Apache ignite : Column id not found. Even when it is there - ignite

Hi peoples I am getting an message on dBeaver console.
Column 'businesslogicid' not found in table '"BusinesslogicsCache".BUSINESSLOGICS' for PK
'"BusinesslogicsCache".BUSINESSLOGICS."businesslogicid"'
this businesslogicid is there in cache and in cache configuration.
Query:-
SELECT LOGICNAME, CONFIGURATION, CREATEDON, MODIFIEDON, CREATEDBY, MODIFIEDBY, ISACTIVE, BUSINESSLOGICID
FROM "BusinesslogicsCache".BUSINESSLOGICS;
table config with jdbctypes
public static CacheConfiguration cacheBusinesslogicsCache() throws Exception {
CacheConfiguration ccfg = new CacheConfiguration();
ccfg.setName("BusinesslogicsCache");
ccfg.setCacheMode(CacheMode.PARTITIONED);
ccfg.setAtomicityMode(CacheAtomicityMode.ATOMIC);
CacheJdbcPojoStoreFactory cacheStoreFactory = new CacheJdbcPojoStoreFactory();
cacheStoreFactory.setDataSourceFactory(new Factory<DataSource>() {
/** {#inheritDoc} **/
#Override
public DataSource create() {
return DataSources.INSTANCE_dsMySQL_CcplatformQa;
};
});
cacheStoreFactory.setDialect(new MySQLDialect());
cacheStoreFactory.setTypes(jdbcTypeBusinesslogics(ccfg.getName()));
ccfg.setCacheStoreFactory(cacheStoreFactory);
ccfg.setReadThrough(true);
ccfg.setWriteThrough(true);
ccfg.setBackups(1);
ArrayList<QueryEntity> qryEntities = new ArrayList<>();
QueryEntity qryEntity = new QueryEntity();
qryEntity.setKeyType("java.lang.Long");
qryEntity.setValueType("org.netlink.ignite.model.Businesslogics");
qryEntity.setKeyFieldName("businesslogicid");
HashSet<String> keyFields = new HashSet<>();
keyFields.add("businesslogicid");
qryEntity.setKeyFields(keyFields);
LinkedHashMap<String, String> fields = new LinkedHashMap<>();
fields.put("logicname", "java.lang.String");
fields.put("type", "java.lang.String");
fields.put("configuration", "java.lang.String");
fields.put("createdon", "java.sql.Timestamp");
fields.put("modifiedon", "java.sql.Timestamp");
fields.put("createdby", "java.lang.Long");
fields.put("modifiedby", "java.lang.Long");
fields.put("isactive", "java.lang.Boolean");
fields.put("businesslogicid", "java.lang.Long");
qryEntity.setFields(fields);
ArrayList<QueryIndex> indexes = new ArrayList<>();
QueryIndex index = new QueryIndex();
index.setName("createdBy");
index.setIndexType(QueryIndexType.SORTED);
LinkedHashMap<String, Boolean> indFlds = new LinkedHashMap<>();
indFlds.put("createdby", false);
index.setFields(indFlds);
indexes.add(index);
index = new QueryIndex();
index.setName("modifiedBy");
index.setIndexType(QueryIndexType.SORTED);
indFlds = new LinkedHashMap<>();
indFlds.put("modifiedby", false);
index.setFields(indFlds);
indexes.add(index);
qryEntity.setIndexes(indexes);
qryEntities.add(qryEntity);
ccfg.setQueryEntities(qryEntities);
return ccfg;
}
/**
* Create JDBC type for "jdbcTypeBusinesslogics".
*
* #param cacheName Cache name.
* #return Configured JDBC type.
**/
private static JdbcType jdbcTypeBusinesslogics(String cacheName) {
JdbcType type = new JdbcType();
type.setCacheName(cacheName);
type.setKeyType(Long.class);
type.setValueType("org.netlink.ignite.model.Businesslogics");
type.setDatabaseSchema(schema);
type.setDatabaseTable("businesslogics");
type.setKeyFields(new JdbcTypeField(Types.BIGINT, "businessLogicId", long.class, "businesslogicid"));
type.setValueFields(new JdbcTypeField(Types.VARCHAR, "logicName", String.class, "logicname"),
new JdbcTypeField(Types.VARCHAR, "type", String.class, "type"),
new JdbcTypeField(Types.LONGVARCHAR, "configuration", String.class, "configuration"),
new JdbcTypeField(Types.TIMESTAMP, "createdOn", Timestamp.class, "createdon"),
new JdbcTypeField(Types.TIMESTAMP, "modifiedOn", Timestamp.class, "modifiedon"),
new JdbcTypeField(Types.BIGINT, "createdBy", long.class, "createdby"),
new JdbcTypeField(Types.BIGINT, "modifiedBy", long.class, "modifiedby"),
new JdbcTypeField(Types.BIT, "isActive", boolean.class, "isactive"));
return type;
}
Please guide me.

I think you should remove setKeyFields since it is only needed when your key is not primitive type.

Related

Apache-ignite Pojo for key

I have apache-ignite running in a cluster with 3 nodes and populated it with some random data using a Long as the key.
IgniteCache<Long, String> cache = ignite.getOrCreateCache("myCache");
Map<Long, String> data = new HashMap<>();
data.put(1L,"Data for 1");
data.put(2L,"Data for 2");
cache.putAll(data);
for retrieval
Set<Long> keys = new HashSet<Long>(Arrays.asList(new Long[]{1L,2L}));
Map<Long,String> data = cache.getAll(keys);
data.forEach( (k,v) -> {
System.out.println(k+" "+v);
});
This all works great but when changing the key of the map to a POJO I am unable to retrieve the data...
IgniteCache<IdTimeStamp, String> cache = ignite.getOrCreateCache("myCache");
Map<IdTimeStamp, String> data = new HashMap<>();
data.put(new IdTimeStamp(1L, 1514759400000L),"Data for 1514759400000");
data.put(new IdTimeStamp(1L, 1514757600000L),"Data for 1514757600000L");
cache.putAll(data);
for retrieval
Set<IdTimeStamp> keys = new HashSet<IdTimeStamp>();
keys.add(new IdTimeStamp(1L, 1514757600000L));
keys.add(new IdTimeStamp(1L, 1514759400000L));
Map<IdTimeStamp,String> data = cache.getAll(keys);
System.out.println(data.size());
data.forEach( (k,v) -> {
System.out.println(k+" "+v);
});
and the IdTimeStamp class:
public class IdTimeStamp {
private Long id;
private Long timestamp;
public IdTimeStamp(Long id, Long timestamp) {
this.id = id;
this.timestamp = timestamp;
}
}
Not working:
ClientConfiguration cfg = new ClientConfiguration().setAddresses("127.0.0.1:10800");
IgniteClient client = Ignition.startClient(cfg);
ClientCache<IdTimeStamp, String> cache = client.cache("myCache");
Working:
public static IgniteCache<IdTimeStamp, String> getIgnite() {
IgniteConfiguration cfg = new IgniteConfiguration();
cfg.setClientMode(true);
cfg.setPeerClassLoadingEnabled(false); //true ??
// Setting up an IP Finder to ensure the client can locate the servers.
TcpDiscoveryMulticastIpFinder ipFinder = new TcpDiscoveryMulticastIpFinder();
ipFinder.setAddresses(Collections.singletonList("127.0.0.1:47500..47509"));
TcpDiscoverySpi discoverySpi = new TcpDiscoverySpi();
discoverySpi.setClientReconnectDisabled(true);
discoverySpi.setIpFinder(ipFinder);
cfg.setDiscoverySpi(discoverySpi);
// Starting the node
Ignite ignite = Ignition.start(cfg);
// Create an IgniteCache and put some values in it.
IgniteCache<IdTimeStamp, String> cache = ignite.getOrCreateCache("myCache");
return cache;
}
Looks like a known limitation when you are using different clients for data population and retrieving the records. Take a look at this question if configuring compactFooter=true solves that problem.
clientConfig.setBinaryConfiguration(new BinaryConfiguration().setCompactFooter(true)
Otherwise, your code looks fine and should work as expected.

The reasoner's realisation does not work in slightly larger ontologies. How can I solve?

I'm new in the semantic web field and I'm trying to compare more reasoners. This is my code:
OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
File file = new File(args[0]);
OWLOntology ontology = manager.loadOntologyFromOntologyDocument(file);
Set<OWLClass> classes = ontology.getClassesInSignature();
String inferredFile = args[1];
//test for correctly uploading ontology
OWLDataFactory df = manager.getOWLDataFactory();
Reasoner jfact = Reasoner.JFACT;
System.out.println(RunReasoner(jfact, df,ontology,manager,inferredFile));
}
//CREATE AN ENUM REASONER
public enum Reasoner{
HERMIT,
PELLET,
KONCLUDE,
JFACT,
FACT,
ELK
}
public static String RunReasoner(Reasoner reasoner, OWLDataFactory df, OWLOntology ontology, OWLOntologyManager manager, String inferredFile) throws OWLOntologyCreationException, FileNotFoundException, IOException, OWLOntologyStorageException {
String esito = "";
OWLReasoner reasoner_object = null;
if(reasoner == Reasoner.HERMIT) {
/****************HERMIT****************************************************************************************/
OWLReasonerFactory rf = new ReasonerFactory();
TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
Configuration configuration = new Configuration();
configuration.reasonerProgressMonitor = progressMonitor;
configuration.ignoreUnsupportedDatatypes = true;
reasoner_object = rf.createReasoner(ontology, configuration);
}
else if(reasoner == Reasoner.KONCLUDE) {
// configure the server end-point
URL url = new URL("http://localhost:8080");
OWLlinkHTTPXMLReasonerFactory factory = new OWLlinkHTTPXMLReasonerFactory();
TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
//OWLlinkReasonerConfiguration conf = (OWLlinkReasonerConfiguration) new SimpleConfiguration(progressMonitor);
reasoner_object = factory.createNonBufferingReasoner(ontology);
}
else if(reasoner == Reasoner.JFACT) {
TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
OWLReasonerConfiguration conf = new SimpleConfiguration(progressMonitor);
JFactFactory factory = new JFactFactory();
reasoner_object = factory.createNonBufferingReasoner(ontology,conf);
}
// else if(reasoner == Reasoner.FACT) {
// TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
// OWLReasonerConfiguration conf = new SimpleConfiguration(progressMonitor);
// FaCTPlusPlusReasonerFactory factory = new FaCTPlusPlusReasonerFactory();
// reasoner_object = factory.createNonBufferingReasoner(ontology,conf);
// }
else if(reasoner == Reasoner.ELK) {
TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
OWLReasonerConfiguration conf = new SimpleConfiguration(progressMonitor);
ElkReasonerFactory factory = new ElkReasonerFactory();
reasoner_object = factory.createNonBufferingReasoner(ontology,conf);
}
else if(reasoner == Reasoner.PELLET) {
TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
OWLReasonerConfiguration conf = new SimpleConfiguration(progressMonitor);
reasoner_object = OpenlletReasonerFactory.getInstance().createReasoner(ontology,conf);
}
else{
esito = "Reasoner non valido";
}
boolean consistencyCheck = reasoner_object.isConsistent();
if (consistencyCheck) {
reasoner_object.precomputeInferences(InferenceType.CLASS_HIERARCHY,
InferenceType.CLASS_ASSERTIONS, InferenceType.OBJECT_PROPERTY_HIERARCHY,
InferenceType.DATA_PROPERTY_HIERARCHY, InferenceType.OBJECT_PROPERTY_ASSERTIONS);
List<InferredAxiomGenerator<? extends OWLAxiom>> generators = new ArrayList<>();
generators.add(new InferredSubClassAxiomGenerator());
generators.add(new InferredClassAssertionAxiomGenerator());
generators.add(new InferredDataPropertyCharacteristicAxiomGenerator());
generators.add(new InferredEquivalentClassAxiomGenerator());
generators.add(new InferredEquivalentDataPropertiesAxiomGenerator());
generators.add(new InferredEquivalentObjectPropertyAxiomGenerator());
generators.add(new InferredInverseObjectPropertiesAxiomGenerator());
generators.add(new InferredObjectPropertyCharacteristicAxiomGenerator());
// NOTE: InferredPropertyAssertionGenerator significantly slows down
// inference computation
generators.add(new org.semanticweb.owlapi.util.InferredPropertyAssertionGenerator());
generators.add(new InferredSubClassAxiomGenerator());
generators.add(new InferredSubDataPropertyAxiomGenerator());
generators.add(new InferredSubObjectPropertyAxiomGenerator());
List<InferredIndividualAxiomGenerator<? extends OWLIndividualAxiom>> individualAxioms =
new ArrayList<>();
generators.addAll(individualAxioms);
generators.add(new InferredDisjointClassesAxiomGenerator());
InferredOntologyGenerator iog = new InferredOntologyGenerator(reasoner_object, generators); //Generates an ontology based on inferred axioms which are essentially supplied by a reasoner
OWLOntology inferredAxiomsOntology = manager.createOntology();
iog.fillOntology(df, inferredAxiomsOntology);
System.out.println(inferredAxiomsOntology.getAxiomCount());
// for(InferredAxiomGenerator<?> i : iog.getAxiomGenerators()) {
// System.out.println(i);}
File inferredOntologyFile = new File(inferredFile);
// Now we create a stream since the ontology manager can then write to that stream.
try (OutputStream outputStream = new FileOutputStream(inferredOntologyFile)) {
// We use the same format as for the input ontology.
manager.saveOntology(inferredAxiomsOntology, outputStream);
}
esito = "done "+ reasoner.toString();
reasoner_object.dispose();
} // End if consistencyCheck
else {
esito = reasoner.toString() +" -- Inconsistent input Ontology, Please check the OWL File";
}
return esito;
}
When I try to run it on a little ontology (40 axioms), my code works perfectly. If instead, I try to run it on a slightly larger ontology (750 axioms), the code continues to go on for hours but never reaches the actual realization. In fact, the inferred axioms file remains empty. I think it’s due to memory overload or some hidden buffer. I hope you can help me solve this problem. Thanks, Rita

TEXT Query in Apache Ignite not giving any result

Following are the items that I did
I started the ignite in remote mode.
I created a cache and added some data. (Also created the cache configuration)
I am doing the text query.
My code looks like this
TcpDiscoverySpi spi = new TcpDiscoverySpi();
TcpDiscoveryVmIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
String hosts[] = new String[]{"ip:48500"} ;
ipFinder.setAddresses(Arrays.asList(hosts));
/**
* create a new instance of tcp discovery multicast ip finder TcpDiscoveryMulticastIpFinder tcMp = new TcpDiscoveryMulticastIpFinder();
*
**tcMp.setAddresses(Arrays.asList("localhost")); // change your IP address here // set the multi cast ip finder for spi
*/
spi.setIpFinder(ipFinder);
/**
* create new ignite configuration
*/
IgniteConfiguration cfg = new IgniteConfiguration();
cfg.setClientMode(true);
cfg.setPeerClassLoadingEnabled(true);
/**
* CacheConfiguration cacheConfig = cacheConfigure();
* cfg.setCacheConfiguration(cacheConfig);
*/
#SuppressWarnings("rawtypes")
CacheConfiguration cacheConfig = cacheConfigure();
cfg.setCacheConfiguration(cacheConfig);
/**
* set the discovery spi to ignite configuration
*/
cfg.setDiscoverySpi(spi);
/**
* Start ignite
*/
Ignite ignite = Ignition.getOrStart(cfg);
and my cache configuration is
CacheConfiguration ccfg = new CacheConfiguration(DEFAULT_CACHE_NAME);
QueryEntity queryEntity = new QueryEntity();
queryEntity.setKeyType(Integer.class.getName());
queryEntity.setValueType(Account.class.getName());
LinkedHashMap<String, String> fields = new LinkedHashMap();
fields.put("accid", Integer.class.getName());
fields.put("attrbool", Boolean.class.getName());
fields.put("accbalance", BigDecimal.class.getName());
fields.put("acctype", String.class.getName());
fields.put("attrbyte", Byte.class.getName());
fields.put("accifsc", String.class.getName());
queryEntity.setFields(fields);
// Listing indexes.
Collection<QueryIndex> indexes = new ArrayList<>(3);
indexes.add(new QueryIndex("accid"));
indexes.add(new QueryIndex("accifsc"));
indexes.add(new QueryIndex("acctype"));
queryEntity.setIndexes(indexes);
ccfg.setQueryEntities(Arrays.asList(queryEntity));
and I am putting data to cache
for(int i=0;i<5;i++) {
Account account=new Account();
account.setAccid(1234+i);
account.setAttrbool(true);
account.setAccbalance(new BigDecimal(100000+i));
account.setAcctype("Demat");
account.setAttrbyte(new Byte("1"));
account.setAccifsc("Master Degree Pstgraduate");
cache.put(new Integer(i), account);
}
and now doing the text query
TextQuery txt = new TextQuery(Account.class,"IFC" );
try (#SuppressWarnings("unchecked")
QueryCursor<Entry<Integer, Account>> masters = cache.query(txt)) {
for (Entry<Integer, Account> e : masters)
System.out.println("results "+e.getValue().toString());
}
My Data Class is
public class Account {
//primary key
#QueryTextField
private Integer accid ;
#QueryTextField
private BigDecimal accbalance ;
#QueryTextField#QuerySqlField
private String accifsc ;
private BigInteger accnum ;
private String accstr ;
#QueryTextField
private String acctype ;
#QueryTextField
private Boolean attrbool ;
#QueryTextField
private Byte attrbyte ;
// getter and setter
}
What am I doing wrong? There is no error in the log.
I changed the Text query code part a bit and it worked for me
TextQuery txt = new TextQuery(Account.class,"IFC" );
try (#SuppressWarnings({ "unchecked", "rawtypes" })
QueryCursor masters = cache.query(txt)) {
#SuppressWarnings("unchecked")
List<CacheEntryImpl<Integer,Account>> accounts = masters.getAll();
Iterator<CacheEntryImpl<Integer, Account>> iterator = accounts.iterator();
while(iterator.hasNext()) {
System.out.println(iterator.next().getValue().getAccifsc());
}
}

get data (single value) from ksoap to andorid through webservice

final String SOAP_ACTION = "http://tempuri.org/get_data2";
final String OPERATION_NAME = "get_data2";
final String WSDL_TARGET_NAMESPACE = "http://tempuri.org/";
final String SOAP_ADDRESS = "****";
final String req_no, req_date, bolnumber, container_no, current_loc, truck_no;
SoapObject request = new SoapObject(WSDL_TARGET_NAMESPACE, OPERATION_NAME);
SoapSerializationEnvelope envelope = new SoapSerializationEnvelope(SoapEnvelope.VER11);
envelope.dotNet = true;
envelope.setOutputSoapObject(request);
HttpTransportSE httpTransport = new HttpTransportSE(SOAP_ADDRESS);
try
{
httpTransport.call(SOAP_ACTION, envelope);
SoapObject resultsString = (SoapObject)envelope.getResponse();
Log.d("-----------------------", resultsString.getProperty("emp_name").toString());
tv.setText(resultsString.getPropertyCount());
}
catch (Exception exception)
{
exception.printStackTrace();
tv.setText(exception.toString());
}

How to add test case when post a test case result

The JsonObject addProperty cannot support to add another JsonObject.
The official test shown on below:
#Test
public void shouldConstructTheCorrectUrlWithExtraParam() {
JsonObject body = new JsonObject();
CreateRequest req = new CreateRequest("Defect", body);
req.addParam("foo", "Bar");
Assert.assertEquals(req.toUrl(), "/defect/create.js?foo=Bar&fetch=true");
}
What I need is ???:
public void shouldConstructTheCorrectUrlWithExtraParam() {
JsonObject body = new JsonObject();
body.add("testcase",???)
CreateRequest req = new CreateRequest("testcaseresult", body);
req.addParam("foo", "Bar");
Assert.assertEquals(req.toUrl(), "/defect/create.js?foo=Bar&fetch=true");
}
I did a mistake for adding other JsonObject, it's a ref instead a instance.
Works well code:
public void createTestCaseResult(JsonObject testCaseJsonObject) throws IOException, URISyntaxException {
log.println("createTestCaseResult...");
String testCaseRef = testCaseJsonObject.get("_ref").getAsString();
QueryRequest userRequest = new QueryRequest("user");
userRequest.setFetch(new Fetch("UserName", "Subscription", "DisplayName"));
userRequest.setQueryFilter(new QueryFilter("UserName", "=", "lu.han#technicolor.com"));
QueryResponse userQueryResponse = restApi.query(userRequest);
JsonArray userQueryResults = userQueryResponse.getResults();
JsonElement userQueryElement = userQueryResults.get(0);
JsonObject userQueryObject = userQueryElement.getAsJsonObject();
String userRef = userQueryObject.get("_ref").getAsString();
close();
getRestApi();
Date now = new Date();
String pattern = "yyyy-MM-dd'T'HH:mm:ssZ";
SimpleDateFormat format = new SimpleDateFormat(pattern);
JsonObject newResult = new JsonObject();
newResult.addProperty("Verdict", "Pass");
newResult.addProperty("Build", "2014.01.08.1234567");
newResult.addProperty("Tester", userRef);
newResult.addProperty("Date", format.format(now));
newResult.addProperty("CreationDate", format.format(now));
newResult.addProperty("TestCase", testCaseRef);
newResult.addProperty("Workspace", workspaceRef);
CreateRequest createRequest = new CreateRequest("testcaseresult", newResult);
CreateResponse createResponse = restApi.create(createRequest);
log.println("createTestCaseResult DONE:");
log.println(String.format("Created %s", createResponse.getObject().get("_ref").getAsString()));
}