javax.cache.CacheException: Failed to parse query. Table not found - ignite

My code starts the node and executes the loadCache() method but when SQL query is applied to it, it gives the error mentioned in the title. Here is my code for configuration, cachestore and loadcache:
Configuration.java:
public class Configuration {
/** Helper class for datasource creation. **/
public static class DataSources {
public static final JdbcDataSource dataSource = createdataSource();
private static JdbcDataSource createdataSource() {
JdbcDataSource dataSource = new JdbcDataSource();
dataSource.setURL("");
dataSource.setUser("");
dataSource.setPassword("");
return dataSource;
}
}
/**
* Configure grid.
*
* #return Ignite configuration.
* #throws Exception If failed to construct Ignite configuration instance.
**/
public static IgniteConfiguration createConfiguration() throws Exception {
//int cpus = Runtime.getRuntime().availableProcessors();
IgniteConfiguration cfg = new IgniteConfiguration();
cfg.setClientMode(true);
cfg.setIgniteInstanceName("Reports");
TcpDiscoverySpi discovery = new TcpDiscoverySpi();
TcpDiscoveryMulticastIpFinder ipFinder = new TcpDiscoveryMulticastIpFinder();
ipFinder.setAddresses(Collections.singletonList(""));
discovery.setIpFinder(ipFinder);
cfg.setDiscoverySpi(discovery);
//cfg.setPeerClassLoadingEnabled(true);
cfg.setCacheConfiguration(cacheOutputReportCache());
return cfg;
}
//Configuration for cache "OutputReportCache".
public static CacheConfiguration<Long, OutputReport> cacheOutputReportCache() throws Exception {
CacheConfiguration<Long, OutputReport> ccfg = new CacheConfiguration<>();
ccfg.setName("OutputReportCache");
ccfg.setCacheMode(CacheMode.PARTITIONED);
ccfg.setAtomicityMode(CacheAtomicityMode.ATOMIC);
CacheJdbcPojoStoreFactory<Long, OutputReport> cacheStoreFactory = new CacheJdbcPojoStoreFactory<>();
cacheStoreFactory.setDataSourceFactory(new Factory<DataSource>() {
/**
*
*/
private static final long serialVersionUID = 1L;
#Override
public DataSource create() {
return DataSources.dataSource;
};
});
cacheStoreFactory.setDialect(new OracleDialect());
ccfg.setCacheStoreFactory(cacheStoreFactory);
ccfg.setReadThrough(true);
ccfg.setWriteThrough(true);
ArrayList<QueryEntity> qryEntities = new ArrayList<>();
QueryEntity qryEntity = new QueryEntity();
qryEntity.setKeyType(String.class.getName());
qryEntity.setValueType(OutputReport.class.getName());
LinkedHashMap<String, String> fields = new LinkedHashMap<>();
fields.put("sid", "java.lang.String");
fields.put("sName", "java.lang.String");
fields.put("cname", "java.lang.String");
fields.put("cnumber", "java.lang.String");
fields.put("cname", "java.lang.String");
fields.put("cid", "java.lang.String");
fields.put("r", "java.lang.String");
fields.put("b", "java.lang.String");
fields.put("Date", "java.lang.String");
fields.put("Side", "java.lang.String");
fields.put("Quant", "java.lang.Float");
fields.put("price", "java.lang.Float");
fields.put("local", "java.lang.Float");
fields.put("usd", "java.lang.Float");
fields.put("trcy", "java.lang.String");
fields.put("Count", "java.lang.Integer");
fields.put("Type", "java.lang.String");
fields.put("Category", "java.lang.String");
fields.put("Subcategory", "java.lang.String");
fields.put("Country", "java.lang.String");
fields.put("eCountry", "java.lang.String");
fields.put("Desc", "java.lang.String");
fields.put("Sector", "java.lang.String");
fields.put("isector", "java.lang.String");
fields.put("Flag", "java.lang.String");
fields.put("Region", "java.lang.String");
fields.put("rowNum", "java.lang.Long");
qryEntity.setFields(fields);
// Listing indexes.
Collection<QueryIndex> indexes = new ArrayList<>(3);
indexes.add(new QueryIndex("sid"));
indexes.add(new QueryIndex("Region"));
indexes.add(new QueryIndex("cnumber"));
indexes.add(new QueryIndex("eCountry"));
qryEntity.setIndexes(indexes);
qryEntities.add(qryEntity);
ccfg.setQueryEntities(qryEntities);
return ccfg;
}
}
OutputReportStore.java:
public class OutputReportStore implements CacheStore<Long, OutputReport> {
//#SpringResource(resourceName = "dataSource")
private DataSource dataSource = DataSources.dataSource;
// This method is called whenever IgniteCache.loadCache() method is called.
#Override
public void loadCache(IgniteBiInClosure<Long, OutputReport> clo, #Nullable Object... objects) throws CacheLoaderException {
System.out.println(">> Loading cache from store...");
try (Connection conn = dataSource.getConnection()) {
try (PreparedStatement st = conn.prepareStatement("select * from OUTPUTREPORT")) {
try (ResultSet rs = st.executeQuery()) {
while (rs.next()) {
OutputReport outputreport = new OutputReport(rs.getString(1), rs.getString(2), rs.getString(3), rs.getString(4), rs.getString(5), rs.getString(6), rs.getString(7), rs.getString(8), rs.getString(9), rs.getString(10), rs.getFloat(11), rs.getFloat(12), rs.getFloat(13), rs.getFloat(14), rs.getString(15), rs.getInt(16), rs.getString(17), rs.getString(18), rs.getString(19), rs.getString(20), rs.getString(21), rs.getString(22), rs.getString(23), rs.getString(24), rs.getString(25), rs.getString(26), rs.getLong(27));
clo.apply(outputreport.getrowNum(), outputreport);
}
}
}
}
catch (SQLException e) {
throw new CacheLoaderException("Failed to load values from cache store.", e);
}
}
// Other CacheStore method implementations.
#Override
public OutputReport load(Long arg0) throws CacheLoaderException {
// TODO Auto-generated method stub
return null;
}
#Override
public void delete(Object arg0) throws CacheWriterException {
// TODO Auto-generated method stub
}
#Override
public void write(Entry<? extends Long, ? extends OutputReport> arg0)
throws CacheWriterException {
// TODO Auto-generated method stub
}
#Override
public Map<Long, OutputReport> loadAll(Iterable<? extends Long> arg0)
throws CacheLoaderException {
// TODO Auto-generated method stub
return null;
}
#Override
public void deleteAll(Collection<?> arg0) throws CacheWriterException {
// TODO Auto-generated method stub
}
#Override
public void writeAll(
Collection<Entry<? extends Long, ? extends OutputReport>> arg0)
throws CacheWriterException {
// TODO Auto-generated method stub
}
#Override
public void sessionEnd(boolean arg0) throws CacheWriterException {
// TODO Auto-generated method stub
}
}
LoadCaches.java:
public class LoadCaches {
//Load caches from database.
//1. Start cluster
//2. Start this utility and wait while load complete
public static void main(String[] args) throws Exception {
try (Ignite ignite = Ignition.start(Configuration.createConfiguration())) {
System.out.println(">>> Loading cache: OutputReportCache");
IgniteCache<Long, OutputReport> cache = ignite.getOrCreateCache("OutputReportCache");
cache.loadCache(null);
System.out.println(">>> Cache loaded!");
QueryCursor<List<?>> cursor = cache.query(new SqlFieldsQuery("select count(*) from outputreport"));
System.out.println(cursor.getAll());
}
}
}
Here is the stack trace:
[16:59:22] Ignite node started OK (id=90c9464d, instance name=Reports)
[16:59:22] Topology snapshot [ver=29, servers=1, clients=6, CPUs=12,
offheap=90.0GB, heap=36.0GB] [16:59:22] ^-- Node
[id=90C9464D-A48A-4DB7-8812-65CE5FD4B8FD, clusterState=ACTIVE]
Loading cache: OutputReportCache
Cache loaded! Exception in thread "main" javax.cache.CacheException: Failed to parse query. Table
"OUTPUTREPORT" not found; SQL statement: select count() from
outputreport [42102-196] at
org.apache.ignite.internal.processors.cache.IgniteCacheProxyImpl.query(IgniteCacheProxyImpl.java:676)
at
org.apache.ignite.internal.processors.cache.IgniteCacheProxyImpl.query(IgniteCacheProxyImpl.java:615)
at
org.apache.ignite.internal.processors.cache.GatewayProtectedCacheProxy.query(GatewayProtectedCacheProxy.java:356)
at project4.LoadCaches.main(LoadCaches.java:26) Caused by: class
org.apache.ignite.internal.processors.query.IgniteSQLException: Failed
to parse query. Table "OUTPUTREPORT" not found; SQL statement: select
count() from outputreport [42102-196] at
org.apache.ignite.internal.processors.query.h2.IgniteH2Indexing.prepareStatementAndCaches(IgniteH2Indexing.java:2026)
at
org.apache.ignite.internal.processors.query.h2.IgniteH2Indexing.parseAndSplit(IgniteH2Indexing.java:1796)
at
org.apache.ignite.internal.processors.query.h2.IgniteH2Indexing.querySqlFields(IgniteH2Indexing.java:1652)
at
org.apache.ignite.internal.processors.query.GridQueryProcessor$4.applyx(GridQueryProcessor.java:2035)
at
org.apache.ignite.internal.processors.query.GridQueryProcessor$4.applyx(GridQueryProcessor.java:2030)
at
org.apache.ignite.internal.util.lang.IgniteOutClosureX.apply(IgniteOutClosureX.java:36)
at
org.apache.ignite.internal.processors.query.GridQueryProcessor.executeQuery(GridQueryProcessor.java:2578)
at
org.apache.ignite.internal.processors.query.GridQueryProcessor.querySqlFields(GridQueryProcessor.java:2044)
at
org.apache.ignite.internal.processors.cache.IgniteCacheProxyImpl.query(IgniteCacheProxyImpl.java:664)
... 3 more Caused by: org.h2.jdbc.JdbcSQLException: Table
"OUTPUTREPORT" not found; SQL statement: select count(*) from
outputreport [42102-196] at
org.h2.message.DbException.getJdbcSQLException(DbException.java:345)
at org.h2.message.DbException.get(DbException.java:179) at
org.h2.message.DbException.get(DbException.java:155) at
org.h2.command.Parser.readTableOrView(Parser.java:5552) at
org.h2.command.Parser.readTableFilter(Parser.java:1266) at
org.h2.command.Parser.parseSelectSimpleFromPart(Parser.java:1946) at
org.h2.command.Parser.parseSelectSimple(Parser.java:2095) at
org.h2.command.Parser.parseSelectSub(Parser.java:1940) at
org.h2.command.Parser.parseSelectUnion(Parser.java:1755) at
org.h2.command.Parser.parseSelect(Parser.java:1743) at
org.h2.command.Parser.parsePrepared(Parser.java:449) at
org.h2.command.Parser.parse(Parser.java:321) at
org.h2.command.Parser.parse(Parser.java:293) at
org.h2.command.Parser.prepareCommand(Parser.java:258) at
org.h2.engine.Session.prepareLocal(Session.java:578) at
org.h2.engine.Session.prepareCommand(Session.java:519) at
org.h2.jdbc.JdbcConnection.prepareCommand(JdbcConnection.java:1204)
at
org.h2.jdbc.JdbcPreparedStatement.(JdbcPreparedStatement.java:73)
at
org.h2.jdbc.JdbcConnection.prepareStatement(JdbcConnection.java:288)
at
org.apache.ignite.internal.processors.query.h2.IgniteH2Indexing.prepare0(IgniteH2Indexing.java:484)
at
org.apache.ignite.internal.processors.query.h2.IgniteH2Indexing.prepareStatement(IgniteH2Indexing.java:452)
at
org.apache.ignite.internal.processors.query.h2.IgniteH2Indexing.prepareStatement(IgniteH2Indexing.java:419)
at
org.apache.ignite.internal.processors.query.h2.IgniteH2Indexing.prepareStatementAndCaches(IgniteH2Indexing.java:2008)
... 11 more
How can this be resolved?

I guess, the root cause of NullPointerException exception is that the CacheJdbcPojoStoreFactory#types is not defined in your code. It can be done via Java API as follows:
JdbcType storeType = new JdbcType;
storeType.setDatabaseSchema("Your-custom-schema");
storeType.setDatabaseTable("Table-name");
storeType.setKeyType(Long.class);
storeType.setValueType(OutputReport.class);
storeType.setValueFields(new JdbcTypeField(Types.VARCHAR, "sName", String.class, null));
...
cacheStoreFactory.setTypes(storeType);
You can find an example here: CacheJdbcPojoStoreTest

Related

Google Cloud Memory Store (Redis), can't connect to redis when instance is just started

I have a problem to connect to redis when my instance is just started.
I use:
runtime: java
env: flex
runtime_config:
jdk: openjdk8
i got following exception:
Caused by: redis.clients.jedis.exceptions.JedisConnectionException: java.net.SocketTimeoutException: connect timed out
RedisConnectionFailureException: Cannot get Jedis connection; nested exception is redis.clients.jedis.exceptions.JedisConnectionException: Could not get a resource from the pool
java.net.SocketTimeoutException: connect timed out
after 2-3 min, it works smoothly
Do i need to add some check in my code or how i should fix it properly?
p.s.
also i use spring boot, with following configuration
#Value("${spring.redis.host}")
private String redisHost;
#Bean
JedisConnectionFactory jedisConnectionFactory() {
// https://cloud.google.com/memorystore/docs/redis/quotas
RedisStandaloneConfiguration config = new RedisStandaloneConfiguration(redisHost, 6379);
return new JedisConnectionFactory(config);
}
#Bean
public RedisTemplate<String, Object> redisTemplate(
#Autowired JedisConnectionFactory jedisConnectionFactory
) {
RedisTemplate<String, Object> template = new RedisTemplate<>();
template.setConnectionFactory(jedisConnectionFactory);
template.setKeySerializer(new StringRedisSerializer());
template.setValueSerializer(new GenericJackson2JsonRedisSerializer(newObjectMapper()));
return template;
}
in pom.xml
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-redis</artifactId>
<version>2.1.2.RELEASE</version>
I solved this problem as follows: in short, I added the “ping” method, which tries to set and get the value from Redis; if it's possible, then application is ready.
Implementation:
First, you need to update app.yaml add following:
readiness_check:
path: "/readiness_check"
check_interval_sec: 5
timeout_sec: 4
failure_threshold: 2
success_threshold: 2
app_start_timeout_sec: 300
Second, in your rest controller:
#GetMapping("/readiness_check")
public ResponseEntity<?> readiness_check() {
if (!cacheConfig.ping()) {
return ResponseEntity.notFound().build();
}
return ResponseEntity.ok().build();
}
Third, class CacheConfig:
public boolean ping() {
long prefix = System.currentTimeMillis();
try {
redisTemplate.opsForValue().set("readiness_check_" + prefix, Boolean.TRUE, 100, TimeUnit.SECONDS);
Boolean val = (Boolean) redisTemplate.opsForValue().get("readiness_check_" + prefix);
return Boolean.TRUE.equals(val);
} catch (Exception e) {
LOGGER.info("ping failed for " + System.currentTimeMillis());
return false;
}
}
P.S.
Also if somebody needs the full implementation of CacheConfig:
#Configuration
public class CacheConfig {
private static final Logger LOGGER = Logger.getLogger(CacheConfig.class.getName());
#Value("${spring.redis.host}")
private String redisHost;
private final RedisTemplate<String, Object> redisTemplate;
#Autowired
public CacheConfig(#Lazy RedisTemplate<String, Object> redisTemplate) {
this.redisTemplate = redisTemplate;
}
#Bean
JedisConnectionFactory jedisConnectionFactory(
#Autowired JedisPoolConfig poolConfig
) {
// https://cloud.google.com/memorystore/docs/redis/quotas
RedisStandaloneConfiguration config = new RedisStandaloneConfiguration(redisHost, 6379);
JedisClientConfiguration clientConfig = JedisClientConfiguration
.builder()
.usePooling()
.poolConfig(poolConfig)
.build();
return new JedisConnectionFactory(config, clientConfig);
}
#Bean
public RedisTemplate<String, Object> redisTemplate(
#Autowired JedisConnectionFactory jedisConnectionFactory
) {
RedisTemplate<String, Object> template = new RedisTemplate<>();
template.setConnectionFactory(jedisConnectionFactory);
template.setKeySerializer(new StringRedisSerializer());
template.setValueSerializer(new GenericJackson2JsonRedisSerializer(newObjectMapper()));
return template;
}
/**
* Example: https://github.com/PengliuIBM/pws_demo/blob/1becdca1bc19320c2742504baa1cada3260f8d93/redisData/src/main/java/com/pivotal/wangyu/study/springdataredis/config/RedisConfig.java
*/
#Bean
redis.clients.jedis.JedisPoolConfig jedisPoolConfig() {
final redis.clients.jedis.JedisPoolConfig poolConfig = new redis.clients.jedis.JedisPoolConfig();
// Maximum active connections to Redis instance
poolConfig.setMaxTotal(16);
// Number of connections to Redis that just sit there and do nothing
poolConfig.setMaxIdle(16);
// Minimum number of idle connections to Redis - these can be seen as always open and ready to serve
poolConfig.setMinIdle(8);
// Tests whether connection is dead when returning a connection to the pool
poolConfig.setTestOnBorrow(true);
// Tests whether connection is dead when connection retrieval method is called
poolConfig.setTestOnReturn(true);
// Tests whether connections are dead during idle periods
poolConfig.setTestWhileIdle(true);
return poolConfig;
}
public boolean ping() {
long prefix = System.currentTimeMillis();
try {
redisTemplate.opsForValue().set("readiness_check_" + prefix, Boolean.TRUE, 100, TimeUnit.SECONDS);
Boolean val = (Boolean) redisTemplate.opsForValue().get("readiness_check_" + prefix);
return Boolean.TRUE.equals(val);
} catch (Exception e) {
LOGGER.info("ping failed for " + System.currentTimeMillis());
return false;
}
}
}

How To update google-cloud-dataflow running in app engine without clearing bigquery tables

I have a google-cloud-dataflow process running on the App-engine.
It listens to messages sent via pubsub and streams to big-query.
I updated my code and I am trying to rerun the app.
But I receive this error:
Exception in thread "main" java.lang.IllegalArgumentException: BigQuery table is not empty
Is there anyway to update data flow without deleting the table?
Since my code might change quite often, and I do not want to delete data in the table.
Here is my code:
public class MyPipline {
private static final Logger LOG = LoggerFactory.getLogger(BotPipline.class);
private static String name;
public static void main(String[] args) {
List<TableFieldSchema> fields = new ArrayList<>();
fields.add(new TableFieldSchema().setName("a").setType("string"));
fields.add(new TableFieldSchema().setName("b").setType("string"));
fields.add(new TableFieldSchema().setName("c").setType("string"));
TableSchema tableSchema = new TableSchema().setFields(fields);
DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
options.setRunner(BlockingDataflowPipelineRunner.class);
options.setProject("my-data-analysis");
options.setStagingLocation("gs://my-bucket/dataflow-jars");
options.setStreaming(true);
Pipeline pipeline = Pipeline.create(options);
PCollection<String> input = pipeline
.apply(PubsubIO.Read.subscription(
"projects/my-data-analysis/subscriptions/myDataflowSub"));
input.apply(ParDo.of(new DoFn<String, Void>() {
#Override
public void processElement(DoFn<String, Void>.ProcessContext c) throws Exception {
LOG.info("json" + c.element());
}
}));
String fileName = UUID.randomUUID().toString().replaceAll("-", "");
input.apply(ParDo.of(new DoFn<String, String>() {
#Override
public void processElement(DoFn<String, String>.ProcessContext c) throws Exception {
JSONObject firstJSONObject = new JSONObject(c.element());
firstJSONObject.put("a", firstJSONObject.get("a").toString()+ "1000");
c.output(firstJSONObject.toString());
}
}).named("update json")).apply(ParDo.of(new DoFn<String, TableRow>() {
#Override
public void processElement(DoFn<String, TableRow>.ProcessContext c) throws Exception {
JSONObject json = new JSONObject(c.element());
TableRow row = new TableRow().set("a", json.get("a")).set("b", json.get("b")).set("c", json.get("c"));
c.output(row);
}
}).named("convert json to table row"))
.apply(BigQueryIO.Write.to("my-data-analysis:mydataset.mytable").withSchema(tableSchema)
);
pipeline.run();
}
}
You need to specify withWriteDisposition on your BigQueryIO.Write - see documentation of the method and of its argument. Depending on your requirements, you need either WRITE_TRUNCATE or WRITE_APPEND.

com.google.android.voicesearch.speechservice.ConnectionException: Failed to establish connection

I am testing an app that makes voice recognition, but now I am getting this error:
11-04 16:25:58.249: E/ServerConnectorImpl(13716): Failed to create TCP connection
11-04 16:25:58.249: E/ServerConnectorImpl(13716): com.google.android.voicesearch.speechservice.ConnectionException: Failed to establish connection
11-04 16:25:58.249: E/ServerConnectorImpl(13716): at com.google.android.voicesearch.tcp.TcpConnectionImpl.<init>(TcpConnectionImpl.java:87)
....
Here is my code:
sr = SpeechRecognizer.createSpeechRecognizer(getApplicationContext());
MyRecognition listener = new MyRecognition();
sr.setRecognitionListener(listener);
Class MyRecognition that implements the methods from RecognitionListener
class MyRecognition implements RecognitionListener{
public void onBeginningOfSpeech() {
}
public void onBufferReceived(byte[] buffer) {
}
public void onEndOfSpeech() {
}
public void onError(int error) {
MediaPlayer mp = new MediaPlayer();
AssetFileDescriptor asset;
try {
asset = getAssets().openFd("error.mp3");
mp.setDataSource(asset.getFileDescriptor(), asset.getStartOffset(), asset.getLength());
asset.close();
mp.prepare();
mp.start();
mp.setOnCompletionListener(AddActivity.this);
} catch (IOException e) {
e.printStackTrace();
}
}
....
public void onResults(Bundle results) {
....
}
....
}
The method that makes the voice recognition
private void reconheceVoz(final MediaPlayer mp){
try{
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, "com.br.test");
intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 5);
sr.startListening(intent);
mp.release();
}
catch(Exception e){
Toast.makeText(AdicaoActivity.this, "Erro: " + e.getMessage(), Toast.LENGTH_SHORT).show();
}
}
This error occurs often? How can I treat it?
Thanks.

ActiveMQ and JMS : Basic steps for novice

Hi all please give some basic about ActiveMQ with JMS for novice. And configuration steps also.
We are going to create a console based application using multithreading. So create an java project for console application.
Now follow these steps..........
Add javax.jms.jar, activemq-all-5.3.0.jar, log4j-1.2.15.jar to your project library.
(You can download all of above jar files from http://www.jarfinder.com/ .
create a file naming jndi.properties and paste these following texts .. ( Deatils for jndi.properties just Google it)
# START SNIPPET: jndi
java.naming.factory.initial = org.apache.activemq.jndi.ActiveMQInitialContextFactory
# use the following property to configure the default connector
java.naming.provider.url = tcp://localhost:61616
# use the following property to specify the JNDI name the connection factory
# should appear as.
#connectionFactoryNames = connectionFactory, queueConnectionFactory, topicConnectionFactry
connectionFactoryNames = connectionFactory, queueConnectionFactory, topicConnectionFactry
# register some queues in JNDI using the form
# queue.[jndiName] = [physicalName]
queue.MyQueue = example.MyQueue
# register some topics in JNDI using the form
# topic.[jndiName] = [physicalName]
topic.MyTopic = example.MyTopic
# END SNIPPET: jndi
Add JMSConsumer.java
import javax.jms.*;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class JMSConsumer implements Runnable{
private static final Log LOG = LogFactory.getLog(JMSConsumer.class);
public void run() {
Context jndiContext = null;
ConnectionFactory connectionFactory = null;
Connection connection = null;
Session session = null;
MessageConsumer consumer = null;
Destination destination = null;
String sourceName = null;
final int numMsgs;
sourceName= "MyQueue";
numMsgs = 1;
LOG.info("Source name is " + sourceName);
/*
* Create a JNDI API InitialContext object
*/
try {
jndiContext = new InitialContext();
} catch (NamingException e) {
LOG.info("Could not create JNDI API context: " + e.toString());
System.exit(1);
}
/*
* Look up connection factory and destination.
*/
try {
connectionFactory = (ConnectionFactory)jndiContext.lookup("queueConnectionFactory");
destination = (Destination)jndiContext.lookup(sourceName);
} catch (NamingException e) {
LOG.info("JNDI API lookup failed: " + e);
System.exit(1);
}
try {
connection = connectionFactory.createConnection();
session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
consumer = session.createConsumer(destination);
connection.start();
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
MessageListener listener = new MyQueueMessageListener();
consumer.setMessageListener(listener );
//Let the thread run for some time so that the Consumer has suffcient time to consume the message
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} catch (JMSException e) {
LOG.info("Exception occurred: " + e);
} finally {
if (connection != null) {
try {
connection.close();
} catch (JMSException e) {
}
}
}
}
}
Add JMSProducer.java
import javax.jms.*;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class JMSProducer implements Runnable{
private static final Log LOG = LogFactory.getLog(JMSProducer.class);
public JMSProducer() {
}
//Run method implemented to run this as a thread.
public void run(){
Context jndiContext = null;
ConnectionFactory connectionFactory = null;
Connection connection = null;
Session session = null;
Destination destination = null;
MessageProducer producer = null;
String destinationName = null;
final int numMsgs;
destinationName = "MyQueue";
numMsgs = 5;
LOG.info("Destination name is " + destinationName);
/*
* Create a JNDI API InitialContext object
*/
try {
jndiContext = new InitialContext();
} catch (NamingException e) {
LOG.info("Could not create JNDI API context: " + e.toString());
System.exit(1);
}
/*
* Look up connection factory and destination.
*/
try {
connectionFactory = (ConnectionFactory)jndiContext.lookup("queueConnectionFactory");
destination = (Destination)jndiContext.lookup(destinationName);
} catch (NamingException e) {
LOG.info("JNDI API lookup failed: " + e);
System.exit(1);
}
/*
* Create connection. Create session from connection; false means
* session is not transacted.create producer, set the text message, set the co-relation id and send the message.
*/
try {
connection = connectionFactory.createConnection();
session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
producer = session.createProducer(destination);
TextMessage message = session.createTextMessage();
for (int i = 0; i
Add MyQueueMessageListener.java
import java.io.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import javax.jms.*;
public class MyQueueMessageListener implements MessageListener {
private static final Log LOG = LogFactory.getLog(MyQueueMessageListener.class);
/**
*
*/
public MyQueueMessageListener() {
// TODO Auto-generated constructor stub
}
/** (non-Javadoc)
* #see javax.jms.MessageListener#onMessage(javax.jms.Message)
* This is called on receving of a text message.
*/
public void onMessage(Message arg0) {
LOG.info("onMessage() called!");
if(arg0 instanceof TextMessage){
try {
//Print it out
System.out.println("Recieved message in listener: " + ((TextMessage)arg0).getText());
System.out.println("Co-Rel Id: " + ((TextMessage)arg0).getJMSCorrelationID());
try {
//Log it to a file
BufferedWriter outFile = new BufferedWriter(new FileWriter("MyQueueConsumer.txt"));
outFile.write("Recieved message in listener: " + ((TextMessage)arg0).getText());
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} catch (JMSException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}else{
System.out.println("~~~~Listener : Error in message format~~~~");
}
}
}
Add SimpleApp.java
public class SimpleApp {
//Run the producer first, then the consumer
public static void main(String[] args) throws Exception {
runInNewthread(new JMSProducer());
runInNewthread(new JMSConsumer());
}
public static void runInNewthread(Runnable runnable) {
Thread brokerThread = new Thread(runnable);
brokerThread.setDaemon(false);
brokerThread.start();
}
}
Now run SimpleApp.java class.
All da best. Happy coding.
Here it is a simple junit test for ActiveMQ and Apache Camel. This two technologies works very good together.
If you want more details about the code, you can find a post in my blog:
http://ignaciosuay.com/unit-testing-active-mq/
public class ActiveMQTest extends CamelTestSupport {
#Override
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
ConnectionFactory connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false");
camelContext.addComponent("activemq", jmsComponentClientAcknowledge(connectionFactory));
return camelContext;
}
#Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
#Override
public void configure() throws Exception {
from("mina:tcp://localhost:6666?textline=true&sync=false")
.to("activemq:processHL7");
from("activemq:processHL7")
.to("mock:end");
}
};
}
#Test
public void testSendHL7Message() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:end");
String m = "MSH|^~\\&|hl7Integration|hl7Integration|||||ADT^A01|||2.5|\r" +
"EVN|A01|20130617154644\r" +
"PID|1|465 306 5961||407623|Wood^Patrick^^^MR||19700101|1|\r" +
"PV1|1||Location||||||||||||||||261938_6_201306171546|||||||||||||||||||||||||20130617134644|";
mock.expectedBodiesReceived(m);
template.sendBody("mina:tcp://localhost:6666?textline=true&sync=false", m);
mock.assertIsSatisfied();
}

can not run multiple jobs with quartz

Hi i m trying to run two jobs using batch framework.
My problem is SimpleJobLauncher is running only one job which is last in the jobs list.
Here what i am doing:
I have two jobs in my database along with the steps for the jobs.
I read the job data from database and process it as following
public class BatchJobScheduler {
private static Log sLog = LogFactory.getLog(BatchJobScheduler.class);
private ApplicationContext ac;
private DataSourceTransactionManager mTransactionManager;
private SimpleJobLauncher mJobLauncher;
private JobRepository mJobRepository;
private SimpleStepFactoryBean stepFactory;
private MapJobRegistry mapJobRegistry;
private JobDetailBean jobDetail;
private CronTriggerBean cronTrigger;
private SimpleJob job;
private SchedulerFactoryBean schedulerFactory;
private static String mDriverClass;
private static String mConnectionUrl;
private static String mUser;
private static String mPassword;
public static JobMetaDataFeeder metadataFeeder;
static {
try {
loadProperties();
metadataFeeder = new JobMetaDataFeeder();
metadataFeeder.configureDataSource(mDriverClass, mConnectionUrl,
mUser, mPassword);
} catch (FileNotFoundException e) {
} catch (IOException e) {
} catch (SQLException e) {
} catch (ClassNotFoundException e) {
}
}
private static void loadProperties() throws FileNotFoundException,
IOException {
Properties properties = new Properties();
InputStream is;
if (BatchJobScheduler.class.getClassLoader() != null) {
is = BatchJobScheduler.class.getClassLoader().getResourceAsStream(
"batch.properties");
} else {
is = System.class.getClassLoader().getResourceAsStream(
"batch.properties");
}
properties.load(is);
mDriverClass = properties.getProperty("batch.jdbc.driver");
mConnectionUrl = properties.getProperty("batch.jdbc.url");
mUser = properties.getProperty("batch.jdbc.user");
mPassword = properties.getProperty("batch.jdbc.password");
}
public void start(WebApplicationContext wac) throws Exception {
try {
ac = new FileSystemXmlApplicationContext("batch-spring.xml");
mTransactionManager = (DataSourceTransactionManager) ac
.getBean("mTransactionManager");
mJobLauncher = (SimpleJobLauncher) ac.getBean("mJobLauncher");
mJobRepository = (JobRepository) ac.getBean("mRepositoryFactory");
mJobLauncher.afterPropertiesSet();
List<JobMetadata> jobsMetaData = getJobsData(mDriverClass,
mConnectionUrl, mUser, mPassword, null);
createAndRunScheduler(jobsMetaData);
} catch (Exception e) {
e.printStackTrace();
sLog.error("Exception while starting job", e);
}
}
#SuppressWarnings("unchecked")
public List<CronTriggerBean> getJobTriggers(List<JobMetadata> jobsMetaData)
throws Exception {
List<CronTriggerBean> triggers = new ArrayList<CronTriggerBean>();
for (JobMetadata jobMetadata : jobsMetaData) {
job = (SimpleJob) ac.getBean("job");
job.setName(jobMetadata.getJobName());
ArrayList<Step> steps = new ArrayList<Step>();
for (StepMetadata stepMetadata : jobMetadata.getSteps()) {
// System.err.println(ac.getBean("stepFactory").getClass());
stepFactory = new SimpleStepFactoryBean<String, Object>();
stepFactory.setTransactionManager(mTransactionManager);
stepFactory.setJobRepository(mJobRepository);
stepFactory.setCommitInterval(stepMetadata.getCommitInterval());
stepFactory.setStartLimit(stepMetadata.getStartLimit());
T5CItemReader itemReader = (T5CItemReader) BeanUtils
.instantiateClass(Class.forName(stepMetadata
.getStepReaderClass()));
itemReader
.setItems(getItemList(jobMetadata.getJobParameters()));
stepFactory.setItemReader(itemReader);
stepFactory.setItemProcessor((ItemProcessor) BeanUtils
.instantiateClass(Class.forName(stepMetadata
.getStepProcessorClass())));
stepFactory.setItemWriter((ItemWriter) BeanUtils
.instantiateClass(Class.forName(stepMetadata
.getStepWriterClass())));
stepFactory.setBeanName(stepMetadata.getStepName());
steps.add((Step) stepFactory.getObject());
}
job.setSteps(steps);
ReferenceJobFactory jobFactory = new ReferenceJobFactory(job);
mapJobRegistry = (MapJobRegistry) ac.getBean("jobRegistry");
mapJobRegistry.register(jobFactory);
jobDetail = (JobDetailBean) ac.getBean("jobDetail");
jobDetail.setJobClass(Class.forName(jobMetadata.getMJoblauncher()));
jobDetail.setGroup(jobMetadata.getJobGroupName());
jobDetail.setName(jobMetadata.getJobName());
Map<String, Object> jobDataMap = new HashMap<String, Object>();
jobDataMap.put("jobName", jobMetadata.getJobName());
jobDataMap.put("jobLocator", mapJobRegistry);
jobDataMap.put("jobLauncher", mJobLauncher);
jobDataMap.put("timestamp", new Date());
// jobDataMap.put("jobParams", jobMetadata.getJobParameters());
jobDetail.setJobDataAsMap(jobDataMap);
jobDetail.afterPropertiesSet();
cronTrigger = (CronTriggerBean) ac.getBean("cronTrigger");
cronTrigger.setJobDetail(jobDetail);
cronTrigger.setJobName(jobMetadata.getJobName());
cronTrigger.setJobGroup(jobMetadata.getJobGroupName());
cronTrigger.setCronExpression(jobMetadata.getCronExpression());
triggers.add(cronTrigger);
}
return triggers;
}
private void createAndRunScheduler(List<JobMetadata> jobsMetaData)
throws Exception {
// System.err.println(ac.getBean("schedulerFactory").getClass());
schedulerFactory = new SchedulerFactoryBean();
List<CronTriggerBean> triggerList = getJobTriggers(jobsMetaData);
Trigger[] triggers = new Trigger[triggerList.size()];
int triggerCount = 0;
for (CronTriggerBean trigger : triggerList) {
triggers[triggerCount] = trigger;
triggerCount++;
}
schedulerFactory.setTriggers(triggers);
schedulerFactory.afterPropertiesSet();
}
private List<JobMetadata> getJobsData(String driverClass,
String connectionURL, String user, String password, String query)
throws SQLException, ClassNotFoundException {
metadataFeeder.createJobMetadata(query);
return metadataFeeder.getJobsMetadata();
}
private List<String> getItemList(String jobParameterString) {
List<String> itemList = new ArrayList<String>();
String[] parameters = jobParameterString.split(";");
for (String string : parameters) {
String[] mapKeyValue = string.split("=");
if (mapKeyValue.length == 2) {
itemList.add(mapKeyValue[0] + ":" + mapKeyValue[1]);
} else {
// exception for invalid job parameters
System.out.println("exception for invalid job parameters");
}
}
return itemList;
}
private Map<String, Object> getParameterMap(String jobParameterString) {
Map<String, Object> parameterMap = new HashMap<String, Object>();
String[] parameters = jobParameterString.split(";");
for (String string : parameters) {
String[] mapKeyValue = string.split("=");
if (mapKeyValue.length == 2) {
parameterMap.put(mapKeyValue[0], mapKeyValue[1]);
} else {
// exception for invalid job parameters
System.out.println("exception for invalid job parameters");
}
}
return parameterMap;
}
}
public class MailJobLauncher extends QuartzJobBean {
/**
* Special key in job data map for the name of a job to run.
*/
static final String JOB_NAME = "jobName";
private static Log sLog = LogFactory.getLog(MailJobLauncher.class);
private JobLocator mJobLocator;
private JobLauncher mJobLauncher;
/**
* Public setter for the {#link JobLocator}.
*
* #param jobLocator
* the {#link JobLocator} to set
*/
public void setJobLocator(JobLocator jobLocator) {
this.mJobLocator = jobLocator;
}
/**
* Public setter for the {#link JobLauncher}.
*
* #param jobLauncher
* the {#link JobLauncher} to set
*/
public void setJobLauncher(JobLauncher jobLauncher) {
this.mJobLauncher = jobLauncher;
}
#Override
#SuppressWarnings("unchecked")
protected void executeInternal(JobExecutionContext context) {
Map<String, Object> jobDataMap = context.getMergedJobDataMap();
executeRecursive(jobDataMap);
}
private void executeRecursive(Map<String, Object> jobDataMap) {
String jobName = (String) jobDataMap.get(JOB_NAME);
JobParameters jobParameters = getJobParametersFromJobMap(jobDataMap);
sLog.info("Quartz trigger firing with Spring Batch jobName=" + jobName
+ jobDataMap + jobParameters);
try {
mJobLauncher.run(mJobLocator.getJob(jobName), jobParameters);
} catch (JobInstanceAlreadyCompleteException e) {
jobDataMap.remove("timestamp");
jobDataMap.put("timestamp", new Date());
executeRecursive(jobDataMap);
} catch (NoSuchJobException e) {
sLog.error("Could not find job.", e);
} catch (JobExecutionException e) {
sLog.error("Could not execute job.", e);
}
}
/*
* Copy parameters that are of the correct type over to {#link
* JobParameters}, ignoring jobName.
* #return a {#link JobParameters} instance
*/
private JobParameters getJobParametersFromJobMap(
Map<String, Object> jobDataMap) {
JobParametersBuilder builder = new JobParametersBuilder();
for (Entry<String, Object> entry : jobDataMap.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if (value instanceof String && !key.equals(JOB_NAME)) {
builder.addString(key, (String) value);
} else if (value instanceof Float || value instanceof Double) {
builder.addDouble(key, ((Number) value).doubleValue());
} else if (value instanceof Integer || value instanceof Long) {
builder.addLong(key, ((Number) value).longValue());
} else if (value instanceof Date) {
builder.addDate(key, (Date) value);
} else {
sLog
.debug("JobDataMap contains values which are not job parameters (ignoring).");
}
}
return builder.toJobParameters();
}
}
I couldnt figure it out why launcher is ignoring all other jobs please help me.
Regards
Make sure these properties are set:
org.quartz.threadPool.class=org.quartz.simpl.SimpleThreadPool
org.quartz.threadPool.threadCount=3
org.quartz.threadPool.threadPriority=5
This will allow a few jobs to run at the same time. Adjust the settings as needed.