1 request exchange, 1 reply exchange, N number responses - rabbitmq

I want to push out 1 response and wait X milliseconds for N responses based on a correlation ID in the headers.
Current code is pretty simple: Send a call then start polling indiscriminately. That works ... for one call.
I know there is talk of a JMS solution ("JMSReader?") that spawns N number of listeners looking for correlation ID allowing these futures to time out, but I am not finding anything remotely related.

Here is a demo app that shows one way to do it...
#SpringBootApplication
public class So57377491Application {
public static void main(String[] args) {
SpringApplication.run(So57377491Application.class, args);
}
private final ConcurrentMap<String, List<String>> pending = new ConcurrentHashMap<>();
private final ConcurrentMap<String, SettableListenableFuture<List<String>>> futures = new ConcurrentHashMap<>();
#Bean
public ApplicationRunner runner(RabbitTemplate template) {
return args -> {
this.pending.put("bar", new ArrayList<>());
this.futures.put("bar", new SettableListenableFuture<>());
template.convertAndSend("so57377491", "", "Foo", msg -> {
msg.getMessageProperties().setCorrelationId("bar");
msg.getMessageProperties().setReplyTo("replyExchange/so57377491-replies");
return msg;
});
try {
List<String> list = this.futures.get("bar").get(5, TimeUnit.SECONDS);
System.out.println(list);
}
catch (TimeoutException toe) {
System.out.println("Partial result after timeout " + this.pending.remove("bar"));
}
finally {
this.futures.remove("bar");
}
};
}
#RabbitListener(bindings = #QueueBinding(
value = #Queue(value = "so57377491-1"),
exchange = #Exchange(value = "so57377491", type = "fanout")))
public String listen1(String in) {
System.out.println(in);
return in.toUpperCase();
}
#RabbitListener(bindings = #QueueBinding(
value = #Queue(value = "so57377491-2"),
exchange = #Exchange(value = "so57377491", type = "fanout")))
public String listen2(String in) {
System.out.println(in);
return in.toLowerCase();
}
#RabbitListener(bindings = #QueueBinding(
value = #Queue(value = "so57377491-3"),
exchange = #Exchange(value = "so57377491", type = "fanout")))
public String listen3(String in) {
System.out.println(in);
return in + in;
}
#RabbitListener(bindings = #QueueBinding(
value = #Queue(value = "so57377491-replies"),
exchange = #Exchange(value = "replyExchange", type = "fanout")))
public void replies(String in, #Header(AmqpHeaders.CORRELATION_ID) String correlationId) {
System.out.println(in);
List<String> list = this.pending.get(correlationId);
if (list == null) {
System.out.println("Late reply for " + correlationId);
}
else {
list.add(in);
if (list.size() == 3) {
this.futures.get(correlationId).set(list);
this.pending.remove(correlationId);
}
}
}
}
Result
Foo
Foo
Foo
foo
FOO
FooFoo
[foo, FOO, FooFoo]

Related

netty different channelpool and exception process

I am new to netty and I have a problem using my netty program.
in initConnection method I want to make a different channelpool for each group.
when user group A come in my sendToMessage I want to create channelPool A
like this way user group B come in my sendToMessage I want to create channelPool B and next time if user group A come in again, i will return channelPool A
Is it right to try doing this? Is it possible?
FixedChannelPool error handling
tell me how can I FixedChannelPool error handling? Could I use acquireTimeoutMillis over time.how?
Here is my code
#Service
public class NettyPoolService {
public static final AttributeKey<CompletableFuture<String>> FUTURE = AttributeKey.valueOf("future");
private static final StringDecoder stringDecoder = new StringDecoder(CharsetUtil.UTF_8);
private static final StringEncoder stringEncoder = new StringEncoder(CharsetUtil.UTF_8);
private static ChannelPool channelPool;
private static EventLoopGroup eventLoopGroup;
#Value("${host}")
private String host;
#Value("${port}")
private String port;
#Value("${connection.count}")
private String numberOfConnections;
#Value("${thread.count}")
private String numberOfThreads;
private synchronized void initConnection (String host, int port, int numberOfThreads, int numberOfConnections,String userGroup) {
if ( (channelPool != null) && (eventLoopGroup != null) ) {
return;
}
System.out.println("#############################################");
System.out.println("initConnection start");
eventLoopGroup = new NioEventLoopGroup(numberOfThreads);
Bootstrap bootstrap = new Bootstrap();
bootstrap.option(ChannelOption.ALLOCATOR, PooledByteBufAllocator.DEFAULT);
bootstrap.option(ChannelOption.SO_KEEPALIVE, true);
//bootstrap.option(ChannelOption.WRITE_BUFFER_HIGH_WATER_MARK, 32 * 1024);
//bootstrap.option(ChannelOption.WRITE_BUFFER_LOW_WATER_MARK, 8 * 1024);
//bootstrap.option(ChannelOption.TCP_NODELAY, true);
bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class).remoteAddress(host, port);
int acquireTimeoutMillis = 10000;
int maxPendingAcquires = Integer.MAX_VALUE;
channelPool = new FixedChannelPool(bootstrap,
new AbstractChannelPoolHandler() {
public void channelCreated(Channel ch) throws Exception {
ChannelPipeline pipeline = ch.pipeline();
// decoders
pipeline.addLast("framer", new DelimiterBasedFrameDecoder(8192, Delimiters.lineDelimiter()));
pipeline.addLast("stringDecoder", stringDecoder);
// encoders
pipeline.addLast("stringEncoder", stringEncoder);
// business logic handler
pipeline.addLast("clientHandler", new ClientPoolHandler(channelPool));
}
},
ChannelHealthChecker.ACTIVE,//eventloop
AcquireTimeoutAction.NEW, //timeout
acquireTimeoutMillis, //
numberOfConnections, //
maxPendingAcquires); //
System.out.println("initConnection End");
System.out.println("#############################################");
}//initConnection
public void sendToMessage(String message,String GroupId) {
System.out.println("=============GroupId=============:"+GroupId);
if (channelPool == null) {
initConnection(host, Integer.parseInt(port.trim()), Integer.parseInt(numberOfThreads.trim()), Integer.parseInt(numberOfConnections.trim()) );
}
final CompletableFuture<String> future = new CompletableFuture<String>();
Future<Channel> channelFuture = channelPool.acquire();
System.out.println("=============channelFuture.get()=============:"+channelFuture.toString());
channelFuture.addListener(new FutureListener<Channel>() {
public void operationComplete(Future<Channel> f) {
if (f.isSuccess()) {
Channel channel = f.getNow();
channel.attr(NettyPoolClientService.FUTURE).set(future);
channel.writeAndFlush(message, channel.voidPromise());
}
}
});
channelFuture.syncUninterruptibly();
}//sendToBnp
}

how to use paging in new graph facebook v4

I have tried the code below but I am getting an error.How can I use paging in v4 facebook after/within login?
// Callback registration
loginButton.registerCallback(callbackManager, new FacebookCallback<LoginResult>() {
#Override
public void onSuccess(LoginResult loginResult) {
GraphRequest request2 = GraphRequest.newMeRequest(
loginResult.getAccessToken(),
new GraphRequest.GraphJSONObjectCallback() {
#Override
public void onCompleted(
JSONObject object,
GraphResponse response) {
JSONObject uu= response.getJSONObject();
if (uu!=null){
Log.w(TAG, "respomse: " + response.toString());
}
GraphRequest nextPageRequest = response.getRequestForPagedResults(GraphResponse.PagingDirection.NEXT);
if (nextPageRequest != null) {
nextPageRequest.setCallback(new GraphRequest.Callback() {
#Override
public void onCompleted(GraphResponse response) {
}
});

 
 
 nextPageRequest.executeBatchAsync(
}
}
});
Bundle parameters2 = new Bundle();
parameters2.putString("fields", "likes");
parameters2.putString("limit", "999");
request2.setParameters(parameters2);
request2.executeAsync();
});
}
}
I use this code within my User class. Every time a new user signs up using Facebook, this gets executed by calling get_user_data() within the User constructor.
For the below example to work for you too, you'll need an access token with the following permissions:
user_friends
email
Constants:
private ArrayList<HashMap> user_fb_friends;
private final String FIELDS = "id,first_name,last_name,name,gender,locale,timezone,updated_time,email,link,picture,friends{id,first_name,last_name,name,gender,updated_time,link,picture}";
private final String USER_FB_ID_TAG = "id";
private final String F_NAME_TAG = "first_name";
private final String L_NAME_TAG = "last_name";
private final String FULL_NAME_TAG = "name";
private final String GENDER_TAG = "gender";
private final String LOCALE_TAG = "locale";
private final String TIMEZONE_TAG = "timezone";
private final String UPDATED_TIME_TAG = "updated_time";
private final String EMAIL_TAG = "email";
private final String LINK_TAG = "link";
private final String PICTURE_TAG = "picture";
private final String DATA_TAG = "data";
private final String IS_SILHOUETTE_TAG = "is_silhouette";
private final String URL_TAG = "url";
private final String FRIENDS_TAG = "friends";
private final String PAGING_TAG = "paging";
private final String NEXT_TAG = "next";
private final String SUMMARY_TAG = "summary";
private final String TOTAL_COUNT_TAG = "total_count";
Actual code:
(the following code includes local setters)
private void get_user_data(){
GraphRequest request = GraphRequest.newMeRequest(
Signup_fragment.mAccessToken,
new GraphRequest.GraphJSONObjectCallback() {
#Override
public void onCompleted(JSONObject object, GraphResponse response) {
JSONObject json = response.getJSONObject();
if(json != null){
try {
user_fb_friends = new ArrayList<>();
/*
* Start parsing the JSON
* 1. Pars the user personal details and save them on new User class
*/
setUser_fb_id(json.getString(USER_FB_ID_TAG));
setF_name(json.getString(F_NAME_TAG));
setL_name(json.getString(L_NAME_TAG));
setFull_name(json.getString(FULL_NAME_TAG));
setGender(json.getString(GENDER_TAG));
setLocale(json.getString(LOCALE_TAG));
setTimezone(json.getInt(TIMEZONE_TAG));
setUpdated_time((Date) json.get(UPDATED_TIME_TAG));
setEmail(json.getString(EMAIL_TAG));
setFb_profile_link(json.getString(LINK_TAG));
Utils.log("User prsonal data was collected (" + getFull_name() + ")");
JSONObject pic_wrapper = json.getJSONObject(PICTURE_TAG);
JSONObject pic_data = pic_wrapper.getJSONObject(DATA_TAG);
if(!pic_data.getBoolean(IS_SILHOUETTE_TAG)){
setFb_profile_pic_link(pic_data.getString(URL_TAG));
}
/*
* 2. Go over the jsonArry of friends, pars and save each friend
* in a HashMap object and store it in user_fb_friends array
*/
JSONObject friends_wrapper = json.getJSONObject(FRIENDS_TAG);
JSONArray friends_json_array = friends_wrapper.getJSONArray(DATA_TAG);
if(friends_json_array.length() > 0){
for (int i = 0; i < friends_json_array.length(); i++) {
HashMap<String, String> friend_hashmap = new HashMap<String, String>();
JSONObject friend_json = friends_json_array.getJSONObject(i);
friend_hashmap.put(USER_FB_ID_TAG, friend_json.getString(USER_FB_ID_TAG));
friend_hashmap.put(F_NAME_TAG, friend_json.getString(F_NAME_TAG));
friend_hashmap.put(L_NAME_TAG, friend_json.getString(L_NAME_TAG));
friend_hashmap.put(FULL_NAME_TAG, friend_json.getString(FULL_NAME_TAG));
friend_hashmap.put(GENDER_TAG, friend_json.getString(GENDER_TAG));
friend_hashmap.put(UPDATED_TIME_TAG, friend_json.getString(UPDATED_TIME_TAG));
friend_hashmap.put(LINK_TAG, friend_json.getString(LINK_TAG));
JSONObject friend_pic_wrapper = json.getJSONObject(PICTURE_TAG);
JSONObject friend_pic_data = friend_pic_wrapper.getJSONObject(DATA_TAG);
if(!friend_pic_data.getBoolean(IS_SILHOUETTE_TAG)){
friend_hashmap.put(URL_TAG, friend_pic_data.getString(URL_TAG));
}
user_fb_friends.add(friend_hashmap);
Utils.log("A friend was added to user_fb_friends (" + i + ")");
}
/*
* 3. Get the URL for the next "friends" JSONObject and send
* a GET request
*/
JSONObject paging_wrapper = json.getJSONObject(PAGING_TAG);
String next_friends_json_url = null;
if(paging_wrapper.getString(NEXT_TAG) != null){
next_friends_json_url = paging_wrapper.getString(NEXT_TAG);
}
JSONObject summary_wrapper = json.getJSONObject(SUMMARY_TAG);
int total_friends_count = summary_wrapper.getInt(TOTAL_COUNT_TAG);
if(next_friends_json_url != null){
/*
* Send a GET request for the next JSONObject
*/
get_paging_data(response);
}
} else {
Utils.log("friends_json_array == null");
}
} catch (JSONException e) {
e.printStackTrace();
}
}
}
});
Bundle parameters = new Bundle();
parameters.putString("fields", FIELDS);
request.setParameters(parameters);
request.executeAsync();
}
private void get_paging_data(GraphResponse response){
GraphRequest nextRequest = response.getRequestForPagedResults(GraphResponse.PagingDirection.NEXT);
Utils.log("get_paging_data was called");
nextRequest.setCallback(new GraphRequest.Callback() {
#Override
public void onCompleted(GraphResponse response) {
JSONObject json = response.getJSONObject();
if (json != null) {
try {
JSONArray friends_json_array = json.getJSONArray(DATA_TAG);
for (int i = 0; i < friends_json_array.length(); i++) {
HashMap<String, String> friend_hashmap = new HashMap<String, String>();
JSONObject friend_json = friends_json_array.getJSONObject(i);
friend_hashmap.put(USER_FB_ID_TAG, friend_json.getString(USER_FB_ID_TAG));
friend_hashmap.put(F_NAME_TAG, friend_json.getString(F_NAME_TAG));
friend_hashmap.put(L_NAME_TAG, friend_json.getString(L_NAME_TAG));
friend_hashmap.put(FULL_NAME_TAG, friend_json.getString(FULL_NAME_TAG));
friend_hashmap.put(GENDER_TAG, friend_json.getString(GENDER_TAG));
friend_hashmap.put(UPDATED_TIME_TAG, friend_json.getString(UPDATED_TIME_TAG));
friend_hashmap.put(LINK_TAG, friend_json.getString(LINK_TAG));
JSONObject friend_pic_wrapper = json.getJSONObject(PICTURE_TAG);
JSONObject friend_pic_data = friend_pic_wrapper.getJSONObject(DATA_TAG);
if (!friend_pic_data.getBoolean(IS_SILHOUETTE_TAG)) {
friend_hashmap.put(URL_TAG, friend_pic_data.getString(URL_TAG));
}
user_fb_friends.add(friend_hashmap);
Utils.log("A friend was added to user_fb_friends (" + i + ")");
}
JSONObject paging_wrapper = json.getJSONObject(PAGING_TAG);
String next_friends_json_url = null;
if (paging_wrapper.getString(NEXT_TAG) != null) {
next_friends_json_url = paging_wrapper.getString(NEXT_TAG);
}
JSONObject summary_wrapper = json.getJSONObject(SUMMARY_TAG);
if (next_friends_json_url != null) {
/*
* 3. Send a GET request for the next JSONObject
*/
get_paging_data(response);
}
} catch (JSONException e) {
e.printStackTrace();
}
}
}
});
response = nextRequest.executeAndWait();
}
Before you go ahead and try to pars your JSON, first check what your JSON will look like. You can check that using Facebook's Graph Explorer at: https://developers.facebook.com/tools/explorer/
IMPORTANT: Only friends who installed this app are returned in API v2.0 and higher. total_count in summary represents the total number of friends, including those who haven't installed the app. More >>

VCR for ServiceStack's JsonServiceClient

The Ruby VCR library enables you to "Record your test suite's HTTP interactions and replay them during future test runs for fast, deterministic, accurate tests."
I'd like to create something similar using ServiceStack's JsonServiceClient, but I can't get it to work. My most recent failed attempt follows. I'd like to either make my current attempt work, or suggestions on another approach that will work.
public static class Memoization
{
public static Func<T, TResult> AsCached<T, TResult>(this Func<T, TResult> function)
{
var cachedResults = new Dictionary<T, TResult>();
string filename = Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + "\\" + (typeof(TResult)).Name + ".jsv";
var serializer = MessagePackSerializer.Create<Dictionary<T, TResult>>();
if (cachedResults.Count == 0)
{
////// load cache from file
using (FileStream fs = new FileStream(filename, FileMode.Create, FileAccess.Write))
{
cachedResults = serializer.Unpack(fs);
}
}
return (argument) =>
{
TResult result;
lock (cachedResults)
{
if (!cachedResults.TryGetValue(argument, out result))
{
result = function(argument);
cachedResults.Add(argument, result);
////// update cache file
using (FileStream fs = new FileStream(filename, FileMode.Create, FileAccess.Write))
{
serializer.Pack(fs, cachedResults);
}
}
}
return result;
};
}
}
class MemoizeJsonClient<TResponse> : JsonServiceClient, IServiceClient, IRestClient
{
private Func<IReturn<TResponse>, TResponse> _getCached;
private JsonServiceClient client;
public TResponse Get(IReturn<TResponse> request)
{
if (_getCached == null)
{
Func<IReturn<TResponse>, TResponse> func = GetImpl;
_getCached = func.AsCached();
}
return _getCached(request);
}
private TResponse GetImpl(IReturn<TResponse> request)
{
return client.Get(request);
}
public MemoizeJsonClient(string BaseUri) {
client = new JsonServiceClient(BaseUri);
}
}
Called like this:
[Test]
public void TestReports2()
{
string Host = "http://localhost:1337";
string BaseUri = Host + "/";
List<Options> testcases = new List<Options>();
testcases.Add(new Options("Name", "20130815", "20130815"));
foreach (Options options in testcases)
{
TransactionsReq transRequest = new TransactionsReq();
transRequest.source = "Source";
transRequest.name = new List<String>(new string[] { options.Name });
transRequest.startDate = options.StartDate;
transRequest.endDate = options.EndDate;
MemoizeJsonClient<TransactionsReqResponse> client = new MemoizeJsonClient<TransactionsReqResponse>(BaseUri);
List<Transaction> transactions;
TransactionsReqResponse transResponse = client.Get(transRequest);
transactions = transResponse.data;
}
}
But I get the following error:
System.Runtime.Serialization.SerializationException occurred
HResult=-2146233076
Message=Cannot serialize type 'ServiceStack.ServiceHost.IReturn`1[ImagineServerWrapper.DTO.TransactionsReqResponse]' because it does not have any serializable fields nor properties.
Source=MsgPack
StackTrace:
at MsgPack.Serialization.SerializerBuilder`1.CreateSerializer()
InnerException:

how to access the activemq statistics plugin in .net

I am trying to access the activemq statistics information http://activemq.apache.org/statisticsplugin.html in c#
This is what i have so far. I am not able to get a reply from the consumer. I can the count increase in monitor website for the queue.
public class Statistics
{
private readonly string queueName = string.Empty;
private readonly string queueToMonitor = string.Empty;
private readonly IConnectionFactory connectionFactory;
private readonly IConnection connection;
private readonly ISession session;
private readonly IMessageProducer producer;
private readonly ActiveMQQueue queue;
public Statistics(string qName, string brokerUri, string queueToMon)
{
this.queueName = qName;
this.queueToMonitor = "ActiveMQ.Statistics.Destination." + queueToMon;
this.connectionFactory = new ConnectionFactory(brokerUri);
this.connection = connectionFactory.CreateConnection();
this.connection.Start();
this.session = connection.CreateSession();
queue = new ActiveMQQueue(qName);
producer = session.CreateProducer(queue);
}
public void GetStats()
{
try
{
var statusQueue = session.CreateTemporaryQueue();
var consumer = session.CreateConsumer(statusQueue);
ActiveMQQueue query = new ActiveMQQueue(queueToMonitor);
var msg = session.CreateMessage();
msg.NMSReplyTo = statusQueue;
producer.Send(queue, msg);
var reply = (ActiveMQMapMessage)consumer.Receive();
if (reply != null)
{
var test = reply.Content.ToString();
}
}
catch (Exception e)
{
var t = e.Message + " " + e.StackTrace;
}
}
}
You are sending the message to the wrong queue. You need to send the message to the ActiveMQ.Statistics.Destination.QueueToMonitor destination. I re-wrote your GetStats() function to show that it works. The critical change is which destination the producer sends the message to.
public void GetStats()
{
try
{
IDestination statusQueue = session.CreateTemporaryQueue();
IMessageConsumer consumer = session.CreateConsumer(statusQueue);
IDestination query = session.GetQueue(queueToMonitor);
IMessage msg = session.CreateMessage();
IMessageProducer producer = session.CreateProducer(query);
msg.NMSReplyTo = statusQueue;
producer.Send(msg);
IMapMessage reply = (IMapMessage) consumer.Receive();
if(reply != null)
{
IPrimitiveMap statsMap = reply.Body;
foreach(string statKey in statsMap.Keys)
{
Console.WriteLine("{0} = {1}", statKey, statsMap[statKey]);
}
}
}
catch(Exception e)
{
var t = e.Message + " " + e.StackTrace;
}
}

can not run multiple jobs with quartz

Hi i m trying to run two jobs using batch framework.
My problem is SimpleJobLauncher is running only one job which is last in the jobs list.
Here what i am doing:
I have two jobs in my database along with the steps for the jobs.
I read the job data from database and process it as following
public class BatchJobScheduler {
private static Log sLog = LogFactory.getLog(BatchJobScheduler.class);
private ApplicationContext ac;
private DataSourceTransactionManager mTransactionManager;
private SimpleJobLauncher mJobLauncher;
private JobRepository mJobRepository;
private SimpleStepFactoryBean stepFactory;
private MapJobRegistry mapJobRegistry;
private JobDetailBean jobDetail;
private CronTriggerBean cronTrigger;
private SimpleJob job;
private SchedulerFactoryBean schedulerFactory;
private static String mDriverClass;
private static String mConnectionUrl;
private static String mUser;
private static String mPassword;
public static JobMetaDataFeeder metadataFeeder;
static {
try {
loadProperties();
metadataFeeder = new JobMetaDataFeeder();
metadataFeeder.configureDataSource(mDriverClass, mConnectionUrl,
mUser, mPassword);
} catch (FileNotFoundException e) {
} catch (IOException e) {
} catch (SQLException e) {
} catch (ClassNotFoundException e) {
}
}
private static void loadProperties() throws FileNotFoundException,
IOException {
Properties properties = new Properties();
InputStream is;
if (BatchJobScheduler.class.getClassLoader() != null) {
is = BatchJobScheduler.class.getClassLoader().getResourceAsStream(
"batch.properties");
} else {
is = System.class.getClassLoader().getResourceAsStream(
"batch.properties");
}
properties.load(is);
mDriverClass = properties.getProperty("batch.jdbc.driver");
mConnectionUrl = properties.getProperty("batch.jdbc.url");
mUser = properties.getProperty("batch.jdbc.user");
mPassword = properties.getProperty("batch.jdbc.password");
}
public void start(WebApplicationContext wac) throws Exception {
try {
ac = new FileSystemXmlApplicationContext("batch-spring.xml");
mTransactionManager = (DataSourceTransactionManager) ac
.getBean("mTransactionManager");
mJobLauncher = (SimpleJobLauncher) ac.getBean("mJobLauncher");
mJobRepository = (JobRepository) ac.getBean("mRepositoryFactory");
mJobLauncher.afterPropertiesSet();
List<JobMetadata> jobsMetaData = getJobsData(mDriverClass,
mConnectionUrl, mUser, mPassword, null);
createAndRunScheduler(jobsMetaData);
} catch (Exception e) {
e.printStackTrace();
sLog.error("Exception while starting job", e);
}
}
#SuppressWarnings("unchecked")
public List<CronTriggerBean> getJobTriggers(List<JobMetadata> jobsMetaData)
throws Exception {
List<CronTriggerBean> triggers = new ArrayList<CronTriggerBean>();
for (JobMetadata jobMetadata : jobsMetaData) {
job = (SimpleJob) ac.getBean("job");
job.setName(jobMetadata.getJobName());
ArrayList<Step> steps = new ArrayList<Step>();
for (StepMetadata stepMetadata : jobMetadata.getSteps()) {
// System.err.println(ac.getBean("stepFactory").getClass());
stepFactory = new SimpleStepFactoryBean<String, Object>();
stepFactory.setTransactionManager(mTransactionManager);
stepFactory.setJobRepository(mJobRepository);
stepFactory.setCommitInterval(stepMetadata.getCommitInterval());
stepFactory.setStartLimit(stepMetadata.getStartLimit());
T5CItemReader itemReader = (T5CItemReader) BeanUtils
.instantiateClass(Class.forName(stepMetadata
.getStepReaderClass()));
itemReader
.setItems(getItemList(jobMetadata.getJobParameters()));
stepFactory.setItemReader(itemReader);
stepFactory.setItemProcessor((ItemProcessor) BeanUtils
.instantiateClass(Class.forName(stepMetadata
.getStepProcessorClass())));
stepFactory.setItemWriter((ItemWriter) BeanUtils
.instantiateClass(Class.forName(stepMetadata
.getStepWriterClass())));
stepFactory.setBeanName(stepMetadata.getStepName());
steps.add((Step) stepFactory.getObject());
}
job.setSteps(steps);
ReferenceJobFactory jobFactory = new ReferenceJobFactory(job);
mapJobRegistry = (MapJobRegistry) ac.getBean("jobRegistry");
mapJobRegistry.register(jobFactory);
jobDetail = (JobDetailBean) ac.getBean("jobDetail");
jobDetail.setJobClass(Class.forName(jobMetadata.getMJoblauncher()));
jobDetail.setGroup(jobMetadata.getJobGroupName());
jobDetail.setName(jobMetadata.getJobName());
Map<String, Object> jobDataMap = new HashMap<String, Object>();
jobDataMap.put("jobName", jobMetadata.getJobName());
jobDataMap.put("jobLocator", mapJobRegistry);
jobDataMap.put("jobLauncher", mJobLauncher);
jobDataMap.put("timestamp", new Date());
// jobDataMap.put("jobParams", jobMetadata.getJobParameters());
jobDetail.setJobDataAsMap(jobDataMap);
jobDetail.afterPropertiesSet();
cronTrigger = (CronTriggerBean) ac.getBean("cronTrigger");
cronTrigger.setJobDetail(jobDetail);
cronTrigger.setJobName(jobMetadata.getJobName());
cronTrigger.setJobGroup(jobMetadata.getJobGroupName());
cronTrigger.setCronExpression(jobMetadata.getCronExpression());
triggers.add(cronTrigger);
}
return triggers;
}
private void createAndRunScheduler(List<JobMetadata> jobsMetaData)
throws Exception {
// System.err.println(ac.getBean("schedulerFactory").getClass());
schedulerFactory = new SchedulerFactoryBean();
List<CronTriggerBean> triggerList = getJobTriggers(jobsMetaData);
Trigger[] triggers = new Trigger[triggerList.size()];
int triggerCount = 0;
for (CronTriggerBean trigger : triggerList) {
triggers[triggerCount] = trigger;
triggerCount++;
}
schedulerFactory.setTriggers(triggers);
schedulerFactory.afterPropertiesSet();
}
private List<JobMetadata> getJobsData(String driverClass,
String connectionURL, String user, String password, String query)
throws SQLException, ClassNotFoundException {
metadataFeeder.createJobMetadata(query);
return metadataFeeder.getJobsMetadata();
}
private List<String> getItemList(String jobParameterString) {
List<String> itemList = new ArrayList<String>();
String[] parameters = jobParameterString.split(";");
for (String string : parameters) {
String[] mapKeyValue = string.split("=");
if (mapKeyValue.length == 2) {
itemList.add(mapKeyValue[0] + ":" + mapKeyValue[1]);
} else {
// exception for invalid job parameters
System.out.println("exception for invalid job parameters");
}
}
return itemList;
}
private Map<String, Object> getParameterMap(String jobParameterString) {
Map<String, Object> parameterMap = new HashMap<String, Object>();
String[] parameters = jobParameterString.split(";");
for (String string : parameters) {
String[] mapKeyValue = string.split("=");
if (mapKeyValue.length == 2) {
parameterMap.put(mapKeyValue[0], mapKeyValue[1]);
} else {
// exception for invalid job parameters
System.out.println("exception for invalid job parameters");
}
}
return parameterMap;
}
}
public class MailJobLauncher extends QuartzJobBean {
/**
* Special key in job data map for the name of a job to run.
*/
static final String JOB_NAME = "jobName";
private static Log sLog = LogFactory.getLog(MailJobLauncher.class);
private JobLocator mJobLocator;
private JobLauncher mJobLauncher;
/**
* Public setter for the {#link JobLocator}.
*
* #param jobLocator
* the {#link JobLocator} to set
*/
public void setJobLocator(JobLocator jobLocator) {
this.mJobLocator = jobLocator;
}
/**
* Public setter for the {#link JobLauncher}.
*
* #param jobLauncher
* the {#link JobLauncher} to set
*/
public void setJobLauncher(JobLauncher jobLauncher) {
this.mJobLauncher = jobLauncher;
}
#Override
#SuppressWarnings("unchecked")
protected void executeInternal(JobExecutionContext context) {
Map<String, Object> jobDataMap = context.getMergedJobDataMap();
executeRecursive(jobDataMap);
}
private void executeRecursive(Map<String, Object> jobDataMap) {
String jobName = (String) jobDataMap.get(JOB_NAME);
JobParameters jobParameters = getJobParametersFromJobMap(jobDataMap);
sLog.info("Quartz trigger firing with Spring Batch jobName=" + jobName
+ jobDataMap + jobParameters);
try {
mJobLauncher.run(mJobLocator.getJob(jobName), jobParameters);
} catch (JobInstanceAlreadyCompleteException e) {
jobDataMap.remove("timestamp");
jobDataMap.put("timestamp", new Date());
executeRecursive(jobDataMap);
} catch (NoSuchJobException e) {
sLog.error("Could not find job.", e);
} catch (JobExecutionException e) {
sLog.error("Could not execute job.", e);
}
}
/*
* Copy parameters that are of the correct type over to {#link
* JobParameters}, ignoring jobName.
* #return a {#link JobParameters} instance
*/
private JobParameters getJobParametersFromJobMap(
Map<String, Object> jobDataMap) {
JobParametersBuilder builder = new JobParametersBuilder();
for (Entry<String, Object> entry : jobDataMap.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if (value instanceof String && !key.equals(JOB_NAME)) {
builder.addString(key, (String) value);
} else if (value instanceof Float || value instanceof Double) {
builder.addDouble(key, ((Number) value).doubleValue());
} else if (value instanceof Integer || value instanceof Long) {
builder.addLong(key, ((Number) value).longValue());
} else if (value instanceof Date) {
builder.addDate(key, (Date) value);
} else {
sLog
.debug("JobDataMap contains values which are not job parameters (ignoring).");
}
}
return builder.toJobParameters();
}
}
I couldnt figure it out why launcher is ignoring all other jobs please help me.
Regards
Make sure these properties are set:
org.quartz.threadPool.class=org.quartz.simpl.SimpleThreadPool
org.quartz.threadPool.threadCount=3
org.quartz.threadPool.threadPriority=5
This will allow a few jobs to run at the same time. Adjust the settings as needed.