DataStreamer does not work well - ignite

I'm using Ignite 2.1.0 and I create a simple program to try DataStreamer, but I offen got error like this:
"[diagnostic]Failed to wait for partition map exchange" or "Attempted to release write lock while not holding it".
I started two local nodes, one was started in windows CMD using example xml configuration and another was started in Eclipse. My code in Eclipse like this :
public class TestDataStreamer {
public static void main(String[] args) {
// TODO Auto-generated method stub
long bgn,end;
IgniteConfiguration cfg = new IgniteConfiguration();
cfg.setPeerClassLoadingEnabled(true);
Ignite ignite = Ignition.start(cfg);
CacheConfiguration<Long, Map> cacheConf = new CacheConfiguration();
cacheConf.setName("TestDataStreamer").setCacheMode(CacheMode.REPLICATED);
cacheConf.setBackups(0);
IgniteCache cache = ignite.getOrCreateCache(cacheConf);
cache.clear();
File dataFile = new File("D:/data/1503307171374.data"); //10,000,000 rows text data
bgn = System.currentTimeMillis();
try {
loadByStreamer(dataFile,ignite,"TestDataStreamer");
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
} finally {
end = System.currentTimeMillis();
System.out.println("---------------");
System.out.println((end-bgn)/1000.0+" s");
}
cache.destroy();
System.out.println("cache destroy...");
ignite.close();
System.out.println("finish");
}
private static void loadByStreamer(File dataFile, Ignite ignite, String cacheName) throws Exception {
IgniteDataStreamer<Long,TestObj> ds = ignite.dataStreamer(cacheName);
//ds.allowOverwrite(true);
ds.autoFlushFrequency(10000);
ds.perNodeBufferSize(4096);
BufferedReader br = new BufferedReader(new InputStreamReader(
new FileInputStream(dataFile),"UTF-8"));
String line = null;
long count = 0;
while((line=br.readLine())!=null){
ds.addData(System.currentTimeMillis(), parseData(line, Constants.DEFAULT_SEPARATOR,
"id,sn,type_code,trade_ts,bill_ts,company_code,company_name,biz_type,charge_amt,pay_mode".split(",")));
if(++count%10000==0){
System.out.println(count+" loaded...");
}
//System.out.println(count+":"+line);
}
System.out.println("flushing...");
ds.flush();
System.out.println("flushed");
br.close();
ds.close();
System.out.println("file handled...");
}
private static TestObj parseData(String data, String saperator, String[] fields){
TestObj obj = new TestObj();
if(data!=null && saperator.trim().length()>0){
String[] values = data.split(saperator);
obj.setId(values[0]);
obj.setSn(values[1]);
obj.setType_code(values[2]);
obj.setTrade_ts(values[3]);
obj.setBill_ts(values[4]);
obj.setCompany_code(values[5]);
obj.setCompany_name(values[6]);
obj.setBiz_type(values[7]);
obj.setCharge_amt(values[8]);
obj.setPay_mode(values[9]);
}
return obj;
}
}
class TestObj {
private String id;
private String sn;
private String type_code;
private String trade_ts;
private String bill_ts;
private String company_code;
private String company_name;
private String biz_type;
private String charge_amt;
private String pay_mode;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getSn() {
return sn;
}
public void setSn(String sn) {
this.sn = sn;
}
public String getType_code() {
return type_code;
}
public void setType_code(String type_code) {
this.type_code = type_code;
}
public String getTrade_ts() {
return trade_ts;
}
public void setTrade_ts(String trade_ts) {
this.trade_ts = trade_ts;
}
public String getBill_ts() {
return bill_ts;
}
public void setBill_ts(String bill_ts) {
this.bill_ts = bill_ts;
}
public String getCompany_code() {
return company_code;
}
public void setCompany_code(String company_code) {
this.company_code = company_code;
}
public String getCompany_name() {
return company_name;
}
public void setCompany_name(String company_name) {
this.company_name = company_name;
}
public String getBiz_type() {
return biz_type;
}
public void setBiz_type(String biz_type) {
this.biz_type = biz_type;
}
public String getCharge_amt() {
return charge_amt;
}
public void setCharge_amt(String charge_amt) {
this.charge_amt = charge_amt;
}
public String getPay_mode() {
return pay_mode;
}
public void setPay_mode(String pay_mode) {
this.pay_mode = pay_mode;
}
}
If stop the node started in CMD and run the program in only one node, it works well.
Is there anyone can help me?

Update jdk for both nodes to the same version, for example for 1.8.0_144(as you already have it in installed), or at least, try to update idk in eclipse to the latest of 1.7 versions, it should help.
There is a thread on Ignite user list, when guys faced pretty the same exception and updating of java version helped them to fix it.

Related

Add weights to documents Lucene8+solr 8 while indexing

I am working on migrating solr from 5.4.3 to 8.11 for one of my search apps and successfully upgraded to 7.7.3. But for further upgradations facing the order of the response data being changed than it was earlier. Here I am trying to use FunctionScoreQuery along with DoubleValuesSource since CustomScoreQuery is deprecated in 7.7.3 and removed in 8.
Below is my code snippet (now I am using solr 8.5.2 and Lucene 8.5.2)
public class CustomQueryParser extends QParserPlugin {
#Override
public QParser createParser(final String qstr, final SolrParams localParams, final SolrParams params,
final SolrQueryRequest req) {
return new MyParser(qstr, localParams, params, req);
}
private static class MyParser extends QParser {
private Query innerQuery;
private String queryString;
public MyParser(final String qstr, final SolrParams localParams, final SolrParams params,
final SolrQueryRequest req) {
super(qstr, localParams, params, req);
if (qstr == null || qstr.trim().length() == 0) {
this.queryString = DEFAULT_SEARCH_QUERY;
setString(this.queryString);
} else {
this.queryString = qstr;
}
try {
if (queryString.contains(":")) {
final QParser parser = getParser(queryString, "edismax", getReq());
this.innerQuery = parser.parse();
} else {
final QParser parser = getParser(queryString, "dismax", getReq());
this.innerQuery = parser.parse();
}
} catch (final SyntaxError ex) {
throw new RuntimeException("Error parsing query", ex);
}
}
#Override
public Query parse() throws SyntaxError{
final Query query = new MyCustomQuery(innerQuery);
final CustomValuesSource customValuesSource = new CustomValuesSource(queryString,innerQuery);
final FunctionScoreQuery fsq = FunctionScoreQuery.boostByValue(query, customValuesSource.fromFloatField("score"));
return fsq;
}
}
}
public class MyCustomQuery extends Query {
#Override
public Weight createWeight(final IndexSearcher searcher, final ScoreMode scoreMode, final float boost) throws IOException {
Weight weight;
if(query == null){
weight = new ConstantScoreWeight(this, boost) {
#Override
public Scorer scorer(final LeafReaderContext context) throws IOException {
return new ConstantScoreScorer(this,score(),scoreMode, DocIdSetIterator.all(context.reader().maxDoc()));
}
#Override
public boolean isCacheable(final LeafReaderContext leafReaderContext) {
return false;
}
};
}else {
weight = searcher.createWeight(query,scoreMode,boost);
}
return weight;
}
}
public class CustomValuesSource extends DoubleValuesSource {
#Override
public DoubleValues getValues(final LeafReaderContext leafReaderContext,final DoubleValues doubleValues) throws IOException {
final DoubleValues dv = new CustomDoubleValues(leafReaderContext);
return dv;
}
class CustomDoubleValues extends DoubleValues {
#Override
public boolean advanceExact(final int doc) throws IOException {
final Document document = leafReaderContext.reader().document(doc);
final List<IndexableField> fields = document.getFields();
for (final IndexableField field : fields) {
// total_score is being calculated with my own preferences
document.add(new FloatDocValuesField("score",total_score));
//can we include the **score** here?
this custom logic which includes score is not even calling.
}
}
}
I am trying for a long time but have not found a single working example. Can anybody help me and save me here.
Thank you,
Syamala.

How do you write data into a Redis custom state store using Kafka Streams

I've recently been learning about how to use the Kafka Streams client and one thing that I've been struggling with is how to switch from the default state store (RocksDB) to a custom state store using something like Redis. The Confluent documentation makes it clear you have to implement the StateStore interface for your custom store and you must provide an implementation of StoreBuilder for creating instances of that store.
Here's what I have so far for my custom store. I've also added a simple write method to append a new entry into a specified stream via the Redis XADD command.
public class MyCustomStore<K,V> implements StateStore, MyWriteableCustomStore<K,V> {
private String name;
private volatile boolean open = false;
private boolean loggingEnabled = false;
public MyCustomStore(String name, boolean loggingEnabled) {
this.name = name;
this.loggingEnabled = loggingEnabled;
}
#Override
public String name() {
return this.name;
}
#Override
public void init(ProcessorContext context, StateStore root) {
if (root != null) {
// register the store
context.register(root, (key, value) -> {
write(key.toString(), value.toString());
});
}
this.open = true;
}
#Override
public void flush() {
// TODO Auto-generated method stub
}
#Override
public void close() {
// TODO Auto-generated method stub
}
#Override
public boolean persistent() {
// TODO Auto-generated method stub
return true;
}
#Override
public boolean isOpen() {
// TODO Auto-generated method stub
return false;
}
#Override
public void write(String key, String value) {
try(Jedis jedis = new Jedis("localhost", 6379)) {
Map<String, String> hash = new HashMap<>();
hash.put(key, value);
jedis.xadd("MyStream", StreamEntryID.NEW_ENTRY, hash);
}
}
}
public class MyCustomStoreBuilder implements StoreBuilder<MyCustomStore<String,String>> {
private boolean cached = true;
private String name;
private Map<String,String> logConfig=new HashMap<>();
private boolean loggingEnabled;
public MyCustomStoreBuilder(String name, boolean loggingEnabled){
this.name = name;
this.loggingEnabled = loggingEnabled;
}
#Override
public StoreBuilder<MyCustomStore<String,String>> withCachingEnabled() {
this.cached = true;
return this;
}
#Override
public StoreBuilder<MyCustomStore<String,String>> withCachingDisabled() {
this.cached = false;
return null;
}
#Override
public StoreBuilder<MyCustomStore<String,String>> withLoggingEnabled(Map<String, String> config) {
loggingEnabled=true;
return this;
}
#Override
public StoreBuilder<MyCustomStore<String,String>> withLoggingDisabled() {
this.loggingEnabled = false;
return this;
}
#Override
public MyCustomStore<String,String> build() {
return new MyCustomStore<String,String>(this.name, this.loggingEnabled);
}
#Override
public Map<String, String> logConfig() {
return logConfig;
}
#Override
public boolean loggingEnabled() {
return loggingEnabled;
}
#Override
public String name() {
return name;
}
}
And here's what my setup and topology look like.
#Bean
public KafkaStreams kafkaStreams(KafkaProperties kafkaProperties) {
final Properties props = new Properties();
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProperties.getBootstrapServers());
props.put(StreamsConfig.APPLICATION_ID_CONFIG, appName);
props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Long().getClass());
props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Double().getClass());
props.put(StreamsConfig.STATE_DIR_CONFIG, "data");
props.put(StreamsConfig.APPLICATION_SERVER_CONFIG, appServerConfig);
props.put(JsonDeserializer.VALUE_DEFAULT_TYPE, JsonNode.class);
props.put(DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG, LogAndContinueExceptionHandler.class);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
final String storeName = "the-custome-store";
Topology topology = new Topology();
// Create CustomStoreSupplier for store name the-custom-store
MyCustomStoreBuilder customStoreBuilder = new MyCustomStoreBuilder(storeName, false);
topology.addSource("input","inputTopic");
topology.addProcessor("redis-processor", () -> new RedisProcessor(storeName), "input");
topology.addStateStore(customStoreBuilder, "redis-processor");
KafkaStreams kafkaStreams = new KafkaStreams(topology, props);
kafkaStreams.start();
return kafkaStreams;
}
public class MyCustomStoreType<K,V> implements QueryableStoreType<MyReadableCustomStore<String,String>> {
#Override
public boolean accepts(StateStore stateStore) {
return stateStore instanceof MyCustomStore;
}
#Override
public MyReadableCustomStore<String,String> create(final StateStoreProvider storeProvider, final String storeName) {
return new MyCustomStoreTypeWrapper<>(storeProvider, storeName, this);
}
}
public class MyCustomStoreTypeWrapper<K,V> implements MyReadableCustomStore<K,V> {
private final QueryableStoreType<MyReadableCustomStore<String, String>> customStoreType;
private final String storeName;
private final StateStoreProvider provider;
public MyCustomStoreTypeWrapper(final StateStoreProvider provider,
final String storeName,
final QueryableStoreType<MyReadableCustomStore<String, String>> customStoreType) {
this.provider = provider;
this.storeName = storeName;
this.customStoreType = customStoreType;
}
#Override
public String read(String key) {
try (Jedis jedis = new Jedis("localhost", 6379)) {
StreamEntryID start = new StreamEntryID(0, 0);
StreamEntryID end = null; // null -> until the last item in the stream
int count = 2;
List<StreamEntry> list = jedis.xrange("MyStream", start, end, count);
if (list != null) {
// Get the most recently added item, which is also the last item
StreamEntry streamData = list.get(list.size() - 1);
return streamData.toString();
} else {
System.out.println("No new data in the stream");
}
return "";
}
}
}
// This throws the InvalidStateStoreException when I try to get access to the custom store
MyReadableCustomStore<String,String> store = streams.store("the-custome-store", new MyCustomStoreType<String,String>());
String value = store.read("testKey");
So, my question is how do I actually get the state store data to persist into Redis now? I feel like I'm missing something in the state store initialization or with the StateRestoreCallback. Any help or clarification with this would be greatly appreciated.
It looks to me that you have the store wired up to the topology correctly. But you don't have any processors using the store.
It could look something like this:
final String storeName = "the-custome-store";
MyCustomStoreBuilder customStoreBuilder = new MyCustomStoreBuilder(storeName, false);
Topology topology = new Topology()
topology.addSource("input", "input-topic");
// makes the processor a child of the source node
// the source node forwards its records to the child processor node
topology.addProcessor("redis-processor", () -> new RedisProcessor(storeName), "input");
// add the store and specify the processor(s) that access the store
topology.addStateStore(storeBuilder, "redis-processor");
class RedisProcessor implements Processor<byte[], byte[]> {
final String storeName;
MyCustomStore<byte[],byte[]> stateStore;
public RedisProcessor(String storeName) {
this.storeName = storeName;
}
#Override
public void init(ProcessorContext context) {
stateStore = (MyCustomeStore<byte[], byte[]>) context.getStateStore(storeName);
}
#Override
public void process(byte[] key, byte[] value) {
stateStore.write(key, value);
}
#Override
public void close() {
}
}
HTH, and let me know how it works out for you.
Update to answer from comments:
I think you need to update MyCustomStore.isOpen() to return the open variable.
Right now it's hardcoded to return false
Override
public boolean isOpen() {
// TODO Auto-generated method stub
return false;
}

Kafka consumer receive null value when sending customer object

So i want to implement application which reads data from json format files. And I have created customer object for the data in json. And I want to send these object through kafka topic. So far i have successfully send String message to producer to consumer. But when i try to send object, in the consumer side, when I do .value().toString(). I got null value. The following is the code I have used:
This is producer:
public class MyProducer {
public static void main(String[] args) throws Exception {
Properties properties = new Properties();
properties.put("bootstrap.servers", "kafka.kafka-cluster-shared.non-prod-5-az-scus.prod.us.walmart.net:9092");
properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
properties.put("value.serializer", "xxxxxxxxx.KafkaJsonSerializer");
properties.put("acks", "1");
properties.put("retries", "2");
properties.put("batch.size", "16384");
properties.put("linger.ms", "1");
properties.put("buffer.memory", "33554432");
KafkaProducer<String, pharmacyData> kafkaProducer = new KafkaProducer<String, pharmacyData>(
properties);
String topic = "insights";
//try {
Gson gson = new Gson();
Reader reader = Files.newBufferedReader(Paths.get("......./part.json"));
List<pharmacyData> pdata = new Gson().fromJson(reader, new TypeToken<List<pharmacyData>>() {}.getType());
//pdata.forEach(System.out::println);
reader.close();
//} catch (Exception e) {
//e.printStackTrace();
//}
for (pharmacyData data : pdata) {
kafkaProducer.send(new ProducerRecord<String, pharmacyData>(topic, data), new Callback() {
#Override
public void onCompletion(RecordMetadata recordMetadata, Exception e) {
if (e == null) {
System.out.println(recordMetadata.partition() + "--" + recordMetadata.serializedValueSize());
} else {
e.printStackTrace();
}
}
});
}
kafkaProducer.close();
}
}
This is the customer object class:
public class pharmacyData {
private String load_date;
private String store_nbr;
private String state;
private String pmp_flag;
private String zero_flag;
private String submit_ts;
public pharmacyData(String load_date, String store_nbr, String state, String pmp_flag, String zero_flag, String submit_ts) {
this.load_date = load_date;
this.store_nbr = store_nbr;
this.state = state;
this.pmp_flag = pmp_flag;
this.zero_flag = zero_flag;
this.submit_ts = submit_ts;
}
public String getLoad_date() {
return load_date;
}
public void setLoad_date(String load_date) {
this.load_date = load_date;
}
public String getStore_nbr() {
return store_nbr;
}
public void setStore_nbr(String store_nbr) {
this.store_nbr = store_nbr;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public String getPmp_flag() {
return pmp_flag;
}
public void setPmp_flag(String pmp_flag) {
this.pmp_flag = pmp_flag;
}
public String getZero_flag() {
return zero_flag;
}
public void setZero_flag(String zero_flag) {
this.zero_flag = zero_flag;
}
public String getSubmit_ts() {
return submit_ts;
}
public void setSubmit_ts(String submit_ts) {
this.submit_ts = submit_ts;
}
#Override
public String toString() {
return "pharmacyData{" +
"load_date='" + load_date + '\'' +
", store_nbr='" + store_nbr + '\'' +
", state='" + state + '\'' +
", pmp_flag='" + pmp_flag + '\'' +
", zero_flag='" + zero_flag + '\'' +
", submit_ts='" + submit_ts + '\'' +
'}';
}
}
this is the customer Serializer:
public class KafkaJsonSerializer implements Serializer {
private Logger logger = LogManager.getLogger(this.getClass());
#Override
public void configure(Map map, boolean b) {
}
#Override
public byte[] serialize(String s, Object o) {
byte[] retVal = null;
ObjectMapper objectMapper = new ObjectMapper();
try {
retVal = objectMapper.writeValueAsBytes(o);
} catch (Exception e) {
logger.error(e.getMessage());
}
return retVal;
}
#Override
public void close() {
}
}
This is the customer Deserializer:
public class KafkaJsonDeserializer implements Deserializer {
#Override
public void configure(Map map, boolean b) {
}
#Override
public Object deserialize(String s, byte[] bytes) {
ObjectMapper mapper = new ObjectMapper();
pharmacyData pdata = null;
try {
pdata = mapper.readValue(bytes, pharmacyData.class);
} catch (Exception e) {
e.printStackTrace();
}
return pdata;
}
#Override
public void close() {
}
}
This is consumer:
public class MyConsumer {
public static void main(String[] args) {
Properties properties = new Properties();
properties.put("bootstrap.servers", "kafka.kafka-cluster-shared.non-prod-5-az-scus.prod.us.walmart.net:9092");
properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
properties.put("value.deserializer", "xxxxxxxx.KafkaJsonDeserializer");
properties.put("group.id", "consumer-group-1");
properties.put("enable.auto.commit", "true");
properties.put("auto.commit.interval.ms", "1000");
properties.put("auto.offset.reset", "earliest");
properties.put("session.timeout.ms", "30000");
KafkaConsumer<String, pharmacyData> consumer = new KafkaConsumer<>(properties);
String topic = "insights";
consumer.subscribe(Collections.singletonList(topic));
while (true) {
ConsumerRecords<String, pharmacyData> consumerRecords = consumer.poll(100);
for (ConsumerRecord<String, pharmacyData> consumerRecord : consumerRecords) {
System.out.println(consumerRecord.key() + "--" + consumerRecord.toString());
//System.out.println(consumerRecord.offset() + "--" + consumerRecord.partition());
}
}
}
}
Can someone please help me with the issues? Thank you very much!
Problem solved:
The solution of this issue just add a default constructor as below:
public pharmacyData() {
}
See this page for more details.

Deserialize JSON with Gson - Expected BEGIN_OBJECT but was String - Reddit's JSON

I'm trying to deserialize JSON from Reddit that you can obtain by appending .json to the url. An example would be:
http://www.reddit.com/r/pics/comments/1wvx52/.json?sort=top
However, I am getting the error message:
Exception in thread "main" com.google.gson.JsonSyntaxException: java.lang.IllegalStateException: Expected BEGIN_OBJECT but was STRING at line 1 column 9765
At line 1 column 9765 in the json there is the following code: "replies": "", whereas normally this would contain an object like this: replies: {
kind: "Listing",
data: {}
},
Does this mean that the json is a String when there is no data, but an object otherwise? How can I deserialize with gson properly if this is the case? I've included my classes below. I still need to figure out how to handle the json starting off with an array of basically two different objects (the first listing in the json is describing the link, while the second listing is describing the comments), but I'll cross that bridge when I get there. Thanks in advance if anyone can shed some light on this issue.
Main Class
public static void main(String[] args)
{
ArrayList<CommentsResults> commentsResults = new ArrayList<CommentsResults>();
String commentsURL = "http://www.reddit.com/r/pics/comments/1wvx52/.json?sort=top";
URL url = null;
try
{
url = new URL(commentsURL);
} catch (MalformedURLException ex)
{
System.out.println(ex.getMessage());
}
try
{
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(url.openStream()));
String jsonText = readAll(bufferedReader);
Gson gson = new GsonBuilder().create();
commentsResults = gson.fromJson(jsonText, new TypeToken<ArrayList<CommentsResults>>(){}.getType());
} catch (IOException ex)
{
System.out.println(ex.getMessage());
}
}
private static String readAll(Reader reader) throws IOException
{
StringBuilder stringBuilder = new StringBuilder();
int cp;
while ((cp = reader.read()) != -1)
{
stringBuilder.append((char) cp);
}
return stringBuilder.toString();
}
CommentsResults Class
public class CommentsResults {
private String kind;
private CommentsData data;
public CommentsResults()
{
}
public CommentsResults(String kind, CommentsData data)
{
this.kind = kind;
this.data = data;
}
public String getKind()
{
return kind;
}
public CommentsData getData()
{
return data;
}
public void setKind(String kind)
{
this.kind = kind;
}
public void setData(CommentsData data)
{
this.data = data;
}
}
CommentsData Class
private String modhash;
private List <CommentsChild> children;
public CommentsData()
{
}
public CommentsData(String modhash, List<CommentsChild> children)
{
this.modhash = modhash;
this.children = children;
}
public String getModhash()
{
return modhash;
}
public List<CommentsChild> getChildren()
{
return children;
}
public void setModhash(String modhash)
{
this.modhash = modhash;
}
public void setChildren(List<CommentsChild> children)
{
this.children = children;
}
CommentsChild Class
private String kind;
private Comment data;
public CommentsChild()
{
}
public CommentsChild(String kind, Comment comment)
{
this.kind = kind;
this.data = comment;
}
public String getKind()
{
return kind;
}
public Comment getComment()
{
return data;
}
public void setKind(String kind)
{
this.kind = kind;
}
public void setComment(Comment comment)
{
this.data = comment;
}
Comment Class
public class Comment {
private CommentsResults replies;
private String id;
private int gilded;
private String author;
private String parent_id;
private String body;
private int downs;
private String link_id;
private boolean score_hidden;
private int created_utc;
private String distinguished;
public Comment()
{
}
public Comment(CommentsResults replies, String id, int gilded, String author, String parent_id, String body, int downs, String link_id, boolean score_hidden, int created_utc, String distinguished)
{
this.replies = replies;
this.id = id;
this.gilded = gilded;
this.author = author;
this.parent_id = parent_id;
this.body = body;
this.downs = downs;
this.link_id = link_id;
this.score_hidden = score_hidden;
this.created_utc = created_utc;
this.distinguished = distinguished;
}
public CommentsResults getReplies()
{
return replies;
}
public String getId()
{
return id;
}
public int getGilded()
{
return gilded;
}
public String getAuthor()
{
return author;
}
public String getParent_id()
{
return parent_id;
}
public String getBody()
{
return body;
}
public int getDowns()
{
return downs;
}
public String getLink_id()
{
return link_id;
}
public boolean isScore_hidden()
{
return score_hidden;
}
public int getCreated_utc()
{
return created_utc;
}
public String getDistinguished()
{
return distinguished;
}
public void setReplies(CommentsResults replies)
{
this.replies = replies;
}
public void setId(String id)
{
this.id = id;
}
public void setGilded(int gilded)
{
this.gilded = gilded;
}
public void setAuthor(String author)
{
this.author = author;
}
public void setParent_id(String parent_id)
{
this.parent_id = parent_id;
}
public void setBody(String body)
{
this.body = body;
}
public void setDowns(int downs)
{
this.downs = downs;
}
public void setLink_id(String link_id)
{
this.link_id = link_id;
}
public void setScore_hidden(boolean score_hidden)
{
this.score_hidden = score_hidden;
}
public void setCreated_utc(int created_utc)
{
this.created_utc = created_utc;
}
public void setDistinguished(String distinguished)
{
this.distinguished = distinguished;
}
}
So in the off chance this helps somebody (which seems dubious at this point) I decided to parse the Json manually using recursion. Here's how I did it:
public static void getCommentsOnLink()
{
String commentsURL= "http://www.reddit.com/r/pics/comments/1wvx52/.json?sort=top";
URL url = null;
try
{
url = new URL(commentsURL);
} catch (MalformedURLException ex)
{
System.out.println(ex.getMessage());
}
String JsonText = readCommentJsonFromURL(url);
RedditCommentResults redditCommentResults = getCommentResults(JsonText);
}
private static String readCommentJsonFromURL(URL url)
{
String JSONText = null;
try
{
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(url.openStream()));
JSONText = readAll(bufferedReader);
} catch (IOException ex)
{
System.out.println(ex.getMessage());
}
return JSONText;
}
private static String readAll(Reader reader) throws IOException
{
StringBuilder stringBuilder = new StringBuilder();
int cp;
while ((cp = reader.read()) != -1)
{
stringBuilder.append((char) cp);
}
return stringBuilder.toString();
}
private static RedditCommentResults getCommentResults(String JsonText)
{
JsonParser parser = new JsonParser();
JsonArray completeJson = (JsonArray) parser.parse(JsonText);
//get link and comment object from the array containing an object for each
JsonObject linkParentJson = (JsonObject) completeJson.get(0);
JsonObject commentParentJson = (JsonObject) completeJson.get(1);
//use automatic deserializer for redditLink
JsonObject linkListingDataJson = linkParentJson.getAsJsonObject("data");
JsonObject linkChildrenJson = linkListingDataJson.getAsJsonArray("children").get(0).getAsJsonObject();
JsonObject linkDataJson = linkChildrenJson.getAsJsonObject("data");
Link commentLink = gson.fromJson(linkDataJson, Link.class);
RedditLink redditCommentLink = new RedditLink(commentLink);
//parse comments manually
JsonObject commentDataJson = commentParentJson.getAsJsonObject("data");
JsonArray commentChildrenJson = commentDataJson.getAsJsonArray("children");
//get all of the comments from the JsonArray
ArrayList<RedditComment> redditComments = getNestedComments(commentChildrenJson);
RedditCommentResults redditCommentResults = new RedditCommentResults(redditComments, redditCommentLink);
return redditCommentResults;
}
private static ArrayList<RedditComment> getNestedComments(JsonArray commentWrapperJsonArray)
{
ArrayList<RedditComment> redditComments = new ArrayList();
for (JsonElement commentWrapperJson : commentWrapperJsonArray)
{
//cast Element to Object so we can search for the primitive "kind". Finally we get it as a String
String kind = commentWrapperJson.getAsJsonObject().getAsJsonPrimitive("kind").getAsString();
//if the comment is of type t1 meaning it is a comment and not a "more" (a "more" is a comment which
//hasn't been loaded yet because it does not have a great deal of upvotes relative to other comments)
if (kind.equals("t1"))
{
JsonObject commentJson = commentWrapperJson.getAsJsonObject().getAsJsonObject("data");
Comment comment = gson.fromJson(commentJson, Comment.class);
RedditComment redditComment = new RedditComment(comment);
JsonElement repliesJson = commentJson.get("replies");
//if the reply is not equal to an empty String (i.e. if there is at least one reply)
if (!repliesJson.isJsonPrimitive())
{
JsonObject dataJson = repliesJson.getAsJsonObject().getAsJsonObject("data");
JsonArray childrenJson = dataJson.getAsJsonArray("children");
ArrayList<RedditComment> nestedComments = getNestedComments(childrenJson);
redditComment.setReplies(nestedComments);
}
redditComments.add(redditComment);
}
}
return redditComments;
}
You have to remove the private CommentsResults replies; from Comment Class and compose replies in CommentsChild class. According to Json fomate you model is
CommentResult -
CommentsData --
List <CommentsChild> children---
CommentsResults replies
recursion/ repeation of comment result
public class CommentsChild {
private String kind;
private Comment data;
//
private CommentsResults replies;
}

REST Web Service using Java and Jersey API: Unable to Read data from DB

I am creating a REST Web Service using Java and Jersey API. The basic REST service works fine,but when I add in a DB connection it gives me a Class Not Found Exception and a SQL Exception - No driver found. I have included the ojdbc6.jar file in the Eclipse build path. Using the same code if I create a Java application it runs fine.
I have added my code below. Can some one plz suggest something.
EDIT: I included the jar file in the WEB-INF lib directory. But when I try to execute the code I get the following error: HTTP Status 405 - Method Not Allowed
public class Note {
private int noteId;
private String content;
private Date createdDate;
public Note() {}
public Note(int noteId, String content, Date createdDate) {
this.noteId = noteId;
this.content = content;
this.createdDate = createdDate;
}
public int getNoteId() {
return noteId;
}
public void setNoteId(int noteId) {
this.noteId = noteId;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public Date getCreatedDate() {
return createdDate;
}
public void setCreatedDate(Date createdDate) {
this.createdDate = createdDate;
}
#Override
public String toString() {
return "Note [content=" + content + ", createdDate=" + createdDate
+ ", noteId=" + noteId + "]";
}
}
public class NoteDAO {
DatabaseAccess data;
Connection connection;
public NoteDAO()
{
try {
data = new DatabaseAccess();
connect();
} catch (SQLException e) {
e.printStackTrace();
}
}
private void connect() throws SQLException
{
try
{
data.connect();
connection = data.connection;
}
catch (SQLException e)
{
e.printStackTrace();
}
}
public Note getNoteById(int id)
{
PreparedStatement prepStmt = null;
try {
String cSQL = "SELECT * FROM NOTE WHERE NOTEID = 12 ";
prepStmt = connection.prepareStatement(cSQL);
prepStmt.setInt(1, id);
ResultSet result = prepStmt.executeQuery();
Note note = new Note();
while (result.next())
{
note.setNoteId(result.getInt(1));
note.setContent(result.getString(2));
note.setCreatedDate( (Date) new java.util.Date(result.getDate(3).getTime()));
}
return note;
} catch (SQLException e) {
e.printStackTrace();
prepStmt = null;
return null;
}
}
}
#Path("/notes")
public class Notes {
#Context
UriInfo uriInfo;
#Context
Request request;
NoteDAO dao = new NoteDAO();
#Path("{note}")
#GET
#Produces(MediaType.APPLICATION_XML)
public Note getNote(
#PathParam("note") String idStr) {
int id = Integer.parseInt(idStr);
Note note = dao.getNoteById(id);
if(note==null)
throw new RuntimeException("Get: Note with " + id + " not found");
return note;
}
public class DatabaseAccess {
Connection connection = null;
public void connect() throws SQLException
{
String DRIVER = "oracle.jdbc.driver.OracleDriver";
String URL = "jdbc:oracle:thin:#xx.xxx.xx.xxx:1521:XXXX";
String UserName = "username";
String Password = "password";
try
{
Class.forName(DRIVER);
}
catch (ClassNotFoundException e)
{
e.printStackTrace();
}
try
{
connection = DriverManager.getConnection(URL,UserName,Password);
}
catch (SQLException e)
{
e.printStackTrace();
}
}
public void disconnect() throws SQLException
{
connection.close();
}
}
If you are using datasources that are managed by the application server, you need to put the ojdbc6.jar library inside the lib folder of your application server.
On JBoss for example, it would be $JBOSS_HOME/server/default/lib.
This is required, because in such case, the datasource is being build when the server starts and independently from your application, which means the server cannot use your application JARs.
If, however, you are pooling the connections yourself, you need to make sure, that the ojdbc6.jar is inside the lib folder of your application WAR archive.