What's wrong with this Lucene TokenFilter? - lucene

Disclaimer: I've been coding for 36 of the last 41 hours. I have a headache. And I can't figure out why this combining TokenFilter is returning 2 tokens, both the first token from the source stream.
public class TokenCombiner extends TokenFilter {
/*
* Recombines all tokens back into a single token using the specified delimiter.
*/
public TokenCombiner(TokenStream in, int delimiter) {
super(in);
this.delimiter = delimiter;
}
int delimiter;
private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
private boolean firstToken = true;
int startOffset = 0;
#Override
public final boolean incrementToken() throws IOException {
while (true){
boolean eos = input.incrementToken(); //We have to process tokens even if they return end of file.
CharTermAttribute token = input.getAttribute(CharTermAttribute.class);
if (eos && token.length() == 0) break; //Break early to avoid extra whitespace.
if (firstToken){
startOffset = input.getAttribute(OffsetAttribute.class).startOffset();
firstToken = false;
}else{
termAtt.append(Character.toString((char)delimiter));
}
termAtt.append(token);
if (eos) break;
}
offsetAtt.setOffset(startOffset, input.getAttribute(OffsetAttribute.class).endOffset());
return false;
}
#Override
public void reset() throws IOException {
super.reset();
firstToken = true;
startOffset = 0;
}
}

I think the fundamental problem here, is that you must realize both TokenCombiner and the producer it consumes (input) share and reuse the same attributes! So token == termAtt always (try adding an assert!).
Man, that sucks if you have been coding for 36 hours on a weekend... try this:
public class TokenCombiner extends TokenFilter {
private final StringBuilder sb = new StringBuilder();
private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
private final char separator;
private boolean consumed; // true if we already consumed
protected TokenCombiner(TokenStream input, char separator) {
super(input);
this.separator = separator;
}
#Override
public final boolean incrementToken() throws IOException {
if (consumed) {
return false; // don't call input.incrementToken() after it returns false
}
consumed = true;
int startOffset = 0;
int endOffset = 0;
boolean found = false; // true if we actually consumed any tokens
while (input.incrementToken()) {
if (!found) {
startOffset = offsetAtt.startOffset();
found = true;
}
sb.append(termAtt);
sb.append(separator);
endOffset = offsetAtt.endOffset();
}
if (found) {
assert sb.length() > 0; // always: because we append separator
sb.setLength(sb.length() - 1);
clearAttributes();
termAtt.setEmpty().append(sb);
offsetAtt.setOffset(startOffset, endOffset);
return true;
} else {
return false;
}
}
#Override
public void reset() throws IOException {
super.reset();
sb.setLength(0);
consumed = false;
}
}

Related

Optimizing Transposing and Comparison of Concurrent List in Java 8

I have an application in Java 8 Collecting Data of multiple Threads using BlockingQueue.
I need to perform comparison of samples.
But my application is very large, I implemented a mock application (Github) in Java 8.
I'm generating a chunk of bytes (really is random order).
The bytes are stored into ChunkDTO class.
I implemented a capturer the ChunkDTO in a List, code in Capturer class.
Each ChunkDTO of List is translated into a List of Samples (TimePitchValue exactly) returning a nested List (or List of List of TimePitchValue).
Later the nested List is transposed in order to performs comparisons between TimePitchValue with the same time value.
Due to enormous volume of TimePitchValue instances it's consumes huge time in my application.
Here some code (The complete functional Code is in Github) because is still large for this site).
public class Generator {
final static Logger LOGGER = Logger.getLogger("SampleComparator");
public static void main(String[] args) {
long previous = System.nanoTime();
final int minBufferSize = 2048;
int sampleRate = 8192;
int numChannels = 1;
int numBytesPerSample = 1;
int samplesChunkPerSecond = sampleRate / minBufferSize;
int minutes = 0;
int seconds = 10;
int time = 60 * minutes + seconds;
int chunksBySecond = samplesChunkPerSecond * numBytesPerSample * numChannels;
int pitchs = 32;
boolean signed = false;
boolean endianness = false;
AudioFormat audioformat = new AudioFormat(sampleRate, 8 * numBytesPerSample, numChannels, signed, endianness);
ControlDSP controlDSP = new ControlDSP(audioformat);
BlockingQueue<ChunkDTO> generatorBlockingQueue = new LinkedBlockingQueue<>();
Capturer capturer = new Capturer(controlDSP, pitchs, pitchs * time * chunksBySecond, generatorBlockingQueue);
controlDSP.getListFuture().add(controlDSP.getExecutorService().submit(capturer));
for (int i = 0; i < time * chunksBySecond; i++) {
for (int p = 0; p < pitchs; p++) {
ChunkDTO chunkDTO = new ChunkDTO(UtilClass.getArrayByte(minBufferSize), i, p);
LOGGER.info(String.format("chunkDTO: %s", chunkDTO));
try {
generatorBlockingQueue.put(chunkDTO);
} catch (InterruptedException ex) {
LOGGER.info(ex.getMessage());
}
}
try {
Thread.sleep(1000 / chunksBySecond);
} catch (Exception ex) {
}
}
controlDSP.tryFinishThreads(Thread.currentThread());
long current = System.nanoTime();
long interval = TimeUnit.NANOSECONDS.toSeconds(current - previous);
System.out.println("Seconds Interval: " + interval);
}
}
Capturer Class
public class Capturer implements Callable<Void> {
private final ControlDSP controlDSP;
private final int pitchs;
private final int totalChunks;
private final BlockingQueue<ChunkDTO> capturerBlockingQueue;
private final Counter intCounter;
private final Map<Long, List<ChunkDTO>> mapIndexListChunkDTO = Collections.synchronizedMap(new HashMap<>());
private volatile boolean isRunning = false;
private final String threadName;
private static final Logger LOGGER = Logger.getLogger("SampleComparator");
public Capturer(ControlDSP controlDSP, int pitchs, int totalChunks, BlockingQueue<ChunkDTO> capturerBlockingQueue) {
this.controlDSP = controlDSP;
this.pitchs = pitchs;
this.totalChunks = totalChunks;
this.capturerBlockingQueue = capturerBlockingQueue;
this.intCounter = new Counter();
this.controlDSP.getListFuture().add(this.controlDSP.getExecutorService().submit(() -> {
while (intCounter.getValue() < totalChunks) {
try {
Thread.sleep(100);
} catch (InterruptedException ex) {
LOGGER.log(Level.SEVERE, null, ex);
}
}
capturerBlockingQueue.add(new ChunkDTOStopper());
}));
this.threadName = this.getClass().getSimpleName();
}
#Override
public Void call() throws Exception {
long quantity = 0;
isRunning = true;
while (isRunning) {
try {
ChunkDTO chunkDTO = capturerBlockingQueue.take();
if (chunkDTO instanceof ChunkDTOStopper) {
break;
}
//Find or Create List (according to Index) to add the incoming Chunk
long index = chunkDTO.getIndex();
int sizeChunk = chunkDTO.getChunk().length;
List<ChunkDTO> listChunkDTOWithIndex = getListChunkDTOByIndex(chunkDTO);
//When the List (according to Index) is completed and processed
if (listChunkDTOWithIndex.size() == pitchs) {
mapIndexListChunkDTO.remove(index);
TransposerComparator transposerComparator = new TransposerComparator(controlDSP, controlDSP.getAudioformat(), index, sizeChunk, listChunkDTOWithIndex);
controlDSP.getListFuture().add(controlDSP.getExecutorService().submit(transposerComparator));
}
quantity++;
intCounter.setValue(quantity);
LOGGER.info(String.format("%s\tConsumes:%s\ttotal:%05d", threadName, chunkDTO, quantity));
} catch (Exception ex) {
LOGGER.log(Level.SEVERE, null, ex);
}
}
LOGGER.info(String.format("%s\tReceived:%05d\tQty:%s\tPitchs:%s\tEND\n", threadName, quantity, quantity / pitchs, pitchs));
return null;
}
private List<ChunkDTO> getListChunkDTOByIndex(ChunkDTO chunkDTO) {
List<ChunkDTO> listChunkDTOWithIndex = mapIndexListChunkDTO.get(chunkDTO.getIndex());
if (listChunkDTOWithIndex == null) {
listChunkDTOWithIndex = new ArrayList<>();
mapIndexListChunkDTO.put(chunkDTO.getIndex(), listChunkDTOWithIndex);
listChunkDTOWithIndex = mapIndexListChunkDTO.get(chunkDTO.getIndex());
}
listChunkDTOWithIndex.add(chunkDTO);
return listChunkDTOWithIndex;
}
}
TransposerComparator class.
The optimization required is in this code, specifically on transposedNestedList method.
public class TransposerComparator implements Callable<Void> {
private final ControlDSP controlDSP;
private final AudioFormat audioformat;
private final long index;
private final int sizeChunk;
private final List<ChunkDTO> listChunkDTOWithIndex;
private final String threadName;
private static final Logger LOGGER = Logger.getLogger("SampleComparator");
public TransposerComparator(ControlDSP controlDSP, AudioFormat audioformat, long index, int sizeChunk, List<ChunkDTO> listChunkDTOWithIndex) {
this.controlDSP = controlDSP;
this.audioformat = audioformat;
this.index = index;
this.sizeChunk = sizeChunk;
this.listChunkDTOWithIndex = listChunkDTOWithIndex;
this.threadName = this.getClass().getSimpleName() + "_" + String.format("%05d", index);
}
#Override
public Void call() throws Exception {
Thread.currentThread().setName(threadName);
LOGGER.info(String.format("%s\tINI", threadName));
try {
int numBytesPerSample = audioformat.getSampleSizeInBits() / 8;
int quantitySamples = sizeChunk / numBytesPerSample;
long baseTime = quantitySamples * index;
// Convert the List of Chunk Bytes to Nested List of TimePitchValue
List<List<TimePitchValue>> nestedListTimePitchValue = listChunkDTOWithIndex
.stream()
.map(chunkDTO -> {
return IntStream
.range(0, quantitySamples)
.mapToObj(time -> {
int value = extractValue(chunkDTO.getChunk(), numBytesPerSample, time);
return new TimePitchValue(chunkDTO.getPitch(), baseTime + time, value);
}).collect(Collectors.toList());
}).collect(Collectors.toList());
List<List<TimePitchValue>> timeNestedListTimePitchValue = transposedNestedList(nestedListTimePitchValue);
} catch (Exception ex) {
ex.printStackTrace();
LOGGER.log(Level.SEVERE, null, ex);
throw ex;
}
return null;
}
private static int extractValue(byte[] bytesSamples, int numBytesPerSample, int time) {
byte[] bytesSingleNumber = Arrays.copyOfRange(bytesSamples, time * numBytesPerSample, (time + 1) * numBytesPerSample);
int value = numBytesPerSample == 2
? (UtilClass.Byte2IntLit(bytesSingleNumber[0], bytesSingleNumber[1]))
: (UtilClass.byte2intSmpl(bytesSingleNumber[0]));
return value;
}
private static List<List<TimePitchValue>> transposedNestedList(List<List<TimePitchValue>> nestedList) {
List<List<TimePitchValue>> outNestedList = new ArrayList<>();
nestedList.forEach(pitchList -> {
pitchList.forEach(pitchValue -> {
List<TimePitchValue> listTimePitchValueWithTime = listTimePitchValueWithTime(outNestedList, pitchValue.getTime());
if (!outNestedList.contains(listTimePitchValueWithTime)) {
outNestedList.add(listTimePitchValueWithTime);
}
listTimePitchValueWithTime.add(pitchValue);
});
});
outNestedList.forEach(pitchList -> {
pitchList.sort(Comparator.comparingInt(TimePitchValue::getValue).reversed());
});
return outNestedList;
}
private static List<TimePitchValue> listTimePitchValueWithTime(List<List<TimePitchValue>> nestedList, long time) {
List<TimePitchValue> listTimePitchValueWithTime = nestedList
.stream()
.filter(innerList -> innerList.stream()
.anyMatch(timePitchValue -> timePitchValue.getTime() == time))
.findAny()
.orElseGet(ArrayList::new);
return listTimePitchValueWithTime;
}
}
I was testing:
With 5 Seconds in Generator class and the List<List<TimePitchValue>> timeNestedListTimePitchValue = transposedNestedList(nestedListTimePitchValue); line in TransposerComparator class, Commented 7 Seconds needed, Uncommented 211 Seconds needed.
With 10 Seconds in Generator class and the List<List<TimePitchValue>> timeNestedListTimePitchValue = transposedNestedList(nestedListTimePitchValue); line in TransposerComparator class, Commented 12 Seconds needed, Uncommented 574 Seconds needed.
I need to use the application at least 60 minutes.
With the purpose of reduce the needed (consumed) time, I have two ways:
I choose for short is to optimize the methods that I am currently using.
That should be successful but longer and is to use GPGPU, but I don't know where to start implementing it yet.
QUESTIONS
This Question is for the first way: What changes do you recommend in the code of the transposedNestedList method in order to improve speed?
Is there better alternative to use this Comparison?
outNestedList.forEach(pitchList -> {
pitchList.sort(Comparator.comparingInt(TimePitchValue::getValue).reversed());
});

error is occuring while creating custom tokenizer in lucene 7.3

I m trying to create new tokenizer by refering book tamingtext (which uses lucene 3.+ api) using new lucene api 7.3, but it is giving me error as mentioned below
java.lang.IllegalStateException: TokenStream contract violation: reset()/close() call missing, reset() called multiple times, or subclass does not call super.reset(). Please see Javadocs of TokenStream class for more information about the correct consuming workflow.
at org.apache.lucene.analysis.Tokenizer$1.read(Tokenizer.java:109)
at java.io.Reader.read(Reader.java:140)
at solr.SentenceTokenizer.fillSentences(SentenceTokenizer.java:43)
at solr.SentenceTokenizer.incrementToken(SentenceTokenizer.java:55)
at solr.NameFilter.fillSpans(NameFilter.java:56)
at solr.NameFilter.incrementToken(NameFilter.java:88)
at spec.solr.NameFilterTest.testNameFilter(NameFilterTest.java:81)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
Here is my SentenceTokenizer class
Initializing method, in older api there was super(Reader); but in current api there is no access to Reader class
public SentenceTokenizer(SentenceDetector detector) {
super();
setReader(reader);
this.sentenceDetector = detector;
}
Here is my reset method
#Override
public void reset() throws IOException {
super.reset();
sentenceDetector = null;
}
When i tried to access this method from custom TokenFilter, I m getting above error
public void fillSentences() throws IOException {
char[] c = new char[256];
int size = 0;
StringBuilder stringBuilder = new StringBuilder();
while ((size = input.read(c)) >= 0) {
stringBuilder.append(c, 0, size);
}
String temp = stringBuilder.toString();
inputSentence = temp.toCharArray();
sentenceSpans = sentenceDetector.sentPosDetect(temp);
tokenOffset = 0;
}
#Override
public final boolean incrementToken() throws IOException {
if (sentenceSpans == null) {
//invoking following method
fillSentences();
}
if (tokenOffset == sentenceSpans.length) {
return false;
}
Span sentenceSpan = sentenceSpans[tokenOffset];
clearAttributes();
int start = sentenceSpan.getStart();
int end = sentenceSpan.getEnd();
charTermAttribute.copyBuffer(inputSentence, start, end - start);
positionIncrementAttribute.setPositionIncrement(1);
offsetAttribute.setOffset(start, end);
tokenOffset++;
return true;
}
Here is my custom TokenFilter class
public final class NameFilter extends TokenFilter {
public static final String NE_PREFIX = "NE_";
private final Tokenizer tokenizer;
private final String[] tokenTypeNames;
private final NameFinderME[] nameFinderME;
private final KeywordAttribute keywordAttribute = addAttribute(KeywordAttribute.class);
private final PositionIncrementAttribute positionIncrementAttribute = addAttribute(PositionIncrementAttribute.class);
private final CharTermAttribute charTermAttribute = addAttribute(CharTermAttribute.class);
private final OffsetAttribute offsetAttribute = addAttribute(OffsetAttribute.class);
private String text;
private int baseOffset;
private Span[] spans;
private String[] tokens;
private Span[][] foundNames;
private boolean[][] tokenTypes;
private int spanOffsets = 0;
private final Queue<AttributeSource.State> tokenQueue =
new LinkedList<>();
public NameFilter(TokenStream in, String[] modelNames, NameFinderME[] nameFinderME) {
super(in);
this.tokenizer = SimpleTokenizer.INSTANCE;
this.nameFinderME = nameFinderME;
this.tokenTypeNames = new String[modelNames.length];
for (int i = 0; i < modelNames.length; i++) {
this.tokenTypeNames[i] = NE_PREFIX + modelNames[i];
}
}
//consumes tokens from the upstream tokenizer and buffer them in a
//StringBuilder whose contents will be passed to opennlp
protected boolean fillSpans() throws IOException {
if (!this.input.incrementToken()) return false;
//process the next sentence from the upstream tokenizer
this.text = input.getAttribute(CharTermAttribute.class).toString();
this.baseOffset = this.input.getAttribute(OffsetAttribute.class).startOffset();
this.spans = this.tokenizer.tokenizePos(text);
this.tokens = Span.spansToStrings(spans, text);
this.foundNames = new Span[this.nameFinderME.length][];
for (int i = 0; i < nameFinderME.length; i++) {
this.foundNames[i] = nameFinderME[i].find(tokens);
}
//insize
this.tokenTypes = new boolean[this.tokens.length][this.nameFinderME.length];
for (int i = 0; i < nameFinderME.length; i++) {
Span[] spans = foundNames[i];
for (int j = 0; j < spans.length; j++) {
int start = spans[j].getStart();
int end = spans[j].getEnd();
for (int k = start; k < end; k++) {
this.tokenTypes[k][i] = true;
}
}
}
spanOffsets = 0;
return true;
}
#Override
public boolean incrementToken() throws IOException {
//if there's nothing in the queue
if(tokenQueue.peek()==null){
//no span or spans consumed
if (spans==null||spanOffsets>=spans.length){
if (!fillSpans())return false;
}
if (spanOffsets>=spans.length)return false;
//copy the token and any types
clearAttributes();
keywordAttribute.setKeyword(false);
positionIncrementAttribute.setPositionIncrement(1);
int startOffset = baseOffset +spans[spanOffsets].getStart();
int endOffset = baseOffset+spans[spanOffsets].getEnd();
offsetAttribute.setOffset(startOffset,endOffset);
charTermAttribute.setEmpty()
.append(tokens[spanOffsets]);
//determine of the current token is of a named entity type, if so
//push the current state into the queue and add a token reflecting
// any matching entity types.
boolean [] types = tokenTypes[spanOffsets];
for (int i = 0; i < nameFinderME.length; i++) {
if (types[i]){
keywordAttribute.setKeyword(true);
positionIncrementAttribute.setPositionIncrement(0);
tokenQueue.add(captureState());
positionIncrementAttribute.setPositionIncrement(1);
charTermAttribute.setEmpty().append(tokenTypeNames[i]);
}
}
}
spanOffsets++;
return true;
}
#Override
public void close() throws IOException {
super.close();
reset();
}
#Override
public void reset() throws IOException {
super.reset();
this.spanOffsets = 0;
this.spans = null;
}
#Override
public void end() throws IOException {
super.end();
reset();
}
}
here is my test case for following class
#Test
public void testNameFilter() throws IOException {
Reader in = new StringReader(input);
Tokenizer tokenizer = new SentenceTokenizer( detector);
tokenizer.reset();
NameFilter nameFilter = new NameFilter(tokenizer, modelName, nameFinderMES);
nameFilter.reset();
CharTermAttribute charTermAttribute;
PositionIncrementAttribute positionIncrementAttribute;
OffsetAttribute offsetAttribute;
int pass = 0;
while (pass < 2) {
int pos = 0;
int lastStart = 0;
int lastEnd = 0;
//error occur on below invoke
while (nameFilter.incrementToken()) {
}
}
I have added following changes in my code and it work fine but i m now sure it is correct answer
public SentenceTokenizer(Reader reader,SentenceDetector sentenceDetector) {
super();
this.input =reader;
this.sentenceDetector = sentenceDetector;
}

JSON response Using Volley in a recycler view in Android app

I am trying to print a json response response in a recyclerview. my Recyclerview is like a expanded listView. And my json resone is like this:
{"Table1":
[
{"filedate":"Oct 26, 2016"},
{"filedate":"Oct 18, 2016"}
],
"Table2":
[{
"filedate":"Oct 18, 2016",
"file1":"12.txt"
},
{
"filedate":"Oct 26, 2016",
"file1":"acerinvoice.pdf"
}
]}
and I trying to print this json resonse using this code:
private void prepareListData() {
// Volley's json array request object
StringRequest stringRequest = new StringRequest(Request.Method.POST, REGISTER_URL,
new Response.Listener<String>() {
//I think problrm is here
#Override
public void onResponse(String response) {
try {
**JSONObject object = new JSONObject(response);
JSONArray jsonarray = object.getJSONArray("Table1");
JSONArray jsonarray1 = object.getJSONArray("Table2");
for (int i = 0; i < jsonarray.length(); i++) {
try {
JSONObject obj = jsonarray.getJSONObject(i);
String str = obj.optString("filedate").trim();
int lth = jsonarray1.length();
data.add(new ExpandableListAdapter.Item(ExpandableListAdapter.HEADER, str));
for (int j = 0; j < jsonarray1.length(); j++) {
try {
JSONObject obj1 = jsonarray1.getJSONObject(j);
String str1 = obj1.optString("filedate").trim();
data.add(new ExpandableListAdapter.Item(ExpandableListAdapter.CHILD, str1));
Toast.makeText(getApplicationContext(), str1, Toast.LENGTH_LONG).show();**
//if condition
} catch (JSONException e) {
// Log.e(TAG, "JSON Parsing error: " + e.getMessage());
}
}
// adding movie to movies array
} catch (JSONException e) {
Log.e("gdshfsjkg", "JSON Parsing error: " + e.getMessage());
}
}
Log.d("test", String.valueOf(data));
Toast.makeText(getApplicationContext(), (CharSequence) data, Toast.LENGTH_LONG).show();
} catch (JSONException e) {
e.printStackTrace();
}
// notifying list adapter about data changes
// so that it renders the list view with updated data
recyclerview.setAdapter(new ExpandableListAdapter(data));
}
}, new Response.ErrorListener() {
#Override
public void onErrorResponse(VolleyError error) {
}
}){
#Override
protected Map<String, String> getParams() {
Map<String, String> params = new HashMap<String, String>();
params.put(CLIENT, "4");
return params;
}
};
// Adding request to request queue
MyApplication.getInstance().addToRequestQueue(stringRequest);
}
ExpandableListAdapter
public class ExpandableListAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> {
public static final int HEADER = 0;
public static final int CHILD = 1;
private List<Item> data;
public ExpandableListAdapter(List<Item> data) {
this.data = data;
}
#Override
public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int type) {
View view = null;
Context context = parent.getContext();
float dp = context.getResources().getDisplayMetrics().density;
int subItemPaddingLeft = (int) (18 * dp);
int subItemPaddingTopAndBottom = (int) (5 * dp);
switch (type) {
case HEADER:
LayoutInflater inflater = (LayoutInflater) parent.getContext().getSystemService(Context.LAYOUT_INFLATER_SERVICE);
view = inflater.inflate(R.layout.list_header, parent, false);
ListHeaderViewHolder header = new ListHeaderViewHolder(view);
return header;
case CHILD:
TextView itemTextView = new TextView(context);
itemTextView.setPadding(subItemPaddingLeft, subItemPaddingTopAndBottom, 0, subItemPaddingTopAndBottom);
itemTextView.setTextColor(0x88000000);
itemTextView.setLayoutParams(
new ViewGroup.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.WRAP_CONTENT));
return new RecyclerView.ViewHolder(itemTextView) {
};
}
return null;
}
public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) {
final Item item = data.get(position);
switch (item.type) {
case HEADER:
final ListHeaderViewHolder itemController = (ListHeaderViewHolder) holder;
itemController.refferalItem = item;
itemController.header_title.setText(item.text);
if (item.invisibleChildren == null) {
itemController.btn_expand_toggle.setImageResource(R.drawable.circle_minus);
} else {
itemController.btn_expand_toggle.setImageResource(R.drawable.circle_plus);
}
itemController.btn_expand_toggle.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (item.invisibleChildren == null) {
item.invisibleChildren = new ArrayList<Item>();
int count = 0;
int pos = data.indexOf(itemController.refferalItem);
while (data.size() > pos + 1 && data.get(pos + 1).type == CHILD) {
item.invisibleChildren.add(data.remove(pos + 1));
count++;
}
notifyItemRangeRemoved(pos + 1, count);
itemController.btn_expand_toggle.setImageResource(R.drawable.circle_plus);
} else {
int pos = data.indexOf(itemController.refferalItem);
int index = pos + 1;
for (Item i : item.invisibleChildren) {
data.add(index, i);
index++;
}
notifyItemRangeInserted(pos + 1, index - pos - 1);
itemController.btn_expand_toggle.setImageResource(R.drawable.circle_minus);
item.invisibleChildren = null;
}
}
});
break;
case CHILD:
TextView itemTextView = (TextView) holder.itemView;
itemTextView.setText(data.get(position).text);
break;
}
}
#Override
public int getItemViewType(int position) {
return data.get(position).type;
}
#Override
public int getItemCount() {
return data.size();
}
private static class ListHeaderViewHolder extends RecyclerView.ViewHolder {
public TextView header_title;
public ImageView btn_expand_toggle;
public Item refferalItem;
public ListHeaderViewHolder(View itemView) {
super(itemView);
header_title = (TextView) itemView.findViewById(R.id.header_title);
btn_expand_toggle = (ImageView) itemView.findViewById(R.id.btn_expand_toggle);
}
}
public static class Item {
public int type;
public String text;
public List<Item> invisibleChildren;
public Item() {
}
public Item(int type, String text) {
this.type = type;
this.text = text;
}
}
}
But this code print nothing in my recycler UI.It show only a empty layout.I have also tried to print the response in my log and it prints whole response together.I want to print table1 contents in headers and table2 contents in as child(items) with their respective headers.how to do this?If any other information needed please ask..
You should probably check your response first of everything. As per your requirements you should use HashMap and ArrayList. Follow these following steps:
1.Take a ArrayLsit and store child data of 1st heading and so on with index starting from 0.
2.Store headers in HashMap as key.
ex: HashMap<String,ArrayList<Data>> hm;
hm.put("your first heading string","related arraylist");
Then use ExpandableListView to arrange parent and child items.
please check your json response this is invalid json response first try to validate your json formate after try to decode and add in recycler view.
Check Your Response into Logcat also the Exception. I've tested the Response you've supplied that contains some unnecessary "," that may cause the Exception.
I have solve my problem to just change above code like this..
private void prepareListData() {
// Volley's json array request object
StringRequest stringRequest = new StringRequest(Request.Method.POST, REGISTER_URL,
new Response.Listener<String>() {
#Override
public void onResponse(String response) {
JSONObject object = null;
try {
object = new JSONObject(response);
} catch (JSONException e) {
e.printStackTrace();
}
JSONArray jsonarray = null;
JSONArray jsonarray1 = null;
try {
jsonarray = object.getJSONArray("Table1");
jsonarray1 = object.getJSONArray("Table1");
} catch (JSONException e) {
e.printStackTrace();
}
for (int i = 0; i < jsonarray.length(); i++) {
try {
JSONObject obj = jsonarray.getJSONObject(i);
//
String str = obj.optString("filedate").trim();
Log.d("test", str);
data.add(new ExpandableListAdapter.Item(ExpandableListAdapter.HEADER, str));
// Toast.makeText(getApplicationContext(), lth, Toast.LENGTH_LONG).show();
for (int j = 0; j < jsonarray1.length(); j++) {
try {
JSONObject obj1 = jsonarray1.getJSONObject(j);
String str1 = obj1.optString("filedate").trim();
String str3 = obj1.optString("category").trim();
String str2 = obj1.optString("file1").trim();
String str4 = obj1.optString("4").trim();
Toast.makeText(getApplicationContext(), "server data respone", Toast.LENGTH_LONG).show();
Toast.makeText(getApplicationContext(), "test"+str1, Toast.LENGTH_LONG).show();
if (str == str1) {
// data.add(new ExpandableListAdapter.Item(ExpandableListAdapter.CHILD, str1));
data.add(new ExpandableListAdapter.Item(ExpandableListAdapter.CHILD, str3));
data.add(new ExpandableListAdapter.Item(ExpandableListAdapter.CHILD, str2));
}
Toast.makeText(getApplicationContext(), str1, Toast.LENGTH_LONG).show();
//if condition
} catch (JSONException e) {
// Log.e(TAG, "JSON Parsing error: " + e.getMessage());
}
}
// adding movie to movies array
} catch (JSONException e) {
Log.e("gdshfsjkg", "JSON Parsing error: " + e.getMessage());
}
}
Log.d("test", String.valueOf(data));
// notifying list adapter about data changes
// so that it renders the list view with updated data
// adapterheader.notifyDataSetChanged();
recyclerview.setAdapter(new ExpandableListAdapter(data));
}
}, new Response.ErrorListener() {
#Override
public void onErrorResponse(VolleyError error) {
// VolleyLog.d(TAG, "Error: " + error.getMessage());
// hidePDialog();
}
}){
#Override
protected Map<String, String> getParams() {
Map<String, String> params = new HashMap<String, String>();
params.put(CLIENT, "4");
return params;
}
};
// Adding request to request queue
MyApplication.getInstance().addToRequestQueue(stringRequest);
}

How to deserialize a JSON array to a singly linked list by using Jackson

I want to deserialize a JSON array to a singly linked list in Java.
The definition of singly linked list is as the following:
public class SinglyLinkedListNode<T> {
public T value;
public SinglyLinkedListNode next;
public SinglyLinkedListNode(final T value) {
this.value = value;
}
}
How to deserialize a JSON string such as [1,2,3,4,5] in to a singly linked list?
public void typeReferenceTest() throws IOException {
ObjectMapper objectMapper = new ObjectMapper();
final ArrayList<Integer> intArray = objectMapper.readValue("[1,2,3,4,5]",
new TypeReference<ArrayList<Integer>>() {});
System.out.println(intArray);
// How to achieve this?
final ArrayList<Integer> intList = objectMapper.readValue("[1,2,3,4,5]",
new TypeReference<SinglyLinkedListNode<Integer>>() {});
System.out.println(intList);
}
Moreover, I want the SinglyLinkedListNode to be a first-class citizen the same as ArrayList, which can be used in all kinds of combinations, such as HashSet<SinglyLinkedListNode<Integer>>, SinglyLinkedListNode<HashMap<String, Integer>>.
For example, what happens if I want to deserialize [[1,2,3], [4,5,6]] into a ArrayList<SinglyLinkedListNode<Integer>> ?
As far as I know, a customized deserializer extending JsonDeserializer is not enough to do this.
When you want it to be deserialized to ArrayList<SinglyLinkedListNode<Integer>> for example. Your code specifies that is the type that expected. Therefore it should if a deserializer for SinglyLinkedListNode<Integer> is regeistered it will succeed.
From the jackson-user google group I get the right answer from #Tatu Saloranta.
The answer is simple: just implement the java.util.List interface, and Jackson will automatically serialize/deserialize between JSON array and SinglyLinkedListNode.
So I implement the java.util.List interface for SinglyLinkedListNode, the code is as the following:
import java.util.*;
import java.util.function.Consumer;
/**
* Singly Linked List.
*
* <p>As to singly linked list, a node can be viewed as a single node,
* and it can be viewed as a list too.</p>
*
* #param <E> the type of elements held in this collection
* #see java.util.LinkedList
*/
public class SinglyLinkedListNode<E>
extends AbstractSequentialList<E>
implements Cloneable, java.io.Serializable {
public E value;
public SinglyLinkedListNode<E> next;
/**
* Constructs an empty list.
*/
public SinglyLinkedListNode() {
value = null;
next = null;
}
/**
* Constructs an list with one elment.
*/
public SinglyLinkedListNode(final E value) {
this.value = value;
next = null;
}
/**
* Constructs a list containing the elements of the specified
* collection, in the order they are returned by the collection's
* iterator.
*
* #param c the collection whose elements are to be placed into this list
* #throws NullPointerException if the specified collection is null
*/
public SinglyLinkedListNode(Collection<? extends E> c) {
this();
addAll(c);
}
/**
* Links e as last element.
*/
void linkLast(E e) {
final SinglyLinkedListNode<E> l = last();
final SinglyLinkedListNode<E> newNode = new SinglyLinkedListNode<>(e);
if (l == null)
this.value = e;
else
l.next = newNode;
modCount++;
}
/**
* Inserts element e before non-null Node succ.
*/
void linkBefore(E e, SinglyLinkedListNode<E> succ) {
assert succ != null;
final SinglyLinkedListNode<E> prev = this.previous(succ);
final SinglyLinkedListNode<E> newNode = new SinglyLinkedListNode<>(e);
if (prev == null)
this.value = e;
else
prev.next = newNode;
modCount++;
}
/**
* Return the node before x.
*
* #param x current node
* #return the node before x
*/
private SinglyLinkedListNode<E> previous(final SinglyLinkedListNode<E> x) {
assert (x != null);
if (size() < 2) return null;
if (this == x) return null;
SinglyLinkedListNode<E> prev = new SinglyLinkedListNode<>();
prev.next = this;
SinglyLinkedListNode<E> cur = this;
while (cur != x) {
prev = prev.next;
cur = cur.next;
}
return prev;
}
/**
* Return the last node.
* #return the last node.
*/
private SinglyLinkedListNode<E> last() {
if (size() == 0) return null;
if (size() == 1) return this;
SinglyLinkedListNode<E> prev = new SinglyLinkedListNode<>();
prev.next = this;
SinglyLinkedListNode<E> cur = this;
while (cur != null) {
prev = prev.next;
cur = cur.next;
}
return prev;
}
/**
* Unlinks non-null node x.
*/
E unlink(SinglyLinkedListNode<E> x) {
assert x != null;
final E element = x.value;
final SinglyLinkedListNode<E> next = x.next;
final SinglyLinkedListNode<E> prev = previous(x);
if (prev == null) {
this.value = next.value;
this.next = next.next;
} else {
prev.next = next;
}
x.next = null;
modCount++;
return element;
}
/**
* #inheritDoc
*/
public ListIterator<E> listIterator(int index) {
checkPositionIndex(index);
return new ListItr(index);
}
private class ListItr implements ListIterator<E> {
private SinglyLinkedListNode<E> lastReturned;
private SinglyLinkedListNode<E> next;
private int nextIndex;
private int expectedModCount = modCount;
ListItr(int index) {
assert isPositionIndex(index);
next = (index == size()) ? null : node(index);
nextIndex = index;
}
public boolean hasNext() {
return nextIndex < size();
}
public E next() {
checkForComodification();
if (!hasNext())
throw new NoSuchElementException();
lastReturned = next;
next = next.next;
nextIndex++;
return lastReturned.value;
}
public boolean hasPrevious() {
return nextIndex > 0;
}
public E previous() {
throw new UnsupportedOperationException();
}
public int nextIndex() {
return nextIndex;
}
public int previousIndex() {
return nextIndex - 1;
}
public void remove() {
checkForComodification();
if (lastReturned == null)
throw new IllegalStateException();
unlink(lastReturned);
nextIndex--;
lastReturned = null;
expectedModCount++;
}
public void set(E e) {
if (lastReturned == null)
throw new IllegalStateException();
checkForComodification();
lastReturned.value = e;
}
public void add(E e) {
checkForComodification();
lastReturned = null;
if (next == null)
linkLast(e);
else
linkBefore(e, next);
nextIndex++;
expectedModCount++;
}
public void forEachRemaining(Consumer<? super E> action) {
Objects.requireNonNull(action);
while (modCount == expectedModCount && nextIndex < size()) {
action.accept(next.value);
lastReturned = next;
next = next.next;
nextIndex++;
}
checkForComodification();
}
final void checkForComodification() {
if (modCount != expectedModCount)
throw new ConcurrentModificationException();
}
}
/**
* #inheritDoc
*/
public int size() {
int size = 0;
if (value == null) return size;
SinglyLinkedListNode<E> cur = this;
while (cur != null) {
size++;
cur = cur.next;
}
return size;
}
private void checkPositionIndex(int index) {
if (!isPositionIndex(index))
throw new IndexOutOfBoundsException(outOfBoundsMsg(index));
}
/**
* Returns the (non-null) Node at the specified element index.
*/
SinglyLinkedListNode<E> node(int index) {
assert isElementIndex(index);
SinglyLinkedListNode<E> x = this;
for (int i = 0; i < index; i++)
x = x.next;
return x;
}
/**
* Tells if the argument is the index of an existing element.
*/
private boolean isElementIndex(int index) {
return index >= 0 && index < size();
}
/**
* Tells if the argument is the index of a valid position for an
* iterator or an add operation.
*/
private boolean isPositionIndex(int index) {
return index >= 0 && index <= size();
}
/**
* Constructs an IndexOutOfBoundsException detail message.
* Of the many possible refactorings of the error handling code,
* this "outlining" performs best with both server and client VMs.
*/
private String outOfBoundsMsg(int index) {
return "Index: "+index+", Size: " + size();
}
}
Here is the unit test code:
#Test public void typeReferenceTest() throws IOException {
ObjectMapper objectMapper = new ObjectMapper();
final SinglyLinkedListNode<Integer> intList = objectMapper.readValue("[1,2,3,4,5]",
new TypeReference<SinglyLinkedListNode<Integer>>() {});
System.out.println(intList);
final ArrayList<SinglyLinkedListNode<Integer>> arrayOfList = objectMapper.readValue("[[1,2,3], [4,5,6]]",
new TypeReference<ArrayList<SinglyLinkedListNode<Integer>>>() {});
System.out.println(arrayOfList);
}
#Tatu Saloranta Thank you very much!
Here is my original blog, Deserialize a JSON Array to a Singly Linked List

With Lucene 4.3.1, How to get all terms which occur in sub-range of all docs

Suppose a lucene index with fields : date, content.
I want to get all terms value and frequency of docs whose date is yesterday. date field is keyword field. content field is analyzed and indexed.
Pls help me with sample code.
My solution source is as follow ...
/**
*
*
* #param reader
* #param fromDateTime
* - yyyymmddhhmmss
* #param toDateTime
* - yyyymmddhhmmss
* #return
*/
static public String top10(IndexSearcher searcher, String fromDateTime,
String toDateTime) {
String top10Query = "";
try {
Query query = new TermRangeQuery("tweetDate", new BytesRef(
fromDateTime), new BytesRef(toDateTime), true, false);
final BitSet bits = new BitSet(searcher.getIndexReader().maxDoc());
searcher.search(query, new Collector() {
private int docBase;
#Override
public void setScorer(Scorer scorer) throws IOException {
}
#Override
public void setNextReader(AtomicReaderContext context)
throws IOException {
this.docBase = context.docBase;
}
#Override
public void collect(int doc) throws IOException {
bits.set(doc + docBase);
}
#Override
public boolean acceptsDocsOutOfOrder() {
return false;
}
});
//
Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_43,
EnglishStopWords.getEnglishStopWords());
//
HashMap<String, Long> wordFrequency = new HashMap<>();
for (int wx = 0; wx < bits.length(); ++wx) {
if (bits.get(wx)) {
Document wd = searcher.doc(wx);
//
TokenStream tokenStream = analyzer.tokenStream("temp",
new StringReader(wd.get("content")));
// OffsetAttribute offsetAttribute = tokenStream
// .addAttribute(OffsetAttribute.class);
CharTermAttribute charTermAttribute = tokenStream
.addAttribute(CharTermAttribute.class);
tokenStream.reset();
while (tokenStream.incrementToken()) {
// int startOffset = offsetAttribute.startOffset();
// int endOffset = offsetAttribute.endOffset();
String term = charTermAttribute.toString();
if (term.length() < 2)
continue;
Long wl;
if ((wl = wordFrequency.get(term)) == null)
wordFrequency.put(term, 1L);
else {
wl += 1;
wordFrequency.put(term, wl);
}
}
tokenStream.end();
tokenStream.close();
}
}
analyzer.close();
// sort
List<String> occurterm = new ArrayList<String>();
for (String ws : wordFrequency.keySet()) {
occurterm.add(String.format("%06d\t%s", wordFrequency.get(ws),
ws));
}
Collections.sort(occurterm, Collections.reverseOrder());
// make query string by top 10 words
int topCount = 10;
for (String ws : occurterm) {
if (topCount-- == 0)
break;
String[] tks = ws.split("\\t");
top10Query += tks[1] + " ";
}
top10Query.trim();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
// return top10 word string
return top10Query;
}