outofDirectMemory exception with redisson - redis

i'm trying to learn Redis through Redisson. Here is my code to insert into redis using multiple threads.
package redisson
import java.io.File;
import java.util.concurrent.atomic.AtomicInteger;
import org.redisson.Redisson;
import org.redisson.api.RBatch;
import org.redisson.api.RMap;
import org.redisson.api.RedissonClient;
import org.redisson.config.Config;
public class RedisTest extends Thread {
static RMap<String, String> dMap = null;
static RMap<String, String> wMap = null;
static RMap<String, String> mMap = null;
static RedissonClient redisson = null;
public static void main(String[] args) throws Exception {
Config config = Config.fromJSON(new File("C:\\Users\\neon-workspace\\RedisProject\\src\\main\\resources\\SingleNodeConfig.json"));
RedissonClient redisson = Redisson.create(config);
dMap = redisson.getMap("Daily");
wMap = redisson.getMap("Weekly");
mMap = redisson.getMap("Monthly");
connectHbse(dMap,wMap,mMap,redisson);
redisson.shutdown();
}
public static void connectHbse(RMap<String, String> dMap,RMap<String, String> wMap,RMap<String, String> mMap,RedissonClient redisson) {
int totalSize=500000;
int totalThread=2;
int chunkSize = totalSize/totalThread;
AtomicInteger total = new AtomicInteger(chunkSize);
RedisTest test1[] = new RedisTest[totalThread];
for (int i = 0; i < test1.length; i++) {
test1[i] = new RedisTest(total,dMap,wMap,mMap,redisson);
total.set(total.intValue()+chunkSize);
}
long t1 = System.currentTimeMillis();
for (int i = 0; i < test1.length; i++) {
test1[i].start();
}
try {
for (int i = 0; i < test1.length; i++) {
test1[i].join();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println("Final Total Time Taken ::>>>>>>>>>>>>>>>>> " + ((System.currentTimeMillis() - t1))+"ms");
}
private AtomicInteger total = null;
public RedisTest(AtomicInteger total,RMap<String, String> dMap,RMap<String, String> wMap,RMap<String, String> mMap,RedissonClient redisson) {
this.total = new AtomicInteger(total.intValue());
this.dMap = dMap;
this.wMap = wMap;
this.mMap = mMap;
this.redisson = redisson;
}
public static int getRandomInteger(int maximum, int minimum) {
return ((int) (Math.random() * (maximum - minimum))) + minimum;
}
public void run() {
try {
long t1 = System.currentTimeMillis();
dMap.clear();
wMap.clear();
mMap.clear();
RBatch batch = redisson.createBatch();
for (;total.decrementAndGet()>=0;) {
String dvalue = ""+getRandomInteger(100,200);
String wvalue = "" +getRandomInteger(200, 300);
String mvalue = "" +getRandomInteger(300, 400);
batch.getMap("Daily").fastPutAsync(""+total.get(), dvalue);
batch.getMap("Weekly").fastPutAsync(""+total.get(), wvalue);
batch.getMap("Monthly").fastPutAsync(""+total.get(), mvalue);
synchronized (total) {
if(total.get()%100==0)
System.out.println(total.get()+" Records in Seconds:::::" + ((System.currentTimeMillis() - t1))/1000);
}
}
batch.execute();
System.out.println("Time Taken for completion::::: " + ((System.currentTimeMillis() - t1))+" by thread:::::"+Thread.currentThread().getName());
System.out.println("Done !!!");
} catch (Exception e) {
System.out.println("Done !!!" + e.getMessage());
e.printStackTrace();
} finally {
}
}
}
This code works fine until totalSize=400000.
When i put the totalSize=500000, its throwing the following exception.
io.netty.handler.codec.EncoderException: io.netty.util.internal.OutOfDirectMemoryError: failed to allocate 16777216 byte(s) of direct memory (used: 939524096, max: 954466304)
at io.netty.handler.codec.MessageToByteEncoder.write(MessageToByteEncoder.java:125)
at org.redisson.client.handler.CommandBatchEncoder.write(CommandBatchEncoder.java:45)
at io.netty.channel.AbstractChannelHandlerContext.invokeWrite0(AbstractChannelHandlerContext.java:738)
... 25 more
Caused by: io.netty.util.internal.OutOfDirectMemoryError: failed to allocate 16777216 byte(s) of direct memory (used: 939524096, max: 954466304)
at io.netty.util.internal.PlatformDependent.incrementMemoryCounter(PlatformDependent.java:627)
at io.netty.util.internal.PlatformDependent.allocateDirectNoCleaner(PlatformDependent.java:581)
at io.netty.buffer.PoolArena$DirectArena.allocateDirect(PoolArena.java:764)
at io.netty.buffer.PoolArena$DirectArena.newChunk(PoolArena.java:740)
at io.netty.buffer.PoolArena.allocateNormal(PoolArena.java:244)
at io.netty.buffer.PoolArena.allocate(PoolArena.java:226)
at io.netty.buffer.PoolArena.reallocate(PoolArena.java:397)
at io.netty.buffer.PooledByteBuf.capacity(PooledByteBuf.java:118)
at io.netty.buffer.AbstractByteBuf.ensureWritable0(AbstractByteBuf.java:285)
at io.netty.buffer.AbstractByteBuf.ensureWritable(AbstractByteBuf.java:265)
at io.netty.buffer.AbstractByteBuf.writeBytes(AbstractByteBuf.java:1046)
at io.netty.buffer.AbstractByteBuf.writeBytes(AbstractByteBuf.java:1054)
at org.redisson.client.handler.CommandEncoder.writeArgument(CommandEncoder.java:169)
at org.redisson.client.handler.CommandEncoder.encode(CommandEncoder.java:110)
at org.redisson.client.handler.CommandBatchEncoder.encode(CommandBatchEncoder.java:52)
at org.redisson.client.handler.CommandBatchEncoder.encode(CommandBatchEncoder.java:32)
at io.netty.handler.codec.MessageToByteEncoder.write(MessageToByteEncoder.java:107)
... 27 more
But i have about 7Gb ram free.
Can someone explain to me the reason i'm getting this exception?

It seems i should provide more memory to my JVM instance using -Xmx which solved the issue for me.

Related

jvm condition and locksupport which is faster?

An experimental example of synchronous call is made. Each thread task waits for a task callback with its own value of + 1. The performance difference between condition and locksupport is compared. The result is unexpected. The two times are the same, but the difference on the flame diagram is very big. Does it mean that the JVM has not optimized locksupport
enter image description here
public class LockerTest {
static int max = 100000;
static boolean usePark = true;
static Map<Long, Long> msg = new ConcurrentHashMap<>();
static ExecutorService producer = Executors.newFixedThreadPool(4);
static ExecutorService consumer = Executors.newFixedThreadPool(16);
static AtomicLong record = new AtomicLong(0);
static CountDownLatch latch = new CountDownLatch(max);
static ReentrantLock lock = new ReentrantLock();
static Condition cond = lock.newCondition();
static Map<Long, Thread> parkMap = new ConcurrentHashMap<>();
static AtomicLong cN = new AtomicLong(0);
public static void main(String[] args) throws InterruptedException {
long start = System.currentTimeMillis();
for (int num = 0; num < max; num++) {
consumer.execute(() -> {
long id = record.incrementAndGet();
msg.put(id, -1L);
call(id);
if (usePark) {
Thread thread = Thread.currentThread();
parkMap.put(id, thread);
while (msg.get(id) == -1) {
cN.incrementAndGet();
LockSupport.park(thread);
}
} else {
lock.lock();
try {
while (msg.get(id) == -1) {
cN.incrementAndGet();
cond.await();
}
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
lock.unlock();
}
}
latch.countDown();
});
}
latch.await();
consumer.shutdown();
producer.shutdown();
System.out.printf("park %s suc %s cost %s cn %s"
, usePark
, msg.entrySet().stream().noneMatch(entry -> entry.getKey() + 1 != entry.getValue())
, System.currentTimeMillis() - start
, cN.get()
);
}
private static void call(long id) {
producer.execute(() -> {
try {
Thread.sleep((id * 13) % 100);
} catch (InterruptedException e) {
e.printStackTrace();
}
if (usePark) {
msg.put(id, id + 1);
LockSupport.unpark(parkMap.remove(id));
} else {
lock.lock();
try {
msg.put(id, id + 1);
cond.signalAll();
} finally {
lock.unlock();
}
}
});
}
}

Optimizing Transposing and Comparison of Concurrent List in Java 8

I have an application in Java 8 Collecting Data of multiple Threads using BlockingQueue.
I need to perform comparison of samples.
But my application is very large, I implemented a mock application (Github) in Java 8.
I'm generating a chunk of bytes (really is random order).
The bytes are stored into ChunkDTO class.
I implemented a capturer the ChunkDTO in a List, code in Capturer class.
Each ChunkDTO of List is translated into a List of Samples (TimePitchValue exactly) returning a nested List (or List of List of TimePitchValue).
Later the nested List is transposed in order to performs comparisons between TimePitchValue with the same time value.
Due to enormous volume of TimePitchValue instances it's consumes huge time in my application.
Here some code (The complete functional Code is in Github) because is still large for this site).
public class Generator {
final static Logger LOGGER = Logger.getLogger("SampleComparator");
public static void main(String[] args) {
long previous = System.nanoTime();
final int minBufferSize = 2048;
int sampleRate = 8192;
int numChannels = 1;
int numBytesPerSample = 1;
int samplesChunkPerSecond = sampleRate / minBufferSize;
int minutes = 0;
int seconds = 10;
int time = 60 * minutes + seconds;
int chunksBySecond = samplesChunkPerSecond * numBytesPerSample * numChannels;
int pitchs = 32;
boolean signed = false;
boolean endianness = false;
AudioFormat audioformat = new AudioFormat(sampleRate, 8 * numBytesPerSample, numChannels, signed, endianness);
ControlDSP controlDSP = new ControlDSP(audioformat);
BlockingQueue<ChunkDTO> generatorBlockingQueue = new LinkedBlockingQueue<>();
Capturer capturer = new Capturer(controlDSP, pitchs, pitchs * time * chunksBySecond, generatorBlockingQueue);
controlDSP.getListFuture().add(controlDSP.getExecutorService().submit(capturer));
for (int i = 0; i < time * chunksBySecond; i++) {
for (int p = 0; p < pitchs; p++) {
ChunkDTO chunkDTO = new ChunkDTO(UtilClass.getArrayByte(minBufferSize), i, p);
LOGGER.info(String.format("chunkDTO: %s", chunkDTO));
try {
generatorBlockingQueue.put(chunkDTO);
} catch (InterruptedException ex) {
LOGGER.info(ex.getMessage());
}
}
try {
Thread.sleep(1000 / chunksBySecond);
} catch (Exception ex) {
}
}
controlDSP.tryFinishThreads(Thread.currentThread());
long current = System.nanoTime();
long interval = TimeUnit.NANOSECONDS.toSeconds(current - previous);
System.out.println("Seconds Interval: " + interval);
}
}
Capturer Class
public class Capturer implements Callable<Void> {
private final ControlDSP controlDSP;
private final int pitchs;
private final int totalChunks;
private final BlockingQueue<ChunkDTO> capturerBlockingQueue;
private final Counter intCounter;
private final Map<Long, List<ChunkDTO>> mapIndexListChunkDTO = Collections.synchronizedMap(new HashMap<>());
private volatile boolean isRunning = false;
private final String threadName;
private static final Logger LOGGER = Logger.getLogger("SampleComparator");
public Capturer(ControlDSP controlDSP, int pitchs, int totalChunks, BlockingQueue<ChunkDTO> capturerBlockingQueue) {
this.controlDSP = controlDSP;
this.pitchs = pitchs;
this.totalChunks = totalChunks;
this.capturerBlockingQueue = capturerBlockingQueue;
this.intCounter = new Counter();
this.controlDSP.getListFuture().add(this.controlDSP.getExecutorService().submit(() -> {
while (intCounter.getValue() < totalChunks) {
try {
Thread.sleep(100);
} catch (InterruptedException ex) {
LOGGER.log(Level.SEVERE, null, ex);
}
}
capturerBlockingQueue.add(new ChunkDTOStopper());
}));
this.threadName = this.getClass().getSimpleName();
}
#Override
public Void call() throws Exception {
long quantity = 0;
isRunning = true;
while (isRunning) {
try {
ChunkDTO chunkDTO = capturerBlockingQueue.take();
if (chunkDTO instanceof ChunkDTOStopper) {
break;
}
//Find or Create List (according to Index) to add the incoming Chunk
long index = chunkDTO.getIndex();
int sizeChunk = chunkDTO.getChunk().length;
List<ChunkDTO> listChunkDTOWithIndex = getListChunkDTOByIndex(chunkDTO);
//When the List (according to Index) is completed and processed
if (listChunkDTOWithIndex.size() == pitchs) {
mapIndexListChunkDTO.remove(index);
TransposerComparator transposerComparator = new TransposerComparator(controlDSP, controlDSP.getAudioformat(), index, sizeChunk, listChunkDTOWithIndex);
controlDSP.getListFuture().add(controlDSP.getExecutorService().submit(transposerComparator));
}
quantity++;
intCounter.setValue(quantity);
LOGGER.info(String.format("%s\tConsumes:%s\ttotal:%05d", threadName, chunkDTO, quantity));
} catch (Exception ex) {
LOGGER.log(Level.SEVERE, null, ex);
}
}
LOGGER.info(String.format("%s\tReceived:%05d\tQty:%s\tPitchs:%s\tEND\n", threadName, quantity, quantity / pitchs, pitchs));
return null;
}
private List<ChunkDTO> getListChunkDTOByIndex(ChunkDTO chunkDTO) {
List<ChunkDTO> listChunkDTOWithIndex = mapIndexListChunkDTO.get(chunkDTO.getIndex());
if (listChunkDTOWithIndex == null) {
listChunkDTOWithIndex = new ArrayList<>();
mapIndexListChunkDTO.put(chunkDTO.getIndex(), listChunkDTOWithIndex);
listChunkDTOWithIndex = mapIndexListChunkDTO.get(chunkDTO.getIndex());
}
listChunkDTOWithIndex.add(chunkDTO);
return listChunkDTOWithIndex;
}
}
TransposerComparator class.
The optimization required is in this code, specifically on transposedNestedList method.
public class TransposerComparator implements Callable<Void> {
private final ControlDSP controlDSP;
private final AudioFormat audioformat;
private final long index;
private final int sizeChunk;
private final List<ChunkDTO> listChunkDTOWithIndex;
private final String threadName;
private static final Logger LOGGER = Logger.getLogger("SampleComparator");
public TransposerComparator(ControlDSP controlDSP, AudioFormat audioformat, long index, int sizeChunk, List<ChunkDTO> listChunkDTOWithIndex) {
this.controlDSP = controlDSP;
this.audioformat = audioformat;
this.index = index;
this.sizeChunk = sizeChunk;
this.listChunkDTOWithIndex = listChunkDTOWithIndex;
this.threadName = this.getClass().getSimpleName() + "_" + String.format("%05d", index);
}
#Override
public Void call() throws Exception {
Thread.currentThread().setName(threadName);
LOGGER.info(String.format("%s\tINI", threadName));
try {
int numBytesPerSample = audioformat.getSampleSizeInBits() / 8;
int quantitySamples = sizeChunk / numBytesPerSample;
long baseTime = quantitySamples * index;
// Convert the List of Chunk Bytes to Nested List of TimePitchValue
List<List<TimePitchValue>> nestedListTimePitchValue = listChunkDTOWithIndex
.stream()
.map(chunkDTO -> {
return IntStream
.range(0, quantitySamples)
.mapToObj(time -> {
int value = extractValue(chunkDTO.getChunk(), numBytesPerSample, time);
return new TimePitchValue(chunkDTO.getPitch(), baseTime + time, value);
}).collect(Collectors.toList());
}).collect(Collectors.toList());
List<List<TimePitchValue>> timeNestedListTimePitchValue = transposedNestedList(nestedListTimePitchValue);
} catch (Exception ex) {
ex.printStackTrace();
LOGGER.log(Level.SEVERE, null, ex);
throw ex;
}
return null;
}
private static int extractValue(byte[] bytesSamples, int numBytesPerSample, int time) {
byte[] bytesSingleNumber = Arrays.copyOfRange(bytesSamples, time * numBytesPerSample, (time + 1) * numBytesPerSample);
int value = numBytesPerSample == 2
? (UtilClass.Byte2IntLit(bytesSingleNumber[0], bytesSingleNumber[1]))
: (UtilClass.byte2intSmpl(bytesSingleNumber[0]));
return value;
}
private static List<List<TimePitchValue>> transposedNestedList(List<List<TimePitchValue>> nestedList) {
List<List<TimePitchValue>> outNestedList = new ArrayList<>();
nestedList.forEach(pitchList -> {
pitchList.forEach(pitchValue -> {
List<TimePitchValue> listTimePitchValueWithTime = listTimePitchValueWithTime(outNestedList, pitchValue.getTime());
if (!outNestedList.contains(listTimePitchValueWithTime)) {
outNestedList.add(listTimePitchValueWithTime);
}
listTimePitchValueWithTime.add(pitchValue);
});
});
outNestedList.forEach(pitchList -> {
pitchList.sort(Comparator.comparingInt(TimePitchValue::getValue).reversed());
});
return outNestedList;
}
private static List<TimePitchValue> listTimePitchValueWithTime(List<List<TimePitchValue>> nestedList, long time) {
List<TimePitchValue> listTimePitchValueWithTime = nestedList
.stream()
.filter(innerList -> innerList.stream()
.anyMatch(timePitchValue -> timePitchValue.getTime() == time))
.findAny()
.orElseGet(ArrayList::new);
return listTimePitchValueWithTime;
}
}
I was testing:
With 5 Seconds in Generator class and the List<List<TimePitchValue>> timeNestedListTimePitchValue = transposedNestedList(nestedListTimePitchValue); line in TransposerComparator class, Commented 7 Seconds needed, Uncommented 211 Seconds needed.
With 10 Seconds in Generator class and the List<List<TimePitchValue>> timeNestedListTimePitchValue = transposedNestedList(nestedListTimePitchValue); line in TransposerComparator class, Commented 12 Seconds needed, Uncommented 574 Seconds needed.
I need to use the application at least 60 minutes.
With the purpose of reduce the needed (consumed) time, I have two ways:
I choose for short is to optimize the methods that I am currently using.
That should be successful but longer and is to use GPGPU, but I don't know where to start implementing it yet.
QUESTIONS
This Question is for the first way: What changes do you recommend in the code of the transposedNestedList method in order to improve speed?
Is there better alternative to use this Comparison?
outNestedList.forEach(pitchList -> {
pitchList.sort(Comparator.comparingInt(TimePitchValue::getValue).reversed());
});

error is occuring while creating custom tokenizer in lucene 7.3

I m trying to create new tokenizer by refering book tamingtext (which uses lucene 3.+ api) using new lucene api 7.3, but it is giving me error as mentioned below
java.lang.IllegalStateException: TokenStream contract violation: reset()/close() call missing, reset() called multiple times, or subclass does not call super.reset(). Please see Javadocs of TokenStream class for more information about the correct consuming workflow.
at org.apache.lucene.analysis.Tokenizer$1.read(Tokenizer.java:109)
at java.io.Reader.read(Reader.java:140)
at solr.SentenceTokenizer.fillSentences(SentenceTokenizer.java:43)
at solr.SentenceTokenizer.incrementToken(SentenceTokenizer.java:55)
at solr.NameFilter.fillSpans(NameFilter.java:56)
at solr.NameFilter.incrementToken(NameFilter.java:88)
at spec.solr.NameFilterTest.testNameFilter(NameFilterTest.java:81)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
Here is my SentenceTokenizer class
Initializing method, in older api there was super(Reader); but in current api there is no access to Reader class
public SentenceTokenizer(SentenceDetector detector) {
super();
setReader(reader);
this.sentenceDetector = detector;
}
Here is my reset method
#Override
public void reset() throws IOException {
super.reset();
sentenceDetector = null;
}
When i tried to access this method from custom TokenFilter, I m getting above error
public void fillSentences() throws IOException {
char[] c = new char[256];
int size = 0;
StringBuilder stringBuilder = new StringBuilder();
while ((size = input.read(c)) >= 0) {
stringBuilder.append(c, 0, size);
}
String temp = stringBuilder.toString();
inputSentence = temp.toCharArray();
sentenceSpans = sentenceDetector.sentPosDetect(temp);
tokenOffset = 0;
}
#Override
public final boolean incrementToken() throws IOException {
if (sentenceSpans == null) {
//invoking following method
fillSentences();
}
if (tokenOffset == sentenceSpans.length) {
return false;
}
Span sentenceSpan = sentenceSpans[tokenOffset];
clearAttributes();
int start = sentenceSpan.getStart();
int end = sentenceSpan.getEnd();
charTermAttribute.copyBuffer(inputSentence, start, end - start);
positionIncrementAttribute.setPositionIncrement(1);
offsetAttribute.setOffset(start, end);
tokenOffset++;
return true;
}
Here is my custom TokenFilter class
public final class NameFilter extends TokenFilter {
public static final String NE_PREFIX = "NE_";
private final Tokenizer tokenizer;
private final String[] tokenTypeNames;
private final NameFinderME[] nameFinderME;
private final KeywordAttribute keywordAttribute = addAttribute(KeywordAttribute.class);
private final PositionIncrementAttribute positionIncrementAttribute = addAttribute(PositionIncrementAttribute.class);
private final CharTermAttribute charTermAttribute = addAttribute(CharTermAttribute.class);
private final OffsetAttribute offsetAttribute = addAttribute(OffsetAttribute.class);
private String text;
private int baseOffset;
private Span[] spans;
private String[] tokens;
private Span[][] foundNames;
private boolean[][] tokenTypes;
private int spanOffsets = 0;
private final Queue<AttributeSource.State> tokenQueue =
new LinkedList<>();
public NameFilter(TokenStream in, String[] modelNames, NameFinderME[] nameFinderME) {
super(in);
this.tokenizer = SimpleTokenizer.INSTANCE;
this.nameFinderME = nameFinderME;
this.tokenTypeNames = new String[modelNames.length];
for (int i = 0; i < modelNames.length; i++) {
this.tokenTypeNames[i] = NE_PREFIX + modelNames[i];
}
}
//consumes tokens from the upstream tokenizer and buffer them in a
//StringBuilder whose contents will be passed to opennlp
protected boolean fillSpans() throws IOException {
if (!this.input.incrementToken()) return false;
//process the next sentence from the upstream tokenizer
this.text = input.getAttribute(CharTermAttribute.class).toString();
this.baseOffset = this.input.getAttribute(OffsetAttribute.class).startOffset();
this.spans = this.tokenizer.tokenizePos(text);
this.tokens = Span.spansToStrings(spans, text);
this.foundNames = new Span[this.nameFinderME.length][];
for (int i = 0; i < nameFinderME.length; i++) {
this.foundNames[i] = nameFinderME[i].find(tokens);
}
//insize
this.tokenTypes = new boolean[this.tokens.length][this.nameFinderME.length];
for (int i = 0; i < nameFinderME.length; i++) {
Span[] spans = foundNames[i];
for (int j = 0; j < spans.length; j++) {
int start = spans[j].getStart();
int end = spans[j].getEnd();
for (int k = start; k < end; k++) {
this.tokenTypes[k][i] = true;
}
}
}
spanOffsets = 0;
return true;
}
#Override
public boolean incrementToken() throws IOException {
//if there's nothing in the queue
if(tokenQueue.peek()==null){
//no span or spans consumed
if (spans==null||spanOffsets>=spans.length){
if (!fillSpans())return false;
}
if (spanOffsets>=spans.length)return false;
//copy the token and any types
clearAttributes();
keywordAttribute.setKeyword(false);
positionIncrementAttribute.setPositionIncrement(1);
int startOffset = baseOffset +spans[spanOffsets].getStart();
int endOffset = baseOffset+spans[spanOffsets].getEnd();
offsetAttribute.setOffset(startOffset,endOffset);
charTermAttribute.setEmpty()
.append(tokens[spanOffsets]);
//determine of the current token is of a named entity type, if so
//push the current state into the queue and add a token reflecting
// any matching entity types.
boolean [] types = tokenTypes[spanOffsets];
for (int i = 0; i < nameFinderME.length; i++) {
if (types[i]){
keywordAttribute.setKeyword(true);
positionIncrementAttribute.setPositionIncrement(0);
tokenQueue.add(captureState());
positionIncrementAttribute.setPositionIncrement(1);
charTermAttribute.setEmpty().append(tokenTypeNames[i]);
}
}
}
spanOffsets++;
return true;
}
#Override
public void close() throws IOException {
super.close();
reset();
}
#Override
public void reset() throws IOException {
super.reset();
this.spanOffsets = 0;
this.spans = null;
}
#Override
public void end() throws IOException {
super.end();
reset();
}
}
here is my test case for following class
#Test
public void testNameFilter() throws IOException {
Reader in = new StringReader(input);
Tokenizer tokenizer = new SentenceTokenizer( detector);
tokenizer.reset();
NameFilter nameFilter = new NameFilter(tokenizer, modelName, nameFinderMES);
nameFilter.reset();
CharTermAttribute charTermAttribute;
PositionIncrementAttribute positionIncrementAttribute;
OffsetAttribute offsetAttribute;
int pass = 0;
while (pass < 2) {
int pos = 0;
int lastStart = 0;
int lastEnd = 0;
//error occur on below invoke
while (nameFilter.incrementToken()) {
}
}
I have added following changes in my code and it work fine but i m now sure it is correct answer
public SentenceTokenizer(Reader reader,SentenceDetector sentenceDetector) {
super();
this.input =reader;
this.sentenceDetector = sentenceDetector;
}

Multithread and Chronicle queue

so I'm trying to see what's the fastest way to write to Chronicle Queue in a multithread env so I have the following:
public static void main(String[] args) throws Exception{
final String path = args[0];
int times = Integer.parseInt(args[1]);
int num = Integer.parseInt(args[2]);
AtomicInteger nextid = new AtomicInteger(0);
ThreadLocal<Integer> id = ThreadLocal.withInitial(() -> nextid.getAndIncrement());
ChronicleTest test = new ChronicleTest();
ChronicleWriter writer = test.new ChronicleWriter(path);
CountDownLatch start = new CountDownLatch(1);
CountDownLatch done = new CountDownLatch(num);
Thread[] threads = new Thread[num];
long[] samples = new long[times * num];
for (int i = 0; i < num; i ++) {
threads[i] = new Thread(new Runnable() {
#Override
public void run() {
try {
start.await();
for (int i = 0; i < times; i++) {
int j = i + times*id.get().intValue();
long s = System.nanoTime();
writer.write(j + " DGirr5JgGVmxhvmaoO0c5MVVOUIEJxWa6nVStPnqmRl3T4hKE9tiwNjn6322uhgr2Fs4hDG8aKYvIB4O0733fx18EqGqNsshaSKoouky5ZekGK3vO87nfSUOz6uDD0olOp35QJQKPgr7tNlFgQP7BImcCyMPFCCm3yhSvOUgiVAD9W9BC3cqlKjQebOG4EkqzRIzwZjxbnIeK2YttfrvOvUJs0e9WBhXUVibi5Ks2j9ROQu2q0PJII4NYyN1a5YW2UKxyng3bRrtBVFqMSamtFzJ23EE4Y7rbQyeCVJhIKRM1LRvcGLUYZqKICWwDtOjGcbXUIlLLYiJcnVRZ4gNRvbFXvTL4XDjhD3uP5S8DjnkeAIBZcQ4VEUf30x65pTGLhWjOMV6jtiEQOWKB3nsuPMhcS1lP3wTQztViW7T8IsQlA5kvVAsnT5A7pojS1CffcYz4a2Rwqf9w6mhTPPZXgpDGArtThW3a69rwjsQxEY9aTwi0Pu0jlSAMmvOA158QFsSeJvLoJXILfocgjNEkj7iVcO0Rc6eC6b5EhJU3wv80EEHnORMXpQVuAuPyao7vJsx06TMcXf9t7Py4qxplVPhptIzrKs2qke2t5A8O4LQzq19OfEQsQGEjqHSbnfWXjfuntuFR8rV4VMyLZO1z3K7HmHtCEy14p5u0C0lj7vmtCnrOum0bnG2MwaAR7DJPIpOtwRObli5K5grv54AWnJyagpRX5e3eTEA8NAKO8oDZuLaoCvgavv9ciFrJcIDmyleVweiVrHs1fQXJoELzFpH4BmvzBuUjfZ8ORSIZsVuq4Hpls19GIA8opb1mSBt7MTifLPauo8WDWRoNi9DvjL4Z08Je6DvrhAFXasU2CMugg5EZ06OXexU17qnvxx2Vz9s9E5U50jDemySZ78KcZ6nqhQKIXUrkTktoEan2JXxI2NNSqEYifwly8ZO2MDquPe4J11rAcOqYp9y6Kb4NtFpNysM1evrLPvCx8oe");
long e = System.nanoTime();
samples[j] = e - s;
}
done.countDown();
} catch (Exception e){
e.printStackTrace();
}
}
});
}
for (int i = 0; i < num; i ++) {
try {
threads[i].start();
} catch (Exception e){
}
}
long startT = System.currentTimeMillis();
start.countDown();
done.await();
long endT = System.currentTimeMillis();
System.out.println("Time to complete [" + times + "] iteration in [" + (endT - startT) + " ms] and threads [" + num + "]");
System.out.println("#######");
for (int i = 0; i < times * num; i ++){
System.out.println(samples[i]);
}
}
private class ChronicleWriter {
SingleChronicleQueue m_cqueue;
ThreadLocal<ExcerptAppender> m_appender;
ChronicleWriter(String path ) {
m_cqueue = SingleChronicleQueueBuilder.binary(path).build();
m_appender = new ThreadLocal<ExcerptAppender>() {
protected ExcerptAppender initialValue() {
return m_cqueue.acquireAppender();
}
};
}
void write(String msg){
m_appender.get().writeText(msg);
}
}
And I ran with parameters:
path 2500 40
For some reason, this keeps crashing with core dump. What am I doing wrong? My disk has lots of disk space so that shouldn't matter. Thanks!!
If your program is crashing due to OutOfMemory error then
note that the disk space and the actual space used by the program may differ.
You may need to increase jvm heap size.
Refer below link to increase jvm heap size
What are the Xms and Xmx parameters when starting JVMs?
Or
Refer below link if you are running your program through eclipse
http://www.planetofbits.com/eclipse/increase-jvm-heap-size-in-eclipse/
I have tried your program with following version of chronicle-queue and it works fine.
<dependency>
<groupId>net.openhft</groupId>
<artifactId>chronicle-queue</artifactId>
<version>4.5.14</version>
</dependency>

SQL query into JTable

I've found one totally working query, which gets columns and their data from Oracle DB and puts the output in console printout.
I've spent 3 hours trying to display this data in Swing JTable.
When I am trying to bind data with JTable:
jTable1.setModel(new javax.swing.table.DefaultTableModel(
data, header
));
it keeps telling me that constructor is invalid. That's true, because I need arrays [] and [][] to make that. Any ideas how this can be implemented?
Here is the original query:
package com.javacoderanch.example.sql;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
public class MetadataColumnExample {
private static final String DRIVER = "oracle.jdbc.OracleDriver";
private static final String URL = "jdbc:oracle:thin:#//XXX";
private static final String USERNAME = "XXX";
private static final String PASSWORD = "XXX";
public static void main(String[] args) throws Exception {
Connection connection = null;
try {
//
// As the usual ritual, load the driver class and get connection
// from database.
//
Class.forName(DRIVER);
connection = DriverManager.getConnection(URL, USERNAME, PASSWORD);
//
// In the statement below we'll select all records from users table
// and then try to find all the columns it has.
//
Statement statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery("select *\n"
+ "from booking\n"
+ "where TRACKING_NUMBER = 1000001741");
//
// The ResultSetMetaData is where all metadata related information
// for a result set is stored.
//
ResultSetMetaData metadata = resultSet.getMetaData();
int columnCount = metadata.getColumnCount();
//
// To get the column names we do a loop for a number of column count
// returned above. And please remember a JDBC operation is 1-indexed
// so every index begin from 1 not 0 as in array.
//
ArrayList<String> columns = new ArrayList<String>();
for (int i = 1; i < columnCount; i++) {
String columnName = metadata.getColumnName(i);
columns.add(columnName);
}
//
// Later we use the collected column names to get the value of the
// column it self.
//
while (resultSet.next()) {
for (String columnName : columns) {
String value = resultSet.getString(columnName);
System.out.println(columnName + " = " + value);
}
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
connection.close();
}
}
}
Ok I found a bit better way adding SQL query to JTable without even using the ArrayList. Maybe will be helpful for someone, completely working:
import java.awt.*;
import javax.swing.*;
import java.sql.*;
import java.awt.image.BufferedImage;
public class report extends JFrame {
PreparedStatement ps;
Connection con;
ResultSet rs;
Statement st;
JLabel l1;
String bn;
int bid;
Date d1, d2;
int rows = 0;
Object data1[][];
JScrollPane scroller;
JTable table;
public report() {
Container cp = getContentPane();
cp.setLayout(new BorderLayout());
setSize(600, 600);
setLocation(50, 50);
setLayout(new BorderLayout());
setTitle("Library Report");
try {
Class.forName("oracle.jdbc.OracleDriver");
con = DriverManager.getConnection("jdbc:oracle:thin:#XXXXX", "XXXXX", "XXXXX");
} catch (Exception e) {
}
try {
/*
* JDBC 2.0 provides a way to retrieve a rowcount from a ResultSet without having to scan through all the rows
* So we add TYPE_SCROLL_INSENSITIVE & CONCUR_READ_ONLY
*/
st = con.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); //Creating Statement Object
} catch (SQLException sqlex) {
System.out.println("!!!###");
}
try {
rs = st.executeQuery("select TRACKING_NUMBER, INCO_TERM_CODE, MODE_OF_TRANSPORT\n"
+ "from salog.booking\n"
+ "where rownum < 5");
// Counting rows
rs.last();
int rows = rs.getRow();
rs.beforeFirst();
System.out.println("cc " + rows);
ResultSetMetaData metaData = rs.getMetaData();
int colummm = metaData.getColumnCount();
System.out.println("colms =" + colummm);
Object[] Colheads = {"BookId", "BookName", "rtyry"};
if (Colheads.length != colummm) {
// System.out.println("EPT!!");
JOptionPane.showMessageDialog(rootPane, "Incorrect Column Headers quantity listed in array! The program will now exit.", "System Error", JOptionPane.ERROR_MESSAGE);
System.exit(0);
}
data1 = new Object[rows][Colheads.length];
for (int i1 = 0; i1 < rows; i1++) {
rs.next();
for (int j1 = 0; j1 < Colheads.length; j1++) {
data1[i1][j1] = rs.getString(j1 + 1);
}
}
JTable table = new JTable(data1, Colheads);
JScrollPane jsp = new JScrollPane(table);
getContentPane().add(jsp);
} catch (Exception e) {
}
setVisible(true);
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
}
public static void main(String args[]) {
JFrame frm = new report();
frm.setSize(600, 600);
frm.setLocation(50, 50);
BufferedImage image = null;
frm.setIconImage(image);
frm.setVisible(true);
frm.show();
}
}