jvm condition and locksupport which is faster? - jvm

An experimental example of synchronous call is made. Each thread task waits for a task callback with its own value of + 1. The performance difference between condition and locksupport is compared. The result is unexpected. The two times are the same, but the difference on the flame diagram is very big. Does it mean that the JVM has not optimized locksupport
enter image description here
public class LockerTest {
static int max = 100000;
static boolean usePark = true;
static Map<Long, Long> msg = new ConcurrentHashMap<>();
static ExecutorService producer = Executors.newFixedThreadPool(4);
static ExecutorService consumer = Executors.newFixedThreadPool(16);
static AtomicLong record = new AtomicLong(0);
static CountDownLatch latch = new CountDownLatch(max);
static ReentrantLock lock = new ReentrantLock();
static Condition cond = lock.newCondition();
static Map<Long, Thread> parkMap = new ConcurrentHashMap<>();
static AtomicLong cN = new AtomicLong(0);
public static void main(String[] args) throws InterruptedException {
long start = System.currentTimeMillis();
for (int num = 0; num < max; num++) {
consumer.execute(() -> {
long id = record.incrementAndGet();
msg.put(id, -1L);
call(id);
if (usePark) {
Thread thread = Thread.currentThread();
parkMap.put(id, thread);
while (msg.get(id) == -1) {
cN.incrementAndGet();
LockSupport.park(thread);
}
} else {
lock.lock();
try {
while (msg.get(id) == -1) {
cN.incrementAndGet();
cond.await();
}
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
lock.unlock();
}
}
latch.countDown();
});
}
latch.await();
consumer.shutdown();
producer.shutdown();
System.out.printf("park %s suc %s cost %s cn %s"
, usePark
, msg.entrySet().stream().noneMatch(entry -> entry.getKey() + 1 != entry.getValue())
, System.currentTimeMillis() - start
, cN.get()
);
}
private static void call(long id) {
producer.execute(() -> {
try {
Thread.sleep((id * 13) % 100);
} catch (InterruptedException e) {
e.printStackTrace();
}
if (usePark) {
msg.put(id, id + 1);
LockSupport.unpark(parkMap.remove(id));
} else {
lock.lock();
try {
msg.put(id, id + 1);
cond.signalAll();
} finally {
lock.unlock();
}
}
});
}
}

Related

Optimizing Transposing and Comparison of Concurrent List in Java 8

I have an application in Java 8 Collecting Data of multiple Threads using BlockingQueue.
I need to perform comparison of samples.
But my application is very large, I implemented a mock application (Github) in Java 8.
I'm generating a chunk of bytes (really is random order).
The bytes are stored into ChunkDTO class.
I implemented a capturer the ChunkDTO in a List, code in Capturer class.
Each ChunkDTO of List is translated into a List of Samples (TimePitchValue exactly) returning a nested List (or List of List of TimePitchValue).
Later the nested List is transposed in order to performs comparisons between TimePitchValue with the same time value.
Due to enormous volume of TimePitchValue instances it's consumes huge time in my application.
Here some code (The complete functional Code is in Github) because is still large for this site).
public class Generator {
final static Logger LOGGER = Logger.getLogger("SampleComparator");
public static void main(String[] args) {
long previous = System.nanoTime();
final int minBufferSize = 2048;
int sampleRate = 8192;
int numChannels = 1;
int numBytesPerSample = 1;
int samplesChunkPerSecond = sampleRate / minBufferSize;
int minutes = 0;
int seconds = 10;
int time = 60 * minutes + seconds;
int chunksBySecond = samplesChunkPerSecond * numBytesPerSample * numChannels;
int pitchs = 32;
boolean signed = false;
boolean endianness = false;
AudioFormat audioformat = new AudioFormat(sampleRate, 8 * numBytesPerSample, numChannels, signed, endianness);
ControlDSP controlDSP = new ControlDSP(audioformat);
BlockingQueue<ChunkDTO> generatorBlockingQueue = new LinkedBlockingQueue<>();
Capturer capturer = new Capturer(controlDSP, pitchs, pitchs * time * chunksBySecond, generatorBlockingQueue);
controlDSP.getListFuture().add(controlDSP.getExecutorService().submit(capturer));
for (int i = 0; i < time * chunksBySecond; i++) {
for (int p = 0; p < pitchs; p++) {
ChunkDTO chunkDTO = new ChunkDTO(UtilClass.getArrayByte(minBufferSize), i, p);
LOGGER.info(String.format("chunkDTO: %s", chunkDTO));
try {
generatorBlockingQueue.put(chunkDTO);
} catch (InterruptedException ex) {
LOGGER.info(ex.getMessage());
}
}
try {
Thread.sleep(1000 / chunksBySecond);
} catch (Exception ex) {
}
}
controlDSP.tryFinishThreads(Thread.currentThread());
long current = System.nanoTime();
long interval = TimeUnit.NANOSECONDS.toSeconds(current - previous);
System.out.println("Seconds Interval: " + interval);
}
}
Capturer Class
public class Capturer implements Callable<Void> {
private final ControlDSP controlDSP;
private final int pitchs;
private final int totalChunks;
private final BlockingQueue<ChunkDTO> capturerBlockingQueue;
private final Counter intCounter;
private final Map<Long, List<ChunkDTO>> mapIndexListChunkDTO = Collections.synchronizedMap(new HashMap<>());
private volatile boolean isRunning = false;
private final String threadName;
private static final Logger LOGGER = Logger.getLogger("SampleComparator");
public Capturer(ControlDSP controlDSP, int pitchs, int totalChunks, BlockingQueue<ChunkDTO> capturerBlockingQueue) {
this.controlDSP = controlDSP;
this.pitchs = pitchs;
this.totalChunks = totalChunks;
this.capturerBlockingQueue = capturerBlockingQueue;
this.intCounter = new Counter();
this.controlDSP.getListFuture().add(this.controlDSP.getExecutorService().submit(() -> {
while (intCounter.getValue() < totalChunks) {
try {
Thread.sleep(100);
} catch (InterruptedException ex) {
LOGGER.log(Level.SEVERE, null, ex);
}
}
capturerBlockingQueue.add(new ChunkDTOStopper());
}));
this.threadName = this.getClass().getSimpleName();
}
#Override
public Void call() throws Exception {
long quantity = 0;
isRunning = true;
while (isRunning) {
try {
ChunkDTO chunkDTO = capturerBlockingQueue.take();
if (chunkDTO instanceof ChunkDTOStopper) {
break;
}
//Find or Create List (according to Index) to add the incoming Chunk
long index = chunkDTO.getIndex();
int sizeChunk = chunkDTO.getChunk().length;
List<ChunkDTO> listChunkDTOWithIndex = getListChunkDTOByIndex(chunkDTO);
//When the List (according to Index) is completed and processed
if (listChunkDTOWithIndex.size() == pitchs) {
mapIndexListChunkDTO.remove(index);
TransposerComparator transposerComparator = new TransposerComparator(controlDSP, controlDSP.getAudioformat(), index, sizeChunk, listChunkDTOWithIndex);
controlDSP.getListFuture().add(controlDSP.getExecutorService().submit(transposerComparator));
}
quantity++;
intCounter.setValue(quantity);
LOGGER.info(String.format("%s\tConsumes:%s\ttotal:%05d", threadName, chunkDTO, quantity));
} catch (Exception ex) {
LOGGER.log(Level.SEVERE, null, ex);
}
}
LOGGER.info(String.format("%s\tReceived:%05d\tQty:%s\tPitchs:%s\tEND\n", threadName, quantity, quantity / pitchs, pitchs));
return null;
}
private List<ChunkDTO> getListChunkDTOByIndex(ChunkDTO chunkDTO) {
List<ChunkDTO> listChunkDTOWithIndex = mapIndexListChunkDTO.get(chunkDTO.getIndex());
if (listChunkDTOWithIndex == null) {
listChunkDTOWithIndex = new ArrayList<>();
mapIndexListChunkDTO.put(chunkDTO.getIndex(), listChunkDTOWithIndex);
listChunkDTOWithIndex = mapIndexListChunkDTO.get(chunkDTO.getIndex());
}
listChunkDTOWithIndex.add(chunkDTO);
return listChunkDTOWithIndex;
}
}
TransposerComparator class.
The optimization required is in this code, specifically on transposedNestedList method.
public class TransposerComparator implements Callable<Void> {
private final ControlDSP controlDSP;
private final AudioFormat audioformat;
private final long index;
private final int sizeChunk;
private final List<ChunkDTO> listChunkDTOWithIndex;
private final String threadName;
private static final Logger LOGGER = Logger.getLogger("SampleComparator");
public TransposerComparator(ControlDSP controlDSP, AudioFormat audioformat, long index, int sizeChunk, List<ChunkDTO> listChunkDTOWithIndex) {
this.controlDSP = controlDSP;
this.audioformat = audioformat;
this.index = index;
this.sizeChunk = sizeChunk;
this.listChunkDTOWithIndex = listChunkDTOWithIndex;
this.threadName = this.getClass().getSimpleName() + "_" + String.format("%05d", index);
}
#Override
public Void call() throws Exception {
Thread.currentThread().setName(threadName);
LOGGER.info(String.format("%s\tINI", threadName));
try {
int numBytesPerSample = audioformat.getSampleSizeInBits() / 8;
int quantitySamples = sizeChunk / numBytesPerSample;
long baseTime = quantitySamples * index;
// Convert the List of Chunk Bytes to Nested List of TimePitchValue
List<List<TimePitchValue>> nestedListTimePitchValue = listChunkDTOWithIndex
.stream()
.map(chunkDTO -> {
return IntStream
.range(0, quantitySamples)
.mapToObj(time -> {
int value = extractValue(chunkDTO.getChunk(), numBytesPerSample, time);
return new TimePitchValue(chunkDTO.getPitch(), baseTime + time, value);
}).collect(Collectors.toList());
}).collect(Collectors.toList());
List<List<TimePitchValue>> timeNestedListTimePitchValue = transposedNestedList(nestedListTimePitchValue);
} catch (Exception ex) {
ex.printStackTrace();
LOGGER.log(Level.SEVERE, null, ex);
throw ex;
}
return null;
}
private static int extractValue(byte[] bytesSamples, int numBytesPerSample, int time) {
byte[] bytesSingleNumber = Arrays.copyOfRange(bytesSamples, time * numBytesPerSample, (time + 1) * numBytesPerSample);
int value = numBytesPerSample == 2
? (UtilClass.Byte2IntLit(bytesSingleNumber[0], bytesSingleNumber[1]))
: (UtilClass.byte2intSmpl(bytesSingleNumber[0]));
return value;
}
private static List<List<TimePitchValue>> transposedNestedList(List<List<TimePitchValue>> nestedList) {
List<List<TimePitchValue>> outNestedList = new ArrayList<>();
nestedList.forEach(pitchList -> {
pitchList.forEach(pitchValue -> {
List<TimePitchValue> listTimePitchValueWithTime = listTimePitchValueWithTime(outNestedList, pitchValue.getTime());
if (!outNestedList.contains(listTimePitchValueWithTime)) {
outNestedList.add(listTimePitchValueWithTime);
}
listTimePitchValueWithTime.add(pitchValue);
});
});
outNestedList.forEach(pitchList -> {
pitchList.sort(Comparator.comparingInt(TimePitchValue::getValue).reversed());
});
return outNestedList;
}
private static List<TimePitchValue> listTimePitchValueWithTime(List<List<TimePitchValue>> nestedList, long time) {
List<TimePitchValue> listTimePitchValueWithTime = nestedList
.stream()
.filter(innerList -> innerList.stream()
.anyMatch(timePitchValue -> timePitchValue.getTime() == time))
.findAny()
.orElseGet(ArrayList::new);
return listTimePitchValueWithTime;
}
}
I was testing:
With 5 Seconds in Generator class and the List<List<TimePitchValue>> timeNestedListTimePitchValue = transposedNestedList(nestedListTimePitchValue); line in TransposerComparator class, Commented 7 Seconds needed, Uncommented 211 Seconds needed.
With 10 Seconds in Generator class and the List<List<TimePitchValue>> timeNestedListTimePitchValue = transposedNestedList(nestedListTimePitchValue); line in TransposerComparator class, Commented 12 Seconds needed, Uncommented 574 Seconds needed.
I need to use the application at least 60 minutes.
With the purpose of reduce the needed (consumed) time, I have two ways:
I choose for short is to optimize the methods that I am currently using.
That should be successful but longer and is to use GPGPU, but I don't know where to start implementing it yet.
QUESTIONS
This Question is for the first way: What changes do you recommend in the code of the transposedNestedList method in order to improve speed?
Is there better alternative to use this Comparison?
outNestedList.forEach(pitchList -> {
pitchList.sort(Comparator.comparingInt(TimePitchValue::getValue).reversed());
});

outofDirectMemory exception with redisson

i'm trying to learn Redis through Redisson. Here is my code to insert into redis using multiple threads.
package redisson
import java.io.File;
import java.util.concurrent.atomic.AtomicInteger;
import org.redisson.Redisson;
import org.redisson.api.RBatch;
import org.redisson.api.RMap;
import org.redisson.api.RedissonClient;
import org.redisson.config.Config;
public class RedisTest extends Thread {
static RMap<String, String> dMap = null;
static RMap<String, String> wMap = null;
static RMap<String, String> mMap = null;
static RedissonClient redisson = null;
public static void main(String[] args) throws Exception {
Config config = Config.fromJSON(new File("C:\\Users\\neon-workspace\\RedisProject\\src\\main\\resources\\SingleNodeConfig.json"));
RedissonClient redisson = Redisson.create(config);
dMap = redisson.getMap("Daily");
wMap = redisson.getMap("Weekly");
mMap = redisson.getMap("Monthly");
connectHbse(dMap,wMap,mMap,redisson);
redisson.shutdown();
}
public static void connectHbse(RMap<String, String> dMap,RMap<String, String> wMap,RMap<String, String> mMap,RedissonClient redisson) {
int totalSize=500000;
int totalThread=2;
int chunkSize = totalSize/totalThread;
AtomicInteger total = new AtomicInteger(chunkSize);
RedisTest test1[] = new RedisTest[totalThread];
for (int i = 0; i < test1.length; i++) {
test1[i] = new RedisTest(total,dMap,wMap,mMap,redisson);
total.set(total.intValue()+chunkSize);
}
long t1 = System.currentTimeMillis();
for (int i = 0; i < test1.length; i++) {
test1[i].start();
}
try {
for (int i = 0; i < test1.length; i++) {
test1[i].join();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println("Final Total Time Taken ::>>>>>>>>>>>>>>>>> " + ((System.currentTimeMillis() - t1))+"ms");
}
private AtomicInteger total = null;
public RedisTest(AtomicInteger total,RMap<String, String> dMap,RMap<String, String> wMap,RMap<String, String> mMap,RedissonClient redisson) {
this.total = new AtomicInteger(total.intValue());
this.dMap = dMap;
this.wMap = wMap;
this.mMap = mMap;
this.redisson = redisson;
}
public static int getRandomInteger(int maximum, int minimum) {
return ((int) (Math.random() * (maximum - minimum))) + minimum;
}
public void run() {
try {
long t1 = System.currentTimeMillis();
dMap.clear();
wMap.clear();
mMap.clear();
RBatch batch = redisson.createBatch();
for (;total.decrementAndGet()>=0;) {
String dvalue = ""+getRandomInteger(100,200);
String wvalue = "" +getRandomInteger(200, 300);
String mvalue = "" +getRandomInteger(300, 400);
batch.getMap("Daily").fastPutAsync(""+total.get(), dvalue);
batch.getMap("Weekly").fastPutAsync(""+total.get(), wvalue);
batch.getMap("Monthly").fastPutAsync(""+total.get(), mvalue);
synchronized (total) {
if(total.get()%100==0)
System.out.println(total.get()+" Records in Seconds:::::" + ((System.currentTimeMillis() - t1))/1000);
}
}
batch.execute();
System.out.println("Time Taken for completion::::: " + ((System.currentTimeMillis() - t1))+" by thread:::::"+Thread.currentThread().getName());
System.out.println("Done !!!");
} catch (Exception e) {
System.out.println("Done !!!" + e.getMessage());
e.printStackTrace();
} finally {
}
}
}
This code works fine until totalSize=400000.
When i put the totalSize=500000, its throwing the following exception.
io.netty.handler.codec.EncoderException: io.netty.util.internal.OutOfDirectMemoryError: failed to allocate 16777216 byte(s) of direct memory (used: 939524096, max: 954466304)
at io.netty.handler.codec.MessageToByteEncoder.write(MessageToByteEncoder.java:125)
at org.redisson.client.handler.CommandBatchEncoder.write(CommandBatchEncoder.java:45)
at io.netty.channel.AbstractChannelHandlerContext.invokeWrite0(AbstractChannelHandlerContext.java:738)
... 25 more
Caused by: io.netty.util.internal.OutOfDirectMemoryError: failed to allocate 16777216 byte(s) of direct memory (used: 939524096, max: 954466304)
at io.netty.util.internal.PlatformDependent.incrementMemoryCounter(PlatformDependent.java:627)
at io.netty.util.internal.PlatformDependent.allocateDirectNoCleaner(PlatformDependent.java:581)
at io.netty.buffer.PoolArena$DirectArena.allocateDirect(PoolArena.java:764)
at io.netty.buffer.PoolArena$DirectArena.newChunk(PoolArena.java:740)
at io.netty.buffer.PoolArena.allocateNormal(PoolArena.java:244)
at io.netty.buffer.PoolArena.allocate(PoolArena.java:226)
at io.netty.buffer.PoolArena.reallocate(PoolArena.java:397)
at io.netty.buffer.PooledByteBuf.capacity(PooledByteBuf.java:118)
at io.netty.buffer.AbstractByteBuf.ensureWritable0(AbstractByteBuf.java:285)
at io.netty.buffer.AbstractByteBuf.ensureWritable(AbstractByteBuf.java:265)
at io.netty.buffer.AbstractByteBuf.writeBytes(AbstractByteBuf.java:1046)
at io.netty.buffer.AbstractByteBuf.writeBytes(AbstractByteBuf.java:1054)
at org.redisson.client.handler.CommandEncoder.writeArgument(CommandEncoder.java:169)
at org.redisson.client.handler.CommandEncoder.encode(CommandEncoder.java:110)
at org.redisson.client.handler.CommandBatchEncoder.encode(CommandBatchEncoder.java:52)
at org.redisson.client.handler.CommandBatchEncoder.encode(CommandBatchEncoder.java:32)
at io.netty.handler.codec.MessageToByteEncoder.write(MessageToByteEncoder.java:107)
... 27 more
But i have about 7Gb ram free.
Can someone explain to me the reason i'm getting this exception?
It seems i should provide more memory to my JVM instance using -Xmx which solved the issue for me.

SCALA: executing complete .sql file

I have a .sql script files and need to execute complete .sql file at once, I have found many related answers, where programmer reads file and execute query one by one.
I require to run .sql file all at-once, like picking up the file and executing in within SCALA.
Currently I am doing it with following code, copied it from copied it from:https://gist.github.com/joe776/831762, my Lead ask me to do it in simple way, instead of executing script line by line, it should get executed as .SQL file
import java.io.IOException;
import java.io.LineNumberReader;
import java.io.PrintWriter;
import java.io.Reader;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class ScriptRunner {
private static final String DEFAULT_DELIMITER = ";";
private static final String DELIMITER_LINE_REGEX = "(?i)DELIMITER.+";
private static final String DELIMITER_LINE_SPLIT_REGEX = "(?i)DELIMITER";
private final Connection connection;
private final boolean stopOnError;
private final boolean autoCommit;
private PrintWriter logWriter = new PrintWriter(System.out);
private PrintWriter errorLogWriter = new PrintWriter(System.err);
private String delimiter = DEFAULT_DELIMITER;
private boolean fullLineDelimiter = false;
/**
* Default constructor.
*
* #param connection
* #param autoCommit
* #param stopOnError
*/
public ScriptRunner(Connection connection, boolean autoCommit, boolean stopOnError) {
this.connection = connection;
this.autoCommit = autoCommit;
this.stopOnError = stopOnError;
}
/**
* #param delimiter
* #param fullLineDelimiter
*/
public void setDelimiter(String delimiter, boolean fullLineDelimiter) {
this.delimiter = delimiter;
this.fullLineDelimiter = fullLineDelimiter;
}
/**
* Setter for logWriter property.
*
* #param logWriter
* - the new value of the logWriter property
*/
public void setLogWriter(PrintWriter logWriter) {
this.logWriter = logWriter;
}
/**
* Setter for errorLogWriter property.
*
* #param errorLogWriter
* - the new value of the errorLogWriter property
*/
public void setErrorLogWriter(PrintWriter errorLogWriter) {
this.errorLogWriter = errorLogWriter;
}
/**
* Runs an SQL script (read in using the Reader parameter).
*
* #param reader
* - the source of the script
* #throws SQLException
* if any SQL errors occur
* #throws IOException
* if there is an error reading from the Reader
*/
public void runScript(Reader reader) throws IOException, SQLException {
try {
boolean originalAutoCommit = connection.getAutoCommit();
try {
if (originalAutoCommit != autoCommit) {
connection.setAutoCommit(autoCommit);
}
runScript(connection, reader);
} finally {
connection.setAutoCommit(originalAutoCommit);
}
} catch (IOException e) {
throw e;
} catch (SQLException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException("Error running script. Cause: " + e, e);
}
}
/**
* Runs an SQL script (read in using the Reader parameter) using the connection passed in.
*
* #param conn
* - the connection to use for the script
* #param reader
* - the source of the script
* #throws SQLException
* if any SQL errors occur
* #throws IOException
* if there is an error reading from the Reader
*/
private void runScript(Connection conn, Reader reader) throws IOException, SQLException {
StringBuffer command = null;
try {
LineNumberReader lineReader = new LineNumberReader(reader);
String line = null;
while ((line = lineReader.readLine()) != null) {
if (command == null) {
command = new StringBuffer();
}
String trimmedLine = line.trim();
if (trimmedLine.startsWith("--")) {
println(trimmedLine);
} else if (trimmedLine.length() < 1 || trimmedLine.startsWith("//")) {
// Do nothing
} else if (trimmedLine.length() < 1 || trimmedLine.startsWith("--")) {
// Do nothing
} else if (!fullLineDelimiter && trimmedLine.endsWith(getDelimiter())
|| fullLineDelimiter && trimmedLine.equals(getDelimiter())) {
Pattern pattern = Pattern.compile(DELIMITER_LINE_REGEX);
Matcher matcher = pattern.matcher(trimmedLine);
if (matcher.matches()) {
setDelimiter(trimmedLine.split(DELIMITER_LINE_SPLIT_REGEX)[1].trim(),
fullLineDelimiter);
line = lineReader.readLine();
if (line == null) {
break;
}
trimmedLine = line.trim();
}
command.append(line.substring(0, line.lastIndexOf(getDelimiter())));
command.append(" ");
Statement statement = conn.createStatement();
println(command);
boolean hasResults = false;
if (stopOnError) {
hasResults = statement.execute(command.toString());
} else {
try {
statement.execute(command.toString());
} catch (SQLException e) {
e.fillInStackTrace();
printlnError("Error executing: " + command);
printlnError(e);
}
}
if (autoCommit && !conn.getAutoCommit()) {
conn.commit();
}
ResultSet rs = statement.getResultSet();
if (hasResults && rs != null) {
ResultSetMetaData md = rs.getMetaData();
int cols = md.getColumnCount();
for (int i = 0; i < cols; i++) {
String name = md.getColumnLabel(i);
print(name + "\t");
}
println("");
while (rs.next()) {
for (int i = 1; i <= cols; i++) {
String value = rs.getString(i);
print(value + "\t");
}
println("");
}
}
command = null;
try {
if (rs != null) {
rs.close();
}
} catch (Exception e) {
e.printStackTrace();
}
try {
if (statement != null) {
statement.close();
}
} catch (Exception e) {
e.printStackTrace();
// Ignore to workaround a bug in Jakarta DBCP
}
} else {
Pattern pattern = Pattern.compile(DELIMITER_LINE_REGEX);
Matcher matcher = pattern.matcher(trimmedLine);
if (matcher.matches()) {
setDelimiter(trimmedLine.split(DELIMITER_LINE_SPLIT_REGEX)[1].trim(),
fullLineDelimiter);
line = lineReader.readLine();
if (line == null) {
break;
}
trimmedLine = line.trim();
}
command.append(line);
command.append(" ");
}
}
if (!autoCommit) {
conn.commit();
}
} catch (SQLException e) {
e.fillInStackTrace();
printlnError("Error executing: " + command);
printlnError(e);
throw e;
} catch (IOException e) {
e.fillInStackTrace();
printlnError("Error executing: " + command);
printlnError(e);
throw e;
} finally {
conn.rollback();
flush();
}
}
private String getDelimiter() {
return delimiter;
}
private void print(Object o) {
if (logWriter != null) {
logWriter.print(o);
}
}
private void println(Object o) {
if (logWriter != null) {
logWriter.println(o);
}
}
private void printlnError(Object o) {
if (errorLogWriter != null) {
errorLogWriter.println(o);
}
}
private void flush() {
if (logWriter != null) {
logWriter.flush();
}
if (errorLogWriter != null) {
errorLogWriter.flush();
}
}
}
Why did you tag "scala"?
To execute a whole sql file at once:
sql < myFile.sql
Edit: as Cyrille Corpet pointed out, you can perform this command directly via scala
import sys.process._
"sql < myfile.sql" !

JSON response Using Volley in a recycler view in Android app

I am trying to print a json response response in a recyclerview. my Recyclerview is like a expanded listView. And my json resone is like this:
{"Table1":
[
{"filedate":"Oct 26, 2016"},
{"filedate":"Oct 18, 2016"}
],
"Table2":
[{
"filedate":"Oct 18, 2016",
"file1":"12.txt"
},
{
"filedate":"Oct 26, 2016",
"file1":"acerinvoice.pdf"
}
]}
and I trying to print this json resonse using this code:
private void prepareListData() {
// Volley's json array request object
StringRequest stringRequest = new StringRequest(Request.Method.POST, REGISTER_URL,
new Response.Listener<String>() {
//I think problrm is here
#Override
public void onResponse(String response) {
try {
**JSONObject object = new JSONObject(response);
JSONArray jsonarray = object.getJSONArray("Table1");
JSONArray jsonarray1 = object.getJSONArray("Table2");
for (int i = 0; i < jsonarray.length(); i++) {
try {
JSONObject obj = jsonarray.getJSONObject(i);
String str = obj.optString("filedate").trim();
int lth = jsonarray1.length();
data.add(new ExpandableListAdapter.Item(ExpandableListAdapter.HEADER, str));
for (int j = 0; j < jsonarray1.length(); j++) {
try {
JSONObject obj1 = jsonarray1.getJSONObject(j);
String str1 = obj1.optString("filedate").trim();
data.add(new ExpandableListAdapter.Item(ExpandableListAdapter.CHILD, str1));
Toast.makeText(getApplicationContext(), str1, Toast.LENGTH_LONG).show();**
//if condition
} catch (JSONException e) {
// Log.e(TAG, "JSON Parsing error: " + e.getMessage());
}
}
// adding movie to movies array
} catch (JSONException e) {
Log.e("gdshfsjkg", "JSON Parsing error: " + e.getMessage());
}
}
Log.d("test", String.valueOf(data));
Toast.makeText(getApplicationContext(), (CharSequence) data, Toast.LENGTH_LONG).show();
} catch (JSONException e) {
e.printStackTrace();
}
// notifying list adapter about data changes
// so that it renders the list view with updated data
recyclerview.setAdapter(new ExpandableListAdapter(data));
}
}, new Response.ErrorListener() {
#Override
public void onErrorResponse(VolleyError error) {
}
}){
#Override
protected Map<String, String> getParams() {
Map<String, String> params = new HashMap<String, String>();
params.put(CLIENT, "4");
return params;
}
};
// Adding request to request queue
MyApplication.getInstance().addToRequestQueue(stringRequest);
}
ExpandableListAdapter
public class ExpandableListAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> {
public static final int HEADER = 0;
public static final int CHILD = 1;
private List<Item> data;
public ExpandableListAdapter(List<Item> data) {
this.data = data;
}
#Override
public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int type) {
View view = null;
Context context = parent.getContext();
float dp = context.getResources().getDisplayMetrics().density;
int subItemPaddingLeft = (int) (18 * dp);
int subItemPaddingTopAndBottom = (int) (5 * dp);
switch (type) {
case HEADER:
LayoutInflater inflater = (LayoutInflater) parent.getContext().getSystemService(Context.LAYOUT_INFLATER_SERVICE);
view = inflater.inflate(R.layout.list_header, parent, false);
ListHeaderViewHolder header = new ListHeaderViewHolder(view);
return header;
case CHILD:
TextView itemTextView = new TextView(context);
itemTextView.setPadding(subItemPaddingLeft, subItemPaddingTopAndBottom, 0, subItemPaddingTopAndBottom);
itemTextView.setTextColor(0x88000000);
itemTextView.setLayoutParams(
new ViewGroup.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.WRAP_CONTENT));
return new RecyclerView.ViewHolder(itemTextView) {
};
}
return null;
}
public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) {
final Item item = data.get(position);
switch (item.type) {
case HEADER:
final ListHeaderViewHolder itemController = (ListHeaderViewHolder) holder;
itemController.refferalItem = item;
itemController.header_title.setText(item.text);
if (item.invisibleChildren == null) {
itemController.btn_expand_toggle.setImageResource(R.drawable.circle_minus);
} else {
itemController.btn_expand_toggle.setImageResource(R.drawable.circle_plus);
}
itemController.btn_expand_toggle.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (item.invisibleChildren == null) {
item.invisibleChildren = new ArrayList<Item>();
int count = 0;
int pos = data.indexOf(itemController.refferalItem);
while (data.size() > pos + 1 && data.get(pos + 1).type == CHILD) {
item.invisibleChildren.add(data.remove(pos + 1));
count++;
}
notifyItemRangeRemoved(pos + 1, count);
itemController.btn_expand_toggle.setImageResource(R.drawable.circle_plus);
} else {
int pos = data.indexOf(itemController.refferalItem);
int index = pos + 1;
for (Item i : item.invisibleChildren) {
data.add(index, i);
index++;
}
notifyItemRangeInserted(pos + 1, index - pos - 1);
itemController.btn_expand_toggle.setImageResource(R.drawable.circle_minus);
item.invisibleChildren = null;
}
}
});
break;
case CHILD:
TextView itemTextView = (TextView) holder.itemView;
itemTextView.setText(data.get(position).text);
break;
}
}
#Override
public int getItemViewType(int position) {
return data.get(position).type;
}
#Override
public int getItemCount() {
return data.size();
}
private static class ListHeaderViewHolder extends RecyclerView.ViewHolder {
public TextView header_title;
public ImageView btn_expand_toggle;
public Item refferalItem;
public ListHeaderViewHolder(View itemView) {
super(itemView);
header_title = (TextView) itemView.findViewById(R.id.header_title);
btn_expand_toggle = (ImageView) itemView.findViewById(R.id.btn_expand_toggle);
}
}
public static class Item {
public int type;
public String text;
public List<Item> invisibleChildren;
public Item() {
}
public Item(int type, String text) {
this.type = type;
this.text = text;
}
}
}
But this code print nothing in my recycler UI.It show only a empty layout.I have also tried to print the response in my log and it prints whole response together.I want to print table1 contents in headers and table2 contents in as child(items) with their respective headers.how to do this?If any other information needed please ask..
You should probably check your response first of everything. As per your requirements you should use HashMap and ArrayList. Follow these following steps:
1.Take a ArrayLsit and store child data of 1st heading and so on with index starting from 0.
2.Store headers in HashMap as key.
ex: HashMap<String,ArrayList<Data>> hm;
hm.put("your first heading string","related arraylist");
Then use ExpandableListView to arrange parent and child items.
please check your json response this is invalid json response first try to validate your json formate after try to decode and add in recycler view.
Check Your Response into Logcat also the Exception. I've tested the Response you've supplied that contains some unnecessary "," that may cause the Exception.
I have solve my problem to just change above code like this..
private void prepareListData() {
// Volley's json array request object
StringRequest stringRequest = new StringRequest(Request.Method.POST, REGISTER_URL,
new Response.Listener<String>() {
#Override
public void onResponse(String response) {
JSONObject object = null;
try {
object = new JSONObject(response);
} catch (JSONException e) {
e.printStackTrace();
}
JSONArray jsonarray = null;
JSONArray jsonarray1 = null;
try {
jsonarray = object.getJSONArray("Table1");
jsonarray1 = object.getJSONArray("Table1");
} catch (JSONException e) {
e.printStackTrace();
}
for (int i = 0; i < jsonarray.length(); i++) {
try {
JSONObject obj = jsonarray.getJSONObject(i);
//
String str = obj.optString("filedate").trim();
Log.d("test", str);
data.add(new ExpandableListAdapter.Item(ExpandableListAdapter.HEADER, str));
// Toast.makeText(getApplicationContext(), lth, Toast.LENGTH_LONG).show();
for (int j = 0; j < jsonarray1.length(); j++) {
try {
JSONObject obj1 = jsonarray1.getJSONObject(j);
String str1 = obj1.optString("filedate").trim();
String str3 = obj1.optString("category").trim();
String str2 = obj1.optString("file1").trim();
String str4 = obj1.optString("4").trim();
Toast.makeText(getApplicationContext(), "server data respone", Toast.LENGTH_LONG).show();
Toast.makeText(getApplicationContext(), "test"+str1, Toast.LENGTH_LONG).show();
if (str == str1) {
// data.add(new ExpandableListAdapter.Item(ExpandableListAdapter.CHILD, str1));
data.add(new ExpandableListAdapter.Item(ExpandableListAdapter.CHILD, str3));
data.add(new ExpandableListAdapter.Item(ExpandableListAdapter.CHILD, str2));
}
Toast.makeText(getApplicationContext(), str1, Toast.LENGTH_LONG).show();
//if condition
} catch (JSONException e) {
// Log.e(TAG, "JSON Parsing error: " + e.getMessage());
}
}
// adding movie to movies array
} catch (JSONException e) {
Log.e("gdshfsjkg", "JSON Parsing error: " + e.getMessage());
}
}
Log.d("test", String.valueOf(data));
// notifying list adapter about data changes
// so that it renders the list view with updated data
// adapterheader.notifyDataSetChanged();
recyclerview.setAdapter(new ExpandableListAdapter(data));
}
}, new Response.ErrorListener() {
#Override
public void onErrorResponse(VolleyError error) {
// VolleyLog.d(TAG, "Error: " + error.getMessage());
// hidePDialog();
}
}){
#Override
protected Map<String, String> getParams() {
Map<String, String> params = new HashMap<String, String>();
params.put(CLIENT, "4");
return params;
}
};
// Adding request to request queue
MyApplication.getInstance().addToRequestQueue(stringRequest);
}

With Lucene 4.3.1, How to get all terms which occur in sub-range of all docs

Suppose a lucene index with fields : date, content.
I want to get all terms value and frequency of docs whose date is yesterday. date field is keyword field. content field is analyzed and indexed.
Pls help me with sample code.
My solution source is as follow ...
/**
*
*
* #param reader
* #param fromDateTime
* - yyyymmddhhmmss
* #param toDateTime
* - yyyymmddhhmmss
* #return
*/
static public String top10(IndexSearcher searcher, String fromDateTime,
String toDateTime) {
String top10Query = "";
try {
Query query = new TermRangeQuery("tweetDate", new BytesRef(
fromDateTime), new BytesRef(toDateTime), true, false);
final BitSet bits = new BitSet(searcher.getIndexReader().maxDoc());
searcher.search(query, new Collector() {
private int docBase;
#Override
public void setScorer(Scorer scorer) throws IOException {
}
#Override
public void setNextReader(AtomicReaderContext context)
throws IOException {
this.docBase = context.docBase;
}
#Override
public void collect(int doc) throws IOException {
bits.set(doc + docBase);
}
#Override
public boolean acceptsDocsOutOfOrder() {
return false;
}
});
//
Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_43,
EnglishStopWords.getEnglishStopWords());
//
HashMap<String, Long> wordFrequency = new HashMap<>();
for (int wx = 0; wx < bits.length(); ++wx) {
if (bits.get(wx)) {
Document wd = searcher.doc(wx);
//
TokenStream tokenStream = analyzer.tokenStream("temp",
new StringReader(wd.get("content")));
// OffsetAttribute offsetAttribute = tokenStream
// .addAttribute(OffsetAttribute.class);
CharTermAttribute charTermAttribute = tokenStream
.addAttribute(CharTermAttribute.class);
tokenStream.reset();
while (tokenStream.incrementToken()) {
// int startOffset = offsetAttribute.startOffset();
// int endOffset = offsetAttribute.endOffset();
String term = charTermAttribute.toString();
if (term.length() < 2)
continue;
Long wl;
if ((wl = wordFrequency.get(term)) == null)
wordFrequency.put(term, 1L);
else {
wl += 1;
wordFrequency.put(term, wl);
}
}
tokenStream.end();
tokenStream.close();
}
}
analyzer.close();
// sort
List<String> occurterm = new ArrayList<String>();
for (String ws : wordFrequency.keySet()) {
occurterm.add(String.format("%06d\t%s", wordFrequency.get(ws),
ws));
}
Collections.sort(occurterm, Collections.reverseOrder());
// make query string by top 10 words
int topCount = 10;
for (String ws : occurterm) {
if (topCount-- == 0)
break;
String[] tks = ws.split("\\t");
top10Query += tks[1] + " ";
}
top10Query.trim();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
// return top10 word string
return top10Query;
}