gzip --rsyncable does not zip from java using Runtime.exec() - gzip

I am trying to run gzip with the --rsyncable option - it works fine when run it in a terminal window (I am on Mac OS) - but it does not work when I run it from java with the following code.
Any idea what the problem could be?
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
public class RsyncIssue {
public static void printOutput(Process p) throws IOException{
String ss;
BufferedReader inReader = new BufferedReader(new InputStreamReader(p.getInputStream()));
while((ss = inReader.readLine()) != null ){
System.out.println("[IN] "+ ss);
}
}
public static void main(String[] args) throws IOException {
File f = new File("Test.ext");
if( !f.exists())
{
f.createNewFile();
}
String zipCommand = "gzip --rsyncable " + f.getCanonicalPath();
System.out.println("Zipping file : " + f.getName() );
System.out.println("Zipping command: " + zipCommand);
Process p = Runtime.getRuntime().exec(zipCommand);
printOutput(p);
File zipfile = new File(f.getCanonicalPath()+".gz");
if( !zipfile.exists())
{
throw new RuntimeException("zip file does not exist " + zipfile.getAbsolutePath());
}
}
}

Things you should do:
Call p.waitFor() so that execution blocks until the execution has completed.
Then call p.exitValue() and compare to 0 to see if there was an error.
In printOutput also read the process's getErrorStream() to view any errors.
Check that --rsyncable is available on your platform, it is not a universally-available gzip option.

Related

Unable To Launch Chrome Browser Because Driver Is Null

I am executing my code using shell scripting file and as per requirement if i will not pass parameter then my code will execute on my local machine and if i will pass parameter with shell scripting then code should be execute on browser stack. If i am executing with parameter then my code is working fine . But if i am not passing parameter in shell scripting then unable to launch chrome browser because in If condition driver is null. This is my base class
When i am executing shell script without parameter then my execution is going in If condition but unable to launch chrome browser because driver variable is not initialized in if condition . if i am printing driver value then it is showing null.
package com.epath.smoketest.tests;
/**
* Class: Base
* Author: D Albanese
* Creation Date: 4/5/2017
*/
import org.junit.Rule;
import org.junit.rules.ExternalResource;
import org.openqa.selenium.WebDriver;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.net.URL;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.remote.LocalFileDetector;
import org.openqa.selenium.remote.RemoteWebDriver;
public class Base {
//-Browser capability passed by CLI argument.
private static String sBrowser;
//-Browser version passed by CLI argument.
private static String sBversion;
//-OS capability passed by CLI argument.
private static String sOsName;
//-OS version capability passed by CLI argument.
private static String sOsVersion;
//-Passing input folder name by CLI argument.
public String sFolderName = "resources";
public static String getExecutionPath;
public static String getResourcePath;
public static WebDriver driver;
public Base(String sBrowser, String sBversion, String sOsName, String sOsVersion,String sFolderName) {
this.sBrowser = sBrowser;
this.sBversion = sBversion;
this.sOsName = sOsName;
this.sOsVersion = sOsVersion;
if(null != sFolderName && ! sFolderName.trim().equals("")) {
this.sFolderName = sFolderName;
}
}
//-Utilizing ExternalResource rule to preserve functionality of #Before and #After annotations in tests
//-ExternalResource rule has before and after methods that execute prior to methods annotated with #Before and #After
#Rule
public ExternalResource resource = new ExternalResource() {
#Override
protected void before() throws Throwable {
//-Use this for local testing
DesiredCapabilities caps = new DesiredCapabilities();
caps.setCapability("browser", sBrowser);
caps.setCapability("browser_version", sBversion);
caps.setCapability("os", sOsName);
caps.setCapability("os_version", sOsVersion);
caps.setCapability("folder_name", sFolderName);
caps.setCapability("browserstack.local", "true");
if(sBrowser.length() == 0 && sBversion.length() == 0 && sOsName.length() == 0 && sOsVersion.length() == 0)
{
System.out.println("Inside If Condition ");
//-Load the properties
Properties prop = new Properties();
InputStream input = null;
input = new FileInputStream(System.getProperty("user.home") +
"/epath/services/tests/resources/AutomationData.properties");
prop.load(input);
// getResourcePath=prop.getProperty("resources_path");
getExecutionPath = prop.getProperty("local_resources_path");
System.out.println("Print Execution Path :- " +getExecutionPath);
System.out.println("Print Driver Path :- " + driver);
System.setProperty("webdriver.chrome", "\\\\192.168.10.21\\volume1\\ngage_dev\\engineering\\ngage\testing\\automated\\chromedriver_win32\\chromedriver.exe");
driver = new ChromeDriver();
} else {
//-Load the properties
System.out.println("Inside else Condtions ");
Properties prop = new Properties();
InputStream input = null;
input = new FileInputStream(System.getProperty("user.home") +
"/epath/services/tests/resources/AutomationData.properties");
prop.load(input);
getResourcePath=prop.getProperty("resources_path");
getExecutionPath = prop.getProperty("resources_path")+sFolderName;
//-Get USERNAME and AUTOMATE_KEY of browser stack
String browserStackUsername = prop.getProperty("browser_stack_username");
String browserStackAutomateKey = prop.getProperty("browser_stack_automate_key");
String URL = "https://" + browserStackUsername + ":" +
browserStackAutomateKey + "#hub-cloud.browserstack.com/wd/hub";
driver = new RemoteWebDriver(new URL(URL), caps);
//-Load the URL to be tested
driver.get(prop.getProperty("test_url"));
//-For local file uploads
((RemoteWebDriver) driver).setFileDetector(new LocalFileDetector());
}
}
#Override
protected void after() {
driver.quit();
}
};
public String getAutomationInputDataPath() {
return this.getExecutionPath;
}
public static String getResourcePathFromPropertiesfile() {
return getResourcePath;
}
}
And this is my test case class where i am calling base class(above class)
public class AddRegisterLAs extends Base {
private Login login;
private Navigation go;
private LearningActivityAdd addLa;
private ImageAdd addImage;
private DocumentAdd addDocument;
private VideoAdd addVideo;
private AudioAdd addAudio;
private LinkAdd addLink;
private CustomAdd addCustom;
private AiccAdd addAicc;
private ScormAdd addScorm;
private RegistrationCreate createRegistration;
private Utils utils;
private GetVersion getVersion;
public AddRegisterLAs() {
super(System.getProperty("browser"),System.getProperty("browser_version"),System.getProperty("os"),System.getProperty("ov"),System.getProperty("folderName"));
}
#Before
public void setUp() {
login = new Login(driver);
go = new Navigation(driver);
addLa = new LearningActivityAdd(driver);
addImage = new ImageAdd(driver);
addDocument = new DocumentAdd(driver);
addVideo = new VideoAdd(driver);
addAudio = new AudioAdd(driver);
addLink = new LinkAdd(driver);
addCustom = new CustomAdd(driver);
addAicc = new AiccAdd(driver);
addScorm = new ScormAdd(driver);
utils = new Utils();
getVersion = new GetVersion(driver);
createRegistration = new RegistrationCreate(driver);
}
#Test
public void Shallow() throws Exception {
//utils.logAndPrint("AddRegisterLAs");
int maxLAs = 1000;
int maxRegs = 1000;
//-Print to screen to create log. Log can be copied and pasted to Word document or elsewhere as needed.
System.out.println("\n" + "---------------------------------------------------------------------------------"+ "\r\n");
System.out.println("---------------------------------------------------------------------------------"+ "\r\n");
System.out.println("Adding and Registering Learning Activities Automation"+ "\r\n");
System.out.println("---------------------------------------------------------------------------------"+ "\r\n");
System.out.println("---------------------------------------------------------------------------------"+ "\r\n");
//-Load the properties
System.out.println("\n" + "---------------------------------------------------------------------------------"+ "\r\n");
System.out.println("Read in PROPERTIES file"+ "\r\n");
System.out.println("---------------------------------------------------------------------------------"+ "\r\n");
Properties prop = new Properties();
InputStream input = null;
At first kindly check that you have access to that driver path which you mentioned inside if.

CannotCompileException while Instrumenting Java code with using Java Assist, cannot find class

I'm trying create a generic Java Agent to instrument any Java application's methods.
I've followed this tutorial https://javapapers.com/core-java/java-instrumentation/ and created a java agent.
The java agent is supposed to look for a particular class ( I'm restricting it to one class now since it's not working for me)
Once the class is found, I'm using JavaAssist API to add a local variable to the beginning of each method and capture the current time. In the end of the method I'd like to simply print the time it took for the method to execute. (Pretty much following all the typical examples about Java agent.
I run my test application ( a web server using Vert.x ) with --javaagent flag pointing to the Java agent jar file I created ( the code is down below).
This works just fine for methods that either don't have return value and no parameters or return/take a primitive type.
However when a method is returning or taking a parameter that is an object from a another class (that has not been loaded yet I think) I get a CannotCompileException exception with the message that that class which is in the parameters list or in the return statement is not found.
For example the instrumentation for this method works:
#Override
public void start() throws Exception {
logger.debug("started thread {}", Thread.currentThread().getName());
for (int port : ports) {
HttpServer httpServer = getVertx().createHttpServer(httpServerOptions);
Router router = setupRoutes();
httpServer.requestHandler(router::accept);
logger.info("Listening on port {}", port);
httpServer.listen(port);
}
}
However for this method that returns io.vertx.ext.web.Router:
private Router setupRoutes() {
Router router = Router.router(getVertx());
router.get(STATUS_PATH).handler(this::statusHandler);
router.route().handler(BodyHandler.create());
router.post().handler(this::handleBidRequest);
router.put().handler(this::handleBidRequest);
router.get(SLEEP_CONTROLLER_PATH).handler(this::sleepControllerHandler);
return router;
}
I get an exception and the output of my java agent is :
Instrumenting method rubiconproject.com.WebServerVerticle.setupRoutes()
Could not instrument method setupRoutes error: cannot find io.vertx.ext.web.Router
This the code for my java agent:
import java.lang.instrument.Instrumentation;
import transformers.TimeMeasuringTransformer;
public class TimeCapturerAgent {
public static void premain(String agentArgs, Instrumentation inst) {
System.out.println(TimeCapturerAgent.class.getCanonicalName() + " is loaded...... ");
inst.addTransformer(new TimeMeasuringTransformer());
}}
package transformers;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.lang.instrument.ClassFileTransformer;
import java.lang.instrument.IllegalClassFormatException;
import java.security.ProtectionDomain;
import javassist.CannotCompileException;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.CtMethod;
public class TimeMeasuringTransformer implements ClassFileTransformer {
public TimeMeasuringTransformer() {
System.out.println("TimeMeasuringTransformer added ");
}
#Override
public byte[] transform(ClassLoader loader,
String className,
Class<?> classBeingRedefined,
ProtectionDomain protectionDomain,
byte[] classfileBuffer) throws IllegalClassFormatException {
if(className != null && className.contains("WebServerVerticle")) {
System.out.println("Instrumenting class " + className);
return modifyClass(classfileBuffer);
}
return null;
}
private byte[] modifyClass(byte[] originalClassfileBuffer) {
ClassPool classPool = ClassPool.getDefault();
CtClass compiledClass;
try {
compiledClass = classPool.makeClass(new ByteArrayInputStream(originalClassfileBuffer));
System.out.println("Created new compiled Class " + compiledClass.getName());
} catch (IOException e) {
e.printStackTrace();
return null;
}
instrumentMethods(compiledClass);
byte [] newClassByteCode = createNewClassByteArray(compiledClass);
compiledClass.detach();
return newClassByteCode;
}
private byte[] createNewClassByteArray(CtClass compiledClass) {
byte[] newClassByteArray = null;
try {
newClassByteArray = compiledClass.toBytecode();
} catch (IOException e) {
e.printStackTrace();
} catch (CannotCompileException e) {
e.printStackTrace();
} finally {
return newClassByteArray;
}
}
private void instrumentMethods(CtClass compiledClass) {
CtMethod[] methods = compiledClass.getDeclaredMethods();
System.out.println("Class has " + methods.length + " methods");
for (CtMethod method : methods) {
try {
System.out.println("Instrumenting method " + method.getLongName());
method.addLocalVariable("startTime", CtClass.longType);
method.insertBefore("startTime = System.nanoTime();");
method.insertAfter("System.out.println(\"Execution Duration "
+ "(nano sec): \"+ (System.nanoTime() - startTime) );");
} catch (CannotCompileException e) {
System.out.println("Could not instrument method " + method.getName()+" error: " + e.getMessage());
continue;
}
}
}}

How can I merge files in directory in hdfs without using get merge command?

I have gone through a programme in Hadoop In action for merging files on the go while copying from Local FS to HDFS.But while executing the code, I m getting array out of bound exception while running in eclipse. But when i created external jar file and run it Hadoop CLI, Empty file got created. If the code's logic is incorrect, is there any other way to merge multiple files in directory in hdfs without using get merge command?
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class PutMerge {
public static void main(String[] args) throws IOException
{
Configuration conf = new Configuration();
FileSystem hdfs = FileSystem.get(conf);
FileSystem local = FileSystem.getLocal(conf);
Path inputDir = new Path(args[0]);
Path hdfsFile = new Path(args[1]);
try {
FileStatus[] inputFiles = local.listStatus(inputDir);
FSDataOutputStream out = hdfs.create(hdfsFile);
for(`int i=0; i<inputFiles.length;i++)`
System.out.println(inputFiles[i].getPath().getName());
FSDataInputStream in = local.open(inputFiles[i].getPath());
byte buffer[] = new byte[256];
int bytesRead = 0;
while( (bytesRead = in.read(buffer)) > 0) {
out.write(buffer, 0, bytesRead);
}
in.close();
}
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
hadoop fs -cat [dir]/* | hadoop fs -put - [destination file]

Splitting PDF document into multiple documents

I'm trying to split a PDF document into multiple documents where each document includes the maximum number of pages it can contain where the file size is less than a maximum file size.
My code currently works when running from Eclipse, but when I click on the .jar file, the static method in a java class seems to crash (I can't seem to catch an exception however).
The code that isn't working is:
myListOfDocuments=mysplitter.split(document);
Somehow the JVM bails on the static method when the above line is called. The load seems to work fine, as follows:
PDDocument document=PDDocument.load(aFile);
Any ideas?
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import org.apache.pdfbox.multipdf.Splitter;
import org.apache.pdfbox.pdmodel.PDDocument;
public class PDFMaxSizeSplitter {
public static void main(String[] args) {
}
public static ArrayList<File> splitTheFile(File aFile,long maxSize){
ArrayList<File> resultFiles = new ArrayList<File>();
//Checks to see if file is already small enough
if (aFile.length() <= maxSize){
resultFiles.add(aFile);
return resultFiles;
}
//checks to see if it's a directory
if (aFile.isDirectory()){
resultFiles.add(aFile);
return resultFiles;
}
try {
PDDocument document = PDDocument.load(aFile);
Splitter mysplitter = new Splitter();
List<PDDocument> myListOfDocuments = mysplitter.split(document);
int docNumber = 0;
while (myListOfDocuments.size()>0){
long theResults = 0;
theResults = getChunk(myListOfDocuments,0,(long) (myListOfDocuments.size()-1),maxSize);
PDDocument newPDFDoc = new PDDocument();
for (long pageindex=0; pageindex<=theResults; pageindex++){
newPDFDoc.addPage(myListOfDocuments.get((int) pageindex).getPage(0));
}
File newFile = new File(aFile.getParentFile() +
File.separator +
aFile.getName().replace(".pdf", "") +
"Part" +
String.format("%03d", docNumber) +
".pdf");
//System.out.println(newFile.getCanonicalFile());
newPDFDoc.save(newFile);
resultFiles.add(newFile);
myListOfDocuments=myListOfDocuments.subList((int) (theResults)+1, (myListOfDocuments.size()));
newPDFDoc.close();
docNumber++;
}
document.close();
} catch (IOException e) {
e.printStackTrace();
}
return resultFiles;
}
private static long getChunk(List<PDDocument> thePages, long lowPage, long highPage, long maxSize) throws IOException{
//System.out.println("low " + lowPage + " high page: " + highPage);
if ( (highPage-lowPage)<=1 ){
if(PDFMaxSizeSplitter.testSize(thePages,0,highPage)<=maxSize){
return highPage;
} else{
return lowPage;
}
} else if (PDFMaxSizeSplitter.testSize(thePages, 0,lowPage+ (highPage-lowPage)/2)<=maxSize){
return PDFMaxSizeSplitter.getChunk(thePages, lowPage + (highPage-lowPage)/2, highPage,maxSize);
}
else {
return PDFMaxSizeSplitter.getChunk(thePages, lowPage, lowPage + (highPage-lowPage)/2,maxSize);
}
}
private static long testSize(List<PDDocument> thePages, long start, long stop) throws IOException{
//System.out.println("Trying: " + (new Long(start)).toString() + " to " + (new Long(stop)).toString());
PDDocument testerdocument = new PDDocument();
//Path tempPath = Files.createTempFile((new Long(start)).toString(), (new Long(stop)).toString());
//System.out.println("Creating tempPath " +tempPath.toString());
//File tempFile=new File(tempPath.toString());
ByteArrayOutputStream tempFile = new ByteArrayOutputStream();
for (long pageindex=start; pageindex<=stop; pageindex++){
testerdocument.addPage(thePages.get((int) pageindex).getPage(0));
}
testerdocument.save(tempFile);
long thefilesize = tempFile.size();
//long thefilesize = (tempFile.length());
//Files.deleteIfExists(tempPath);
tempFile.reset();
testerdocument.close();
return thefilesize;
}
}
-----------edit--------------
It turns out the JVM was running out of memory.
It turns out the JVM was running out of memory. I added a jvm argument to increase the memory. Also, I switched to the 64 bit jvm mode by using the argument -d64 on the jvm. Also, I have been using the disk drive cached memory management found in pdfbox, e.g., new PDDocument(aFile, MemoryUsageSetting.setupTempFileOnly());
With these settings, I can handle several gigabytes of files. Now in the code, I try to load the documents into direct memory and catch the out of memory exception to switch to a low memory mode. In the low memory mode I use the MemoryUsageSetting.setupTempFileOnly() to avoid using too much of the heap.

Use Drools 6.0 new PHREAK algorithm by using the 5.5. legacy API

Is it possible to try out the new PHREAKS algorithm but using the conventional API from Drools 5.5
* EDIT: to make the question more precise *
conventional 5.5 API
import org.drools.KnowledgeBase;
import org.drools.KnowledgeBaseFactory;
import org.drools.builder.KnowledgeBuilder;
import org.drools.builder.KnowledgeBuilderFactory;
import org.drools.builder.ResourceType;
import org.drools.io.Resource;
import org.drools.io.ResourceFactory;
import org.drools.runtime.StatefulKnowledgeSession;
instead new Drools 6 API
import org.kie.api.KieBase;
import org.kie.api.KieBaseConfiguration;
import org.kie.api.KieServices;
import org.kie.api.builder.KieBuilder;
import org.kie.api.builder.KieFileSystem;
import org.kie.api.builder.KieScanner;
import org.kie.api.builder.Message;
import org.kie.api.builder.ReleaseId;
import org.kie.api.builder.model.KieBaseModel;
import org.kie.api.builder.model.KieModuleModel;
import org.kie.api.builder.model.KieSessionModel;
import org.kie.api.conf.EqualityBehaviorOption;
import org.kie.api.conf.EventProcessingOption;
import org.kie.api.runtime.KieContainer;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.conf.ClockTypeOption;
It's also possible with the 5 knowledge api in drools 6.0+ by setting a rule engine property:
drools.ruleEngine = phreak
Here's how you set in the 5 knowledge api:
knowledgeBaseConfiguration.setProperty("drools.ruleEngine", "phreak");
I'm copy-pasting the simplest Java code for launching a 6.0 session. Everything else - fact insertion, global definitions,... works using the same API, only with KieSession.
package simple;
import java.io.FileInputStream;
import org.kie.api.KieServices;
import org.kie.api.builder.model.KieModuleModel;
import org.kie.api.builder.model.KieBaseModel;
import org.kie.api.builder.model.KieSessionModel;
import org.kie.api.builder.KieBuilder;
import org.kie.api.builder.KieFileSystem;
import org.kie.api.builder.Results;
import org.kie.api.builder.Message;
import org.kie.api.KieBase;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.KieContainer;
public class Main {
private KieSession kieSession;
public void build() throws Exception {
KieServices kieServices = KieServices.Factory.get();
KieFileSystem kfs = kieServices.newKieFileSystem();
FileInputStream fis = new FileInputStream( "simple/simple.drl" );
kfs.write( "src/main/resources/simple.drl",
kieServices.getResources().newInputStreamResource( fis ) );
KieBuilder kieBuilder = kieServices.newKieBuilder( kfs ).buildAll();
Results results = kieBuilder.getResults();
if( results.hasMessages( Message.Level.ERROR ) ){
System.out.println( results.getMessages() );
throw new IllegalStateException( "### errors ###" );
}
KieContainer kieContainer =
kieServices.newKieContainer( kieServices.getRepository().getDefaultReleaseId() );
KieBase kieBase = kieContainer.getKieBase();
kieSession = kieContainer.newKieSession();
}
public void exec(){
kieSession.fireAllRules();
}
public static void main( String[] args ) throws Exception {
Main m = new Main();
m.build();
m.exec();
}
}
Yes, most of the API is still supported. You will need to add the knowledge-api jar to your classpath (https://github.com/droolsjbpm/drools/tree/master/knowledge-api-legacy5-adapter).
The main difference is that Drools 6 no longer uses PKG for deployment. It uses mavenized kjars now. If you are not using PKG files (built by guvnor in Drools 5), you should be fine.
the working code to preserve rules and facts by using the same KieSession and deploy Jars, the code is adapted from here https://github.com/droolsjbpm/drools/blob/master/drools-compiler/src/test/java/org/drools/compiler/integrationtests/IncrementalCompilationTest.java.
package test.drools;
import java.io.UnsupportedEncodingException;
import org.drools.compiler.kie.builder.impl.InternalKieModule;
import org.kie.api.KieBaseConfiguration;
import org.kie.api.KieServices;
import org.kie.api.builder.KieBuilder;
import org.kie.api.builder.KieFileSystem;
import org.kie.api.builder.KieModule;
import org.kie.api.builder.ReleaseId;
import org.kie.api.io.Resource;
import org.kie.api.runtime.KieContainer;
import org.kie.api.runtime.KieSession;
public class Playground4 {
private String newline = System.getProperty("line.separator");
public static void main(String[] args) {
Playground4 pg = new Playground4();
try {
pg.doRules();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
}
private String generateDRLString(int start, int end) {
StringBuilder sb;
sb = new StringBuilder();
sb.append("package performance.drools;" + newline);
for (int i = start; i <= end; i++) {
sb.append("rule \"R" + i + "\"" + newline);
sb.append("when" + newline);
sb.append("then" + newline);
sb.append("System.out.println(\"rule" + i + " fired!\");" + newline);
sb.append("end" + newline);
}
return sb.toString();
}
int count = 1;
public void doRules() throws UnsupportedEncodingException {
KieServices kieServices = KieServices.Factory.get();
KieBaseConfiguration kieBaseConf = kieServices.newKieBaseConfiguration();
//kieBaseConf.setProperty("drools.ruleEngine", "phreak");
kieBaseConf.setProperty("drools.ruleEngine", "phreak");
ReleaseId releaseId;
KieModule kieModule;
releaseId = kieServices.newReleaseId("performance.drools",
"test-upgrade", "1.$v.0".replace("$v", String.valueOf(count)));
// create session without rules for version 1.1.0
kieModule = createAndDeployJar(releaseId);
KieContainer kieContainer = kieServices.newKieContainer(kieModule
.getReleaseId());
kieContainer.newKieBase(kieBaseConf);
KieSession kieSession = kieContainer.newKieSession();
// Create an in-memory jar for version 1.2.0
count++;
releaseId = kieServices.newReleaseId("performance.drools",
"test-upgrade", "1.$v.0".replace("$v", String.valueOf(count)));
kieModule = createAndDeployJar(releaseId, generateDRLString(1, 3));
kieContainer.updateToVersion(releaseId);
kieSession.insert(new Object());
// Create an in-memory jar for version 1.3.0
count++;
releaseId = kieServices.newReleaseId("performance.drools",
"test-upgrade", "1.$v.0".replace("$v", String.valueOf(count)));
kieModule = createAndDeployJar(releaseId, generateDRLString(4, 12));
kieContainer.updateToVersion(releaseId);
kieSession.fireAllRules();
System.out.println(kieSession.getFactCount());
}
public KieModule createAndDeployJar(ReleaseId releaseId, String... drls) {
KieServices kieServices = KieServices.Factory.get();
byte[] jar = createKJar(kieServices, releaseId, null, drls);
return deployJar(kieServices, jar);
}
KieFileSystem kfs;
public byte[] createKJar(KieServices ks, ReleaseId releaseId,
String pom, String... drls) {
if (kfs == null) kfs = ks.newKieFileSystem();
if (pom != null) {
kfs.write("pom.xml", pom);
} else {
kfs.generateAndWritePomXML(releaseId);
}
for (int i = 0; i < drls.length; i++) {
if (drls[i] != null) {
kfs.write("src/main/resources/r" + 10 * count + i + ".drl", drls[i]);
}
}
KieBuilder kb = ks.newKieBuilder(kfs).buildAll();
if (kb.getResults()
.hasMessages(org.kie.api.builder.Message.Level.ERROR)) {
for (org.kie.api.builder.Message result : kb.getResults()
.getMessages()) {
System.out.println(result.getText());
}
return null;
}
InternalKieModule kieModule = (InternalKieModule) ks.getRepository()
.getKieModule(releaseId);
byte[] jar = kieModule.getBytes();
return jar;
}
public KieModule deployJar(KieServices ks, byte[] jar) {
// Deploy jar into the repository
Resource jarRes = ks.getResources().newByteArrayResource(jar);
KieModule km = ks.getRepository().addKieModule(jarRes);
return km;
}
}