SonarQube Runner throws NullPointerException during analysis - nullpointerexception

UPDATE:
I am running it on Fedora 21.
SonarQube - 5.0.
SonarQube Runner - 2.4
UPDATE2: Findbugs v3.1, Java Plugin v2.8
UPDATE3:
Analyzer fails on the following file:
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
public class MyManager extends ParentManager {
public MyManager(int id, int var1, int var2, int var3) {
super(id, var1, var2, var3);
}
public void load() {
Connection con = null;
Statement st;
ResultSet res;
try {
con = DatabaseManager.getDataSource().getConnection();
st = con.createStatement();
int currentId = 0;
CurrentData currentData= null;
res = st.executeQuery("SELECT subjects.subjectname" + " AS name, " +
" subjects.subjectcode AS code, " +
" groups.groupname AS gname, " +
" groups.groupID AS gID , " +
" subjects.sID AS sID, " +
" groups.isMain AS ismain " +
" FROM joinGroup LEFT JOIN groups ON joinGroup.groupID=groups.groupID " +
" LEFT JOIN subjects ON subjects.sID=groups.sID " +
" WHERE joinGroup.sID=" + id + " " +
" AND groups.var1" + var1 + " AND groups.var2=" + var2 + " " +
" ORDER BY name, subjects.subjectcode, subjects.sID ");
while (res.next()) {
if (currentId != res.getInt("sID")) {
currentData = new CurrentData();
data.subjects.add(currentData);
currentData.subjectName = res.getString("name");
currentData.subjectID = res.getInt("sID");
}
CurrentGroupData groupData = new CurrentGroupData();
groupData.name = res.getString("gname");
groupData.id = res.getInt("gID");
currentId = res.getInt("sID");
if (res.getBoolean("ismain")) {
assert currentData != null;
currentData.groupID = res.getInt("gID");
}
if (currentData != null) {
currentData.groups.add(groupData);
}
}
loadSubjectsData(st, res);
} catch (Exception exc) {
Log.writeLog(ex);
} finally {
try {
con.close();
} catch (SQLException e) {
Log.writeLog(e);
}
}
}
}
I have installed SonarQube following the tutorials on sonar website.
I am able to browse to localhost:9000 and it shows start page.
Then I try to analyze the project via command: sonar-runner.
It fails.
Running sonar-runner -X displays the following:
>
ERROR: Error during Sonar runner execution
org.sonar.runner.impl.RunnerException: Unable to execute Sonar
at org.sonar.runner.impl.BatchLauncher$1.delegateExecution(BatchLauncher.java:91)
at org.sonar.runner.impl.BatchLauncher$1.run(BatchLauncher.java:75)
at java.security.AccessController.doPrivileged(Native Method)
at org.sonar.runner.impl.BatchLauncher.doExecute(BatchLauncher.java:69)
at org.sonar.runner.impl.BatchLauncher.execute(BatchLauncher.java:50)
at org.sonar.runner.api.EmbeddedRunner.doExecute(EmbeddedRunner.java:102)
at org.sonar.runner.api.Runner.execute(Runner.java:100)
at org.sonar.runner.Main.executeTask(Main.java:70)
at org.sonar.runner.Main.execute(Main.java:59)
at org.sonar.runner.Main.main(Main.java:53)
Caused by: org.sonar.squidbridge.api.AnalysisException: SonarQube is unable to analyze file : '/path/to/my/file/MyFile.java'
at org.sonar.java.ast.AstScanner.simpleScan(AstScanner.java:114)
at org.sonar.java.ast.AstScanner.scan(AstScanner.java:75)
at org.sonar.java.JavaSquid.scanSources(JavaSquid.java:122)
at org.sonar.java.JavaSquid.scan(JavaSquid.java:115)
at org.sonar.plugins.java.JavaSquidSensor.analyse(JavaSquidSensor.java:91)
at org.sonar.batch.phases.SensorsExecutor.executeSensor(SensorsExecutor.java:79)
at org.sonar.batch.phases.SensorsExecutor.execute(SensorsExecutor.java:70)
at org.sonar.batch.phases.PhaseExecutor.execute(PhaseExecutor.java:122)
at org.sonar.batch.scan.ModuleScanContainer.doAfterStart(ModuleScanContainer.java:222)
at org.sonar.api.platform.ComponentContainer.startComponents(ComponentContainer.java:93)
at org.sonar.api.platform.ComponentContainer.execute(ComponentContainer.java:78)
at org.sonar.batch.scan.ProjectScanContainer.scan(ProjectScanContainer.java:235)
at org.sonar.batch.scan.ProjectScanContainer.scanRecursively(ProjectScanContainer.java:230)
at org.sonar.batch.scan.ProjectScanContainer.doAfterStart(ProjectScanContainer.java:223)
at org.sonar.api.platform.ComponentContainer.startComponents(ComponentContainer.java:93)
at org.sonar.api.platform.ComponentContainer.execute(ComponentContainer.java:78)
at org.sonar.batch.scan.ScanTask.scan(ScanTask.java:65)
at org.sonar.batch.scan.ScanTask.execute(ScanTask.java:52)
at org.sonar.batch.bootstrap.TaskContainer.doAfterStart(TaskContainer.java:128)
at org.sonar.api.platform.ComponentContainer.startComponents(ComponentContainer.java:93)
at org.sonar.api.platform.ComponentContainer.execute(ComponentContainer.java:78)
at org.sonar.batch.bootstrap.BootstrapContainer.executeTask(BootstrapContainer.java:171)
at org.sonar.batch.bootstrapper.Batch.executeTask(Batch.java:95)
at org.sonar.batch.bootstrapper.Batch.execute(Batch.java:67)
at org.sonar.runner.batch.IsolatedLauncher.execute(IsolatedLauncher.java:48)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.sonar.runner.impl.BatchLauncher$1.delegateExecution(BatchLauncher.java:87)
... 9 more
Caused by: java.lang.NullPointerException
at org.sonar.java.checks.AbstractInjectionChecker.isIdentifierDynamicString(AbstractInjectionChecker.java:67)
at org.sonar.java.checks.AbstractInjectionChecker.isDynamicString(AbstractInjectionChecker.java:54)
at org.sonar.java.checks.AbstractInjectionChecker.isDynamicString(AbstractInjectionChecker.java:49)
at org.sonar.java.checks.AbstractInjectionChecker.isDynamicString(AbstractInjectionChecker.java:57)
at org.sonar.java.checks.AbstractInjectionChecker.isDynamicString(AbstractInjectionChecker.java:49)
at org.sonar.java.checks.AbstractInjectionChecker.isDynamicString(AbstractInjectionChecker.java:57)
at org.sonar.java.checks.AbstractInjectionChecker.isDynamicString(AbstractInjectionChecker.java:49)
at org.sonar.java.checks.AbstractInjectionChecker.isDynamicString(AbstractInjectionChecker.java:57)
at org.sonar.java.checks.SQLInjectionCheck.visitNode(SQLInjectionCheck.java:48)
at org.sonar.java.ast.visitors.SubscriptionVisitor.visit(SubscriptionVisitor.java:95)
at org.sonar.java.ast.visitors.SubscriptionVisitor.visitChildren(SubscriptionVisitor.java:113)
at org.sonar.java.ast.visitors.SubscriptionVisitor.visit(SubscriptionVisitor.java:97)
at org.sonar.java.ast.visitors.SubscriptionVisitor.visitChildren(SubscriptionVisitor.java:113)
at org.sonar.java.ast.visitors.SubscriptionVisitor.visit(SubscriptionVisitor.java:97)
at org.sonar.java.ast.visitors.SubscriptionVisitor.visitChildren(SubscriptionVisitor.java:113)
at org.sonar.java.ast.visitors.SubscriptionVisitor.visit(SubscriptionVisitor.java:97)
at org.sonar.java.ast.visitors.SubscriptionVisitor.visitChildren(SubscriptionVisitor.java:113)
at org.sonar.java.ast.visitors.SubscriptionVisitor.visit(SubscriptionVisitor.java:97)
at org.sonar.java.ast.visitors.SubscriptionVisitor.visitChildren(SubscriptionVisitor.java:113)
at org.sonar.java.ast.visitors.SubscriptionVisitor.visit(SubscriptionVisitor.java:97)
at org.sonar.java.ast.visitors.SubscriptionVisitor.visitChildren(SubscriptionVisitor.java:113)
at org.sonar.java.ast.visitors.SubscriptionVisitor.visit(SubscriptionVisitor.java:97)
at org.sonar.java.ast.visitors.SubscriptionVisitor.visitChildren(SubscriptionVisitor.java:113)
at org.sonar.java.ast.visitors.SubscriptionVisitor.visit(SubscriptionVisitor.java:97)
at org.sonar.java.ast.visitors.SubscriptionVisitor.visitChildren(SubscriptionVisitor.java:113)
at org.sonar.java.ast.visitors.SubscriptionVisitor.visit(SubscriptionVisitor.java:97)
at org.sonar.java.ast.visitors.SubscriptionVisitor.scanTree(SubscriptionVisitor.java:73)
at org.sonar.java.ast.visitors.SubscriptionVisitor.scanFile(SubscriptionVisitor.java:67)
at org.sonar.java.model.VisitorsBridge.visitFile(VisitorsBridge.java:122)
at com.sonar.sslr.impl.ast.AstWalker.walkAndVisit(AstWalker.java:67)
at org.sonar.java.ast.AstScanner.simpleScan(AstScanner.java:107)
... 38 more
>
Didn't do anything "exotic", just followed the official tutorials like this one:
http://docs.sonarqube.org/display/SONAR/Analyzing+with+SonarQube+Runner
I am running it locally.
Any ideas?
Thanks

I got similar problem with ReturnEmptyArrayyNotNullCheck. As a workaround I disabled that rule in sonarqube web gui.
Log in as an admin. Click Rules in the top menu. Search for SQL or Inject or whatever other word to find the rule you think is causing analysis failures. Then select the rule and disable it in the quality profile attached to your project. Of course this rule will be ignored totally in all attached projects.

I got no solution, but ran into the same problem and and found this reference on sonar website, could it be the same problem you have?
http://sonarqube.15.x6.nabble.com/NullPointerException-analysing-file-with-SQLInjectionCheck-td5032155.html (Scroll to the end)

Related

javax.cache.CacheException: Failed to find SQL table for type: XXXXXX

I am using ignite 2.7 version and getting the below exception while querying the ignite cache:
19/10/30 05:33:14 ERROR yarn.ApplicationMaster: User class threw exception: javax.cache.CacheException: Failed to find SQL table for type: CanonicalXXXXX
javax.cache.CacheException: Failed to find SQL table for type: CanonicalXXXXX
This is my code to start ignite:
def getIgnite(): Ignite = {
val clusterName = clusterProperties.getProperty("XXXXXXXX")
Ignition.setClientMode(true)
try {
val ignite = Ignition.ignite(clusterName);
if ( clusterName == ignite.name() ) {
logInfo("#### Found and returning client for cluster: " +
clusterName)
return ignite
}
}
catch {
case e: Exception => e.printStackTrace()
}
val configFilePath = clusterProperties.getProperty("XXXXXXXX")
logInfo("#### configFilePath: " + configFilePath)
val configInputStream = FileSystem.get(new Configuration()).open(new
Path(configFilePath));
logInfo("#### Starting Ignite Client")
return Ignition.start(configInputStream) }
I am loading data into the cache and data get loaded successfully and cache size also get printed as below:
INFO dataloader.IgniteDataLoader: #### OFFICIAL_NAME_CACHE SIZE => 51016471
Below code is for accessing the cache:
def createXXXXXXCache: IgniteCache[String, CanonicalXXXXX] = {
val orgCacheCfg: CacheConfiguration[String, CanonicalXXXXX] =
new CacheConfiguration[String, CanonicalXXXXX](OFFICIAL_NAME_CACHE)
orgCacheCfg.setIndexedTypes(classOf[String], classOf[CanonicalXXXXX])
orgCacheCfg.setCacheMode(CacheMode.PARTITIONED)
orgCacheCfg.setAtomicityMode(CacheAtomicityMode.ATOMIC)
orgCacheCfg.setBackups(3)
getIgnite().getOrCreateCache(orgCacheCfg) }
But while trying to query the cache I am getting the exception. Below code is for querying the cache:
val companyName = "SUFFOLK CONST CO"
val queryString = "orgName = '" + companyName + "'"
val companyNameQuery = new SqlQuery[String, CanonicalXXXXX]
(classOf[CanonicalXXXXX], queryString)
val queryCursor = igniteXXXXXX.createXXXXXXCache.query(companyNameQuery)
val queryResults = Future {
queryCursor.getAll()
}
try {
val companyResults = extractXXXXXVO(queryResults)
logInfo(s"Ignite Results returned - $companyResults")
} catch {
case exe: Exception => logInfo(s"OfficialXXXX exact search timed out")
queryCursor.close()
Vector.empty
}
The code throwing exception at query() method for the below line:
val queryCursor = igniteXXXXXX.createXXXXXXCache.query(companyNameQuery)
19/10/30 05:33:14 INFO dataloader.IgniteServerXXXXXX: Examplelogger1 - 'SqlQuery [type= CanonicalXXXXX, alias=null, sql=orgName = 'SUFFOLK CONSTRUCTION CO INC', args=null, timeout=0, distributedJoins=false, replicatedOnly=false]'
19/10/30 05:33:14 INFO dataloader.igniteXXXXXX: #### Found and returning client for cluster: JalaDalaXXXXX
19/10/30 05:33:14 ERROR yarn.ApplicationMaster: User class threw exception: javax.cache.CacheException: Failed to find SQL table for type: CanonicalXXXXX
javax.cache.CacheException: Failed to find SQL table for type: CanonicalXXXXX
at org.apache.ignite.internal.processors.cache.IgniteCacheProxyImpl.query(IgniteCacheProxyImpl.java:697)
at org.apache.ignite.internal.processors.cache.GatewayProtectedCacheProxy.query(GatewayProtectedCacheProxy.java:376)
at xx.xx.dataloader.IgniteServerXXXXXX$.loadOAData(IgniteServerXXXXXX.scala:70)
at xx.xx.StartXXXXXXX$.startIgniteAndDataloading(StartXXXXXXX.scala:49)
at xx.xx.StartXXXXXXX$delayedInit$body.apply(StartXXXXXXX.scala:13)
at scala.Function0$class.apply$mcV$sp(Function0.scala:40)
at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:12)
at scala.App$$anonfun$main$1.apply(App.scala:71)
at scala.App$$anonfun$main$1.apply(App.scala:71)
at scala.collection.immutable.List.foreach(List.scala:318)
at scala.collection.generic.TraversableForwarder$class.foreach(TraversableForwarder.scala:32)
at scala.App$class.main(App.scala:71)
at xx.xx.StartXXXXXXX$.main(StartXXXXXXX.scala:12)
at xx.xx.StartXXXXXXX.main(StartXXXXXXX.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:567)
Caused by: class org.apache.ignite.internal.processors.query.IgniteSQLException: Failed to find SQL table for type: CanonicalNameOAVO
at org.apache.ignite.internal.processors.query.h2.IgniteH2Indexing.queryDistributedSql(IgniteH2Indexing.java:1843)
at org.apache.ignite.internal.processors.query.GridQueryProcessor$7.applyx(GridQueryProcessor.java:2289)
at org.apache.ignite.internal.processors.query.GridQueryProcessor$7.applyx(GridQueryProcessor.java:2287)
at org.apache.ignite.internal.util.lang.IgniteOutClosureX.apply(IgniteOutClosureX.java:36)
at org.apache.ignite.internal.processors.query.GridQueryProcessor.executeQuery(GridQueryProcessor.java:2707)
at org.apache.ignite.internal.processors.query.GridQueryProcessor.queryDistributedSql(GridQueryProcessor.java:2286)
at org.apache.ignite.internal.processors.query.GridQueryProcessor.querySql(GridQueryProcessor.java:2267)
at org.apache.ignite.internal.processors.cache.IgniteCacheProxyImpl.query(IgniteCacheProxyImpl.java:682)
Could someone please suggest how to resolve the issue.
Thanks,
Chandan
I'm not sure about the root cause of the issue, but first of all you should replace
val queryString = "orgName = '" + companyName + "'"
val companyNameQuery = new SqlQuery[String, CanonicalXXXXX](classOf[CanonicalXXXXX], queryString)
with
val queryString = "orgName = ?"
val companyNameQuery = new SqlQuery[String, CanonicalXXXXX](classOf[CanonicalXXXXX], queryString)
companyNameQuery.setArgs(companyName)
That's the correct way of using of this API. By the way SqlQuery is a kind of deprecated stuff. You should prefer to use SqlFieldsQuery instead of it.

mapreduce job on yarn exited with exitCode: -1000 beacuse of resource changed on src filesystem

Application application_1552978163044_0016 failed 5 times due to AM Container for appattempt_1552978163044_0016_000005 exited with exitCode: -1000
Diagnostics:
java.io.IOException: Resource
abfs://xxx#xxx.dfs.core.windows.net/hdp/apps/2.6.5.3006-29/mapreduce/mapreduce.tar.gz
changed on src filesystem (expected 1552949440000, was 1552978240000
Failing this attempt. Failing the application.
Just based on the exception information, it seems to be caused by Azure Storage could not keep the original timestamp of the copied file. I searched a workaround that recommended to change the source code of yarn-common to disable the code block of timestamp check when copy file to avoid the exception throws to make the MR job continous to work.
Here is the source code in the latest version of yarn-common which check the timestamp for copied file and throws the exception.
/** #L255
* Localize files.
* #param destination destination directory
* #throws IOException cannot read or write file
* #throws YarnException subcommand returned an error
*/
private void verifyAndCopy(Path destination)
throws IOException, YarnException {
final Path sCopy;
try {
sCopy = resource.getResource().toPath();
} catch (URISyntaxException e) {
throw new IOException("Invalid resource", e);
}
FileSystem sourceFs = sCopy.getFileSystem(conf);
FileStatus sStat = sourceFs.getFileStatus(sCopy);
if (sStat.getModificationTime() != resource.getTimestamp()) {
throw new IOException("Resource " + sCopy +
" changed on src filesystem (expected " + resource.getTimestamp() +
", was " + sStat.getModificationTime());
}
if (resource.getVisibility() == LocalResourceVisibility.PUBLIC) {
if (!isPublic(sourceFs, sCopy, sStat, statCache)) {
throw new IOException("Resource " + sCopy +
" is not publicly accessible and as such cannot be part of the" +
" public cache.");
}
}
downloadAndUnpack(sCopy, destination);
}

Unloading data from Amazon redshift to Amazon s3

I am trying to use the following code to unload data into S3 bucket. Which works but after unloading it throws some error.
Properties props = new Properties();
props.setProperty("user", MasterUsername);
props.setProperty("password", MasterUserPassword);
conn = DriverManager.getConnection(dbURL, props);
stmt = conn.createStatement();
String sql;
sql = "unload('select * from part where p_partkey in (select p_partkey from
part limit 10)') to"
+ " 's3://redshiftdump.****' "
+ " DELIMITER AS ','"
+ "ADDQUOTES "
+ "NULL AS ''"
+ "credentials 'aws_access_key_id=****;aws_secret_access_key=***' "
+ "parallel off" +
";";
boolean i = stmt.execute(sql);
stmt.close();
conn.close();
The unloading works. It is creating a file in the bucket. But it is giving me some error
java.sql.SQLException:
dataengine.impl.DSISimpleRowCountResult cannot be cast to
com.amazon.dsi.dataengine.interfaces.IResultSet
at
com.amazon.redshift.core.jdbc42.PGJDBC42Statement.createResultSet(Unknown
Source)
at com.amazon.jdbc.common.SStatement.executeQuery(Unknown Source)
what is this error and how to avoid it? Is there any way to dump the table in CSV format. Right now it is dumping the file in FILE format.
You say the UNLOAD works but you receive this error, that suggests to me that you are connecting successfully but there is an problem in the way your code interacts with the JDBC driver when the query completes.
We provide an example that may be helpful in our documentation on the page "Connect to Your Cluster Programmatically"
Regarding the output file format, you will get whatever is specified in your UNLOAD SQL but the filename will have a suffix (for example "000" or "6411_part_00") to indicate which part of the UNLOAD it is.
use executeUpdate .
def runQuery(sql: String) = {
Class.forName("com.amazon.redshift.jdbc.Driver")
val connection = DriverManager.getConnection(url, username, password)
var statement: Statement = null
try {
statement = connection.createStatement()
statement.setQueryTimeout(redshiftTimeoutInSeconds)
val result = statement.executeUpdate(sql)
logger.info(s"statement response code : ${result}")
} catch {
case e: Exception => {
logger.error(s"statement.isCloseOnCompletion :${e.getMessage} ::: ${e.printStackTrace()}")
throw new IngestionException(e.getMessage)
}
}
finally {
if(statement != null ) statement.close()
connection.close()
}
}

to extract image and meta data from DICOM image using java

i am trying to extract image and metadata from dicom image using java on windows platform. i am new in dcm4che.
my code is
i am getting error
Exception in thread "main" java.lang.NoClassDefFoundError: org/dcm4che2/image/PartialComponentSampleModel
at org.dcm4che2.imageioimpl.plugins.dcm.DicomImageReaderSpi.createReaderInstance(DicomImageReaderSpi.java:146)
at javax.imageio.spi.ImageReaderSpi.createReaderInstance(ImageReaderSpi.java:320)
at javax.imageio.ImageIO$ImageReaderIterator.next(ImageIO.java:529)
at javax.imageio.ImageIO$ImageReaderIterator.next(ImageIO.java:513)
at miec.extraction.extraction.extractionD(extraction.java:32)
at miec.MIEC.main(MIEC.java:46)
Caused by: java.lang.ClassNotFoundException: org.dcm4che2.image.PartialComponentSampleModel
at java.net.URLClassLoader$1.run(URLClassLoader.java:372)
at java.net.URLClassLoader$1.run(URLClassLoader.java:361)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:360)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 6 more
please help me... which all jar files should i include..
Use PixelMed Here the example
import com.pixelmed.dicom.Attribute;
import com.pixelmed.dicom.AttributeList;
import com.pixelmed.dicom.AttributeTag;
import com.pixelmed.dicom.TagFromName;
import com.pixelmed.display.SourceImage;
public class DisplayImageTagsToConsole {
private static AttributeList list = new AttributeList();
public static void readDcmFile() {
String dicomFile = "E:\\eye\\demo12.dcm";
try {
list.read(dicomFile);
System.out.println("Patient Name:" + getTagInformation(TagFromName.PatientName));
System.out.println("Patient ID:" + getTagInformation(TagFromName.PatientID));
System.out.println("Transfer Syntax:" + getTagInformation(TagFromName.TransferSyntaxUID));
System.out.println("SOP Class:" + getTagInformation(TagFromName.SOPClassUID));
System.out.println("Modality:" + getTagInformation(TagFromName.Modality));
System.out.println("Samples Per Pixel:" + getTagInformation(TagFromName.SamplesPerPixel));
System.out.println("Photometric Interpretation:" + getTagInformation(TagFromName.PhotometricInterpretation));
System.out.println("Pixel Spacing:" + getTagInformation(TagFromName.PixelSpacing));
System.out.println("Bits Allocated:" + getTagInformation(TagFromName.BitsAllocated));
System.out.println("Bits Stored:" + getTagInformation(TagFromName.BitsStored));
System.out.println("High Bit:" + getTagInformation(TagFromName.HighBit));
SourceImage img = new com.pixelmed.display.SourceImage(list);
System.out.println("Number of frames " + img.getNumberOfFrames());
System.out.println("Width " + img.getWidth());//all frames will have same width
System.out.println("Height " + img.getHeight());//all frames will have same height
System.out.println("Is Grayscale? " + img.isGrayscale());
System.out.println("Pixel Data present:" + (list.get(TagFromName.PixelData) != null));
} catch (Exception e) {
e.printStackTrace();
}
}
private static String getTagInformation(AttributeTag attrTag) {
return Attribute.getDelimitedStringValuesOrEmptyString(list, attrTag);
}
}

Lucene - Highlighter throwing exception when using * on search

I'm using Lucene 4.6.1 and Highlighter 4.6.0. Since indexing is working properly, I'm just gonna show my search code:
... code to get all the fields' name/values, numDocs, etc.
...
// Create Query and search
try {
TopScoreDocCollector collector = TopScoreDocCollector.create(numDocs, true);
Query q = MultiFieldQueryParser.parse(Version.LUCENE_40, searchTerms, fields, analyzer);
searcher.search(q, collector);
ScoreDoc[] hits = collector.topDocs().scoreDocs;
Highlighter highlighter = new Highlighter(new QueryScorer(q));
highlighter.setTextFragmenter(new SimpleFragmenter(40));
int maxNumFragmentsRequired = 2;
System.out.println("Found " + hits.length + " hits.");
for(int i=0;i<hits.length;++i) {
int docId = hits[i].doc;
Document d = searcher.doc(docId);
for(int j=0; j<fields.length; j++) {
if(d.get(fields[j]) != null) {
String fieldText = d.get(fields[j]).trim();
TokenStream tokenStream = analyzer.tokenStream(fields[j], new StringReader(fieldText));
// Create String without the highlighted term
String unhighlighted = (i + 1) + ". "+fields[j]+ " "+ d.get(fields[j]).trim() + "<br>";
// Create the highlighted term
String highlighted = highlighter.getBestFragments(tokenStream, fieldText, maxNumFragmentsRequired, "...");
// If the highlighted term really exists
if(!highlighted.equals(""))
unhighlighted = (i + 1) + ". "+fields[j]+ " "+ highlighted + "<br>";
response += unhighlighted;
}
}
}
} catch (Exception e) {
System.out.println("Error searching " + searchTerm + " : " + e.getMessage());
}
System.out.println(response);
}
For example: into my index I got many Documents named "Process 001", "Process 002", "Process 003" and so on. If I try to search by: Process, I can retrieve all the Process (this is working perfect!). The problem happens when I try to search by: proc*, or: pr*, or something like that... The errors are here:
Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/lucene/queries/CommonTermsQuery
at org.apache.lucene.search.highlight.WeightedSpanTermExtractor.extract(WeightedSpanTermExtractor.java:149)
at org.apache.lucene.search.highlight.WeightedSpanTermExtractor.extract(WeightedSpanTermExtractor.java:99)
at org.apache.lucene.search.highlight.WeightedSpanTermExtractor.getWeightedSpanTerms(WeightedSpanTermExtractor.java:474)
at org.apache.lucene.search.highlight.QueryScorer.initExtractor(QueryScorer.java:217)
at org.apache.lucene.search.highlight.QueryScorer.init(QueryScorer.java:186)
at org.apache.lucene.search.highlight.Highlighter.getBestTextFragments(Highlighter.java:197)
at org.apache.lucene.search.highlight.Highlighter.getBestFragments(Highlighter.java:156)
at org.apache.lucene.search.highlight.Highlighter.getBestFragments(Highlighter.java:460)
at freedom.lucene.service.LuceneTestApplication.search(LuceneTestApplication.java:406)
at freedom.lucene.service.LuceneTestApplication.main(LuceneTestApplication.java:75)
Caused by: java.lang.ClassNotFoundException: org.apache.lucene.queries.CommonTermsQuery
at java.net.URLClassLoader$1.run(Unknown Source)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(Unknown Source)
at java.lang.ClassLoader.loadClass(Unknown Source)
at sun.misc.Launcher$AppClassLoader.loadClass(Unknown Source)
at java.lang.ClassLoader.loadClass(Unknown Source)
... 10 more
The exception occurs on this line:
String highlighted = highlighter.getBestFragments(tokenStream, fieldText, maxNumFragmentsRequired, "...");
If I remove the Highlighter code, the search works properly with *
Add lucene-queries-4.6.1.jar to your classpath.
CommonTermsQuery is not included in the lucene-core jar.
Add lucene-memory-XX.jar to your External Libraries