Simple Hive write not working - hive

Trying to write a simple POC using Apache Beam and Hive:
public static void main(String[] args) {
PipelineOptions options = PipelineOptionsFactory
.fromArgs(args)
.withValidation()
.as(PVAOptions.class);
Pipeline p = Pipeline.create(options);
p
.apply(TextIO.read().from("src/test/resources/words.txt"))
.apply(ParDo.of(new PukeHive()))
.apply(HCatalogIO.write()
.withBatchSize(100)
.withConfigProperties(getHiveConfigProperties())
.withTable(getHiveTable())
)
;
p.run().waitUntilFinish();
}
static class PukeHive extends DoFn<String, HCatRecord> {
#ProcessElement
public void processElement(ProcessContext c) throws IOException {
DefaultHCatRecord rec = new DefaultHCatRecord(1);
rec.set(0, c.element());
c.output(rec);
}
}
This results in the following exception. Debugging reveals that this is because Beam's WritableCoder tries to create a newInstance() of the abstract class HCatRecord.
org.apache.beam.sdk.Pipeline$PipelineExecutionException: java.lang.RuntimeException: org.apache.beam.sdk.coders.CoderException: unable to deserialize record
at org.apache.beam.runners.direct.DirectRunner$DirectPipelineResult.waitUntilFinish (DirectRunner.java:349)
at org.apache.beam.runners.direct.DirectRunner$DirectPipelineResult.waitUntilFinish (DirectRunner.java:319)
at org.apache.beam.runners.direct.DirectRunner.run (DirectRunner.java:210)
at org.apache.beam.runners.direct.DirectRunner.run (DirectRunner.java:66)
at org.apache.beam.sdk.Pipeline.run (Pipeline.java:311)
at org.apache.beam.sdk.Pipeline.run (Pipeline.java:297)
at com.comp.beam.Main.main (Main.java:48)
at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke (Method.java:498)
at org.codehaus.mojo.exec.ExecJavaMojo$1.run (ExecJavaMojo.java:282)
at java.lang.Thread.run (Thread.java:748)
Caused by: java.lang.RuntimeException: org.apache.beam.sdk.coders.CoderException: unable to deserialize record
at org.apache.beam.runners.direct.ImmutabilityCheckingBundleFactory$ImmutabilityEnforcingBundle.add (ImmutabilityCheckingBundleFactory.java:114)
at org.apache.beam.runners.direct.ParDoEvaluator$BundleOutputManager.output (ParDoEvaluator.java:242)
at org.apache.beam.repackaged.beam_runners_direct_java.runners.core.SimpleDoFnRunner.outputWindowedValue (SimpleDoFnRunner.java:219)
at org.apache.beam.repackaged.beam_runners_direct_java.runners.core.SimpleDoFnRunner.access$700 (SimpleDoFnRunner.java:69)
at org.apache.beam.repackaged.beam_runners_direct_java.runners.core.SimpleDoFnRunner$DoFnProcessContext.output (SimpleDoFnRunner.java:517)
at org.apache.beam.repackaged.beam_runners_direct_java.runners.core.SimpleDoFnRunner$DoFnProcessContext.output (SimpleDoFnRunner.java:505)
at com.comp.beam.Main$PukeHive.processElement (Main.java:61)
Caused by: org.apache.beam.sdk.coders.CoderException: unable to deserialize record
at org.apache.beam.sdk.io.hadoop.WritableCoder.decode (WritableCoder.java:92)
at org.apache.beam.sdk.io.hadoop.WritableCoder.decode (WritableCoder.java:54)
at org.apache.beam.sdk.coders.Coder.decode (Coder.java:170)
at org.apache.beam.sdk.util.CoderUtils.decodeFromSafeStream (CoderUtils.java:122)
at org.apache.beam.sdk.util.CoderUtils.decodeFromByteArray (CoderUtils.java:105)
at org.apache.beam.sdk.util.CoderUtils.decodeFromByteArray (CoderUtils.java:99)
at org.apache.beam.sdk.util.CoderUtils.clone (CoderUtils.java:148)
at org.apache.beam.sdk.util.MutationDetectors$CodedValueMutationDetector.<init> (MutationDetectors.java:117)
at org.apache.beam.sdk.util.MutationDetectors.forValueWithCoder (MutationDetectors.java:46)
at org.apache.beam.runners.direct.ImmutabilityCheckingBundleFactory$ImmutabilityEnforcingBundle.add (ImmutabilityCheckingBundleFactory.java:112)
at org.apache.beam.runners.direct.ParDoEvaluator$BundleOutputManager.output (ParDoEvaluator.java:242)
at org.apache.beam.repackaged.beam_runners_direct_java.runners.core.SimpleDoFnRunner.outputWindowedValue (SimpleDoFnRunner.java:219)
at org.apache.beam.repackaged.beam_runners_direct_java.runners.core.SimpleDoFnRunner.access$700 (SimpleDoFnRunner.java:69)
at org.apache.beam.repackaged.beam_runners_direct_java.runners.core.SimpleDoFnRunner$DoFnProcessContext.output (SimpleDoFnRunner.java:517)
at org.apache.beam.repackaged.beam_runners_direct_java.runners.core.SimpleDoFnRunner$DoFnProcessContext.output (SimpleDoFnRunner.java:505)
at com.comp.beam.Main$PukeHive.processElement (Main.java:61)
at com.comp.beam.Main$PukeHive$DoFnInvoker.invokeProcessElement (Unknown Source)
at org.apache.beam.repackaged.beam_runners_direct_java.runners.core.SimpleDoFnRunner.invokeProcessElement (SimpleDoFnRunner.java:185)
at org.apache.beam.repackaged.beam_runners_direct_java.runners.core.SimpleDoFnRunner.processElement (SimpleDoFnRunner.java:149)
at org.apache.beam.repackaged.beam_runners_direct_java.runners.core.SimplePushbackSideInputDoFnRunner.processElementInReadyWindows (SimplePushbackSideInputDoFnRunner.java:78)
at org.apache.beam.runners.direct.ParDoEvaluator.processElement (ParDoEvaluator.java:189)
at org.apache.beam.runners.direct.DoFnLifecycleManagerRemovingTransformEvaluator.processElement (DoFnLifecycleManagerRemovingTransformEvaluator.java:55)
at org.apache.beam.runners.direct.DirectTransformExecutor.processElements (DirectTransformExecutor.java:161)
at org.apache.beam.runners.direct.DirectTransformExecutor.run (DirectTransformExecutor.java:125)
at java.util.concurrent.Executors$RunnableAdapter.call (Executors.java:511)
at java.util.concurrent.FutureTask.run (FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker (ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run (ThreadPoolExecutor.java:624)
at java.lang.Thread.run (Thread.java:748)
Caused by: java.lang.InstantiationException
at sun.reflect.InstantiationExceptionConstructorAccessorImpl.newInstance (InstantiationExceptionConstructorAccessorImpl.java:48)
at java.lang.reflect.Constructor.newInstance (Constructor.java:423)
at org.apache.beam.sdk.io.hadoop.WritableCoder.decode (WritableCoder.java:85)
at org.apache.beam.sdk.io.hadoop.WritableCoder.decode (WritableCoder.java:54)
at org.apache.beam.sdk.coders.Coder.decode (Coder.java:170)
at org.apache.beam.sdk.util.CoderUtils.decodeFromSafeStream (CoderUtils.java:122)
at org.apache.beam.sdk.util.CoderUtils.decodeFromByteArray (CoderUtils.java:105)
at org.apache.beam.sdk.util.CoderUtils.decodeFromByteArray (CoderUtils.java:99)
at org.apache.beam.sdk.util.CoderUtils.clone (CoderUtils.java:148)
at org.apache.beam.sdk.util.MutationDetectors$CodedValueMutationDetector.<init> (MutationDetectors.java:117)
at org.apache.beam.sdk.util.MutationDetectors.forValueWithCoder (MutationDetectors.java:46)
at org.apache.beam.runners.direct.ImmutabilityCheckingBundleFactory$ImmutabilityEnforcingBundle.add (ImmutabilityCheckingBundleFactory.java:112)
at org.apache.beam.runners.direct.ParDoEvaluator$BundleOutputManager.output (ParDoEvaluator.java:242)
at org.apache.beam.repackaged.beam_runners_direct_java.runners.core.SimpleDoFnRunner.outputWindowedValue (SimpleDoFnRunner.java:219)
at org.apache.beam.repackaged.beam_runners_direct_java.runners.core.SimpleDoFnRunner.access$700 (SimpleDoFnRunner.java:69)
at org.apache.beam.repackaged.beam_runners_direct_java.runners.core.SimpleDoFnRunner$DoFnProcessContext.output (SimpleDoFnRunner.java:517)
at org.apache.beam.repackaged.beam_runners_direct_java.runners.core.SimpleDoFnRunner$DoFnProcessContext.output (SimpleDoFnRunner.java:505)
at com.comp.beam.Main$PukeHive.processElement (Main.java:61)
at com.comp.beam.Main$PukeHive$DoFnInvoker.invokeProcessElement (Unknown Source)
at org.apache.beam.repackaged.beam_runners_direct_java.runners.core.SimpleDoFnRunner.invokeProcessElement (SimpleDoFnRunner.java:185)
at org.apache.beam.repackaged.beam_runners_direct_java.runners.core.SimpleDoFnRunner.processElement (SimpleDoFnRunner.java:149)
at org.apache.beam.repackaged.beam_runners_direct_java.runners.core.SimplePushbackSideInputDoFnRunner.processElementInReadyWindows (SimplePushbackSideInputDoFnRunner.java:78)
at org.apache.beam.runners.direct.ParDoEvaluator.processElement (ParDoEvaluator.java:189)
at org.apache.beam.runners.direct.DoFnLifecycleManagerRemovingTransformEvaluator.processElement (DoFnLifecycleManagerRemovingTransformEvaluator.java:55)
at org.apache.beam.runners.direct.DirectTransformExecutor.processElements (DirectTransformExecutor.java:161)
at org.apache.beam.runners.direct.DirectTransformExecutor.run (DirectTransformExecutor.java:125)
at java.util.concurrent.Executors$RunnableAdapter.call (Executors.java:511)
at java.util.concurrent.FutureTask.run (FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker (ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run (ThreadPoolExecutor.java:624)
at java.lang.Thread.run (Thread.java:748)
How can I feed my data into Hive using Beam?

I believe you need to register the coder for the HCatRecord which would be:
Pipeline p = Pipeline.create(options);
p.getCoderRegistry()
.registerCoderForClass(HCatRecord.class, WritableCoder.of(DefaultHCatRecord.class));
To test this I added the following to the HCatalogIOTest class of the beam project. It uses a different schema but should demonstrate a complete example:
#Test
#NeedsEmptyTestTables
public void testSOKricket() {
// Register the coder
defaultPipeline
.getCoderRegistry()
.registerCoderForClass(HCatRecord.class, WritableCoder.of(DefaultHCatRecord.class));
defaultPipeline
.apply(TextIO.read().from("/tmp/words.txt"))
.apply(ParDo.of(new PukeHive()))
.apply(
HCatalogIO.write()
.withConfigProperties(getConfigPropertiesAsMap(service.getHiveConf()))
.withDatabase(TEST_DATABASE)
.withTable(TEST_TABLE)
.withPartition(new java.util.HashMap<>())
.withBatchSize(1L));
defaultPipeline.run();
}
static class PukeHive extends DoFn<String, HCatRecord> {
#ProcessElement
public void processElement(ProcessContext c) throws Exception {
// our test schema is (mycol1 string, mycol2 int)
DefaultHCatRecord rec = new DefaultHCatRecord(2);
rec.set(0, c.element());
rec.set(1, 1);
c.output(rec);
}
}

Related

java.lang.NoSuchMethodError: com.google.flatbuffers.FlatBufferBuilder.createString(Ljava/lang/CharSequence;)I

While running pyspark3 with pandas 1.1.5 and pyarrow 2.0.0 getting the below error:
Spark Code:
import pyarrow
import pandas as pd
df = pd.DataFrame({'col1' : [1,2,3], 'col2': [4,5,6]})
df_sp = spark.createDataFrame(df)
df_sp.cache().count()
schema = df_sp.schema
def dummy_udf(data):
return data
res = df_sp.groupby('col1').applyInPandas(dummy_udf, schema=schema)
print(res.cache().count())
print(res.toPandas())
Exception:
21/09/17 07:28:10 ERROR util.Utils: Uncaught exception in thread stdout writer for python3
java.lang.NoSuchMethodError: com.google.flatbuffers.FlatBufferBuilder.createString(Ljava/lang/CharSequence;)I
at org.apache.arrow.vector.types.pojo.Field.getField(Field.java:204)
at org.apache.arrow.vector.types.pojo.Schema.getSchema(Schema.java:178)
at org.apache.arrow.vector.ipc.message.MessageSerializer.serializeMetadata(MessageSerializer.java:187)
at org.apache.arrow.vector.ipc.message.MessageSerializer.serialize(MessageSerializer.java:165)
at org.apache.arrow.vector.ipc.ArrowWriter.ensureStarted(ArrowWriter.java:159)
at org.apache.arrow.vector.ipc.ArrowWriter.start(ArrowWriter.java:112)
at org.apache.spark.sql.execution.python.ArrowPythonRunner$$anon$1.$anonfun$writeIteratorToStream$1(ArrowPythonRunner.scala:86)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1439)
at org.apache.spark.sql.execution.python.ArrowPythonRunner$$anon$1.writeIteratorToStream(ArrowPythonRunner.scala:103)
at org.apache.spark.api.python.BasePythonRunner$WriterThread.$anonfun$run$1(PythonRunner.scala:397)
at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)
at org.apache.spark.api.python.BasePythonRunner$WriterThread.run(PythonRunner.scala:232)
21/09/17 07:28:10 ERROR util.SparkUncaughtExceptionHandler: Uncaught exception in thread Thread[stdout writer for python3,5,main]
java.lang.NoSuchMethodError: com.google.flatbuffers.FlatBufferBuilder.createString(Ljava/lang/CharSequence;)I
at org.apache.arrow.vector.types.pojo.Field.getField(Field.java:204)
at org.apache.arrow.vector.types.pojo.Schema.getSchema(Schema.java:178)
at org.apache.arrow.vector.ipc.message.MessageSerializer.serializeMetadata(MessageSerializer.java:187)
at org.apache.arrow.vector.ipc.message.MessageSerializer.serialize(MessageSerializer.java:165)
at org.apache.arrow.vector.ipc.ArrowWriter.ensureStarted(ArrowWriter.java:159)
at org.apache.arrow.vector.ipc.ArrowWriter.start(ArrowWriter.java:112)
at org.apache.spark.sql.execution.python.ArrowPythonRunner$$anon$1.$anonfun$writeIteratorToStream$1(ArrowPythonRunner.scala:86)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1439)
at org.apache.spark.sql.execution.python.ArrowPythonRunner$$anon$1.writeIteratorToStream(ArrowPythonRunner.scala:103)
at org.apache.spark.api.python.BasePythonRunner$WriterThread.$anonfun$run$1(PythonRunner.scala:397)
at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)
at org.apache.spark.api.python.BasePythonRunner$WriterThread.run(PythonRunner.scala:232)
21/09/17 07:28:10 WARN storage.BlockManager: Putting block rdd_25_69 failed due to exception org.apache.spark.SparkException: Python worker exited unexpectedly (crashed).
21/09/17 07:28:10 INFO memory.MemoryStore: MemoryStore cleared
21/09/17 07:28:10 INFO storage.BlockManager: BlockManager stopped
21/09/17 07:28:10 INFO util.ShutdownHookManager: Shutdown hook called
While checking createString() method implementations in FlatBufferBuilder.java class, it has two methods one is accepting CharSequence and another one is accepting the ByteBuffer as a argument.
public int createString(CharSequence s) {
}
public int createString(ByteBuffer s) {
}
While checking getField() method implementation in Field.java class, here it is passed String value.
public class Field {
private final String name;
public int getField(FlatBufferBuilder builder) {
int nameOffset = name == null ? -1 : builder.createString(name);
}
}
To fix this issue, we need to pass either CharSequence or ByteBuffer as the argument in getField() method.
Solution:
public int getField(FlatBufferBuilder builder) {
java.nio.ByteBuffer bb = java.nio.ByteBuffer.wrap(name.getBytes());
int nameOffset = name == null ? -1 : builder.createString(bb);
.......
}

Back-End (JVM) internal error. Why do i get this IDE error but others with the same code do not?

I'm part of a group that is developing a program in Kotlin. I have recently pulled fresh code off the development branch. The problem is i get this strange error. I am the only person that gets it; my groupmates have the same code and it runs fine for them.
I've tried googling for the error. I didn't find any help as it is quite a specific one. Plus like i said my groupmates do not get this error. It is therefore probably not related to the code.
The error i get is this:
Error:Kotlin: [Internal Error] java.lang.IllegalStateException: Backend Internal error: Exception during code generation
Cause: Back-end (JVM) Internal error: Error type encountered: [ERROR : For SuccessOrFailure] (ErrorType).
Cause: Error type encountered: [ERROR : For SuccessOrFailure] (ErrorType).
File being compiled at position: (32,28) in C:/Users/Gebruiker/Desktop/Repo/game/src/main/kotlin/nl/han/asd/a1/network/networkstates/EndRoundState.kt
The root cause was thrown at: KotlinTypeMapper.java:116
File being compiled at position: file://C:/Users/Gebruiker/Desktop/Repo/game/src/main/kotlin/nl/han/asd/a1/network/networkstates/EndRoundState.kt
The root cause was thrown at: ExpressionCodegen.java:322
at org.jetbrains.kotlin.codegen.CompilationErrorHandler.lambda$static$0(CompilationErrorHandler.java:24)
at org.jetbrains.kotlin.codegen.PackageCodegenImpl.generate(PackageCodegenImpl.java:74)
at org.jetbrains.kotlin.codegen.DefaultCodegenFactory.generatePackage(CodegenFactory.kt:97)
at org.jetbrains.kotlin.codegen.DefaultCodegenFactory.generateModule(CodegenFactory.kt:68)
at org.jetbrains.kotlin.codegen.KotlinCodegenFacade.doGenerateFiles(KotlinCodegenFacade.java:47)
at org.jetbrains.kotlin.codegen.KotlinCodegenFacade.compileCorrectFiles(KotlinCodegenFacade.java:39)
at org.jetbrains.kotlin.cli.jvm.compiler.KotlinToJVMBytecodeCompiler.generate(KotlinToJVMBytecodeCompiler.kt:446)
at org.jetbrains.kotlin.cli.jvm.compiler.KotlinToJVMBytecodeCompiler.compileModules$cli(KotlinToJVMBytecodeCompiler.kt:142)
at org.jetbrains.kotlin.cli.jvm.K2JVMCompiler.doExecute(K2JVMCompiler.kt:161)
at org.jetbrains.kotlin.cli.jvm.K2JVMCompiler.doExecute(K2JVMCompiler.kt:57)
at org.jetbrains.kotlin.cli.common.CLICompiler.execImpl(CLICompiler.java:96)
at org.jetbrains.kotlin.cli.common.CLICompiler.execImpl(CLICompiler.java:52)
at org.jetbrains.kotlin.cli.common.CLITool.exec(CLITool.kt:93)
at org.jetbrains.kotlin.daemon.CompileServiceImpl$compile$$inlined$ifAlive$lambda$1.invoke(CompileServiceImpl.kt:402)
at org.jetbrains.kotlin.daemon.CompileServiceImpl$compile$$inlined$ifAlive$lambda$1.invoke(CompileServiceImpl.kt:101)
at org.jetbrains.kotlin.daemon.CompileServiceImpl$doCompile$$inlined$ifAlive$lambda$2.invoke(CompileServiceImpl.kt:937)
at org.jetbrains.kotlin.daemon.CompileServiceImpl$doCompile$$inlined$ifAlive$lambda$2.invoke(CompileServiceImpl.kt:101)
at org.jetbrains.kotlin.daemon.common.DummyProfiler.withMeasure(PerfUtils.kt:137)
at org.jetbrains.kotlin.daemon.CompileServiceImpl.checkedCompile(CompileServiceImpl.kt:977)
at org.jetbrains.kotlin.daemon.CompileServiceImpl.doCompile(CompileServiceImpl.kt:936)
at org.jetbrains.kotlin.daemon.CompileServiceImpl.compile(CompileServiceImpl.kt:400)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at java.rmi/sun.rmi.server.UnicastServerRef.dispatch(UnicastServerRef.java:359)
at java.rmi/sun.rmi.transport.Transport$1.run(Transport.java:200)
at java.rmi/sun.rmi.transport.Transport$1.run(Transport.java:197)
at java.base/java.security.AccessController.doPrivileged(Native Method)
at java.rmi/sun.rmi.transport.Transport.serviceCall(Transport.java:196)
at java.rmi/sun.rmi.transport.tcp.TCPTransport.handleMessages(TCPTransport.java:562)
at java.rmi/sun.rmi.transport.tcp.TCPTransport$ConnectionHandler.run0(TCPTransport.java:796)
at java.rmi/sun.rmi.transport.tcp.TCPTransport$ConnectionHandler.lambda$run$0(TCPTransport.java:677)
at java.base/java.security.AccessController.doPrivileged(Native Method)
at java.rmi/sun.rmi.transport.tcp.TCPTransport$ConnectionHandler.run(TCPTransport.java:676)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:834)
Caused by: org.jetbrains.kotlin.codegen.CompilationException: Back-end (JVM) Internal error: Error type encountered: [ERROR : For SuccessOrFailure] (ErrorType).
Cause: Error type encountered: [ERROR : For SuccessOrFailure] (ErrorType).
File being compiled at position: (32,28) in C:/Users/Gebruiker/Desktop/Repo/game/src/main/kotlin/nl/han/asd/a1/network/networkstates/EndRoundState.kt
The root cause was thrown at: KotlinTypeMapper.java:116
at org.jetbrains.kotlin.codegen.ExpressionCodegen.genQualified(ExpressionCodegen.java:322)
at org.jetbrains.kotlin.codegen.ExpressionCodegen.genQualified(ExpressionCodegen.java:281)
at org.jetbrains.kotlin.codegen.ExpressionCodegen.gen(ExpressionCodegen.java:354)
at org.jetbrains.kotlin.codegen.CallGenerator$DefaultCallGenerator.genValueAndPut(CallGenerator.kt:68)
at org.jetbrains.kotlin.codegen.CallBasedArgumentGenerator.generateExpression(CallBasedArgumentGenerator.java:58)
at org.jetbrains.kotlin.codegen.ArgumentGenerator.generate(ArgumentGenerator.kt:68)
at org.jetbrains.kotlin.codegen.ExpressionCodegen.invokeMethodWithArguments(ExpressionCodegen.java:2461)
at org.jetbrains.kotlin.codegen.ExpressionCodegen.invokeMethodWithArguments(ExpressionCodegen.java:2433)
at org.jetbrains.kotlin.codegen.Callable$invokeMethodWithArguments$1.invoke(Callable.kt:41)
at org.jetbrains.kotlin.codegen.Callable$invokeMethodWithArguments$1.invoke(Callable.kt:13)
at org.jetbrains.kotlin.codegen.OperationStackValue.putSelector(StackValue.kt:79)
at org.jetbrains.kotlin.codegen.StackValueWithLeaveTask.putSelector(StackValue.kt:67)
at org.jetbrains.kotlin.codegen.StackValue.put(StackValue.java:112)
at org.jetbrains.kotlin.codegen.StackValue.put(StackValue.java:101)
at org.jetbrains.kotlin.codegen.ExpressionCodegen.putStackValue(ExpressionCodegen.java:378)
at org.jetbrains.kotlin.codegen.ExpressionCodegen.gen(ExpressionCodegen.java:363)
at org.jetbrains.kotlin.codegen.ExpressionCodegen.gen(ExpressionCodegen.java:358)
at org.jetbrains.kotlin.codegen.MemberCodegen.generateInitializers(MemberCodegen.java:493)
at org.jetbrains.kotlin.codegen.ConstructorCodegen.generatePrimaryConstructorImpl(ConstructorCodegen.java:213)
at org.jetbrains.kotlin.codegen.ConstructorCodegen.access$000(ConstructorCodegen.java:41)
at org.jetbrains.kotlin.codegen.ConstructorCodegen$1.doGenerateBody(ConstructorCodegen.java:97)
at org.jetbrains.kotlin.codegen.FunctionGenerationStrategy$CodegenBased.generateBody(FunctionGenerationStrategy.java:84)
at org.jetbrains.kotlin.codegen.FunctionCodegen.generateMethodBody(FunctionCodegen.java:674)
at org.jetbrains.kotlin.codegen.FunctionCodegen.generateMethodBody(FunctionCodegen.java:435)
at org.jetbrains.kotlin.codegen.FunctionCodegen.generateMethod(FunctionCodegen.java:266)
at org.jetbrains.kotlin.codegen.ConstructorCodegen.generatePrimaryConstructor(ConstructorCodegen.java:93)
at org.jetbrains.kotlin.codegen.ImplementationBodyCodegen.generateConstructors(ImplementationBodyCodegen.java:462)
at org.jetbrains.kotlin.codegen.ClassBodyCodegen.generateBody(ClassBodyCodegen.java:83)
at org.jetbrains.kotlin.codegen.MemberCodegen.generate(MemberCodegen.java:128)
at org.jetbrains.kotlin.codegen.MemberCodegen.genClassOrObject(MemberCodegen.java:302)
at org.jetbrains.kotlin.codegen.MemberCodegen.genClassOrObject(MemberCodegen.java:286)
at org.jetbrains.kotlin.codegen.PackageCodegenImpl.generateClassOrObject(PackageCodegenImpl.java:161)
at org.jetbrains.kotlin.codegen.PackageCodegenImpl.generateClassesAndObjectsInFile(PackageCodegenImpl.java:86)
at org.jetbrains.kotlin.codegen.PackageCodegenImpl.generateFile(PackageCodegenImpl.java:119)
at org.jetbrains.kotlin.codegen.PackageCodegenImpl.generate(PackageCodegenImpl.java:66)
... 36 more
Caused by: java.lang.IllegalStateException: Error type encountered: [ERROR : For SuccessOrFailure] (ErrorType).
at org.jetbrains.kotlin.codegen.state.KotlinTypeMapper$1.processErrorType(KotlinTypeMapper.java:116)
at org.jetbrains.kotlin.load.kotlin.TypeSignatureMappingKt.mapType(typeSignatureMapping.kt:91)
at org.jetbrains.kotlin.codegen.state.KotlinTypeMapper.mapType(KotlinTypeMapper.java:512)
at org.jetbrains.kotlin.codegen.state.KotlinTypeMapper.writeParameterType(KotlinTypeMapper.java:1518)
at org.jetbrains.kotlin.codegen.state.KotlinTypeMapper.writeParameter(KotlinTypeMapper.java:1488)
at org.jetbrains.kotlin.codegen.state.KotlinTypeMapper.writeParameter(KotlinTypeMapper.java:1477)
at org.jetbrains.kotlin.codegen.state.KotlinTypeMapper.lambda$mapSignatureWithCustomParameters$4(KotlinTypeMapper.java:1295)
at java.base/java.util.stream.ForEachOps$ForEachOp$OfRef.accept(ForEachOps.java:183)
at java.base/java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:195)
at java.base/java.util.Collections$2.tryAdvance(Collections.java:4745)
at java.base/java.util.Collections$2.forEachRemaining(Collections.java:4753)
at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:484)
at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474)
at java.base/java.util.stream.ForEachOps$ForEachOp.evaluateSequential(ForEachOps.java:150)
at java.base/java.util.stream.ForEachOps$ForEachOp$OfRef.evaluateSequential(ForEachOps.java:173)
at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
at java.base/java.util.stream.ReferencePipeline.forEach(ReferencePipeline.java:497)
at org.jetbrains.kotlin.codegen.state.KotlinTypeMapper.mapSignatureWithCustomParameters(KotlinTypeMapper.java:1293)
at org.jetbrains.kotlin.codegen.state.KotlinTypeMapper.mapSignature(KotlinTypeMapper.java:1212)
at org.jetbrains.kotlin.codegen.state.KotlinTypeMapper.mapSignatureWithGeneric(KotlinTypeMapper.java:1171)
at org.jetbrains.kotlin.codegen.FunctionGenerationStrategy.mapMethodSignature(FunctionGenerationStrategy.java:46)
at org.jetbrains.kotlin.codegen.FunctionCodegen.generateMethod(FunctionCodegen.java:204)
at org.jetbrains.kotlin.codegen.FunctionCodegen.generateMethod(FunctionCodegen.java:183)
at org.jetbrains.kotlin.codegen.coroutines.CoroutineCodegenForLambda.generateResumeImpl(CoroutineCodegen.kt:421)
at org.jetbrains.kotlin.codegen.coroutines.CoroutineCodegenForLambda.generateClosureBody(CoroutineCodegen.kt:234)
at org.jetbrains.kotlin.codegen.ClosureCodegen.generateBody(ClosureCodegen.java:166)
at org.jetbrains.kotlin.codegen.coroutines.CoroutineCodegenForLambda.generateBody(CoroutineCodegen.kt:242)
at org.jetbrains.kotlin.codegen.MemberCodegen.generate(MemberCodegen.java:128)
at org.jetbrains.kotlin.codegen.ExpressionCodegen.genClosure(ExpressionCodegen.java:1022)
at org.jetbrains.kotlin.codegen.ExpressionCodegen.genClosure(ExpressionCodegen.java:992)
at org.jetbrains.kotlin.codegen.ExpressionCodegen.visitLambdaExpression(ExpressionCodegen.java:983)
at org.jetbrains.kotlin.codegen.ExpressionCodegen.visitLambdaExpression(ExpressionCodegen.java:111)
at org.jetbrains.kotlin.psi.KtLambdaExpression.accept(KtLambdaExpression.java:39)
at org.jetbrains.kotlin.codegen.ExpressionCodegen.genQualified(ExpressionCodegen.java:299)
... 70 more
I'm sure this has to do with my IDE or some local setting. Again, my groupmates don't get this error. The file that is mentioned in the error, EndRoundState.kt, however looks like this. If this helps clarify my problem.
package nl.han.asd.a1.network.networkstates
import kotlinx.coroutines.GlobalScope
import kotlinx.coroutines.delay
import kotlinx.coroutines.launch
import nl.han.asd.a1.network.NetworkLogic
import nl.han.asd.a1.network.Player
import nl.han.asd.a1.network.networkmessages.NetworkMessage
import nl.han.asd.a1.network.networkmessages.NetworkMessageTypes.*
import nl.han.asd.a1.network.networkmessages.messagetypes.GameData
import nl.han.asd.a1.utilities.gameclock.IClock
class EndRoundState(networkLogic: NetworkLogic, var players: MutableList<Player>, hash: String, val clock: IClock) : NetworkState(networkLogic) {
private var rightstate = true
private val endRoundDuration = 5000 //How long should the program be in this state? default = 5000
private val checkTimerInterval = 500L //How often should we check if the timer has expired. default = 500L
init {
var hashes: MutableList<String> = mutableListOf()
players.forEach {
if (it.hash != null) {
hashes.add(it.hash.toString())
}
}
val rightHash = getRightHash(hashes)
if (hash == rightHash) {
this.networkLogic.correctGameState(hash)
} else {
rightstate = false
}
GlobalScope.launch {
//launch a coroutine that will run alongside the other code. Think of it as a thread-lite. This will change the state after endRoundDuration expires
val endTime: Long = clock.getCurrentTime() + endRoundDuration
while (true) {
delay(checkTimerInterval)
if (clock.getCurrentTime() >= endTime) {
networkLogic.startNewRound()
return#launch
}
}
}
}
override fun handleMessage(message: NetworkMessage, ip: String) {
when (message.networkMessageType) {
ROUND_IS_OVER -> ignore()
CONNECT_REQUEST -> ignore()
CONNECT_RESPONSE -> ignore()
GAME_ANNOUNCE -> ignore()
GAME_DATA -> {
if (!rightstate) {
val gameData = message as GameData
this.networkLogic.setGameState(gameData.data.game)
}
}
INITIATOR_MESSAGE -> ignore()
MOVE -> TODO()
RECONNECT_REQUEST -> TODO()
}
}
private fun ignore() {
}
private fun getRightHash(hashes: MutableList<String>): String {
val frequenciesByHash = hashes.groupingBy { it }.eachCount()
var highestCount = 0
var rightHash: String? = null
frequenciesByHash.forEach {
if (it.value > highestCount) {
highestCount = it.value
rightHash = it.key
}
}
return run {if(rightHash.isNullOrEmpty()) "" else rightHash!!}
}
}
I just want the code to compile on my machine, like it does for my collegues. It does compile through Maven, just not through IntelliJ.
Thank you very much!
I fixed by just upgrading kotlin plugin. My plugin version currently is 1.3.41-release-IJ2018.2-1
I resolved this issue by uninstalling IntelliJ including all settings/plugins and reinstalling.
Uninstalling the IDE without removing settings/plugins did not work.
I've encountered this in one of our test-cases where we had a function with a long name.
Granted this development machine was a Windows 10 machine, and Windows has a history of having problems with long filenames, https://community.spiceworks.com/topic/2006950-file-path-too-long-shortening-names-is-only-the-solution.
Try to see if the filename or the function names is too long and shorten it.
It certainly helped me to reduce the 93 character test function name to 70 characters.
Also check to see if you are using weird characters and emojis, some times they can mess up the file generation.
Have fun and be safe out there.
In my case, it was caused by using my custom suspend operator fun plusAssign and calling it by +=. When I replace the += by explicit plusAssign, it compiles fine. I can also use the same += elsewhere just fine. No idea what's going on.

Apache Ignite: Failed to load job class [class=org.apache.ignite.internal.processors.hadoop.v2.HadoopV2Job]

I am trying to start a server using the ignite.sh script and am getting the above error (failed to load HadoopV2Job). "config/default-config.xml" is being passed to CommandLineStartup and hasn't been changed.
Has anyone came across this issue or does anyone know how to fix it?
My Ignite version is 1.4.0 and here is the full stack trace:
class org.apache.ignite.IgniteException: Failed to start processor: HadoopProcessor [idCtr=0]
at org.apache.ignite.internal.util.IgniteUtils.convertException(IgniteUtils.java:881)
at org.apache.ignite.Ignition.start(Ignition.java:349)
at org.apache.ignite.startup.cmdline.CommandLineStartup.main(CommandLineStartup.java:302)
Caused by: class org.apache.ignite.IgniteCheckedException: Failed to start processor: HadoopProcessor [idCtr=0]
at org.apache.ignite.internal.IgniteKernal.startProcessor(IgniteKernal.java:1504)
at org.apache.ignite.internal.IgniteKernal.start(IgniteKernal.java:888)
at org.apache.ignite.internal.IgnitionEx$IgniteNamedInstance.start0(IgnitionEx.java:1617)
at org.apache.ignite.internal.IgnitionEx$IgniteNamedInstance.start(IgnitionEx.java:1484)
at org.apache.ignite.internal.IgnitionEx.start0(IgnitionEx.java:965)
at org.apache.ignite.internal.IgnitionEx.startConfigurations(IgnitionEx.java:892)
at org.apache.ignite.internal.IgnitionEx.start(IgnitionEx.java:784)
at org.apache.ignite.internal.IgnitionEx.start(IgnitionEx.java:705)
at org.apache.ignite.internal.IgnitionEx.start(IgnitionEx.java:576)
at org.apache.ignite.internal.IgnitionEx.start(IgnitionEx.java:546)
at org.apache.ignite.Ignition.start(Ignition.java:346)
... 1 more
Caused by: class org.apache.ignite.IgniteCheckedException: Failed to load job class [class=org.apache.ignite.internal.processors.hadoop.v2.HadoopV2Job]
at org.apache.ignite.internal.processors.hadoop.jobtracker.HadoopJobTracker.start(HadoopJobTracker.java:167)
at org.apache.ignite.internal.processors.hadoop.HadoopProcessor.start(HadoopProcessor.java:103)
at org.apache.ignite.internal.IgniteKernal.startProcessor(IgniteKernal.java:1501)
... 11 more
Caused by: java.lang.IllegalArgumentException
at org.objectweb.asm.ClassReader.<init>(Unknown Source)
at org.objectweb.asm.ClassReader.<init>(Unknown Source)
at org.objectweb.asm.ClassReader.<init>(Unknown Source)
at org.apache.ignite.internal.processors.hadoop.HadoopClassLoader.hasExternalDependencies(HadoopClassLoader.java:288)
at org.apache.ignite.internal.processors.hadoop.HadoopClassLoader.loadClass(HadoopClassLoader.java:162)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at org.apache.ignite.internal.processors.hadoop.jobtracker.HadoopJobTracker.start(HadoopJobTracker.java:163)
... 13 more
Shane, did you compile the Ignite code with JDK8 ?
It looks like the org.objectweb.asm library failed to parse class "org.apache.ignite.internal.processors.hadoop.v2.HadoopV2Job" bytecode because the bytecode version is higher than 1.7:
/**
* Constructs a new {#link ClassReader} object.
*
* #param b
* the bytecode of the class to be read.
* #param off
* the start offset of the class data.
* #param len
* the length of the class data.
*/
public ClassReader(final byte[] b, final int off, final int len) {
this.b = b;
// checks the class version
if (readShort(off + 6) > Opcodes.V1_7) {
throw new IllegalArgumentException();
}
Please try to build Ignite witgh JDK 1.7 or specify target level = 1.7 with JDK8. Does that solve the problem?

Sonar-qube 4.5.1 custom rules plugin is not working

I have created a custom rule plugin using custom rule code snippet. Now I have added another rule which will show error when a anonymous class has more than 30 lines.
I have build the custom rule plugin jar using mvn package command and put in in sonar server extension/plugin and restarted the server.
my corresponding rule can be viewed from sonar server but rule cannot analyze the the code though there is a class with anonymous class with loc >30 lines. my custom rule is given below.
#Rule(
key = AnonymousClassesTooBigCheck.KEY,
name = "Avoid too big classes",
description = "avoid too bid classes",
tags = {"brain-overload"},
priority = Priority.MAJOR)
#BelongsToProfile(title = "Sonar way", priority = Priority.MAJOR)
public class AnonymousClassesTooBigCheck extends BaseTreeVisitor implements JavaFileScanner {
public static final String KEY = "AD0001";
private static final RuleKey RULE_KEY = RuleKey.of(MyJavaRulesDefinition.REPOSITORY_KEY, KEY);
private static final int DEFAULT_MAX = 30;
#RuleProperty(defaultValue = "" + DEFAULT_MAX)
public int max = DEFAULT_MAX;
private JavaFileScannerContext context;
/**
* Flag to skip check for class bodies of EnumConstants.
*/
private boolean isEnumConstantBody;
#Override
public void scanFile(JavaFileScannerContext context) {
this.context = context;
isEnumConstantBody = false;
scan(context.getTree());
}
#Override
public void visitNewClass(NewClassTree tree) {
if (tree.classBody() != null && !isEnumConstantBody) {
int lines = getNumberOfLines(tree.classBody());
if (lines > max) {
context.addIssue(tree, RULE_KEY, "Reduce this anonymous class number of lines from " + lines + " to at most " + max + ", or make it a named class.");
}
}
isEnumConstantBody = false;
super.visitNewClass(tree);
}
#Override
public void visitEnumConstant(EnumConstantTree tree) {
isEnumConstantBody = true;
super.visitEnumConstant(tree);
}
#Override
public void visitLambdaExpression(LambdaExpressionTree lambdaExpressionTree) {
int lines = getNumberOfLines(((JavaTree) lambdaExpressionTree.body()).getAstNode());
if (lines > max) {
context.addIssue(lambdaExpressionTree, RULE_KEY, "Reduce this lambda expression number of lines from " + lines + " to at most " + max + ".");
}
super.visitLambdaExpression(lambdaExpressionTree);
}
private int getNumberOfLines(ClassTree classTree) {
int startLine = ((InternalSyntaxToken) classTree.openBraceToken()).getLine();
int endline = ((InternalSyntaxToken) classTree.closeBraceToken()).getLine();
return endline - startLine + 1;
}
private int getNumberOfLines(AstNode node) {
return node.getLastToken().getLine() - node.getTokenLine() + 1;
}
}
Now do I need to do any thing more to get the rule hit?
One more thing
after putting the plugin jar in plugin folder if i run the analyzer using mvn sonar:sonar a error occures
Embedded error: org/sonar/java/model/InternalSyntaxToken
org.sonar.java.model.InternalSyntaxToken
[INFO] ------------------------------------------------------------------------
[INFO] Trace
org.apache.maven.lifecycle.LifecycleExecutionException: Can not execute Sonar
at org.apache.maven.lifecycle.DefaultLifecycleExecutor.executeGoals(DefaultLifecycleExecutor.java:719)
at org.apache.maven.lifecycle.DefaultLifecycleExecutor.executeStandaloneGoal(DefaultLifecycleExecutor.java:569)
at org.apache.maven.lifecycle.DefaultLifecycleExecutor.executeGoal(DefaultLifecycleExecutor.java:539)
at org.apache.maven.lifecycle.DefaultLifecycleExecutor.executeGoalAndHandleFailures(DefaultLifecycleExecutor.java:387)
at org.apache.maven.lifecycle.DefaultLifecycleExecutor.executeTaskSegments(DefaultLifecycleExecutor.java:284)
at org.apache.maven.lifecycle.DefaultLifecycleExecutor.execute(DefaultLifecycleExecutor.java:180)
at org.apache.maven.DefaultMaven.doExecute(DefaultMaven.java:328)
at org.apache.maven.DefaultMaven.execute(DefaultMaven.java:138)
at org.apache.maven.cli.MavenCli.main(MavenCli.java:362)
at org.apache.maven.cli.compat.CompatibleMain.main(CompatibleMain.java:60)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.codehaus.classworlds.Launcher.launchEnhanced(Launcher.java:315)
at org.codehaus.classworlds.Launcher.launch(Launcher.java:255)
at org.codehaus.classworlds.Launcher.mainWithExitCode(Launcher.java:430)
at org.codehaus.classworlds.Launcher.main(Launcher.java:375)
Caused by: org.apache.maven.plugin.MojoExecutionException: Can not execute Sonar
at org.codehaus.mojo.sonar.Bootstraper.executeMojo(Bootstraper.java:103)
at org.codehaus.mojo.sonar.Bootstraper.start(Bootstraper.java:79)
at org.codehaus.mojo.sonar.SonarMojo.execute(SonarMojo.java:88)
at org.apache.maven.plugin.DefaultPluginManager.executeMojo(DefaultPluginManager.java:490)
at org.apache.maven.lifecycle.DefaultLifecycleExecutor.executeGoals(DefaultLifecycleExecutor.java:694)
... 17 more
Caused by: org.apache.maven.plugin.MojoExecutionException: org/sonar/java/model/InternalSyntaxToken
at org.sonar.maven.ExceptionHandling.handle(ExceptionHandling.java:37)
at org.sonar.maven.SonarMojo.execute(SonarMojo.java:175)
at org.apache.maven.plugin.DefaultPluginManager.executeMojo(DefaultPluginManager.java:490)
at org.codehaus.mojo.sonar.Bootstraper.executeMojo(Bootstraper.java:98)
... 21 more
Caused by: java.lang.NoClassDefFoundError: org/sonar/java/model/InternalSyntaxToken
at org.sonar.samples.java.AnonymousClassesTooBigCheck.getNumberOfLines(AnonymousClassesTooBigCheck.java:78)
at org.sonar.samples.java.AnonymousClassesTooBigCheck.visitNewClass(AnonymousClassesTooBigCheck.java:53)
at org.sonar.java.model.expression.NewClassTreeImpl.accept(NewClassTreeImpl.java:126)
at org.sonar.plugins.java.api.tree.BaseTreeVisitor.scan(BaseTreeVisitor.java:42)
at org.sonar.plugins.java.api.tree.BaseTreeVisitor.visitVariable(BaseTreeVisitor.java:291)
at org.sonar.java.model.declaration.VariableTreeImpl.accept(VariableTreeImpl.java:180)
at org.sonar.plugins.java.api.tree.BaseTreeVisitor.scan(BaseTreeVisitor.java:42)
at org.sonar.plugins.java.api.tree.BaseTreeVisitor.scan(BaseTreeVisitor.java:36)
at org.sonar.plugins.java.api.tree.BaseTreeVisitor.visitClass(BaseTreeVisitor.java:69)
at org.sonar.java.model.declaration.ClassTreeImpl.accept(ClassTreeImpl.java:201)
at org.sonar.plugins.java.api.tree.BaseTreeVisitor.scan(BaseTreeVisitor.java:42)
at org.sonar.plugins.java.api.tree.BaseTreeVisitor.scan(BaseTreeVisitor.java:36)
at org.sonar.plugins.java.api.tree.BaseTreeVisitor.visitClass(BaseTreeVisitor.java:69)
at org.sonar.java.model.declaration.ClassTreeImpl.accept(ClassTreeImpl.java:201)
at org.sonar.plugins.java.api.tree.BaseTreeVisitor.scan(BaseTreeVisitor.java:42)
at org.sonar.plugins.java.api.tree.BaseTreeVisitor.scan(BaseTreeVisitor.java:36)
at org.sonar.plugins.java.api.tree.BaseTreeVisitor.visitCompilationUnit(BaseTreeVisitor.java:55)
at org.sonar.java.model.JavaTree$CompilationUnitTreeImpl.accept(JavaTree.java:202)
at org.sonar.plugins.java.api.tree.BaseTreeVisitor.scan(BaseTreeVisitor.java:42)
at org.sonar.samples.java.AnonymousClassesTooBigCheck.scanFile(AnonymousClassesTooBigCheck.java:47)
at org.sonar.java.model.VisitorsBridge.visitFile(VisitorsBridge.java:123)
at com.sonar.sslr.impl.ast.AstWalker.walkAndVisit(AstWalker.java:67)
at org.sonar.java.ast.AstScanner.simpleScan(AstScanner.java:107)
at org.sonar.java.ast.AstScanner.scan(AstScanner.java:75)
at org.sonar.java.JavaSquid.scanSources(JavaSquid.java:122)
at org.sonar.java.JavaSquid.scan(JavaSquid.java:115)
at org.sonar.plugins.java.JavaSquidSensor.analyse(JavaSquidSensor.java:91)
at org.sonar.batch.phases.SensorsExecutor.executeSensor(SensorsExecutor.java:79)
at org.sonar.batch.phases.SensorsExecutor.execute(SensorsExecutor.java:70)
at org.sonar.batch.phases.PhaseExecutor.execute(PhaseExecutor.java:119)
at org.sonar.batch.scan.ModuleScanContainer.doAfterStart(ModuleScanContainer.java:194)
at org.sonar.api.platform.ComponentContainer.startComponents(ComponentContainer.java:92)
at org.sonar.api.platform.ComponentContainer.execute(ComponentContainer.java:77)
at org.sonar.batch.scan.ProjectScanContainer.scan(ProjectScanContainer.java:233)
at org.sonar.batch.scan.ProjectScanContainer.scanRecursively(ProjectScanContainer.java:228)
at org.sonar.batch.scan.ProjectScanContainer.doAfterStart(ProjectScanContainer.java:221)
at org.sonar.api.platform.ComponentContainer.startComponents(ComponentContainer.java:92)
at org.sonar.api.platform.ComponentContainer.execute(ComponentContainer.java:77)
at org.sonar.batch.scan.ScanTask.scan(ScanTask.java:64)
at org.sonar.batch.scan.ScanTask.execute(ScanTask.java:51)
at org.sonar.batch.bootstrap.TaskContainer.doAfterStart(TaskContainer.java:125)
at org.sonar.api.platform.ComponentContainer.startComponents(ComponentContainer.java:92)
at org.sonar.api.platform.ComponentContainer.execute(ComponentContainer.java:77)
at org.sonar.batch.bootstrap.BootstrapContainer.executeTask(BootstrapContainer.java:173)
at org.sonar.batch.bootstrapper.Batch.executeTask(Batch.java:95)
at org.sonar.batch.bootstrapper.Batch.execute(Batch.java:67)
at org.sonar.runner.batch.IsolatedLauncher.execute(IsolatedLauncher.java:48)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.sonar.runner.impl.BatchLauncher$1.delegateExecution(BatchLauncher.java:87)
at org.sonar.runner.impl.BatchLauncher$1.run(BatchLauncher.java:75)
at java.security.AccessController.doPrivileged(Native Method)
at org.sonar.runner.impl.BatchLauncher.doExecute(BatchLauncher.java:69)
at org.sonar.runner.impl.BatchLauncher.execute(BatchLauncher.java:50)
at org.sonar.runner.api.EmbeddedRunner.doExecute(EmbeddedRunner.java:102)
at org.sonar.runner.api.Runner.execute(Runner.java:100)
at org.sonar.maven.SonarMojo.execute(SonarMojo.java:173)
... 23 more
Caused by: java.lang.ClassNotFoundException: org.sonar.java.model.InternalSyntaxToken
at org.codehaus.plexus.classworlds.strategy.SelfFirstStrategy.loadClass(SelfFirstStrategy.java:50)
at org.codehaus.plexus.classworlds.realm.ClassRealm.unsynchronizedLoadClass(ClassRealm.java:259)
at org.codehaus.plexus.classworlds.realm.ClassRealm.loadClass(ClassRealm.java:235)
at org.codehaus.plexus.classworlds.realm.ClassRealm.loadClass(ClassRealm.java:227)
... 82 more
The reason you encounter this error is that the code of the AnonymousClassesTooBigCheck is using internal APIs, here InternalSyntaxToken is not supposed to be used directly and thus fails at runtime (you might notice that none of the example in the custom rule code snippet is using this class).

Spark App exception running in IntelliJ IDEA

Can someone help me out?
I am running a spark App in IntelliJ IDEA.
object MainDriver {
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("Spark Sentiment Analysis").setMaster("local[2]")
val sc = new SparkContext(conf)
val posWords = sc.textFile("src/main/resources/Hu_Liu_positive_word_list.txt")
val negWords = sc.textFile("src/main/resources/Hu_Liu_negative_word_list.txt")
val nltkStopWords = sc.textFile("src/main/resources/stopwords/english")
val moreStopWds = sc.parallelize(List("cant", "didnt", "doesnt", "dont", "goes", "isnt", "hes",
"shes", "thats", "theres", "theyre", "wont", "youll", "youre",
"youve", "br", "ve", "re", "vs", "dick", "ginger", "hollywood",
"jack", "jill", "john", "karloff", "kudrow", "orson", "peter", "tcm",
"tom", "toni", "welles", "william", "wolheim", "nikita"))
val stopWordsRDD = (nltkStopWords union moreStopWds).filter(_ != "").cache()
val stopWordsList = sc.broadcast(stopWordsRDD.collect())
val inTrainUnsup = sc.wholeTextFiles("src/main/resources/reviews/train/unsup")
val parsedTrainUnsup = inTrainUnsup mapValues (
_ map {
case c: Char if Character.isLetterOrDigit(c) => c
case _ => ' '
}
split (" ")
filter (_.trim() != "")
filter (_.length() > 1)
map (_.toLowerCase())
filter (!stopWordsList.value.contains(_))
)
val wordFreqDist = parsedTrainUnsup flatMap {
case (x, y) => y
} map (w => (w, 1)) reduceByKey (_ + _)
val posItems = (posWords map ((_, -1))) join wordFreqDist mapValues { case (x, y) => y}
val sortedPosItems = posItems map (_.swap) sortByKey (false) map (_.swap) //This is not useful now...
val negItems = (negWords map ((_, -1))) join wordFreqDist mapValues { case (x, y) => y}
val sortedNegItems = negItems map (_.swap) sortByKey (false) map (_.swap) //This is not useful now...
//Get the top 25 hot items
//implicit val is for top(25), defining sort on the 2nd element
implicit val pairSortByValue = new Ordering[(String, Int)] {
override def compare(a: (String, Int), b: (String, Int)) = a._2 compare b._2
}
println("Top 25 positive words in unsup dataset")
posItems.top(25).foreach(println)
println("Top 25 negative words in unsup dataset")
negItems.top(25).foreach(println)
sc.stop()
}
}
This code runs well if I use spark-submit.
But it throws exceptions when I directly run it in IntelliJ IDEA (Menu: Run > Run...). After looking into it, it seems something is wrong with val inTrainUnsup = sc.wholeTextFiles("src/main/resources/reviews/train/unsup"), because when I simply do inTrainUnsup.saveAsTextFile("test file"), it throws same exception.
14/11/11 10:21:07 ERROR executor.Executor: Exception in task 0.0 in stage 1.0 (TID 4)
java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
at org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReader.initNextRecordReader(CombineFileRecordReader.java:164)
at org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReader.<init>(CombineFileRecordReader.java:126)
at org.apache.spark.input.WholeTextFileInputFormat.createRecordReader(WholeTextFileInputFormat.scala:44)
at org.apache.spark.rdd.NewHadoopRDD$$anon$1.<init>(NewHadoopRDD.scala:115)
at org.apache.spark.rdd.NewHadoopRDD.compute(NewHadoopRDD.scala:103)
at org.apache.spark.rdd.NewHadoopRDD.compute(NewHadoopRDD.scala:65)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:262)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:229)
at org.apache.spark.rdd.MappedRDD.compute(MappedRDD.scala:31)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:262)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:229)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:62)
at org.apache.spark.scheduler.Task.run(Task.scala:54)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:177)
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:895)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:918)
at java.lang.Thread.run(Thread.java:695)
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:39)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:27)
at java.lang.reflect.Constructor.newInstance(Constructor.java:513)
at org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReader.initNextRecordReader(CombineFileRecordReader.java:155)
... 16 more
Caused by: java.lang.IncompatibleClassChangeError: Found class org.apache.hadoop.mapreduce.TaskAttemptContext, but interface was expected
at org.apache.spark.input.WholeTextFileRecordReader.<init>(WholeTextFileRecordReader.scala:40)
... 21 more
14/11/11 10:21:07 ERROR executor.Executor: Exception in task 1.0 in stage 1.0 (TID 5)
java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
at org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReader.initNextRecordReader(CombineFileRecordReader.java:164)
at org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReader.<init>(CombineFileRecordReader.java:126)
at org.apache.spark.input.WholeTextFileInputFormat.createRecordReader(WholeTextFileInputFormat.scala:44)
at org.apache.spark.rdd.NewHadoopRDD$$anon$1.<init>(NewHadoopRDD.scala:115)
at org.apache.spark.rdd.NewHadoopRDD.compute(NewHadoopRDD.scala:103)
at org.apache.spark.rdd.NewHadoopRDD.compute(NewHadoopRDD.scala:65)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:262)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:229)
at org.apache.spark.rdd.MappedRDD.compute(MappedRDD.scala:31)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:262)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:229)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:62)
at org.apache.spark.scheduler.Task.run(Task.scala:54)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:177)
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:895)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:918)
at java.lang.Thread.run(Thread.java:695)
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:39)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:27)
at java.lang.reflect.Constructor.newInstance(Constructor.java:513)
at org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReader.initNextRecordReader(CombineFileRecordReader.java:155)
... 16 more
Caused by: java.lang.IncompatibleClassChangeError: Found class org.apache.hadoop.mapreduce.TaskAttemptContext, but interface was expected
at org.apache.spark.input.WholeTextFileRecordReader.<init>(WholeTextFileRecordReader.scala:40)
... 21 more
14/11/11 10:21:07 WARN scheduler.TaskSetManager: Lost task 1.0 in stage 1.0 (TID 5, localhost): java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReader.initNextRecordReader(CombineFileRecordReader.java:164)
org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReader.<init>(CombineFileRecordReader.java:126)
org.apache.spark.input.WholeTextFileInputFormat.createRecordReader(WholeTextFileInputFormat.scala:44)
org.apache.spark.rdd.NewHadoopRDD$$anon$1.<init>(NewHadoopRDD.scala:115)
org.apache.spark.rdd.NewHadoopRDD.compute(NewHadoopRDD.scala:103)
org.apache.spark.rdd.NewHadoopRDD.compute(NewHadoopRDD.scala:65)
org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:262)
org.apache.spark.rdd.RDD.iterator(RDD.scala:229)
org.apache.spark.rdd.MappedRDD.compute(MappedRDD.scala:31)
org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:262)
org.apache.spark.rdd.RDD.iterator(RDD.scala:229)
org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:62)
org.apache.spark.scheduler.Task.run(Task.scala:54)
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:177)
java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:895)
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:918)
java.lang.Thread.run(Thread.java:695)
14/11/11 10:21:07 ERROR scheduler.TaskSetManager: Task 1 in stage 1.0 failed 1 times; aborting job
14/11/11 10:21:07 INFO scheduler.TaskSchedulerImpl: Removed TaskSet 1.0, whose tasks have all completed, from pool
14/11/11 10:21:07 INFO scheduler.TaskSetManager: Lost task 0.0 in stage 1.0 (TID 4) on executor localhost: java.lang.RuntimeException (java.lang.reflect.InvocationTargetException) [duplicate 1]
14/11/11 10:21:07 INFO scheduler.TaskSchedulerImpl: Removed TaskSet 1.0, whose tasks have all completed, from pool
14/11/11 10:21:07 INFO scheduler.TaskSchedulerImpl: Cancelling stage 1
14/11/11 10:21:07 INFO scheduler.DAGScheduler: Failed to run saveAsTextFile at MainDriver.scala:30
Exception in thread "main" org.apache.spark.SparkException: Job aborted due to stage failure: Task 1 in stage 1.0 failed 1 times, most recent failure: Lost task 1.0 in stage 1.0 (TID 5, localhost): java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReader.initNextRecordReader(CombineFileRecordReader.java:164)
org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReader.<init>(CombineFileRecordReader.java:126)
org.apache.spark.input.WholeTextFileInputFormat.createRecordReader(WholeTextFileInputFormat.scala:44)
org.apache.spark.rdd.NewHadoopRDD$$anon$1.<init>(NewHadoopRDD.scala:115)
org.apache.spark.rdd.NewHadoopRDD.compute(NewHadoopRDD.scala:103)
org.apache.spark.rdd.NewHadoopRDD.compute(NewHadoopRDD.scala:65)
org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:262)
org.apache.spark.rdd.RDD.iterator(RDD.scala:229)
org.apache.spark.rdd.MappedRDD.compute(MappedRDD.scala:31)
org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:262)
org.apache.spark.rdd.RDD.iterator(RDD.scala:229)
org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:62)
org.apache.spark.scheduler.Task.run(Task.scala:54)
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:177)
java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:895)
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:918)
java.lang.Thread.run(Thread.java:695)
Driver stacktrace:
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1185)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1174)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1173)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1173)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:688)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:688)
at scala.Option.foreach(Option.scala:236)
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:688)
at org.apache.spark.scheduler.DAGSchedulerEventProcessActor$$anonfun$receive$2.applyOrElse(DAGScheduler.scala:1391)
at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
at akka.actor.ActorCell.invoke(ActorCell.scala:456)
at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
at akka.dispatch.Mailbox.run(Mailbox.scala:219)
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)