Caused by: java.lang.ClassCastException Wrapper cannot be cast to - glassfish

I am trying to run Remote EJB running on a glassfish 3.1 container with a Javafax 2.2 client and I throw an exeption when I "lookup" the remote EJB.
The purpose of my Application is to get/put with Javafx Client objects which are save/retrieve as XML files on the server side.
On the server side the EJB is packaged into an EAR.
A controller "scrubber_S_Controller" is the Stateless session EJB
package scrubber_S_Controller;
import java.io.Serializable;
import javax.ejb.Stateless;
import javax.xml.bind.JAXBException;
import scrubber_S_Model.SimpleObject;
/**
* Session Bean implementation class Session
*/
#Stateless
public class Session implements SessionRemote, Serializable {
/**
*
*/
private static final long serialVersionUID = -5718452084852474986L;
/**
* Default constructor.
*/
public Session() {
// TODO Auto-generated constructor stub
}
#Override
public SimpleObject getSimpleObject() throws JAXBException {
SimpleObject simpleobjet = new SimpleObject();
return simpleobjet.retrieveSimpleObject();
}
#Override
public void setSimpleObject(SimpleObject simpleobject) throws JAXBException {
simpleobject.saveSimpleObject(simpleobject);
}
}
The remote interface used is
package scrubber_S_Controller;
import javax.ejb.Remote;
import javax.xml.bind.JAXBException;
import scrubber_S_Model.SimpleObject;
#Remote
public interface SessionRemote {
public SimpleObject getSimpleObject() throws JAXBException;
public void setSimpleObject(SimpleObject simpleobject) throws JAXBException;
}
SimpleObject are managed in a scrubber_S_Model package:
package scrubber_S_Model;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
#XmlRootElement(name = "SimpleObject")
public class SimpleObject implements java.io.Serializable {
/**
*
*/
private static final long serialVersionUID = 306212289216139111L;
/**
* Used to define a simpleObject Value Object
*/
#XmlElement(name = "scrubberValveValue")
private int scrubberValveValue;
#XmlElement(name = "bypassValveValue")
private int bypassValveValue;
#XmlElement(name = "exhaustState")
private boolean exhaustState;
#XmlElement(name = "layoutColor")
private String layoutColor;
#XmlElement(name = "textValue")
private String textValue;
#XmlElement(name = "textColor")
private String textColor;
#XmlElement(name = "pressureThreshold")
private int pressureThreshold;
public SimpleObject(int bypassvalvevalue, int scrubbervalvevalue,
boolean exhauststate, String layoutcolor, String textvalue,
String textcolor, int pressurethreshold) {
this.bypassValveValue = bypassvalvevalue;
this.scrubberValveValue = scrubbervalvevalue;
this.exhaustState = exhauststate;
this.layoutColor = layoutcolor;
this.textValue = textvalue;
this.textColor = textcolor;
this.pressureThreshold = pressurethreshold;
}
/**
* Empty constructor, just to enable JAXB.
*/
public SimpleObject() {
}
/**
* Gets the value of the scrubberValveValue property.
*
*/
public int getScrubberValveValue() {
return this.scrubberValveValue;
}
/**
* Sets the value of the scrubberValveValue property.
*
*/
public void setScrubberValveValue(int value) {
this.scrubberValveValue = value;
}
/**
* Gets the value of the bypassValveValue property.
*
*/
public int getBypassValveValue() {
return this.bypassValveValue;
}
/**
* Sets the value of the bypassValveValue property.
*
*/
public void setBypassValveValue(int value) {
this.bypassValveValue = value;
}
/**
* Gets the value of the exhaustState property.
*
*/
public boolean isExhaustState() {
return this.exhaustState;
}
/**
* Sets the value of the exhaustState property.
*
*/
public void setExhaustState(boolean value) {
this.exhaustState = value;
}
/**
* Gets the value of the layoutColor property.
*
* #return possible object is {#link String }
*
*/
public String getLayoutColor() {
return this.layoutColor;
}
/**
* Sets the value of the layoutColor property.
*
* #param value
* allowed object is {#link String }
*
*/
public void setLayoutColor(String value) {
this.layoutColor = value;
}
/**
* Gets the value of the textValue property.
*
* #return possible object is {#link String }
*
*/
public String getTextValue() {
return this.textValue;
}
/**
* Sets the value of the textValue property.
*
* #param value
* allowed object is {#link String }
*
*/
public void setTextValue(String value) {
this.textValue = value;
}
/**
* Gets the value of the textColor property.
*
* #return possible object is {#link String }
*
*/
public String getTextColor() {
return this.textColor;
}
/**
* Sets the value of the textColor property.
*
* #param value
* allowed object is {#link String }
*
*/
public void setTextColor(String value) {
this.textColor = value;
}
/**
* Gets the value of the pressureThreshold property.
*
*/
public int getPressureThreshold() {
return this.pressureThreshold;
}
/**
* Sets the value of the pressureThreshold property.
*
*/
public void setPressureThreshold(int value) {
this.pressureThreshold = value;
}
public void saveSimpleObject(SimpleObject simpleobjet) throws JAXBException {
FileOutputStream fileout = null;
JAXBContext jc = JAXBContext.newInstance(SimpleObject.class);
Marshaller marshaller = jc.createMarshaller();
marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
try {
fileout = new FileOutputStream("simpleobjectfile.xml");
} catch (FileNotFoundException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
marshaller.marshal(simpleobjet, fileout);
try {
fileout.flush();
fileout.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public SimpleObject retrieveSimpleObject() throws JAXBException {
FileInputStream fileinput = null;
JAXBContext jc = JAXBContext.newInstance(SimpleObject.class);
Unmarshaller unmarshaller = jc.createUnmarshaller();
try {
fileinput = new FileInputStream("simpleobjectfile.xml");
} catch (FileNotFoundException e) {
e.printStackTrace();
}
SimpleObject simpleobjet = (SimpleObject)unmarshaller.unmarshal(fileinput);
try {
fileinput.close();
} catch (IOException e) {
e.printStackTrace();
}
return simpleobjet;
}
}
Junit test of the marshalling/unmashalling are working fine.
Deployment of the EJB give the following JNDI naming:
INFO: EJB5181:Portable JNDI names for EJB Session: [java:global/Scrubber_S_EAR/Scrubber_S/Session, java:global/Scrubber_S_EAR/Scrubber_S/Session!scrubber_S_Controller.SessionRemote]
INFO: CORE10010: Loading application Scrubber_S_EAR done in 4 406 ms
On the client side the Javafx application as follow:
package ScrubberView;
import java.util.Properties;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.xml.bind.JAXBException;
import scrubber_CView_Model.SimpleObject;
import session.SessionRemote;
import javafx.application.Application;
import javafx.scene.Scene;
import javafx.scene.layout.BorderPane;
import javafx.scene.paint.Color;
import javafx.stage.Stage;
public class scrubberView extends Application {
#Override
public void start(Stage primaryStage) throws JAXBException {
try {
Properties propriétés = new Properties();
propriétés.setProperty("org.omg.CORBA.ORBInitialHost", "localhost");
Context ctx = new InitialContext(propriétés);
SessionRemote mySession = (SessionRemote)ctx.lookup("java:global/Scrubber_S_EAR/Scrubber_S/Session");
//Create an object to exchange
SimpleObject simpleObject = new SimpleObject(1, 2, true, "layoutcolor", "text", "textcolor", 10 );
mySession.setSimpleObject(simpleObject);
#SuppressWarnings("unused")
SimpleObject simpleObject2 = new SimpleObject();
simpleObject2 = mySession.getSimpleObject();
} catch (NamingException e) {
// TODO Auto-generated catch block
System.out.println(e.toString() );
e.printStackTrace();
}
//compose the scrubberview scene and show it
primaryStage.setTitle("scrubberView");
BorderPane borderpane = new BorderPane();
Scene scene = new Scene(borderpane, 350, 80, Color.GREY);
primaryStage.setScene(scene);
scene.getStylesheets().add("./CleanRoomControl.css");
primaryStage.show();
}
public static void main(String[] args) {
launch(args);
}
}
The following jar are in the buid enc of the Application
C:\glassfish3\glassfish\modules\auto-depends.jar
C:\glassfish3\glassfish\modules\common-util.jar
C:\glassfish3\glassfish\modules\config-api.jar
C:\glassfish3\glassfish\modules\config-types.jar
C:\glassfish3\glassfish\modules\config.jar
C:\glassfish3\glassfish\modules\deployment-common.jar
C:\glassfish3\glassfish\modules\dol.jar
C:\glassfish3\glassfish\modules\ejb-container.jar
C:\glassfish3\glassfish\modules\ejb.security.jar
C:\glassfish3\glassfish\modules\glassfish-api.jar
C:\glassfish3\glassfish\modules\glassfish-corba-asm.jar
C:\glassfish3\glassfish\modules\glassfish-corba-codegen.jar
C:\glassfish3\glassfish\modules\glassfish-corba-csiv2-idl.jar
C:\glassfish3\glassfish\modules\glassfish-corba-newtimer.jar
C:\glassfish3\glassfish\modules\glassfish-corba-omgapi.jar
C:\glassfish3\glassfish\modules\glassfish-corba-orb.jar
C:\glassfish3\glassfish\modules\glassfish-corba-orbgeneric.jar
C:\glassfish3\glassfish\modules\glassfish-naming.jar
C:\glassfish3\glassfish\modules\gmbal.jar
C:\glassfish3\glassfish\modules\hk2-core.jar
C:\glassfish3\glassfish\modules\internal-api.jar
C:\glassfish3\glassfish\modules\javax.ejb.jar
C:\glassfish3\glassfish\modules\kernel.jar
C:\glassfish3\glassfish\modules\management-api.jar
C:\glassfish3\glassfish\modules\orb-connector.jar
C:\glassfish3\glassfish\modules\orb-iiop.jar
C:\glassfish3\glassfish\modules\security.jar
C:\glassfish3\glassfish\modules\transaction-internal-api.jar
when running the
SessionRemote mySession = (SessionRemote)ctx.lookup("java:global/Scrubber_S_EAR/Scrubber_S/Session");
line, it raises the following exeption:
Exception in Application start method
Exception in thread "main" java.lang.RuntimeException: Exception in Application start method
at com.sun.javafx.application.LauncherImpl.launchApplication1(LauncherImpl.java:403)
at com.sun.javafx.application.LauncherImpl.access$000(LauncherImpl.java:47)
at com.sun.javafx.application.LauncherImpl$1.run(LauncherImpl.java:115)
at java.lang.Thread.run(Thread.java:722)
Caused by: java.lang.ClassCastException: scrubber_S_Controller._SessionRemote_Wrapper cannot be cast to session.SessionRemote
at ScrubberView.scrubberView.start(scrubberView.java:27)
at com.sun.javafx.application.LauncherImpl$5.run(LauncherImpl.java:319)
at com.sun.javafx.application.PlatformImpl$5.run(PlatformImpl.java:215)
at com.sun.javafx.application.PlatformImpl$4$1.run(PlatformImpl.java:179)
at com.sun.javafx.application.PlatformImpl$4$1.run(PlatformImpl.java:176)
at java.security.AccessController.doPrivileged(Native Method)
at com.sun.javafx.application.PlatformImpl$4.run(PlatformImpl.java:176)
at com.sun.glass.ui.win.WinApplication._runLoop(Native Method)
at com.sun.glass.ui.win.WinApplication.access$100(WinApplication.java:29)
at com.sun.glass.ui.win.WinApplication$3$1.run(WinApplication.java:73)
... 1 more
If I try with the other naming:
SessionRemote mySession = (SessionRemote)ctx.lookup("java:global/Scrubber_S_EAR/Scrubber_S/Session!scrubber_S_Controller.SessionRemote");
It raises the same exeption.
It will be great if somebody can help me to fix this issue.
Many thanks in advance for your help.
I hope that my english is not too bad for the understanding.

I fix this issue with the following:
Remove the jar files from the env and only put gf-client.jar in the classpath as explain in
How do I access a Remote EJB component from a stand-alone java client?
Rename "scrubber_S_Controler" to "session" (as the corresponding package in the client side)
Remove "retrieveSimpleObject()" and "saveSimpleObject(SimpleObject simpleobjet)" from "SimpleObject" class and add it to a new "SimpleObjectPersist class"
use the "SimpleObjectPersist" to save and retreive a SimpleObject.
Ough! After this, its running well.

Related

GRPC Java load-balancing - start method is not being called from NameResolver

We are trying to implement gRPC load balancing in Java with Consul Service Discovery.
Version info: grpc-java v1.30.0
The problem is that when the app runs, the start method from our custom NameResolver class not being called !
Here is our code:
Here is the custom NameResolver class (start method here is not being called)
I have put breakpoint at start method to check and it's not being called !
package com.bht.saigonparking.common.loadbalance;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import org.apache.logging.log4j.Level;
import org.springframework.cloud.client.ServiceInstance;
import org.springframework.cloud.client.discovery.DiscoveryClient;
import com.bht.saigonparking.common.util.LoggingUtil;
import io.grpc.Attributes;
import io.grpc.EquivalentAddressGroup;
import io.grpc.NameResolver;
import lombok.Getter;
/**
*
* #author bht
*/
#Getter
public final class SaigonParkingNameResolver extends NameResolver {
private final URI consulURI;
private final String serviceId;
private final DiscoveryClient discoveryClient;
private Listener listener;
private List<ServiceInstance> serviceInstances;
public SaigonParkingNameResolver(DiscoveryClient discoveryClient,
URI consulURI,
String serviceId,
int pauseInSeconds) {
this.consulURI = consulURI;
this.serviceId = serviceId;
this.discoveryClient = discoveryClient;
/* run connection check timer */
ConnectionCheckTimer connectionCheckTimer = new ConnectionCheckTimer(this, pauseInSeconds);
connectionCheckTimer.runTimer();
}
#Override
public String getServiceAuthority() {
return consulURI.getAuthority();
}
#Override
public void start(Listener2 listener) {
this.listener = listener;
loadServiceInstances();
}
#Override
public void shutdown() {
// implement shutdown...
}
void loadServiceInstances() {
List<EquivalentAddressGroup> addressList = new ArrayList<>();
serviceInstances = discoveryClient.getInstances(serviceId);
if (serviceInstances == null || serviceInstances.isEmpty()) {
LoggingUtil.log(Level.WARN, "loadServiceInstances", "Warning",
String.format("no serviceInstances of %s", serviceId));
return;
}
serviceInstances.forEach(serviceInstance -> {
String host = serviceInstance.getHost();
int port = serviceInstance.getPort();
LoggingUtil.log(Level.INFO, "loadServiceInstances", serviceId, String.format("%s:%d", host, port));
List<SocketAddress> socketAddressList = new ArrayList<>();
socketAddressList.add(new InetSocketAddress(host, port));
addressList.add(new EquivalentAddressGroup(socketAddressList));
});
if (!addressList.isEmpty()) {
listener.onAddresses(addressList, Attributes.EMPTY);
}
}
}
Here is the custom NameResolverProvider class
package com.bht.saigonparking.common.loadbalance;
import java.net.URI;
import org.springframework.cloud.client.discovery.DiscoveryClient;
import io.grpc.NameResolver;
import io.grpc.NameResolverProvider;
import lombok.AllArgsConstructor;
/**
* #author bht
*/
#AllArgsConstructor
public final class SaigonParkingNameResolverProvider extends NameResolverProvider {
private final String serviceId;
private final DiscoveryClient discoveryClient;
private final int pauseInSeconds;
#Override
protected boolean isAvailable() {
return true;
}
#Override
protected int priority() {
return 5;
}
#Override
public String getDefaultScheme() {
return "consul";
}
#Override
public NameResolver newNameResolver(URI targetUri, NameResolver.Args args) {
return new SaigonParkingNameResolver(discoveryClient, targetUri, serviceId, pauseInSeconds);
}
}
Here is a class from Client
package com.bht.saigonparking.service.auth.configuration;
import java.util.concurrent.TimeUnit;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cloud.client.discovery.DiscoveryClient;
import org.springframework.context.annotation.Bean;
import org.springframework.stereotype.Component;
import com.bht.saigonparking.api.grpc.user.UserServiceGrpc;
import com.bht.saigonparking.common.interceptor.SaigonParkingClientInterceptor;
import com.bht.saigonparking.common.loadbalance.SaigonParkingNameResolverProvider;
import io.grpc.ManagedChannel;
import io.grpc.ManagedChannelBuilder;
import lombok.AllArgsConstructor;
/**
*
* #author bht
*/
#Component
#AllArgsConstructor(onConstructor = #__(#Autowired))
public final class ChannelConfiguration {
private final SaigonParkingClientInterceptor clientInterceptor;
#Bean("userResolver")
public SaigonParkingNameResolverProvider userServiceNameResolverProvider(#Value("${connection.user-service.id}") String serviceId,
#Value("${connection.refresh-period-in-seconds}") int refreshPeriod,
#Autowired DiscoveryClient discoveryClient) {
return new SaigonParkingNameResolverProvider(serviceId, discoveryClient, refreshPeriod);
}
/**
*
* channel is the abstraction to connect to a service endpoint
*
* note for gRPC service stub:
* .newStub(channel) --> nonblocking/asynchronous stub
* .newBlockingStub(channel) --> blocking/synchronous stub
*/
#Bean
public ManagedChannel managedChannel(#Value("${spring.cloud.consul.host}") String host,
#Value("${spring.cloud.consul.port}") int port,
#Value("${connection.idle-timeout}") int timeout,
#Value("${connection.max-inbound-message-size}") int maxInBoundMessageSize,
#Value("${connection.max-inbound-metadata-size}") int maxInBoundMetadataSize,
#Value("${connection.load-balancing-policy}") String loadBalancingPolicy,
#Qualifier("userResolver") SaigonParkingNameResolverProvider nameResolverProvider) {
return ManagedChannelBuilder
.forTarget("consul://" + host + ":" + port) // build channel to server with server's address
.keepAliveWithoutCalls(false) // Close channel when client has already received response
.idleTimeout(timeout, TimeUnit.MILLISECONDS) // 10000 milliseconds / 1000 = 10 seconds --> request time-out
.maxInboundMetadataSize(maxInBoundMetadataSize * 1024 * 1024) // 2KB * 1024 = 2MB --> max message header size
.maxInboundMessageSize(maxInBoundMessageSize * 1024 * 1024) // 10KB * 1024 = 10MB --> max message size to transfer together
.defaultLoadBalancingPolicy(loadBalancingPolicy) // set load balancing policy for channel
.nameResolverFactory(nameResolverProvider) // using Consul service discovery for DNS querying
.intercept(clientInterceptor) // add internal credential authentication
.usePlaintext() // use plain-text to communicate internally
.build(); // Build channel to communicate over gRPC
}
/* asynchronous user service stub */
#Bean
public UserServiceGrpc.UserServiceStub userServiceStub(#Autowired ManagedChannel channel) {
return UserServiceGrpc.newStub(channel);
}
/* synchronous user service stub */
#Bean
public UserServiceGrpc.UserServiceBlockingStub userServiceBlockingStub(#Autowired ManagedChannel channel) {
return UserServiceGrpc.newBlockingStub(channel);
}
}
Is there anything wrong on our code ?
We are looking forward to hearing from you soon !
We thought that start will be called as the channel created. It's wrong !
Sorry as we misunderstood about gRPC load-balancing.
It's now called start on new service call !
Thanks !
Saigon Parking team.

AbstractStringBuilder.ensureCapacityInternal get NullPointerException in storm bolt

online system, the storm Bolt get NullPointerException,though I think I check it before line 61; It gets NullPointerException once in a while;
import ***.KeyUtils;
import ***.redis.PipelineHelper;
import ***.redis.PipelinedCacheClusterClient;
import **.redis.R2mClusterClient;
import org.apache.commons.lang3.StringUtils;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.IRichBolt;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.tuple.Tuple;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import java.util.Map;
/**
* RedisBolt batch operate
*/
public class RedisBolt implements IRichBolt {
static final long serialVersionUID = 737015318988609460L;
private static ApplicationContext applicationContext;
private static long logEmitNumber = 0;
private static StringBuffer totalCmds = new StringBuffer();
private Logger logger = LoggerFactory.getLogger(getClass());
private OutputCollector _collector;
private R2mClusterClient r2mClusterClient;
#Override
public void prepare(Map map, TopologyContext topologyContext, OutputCollector outputCollector) {
_collector = outputCollector;
if (applicationContext == null) {
applicationContext = new ClassPathXmlApplicationContext("spring/spring-config-redisbolt.xml");
}
if (r2mClusterClient == null) {
r2mClusterClient = (R2mClusterClient) applicationContext.getBean("r2mClusterClient");
}
}
#Override
public void execute(Tuple tuple) {
String log = tuple.getString(0);
String lastCommands = tuple.getString(1);
try {
//log count
if (StringUtils.isNotEmpty(log)) {
logEmitNumber++;
}
if (StringUtils.isNotEmpty(lastCommands)) {
if(totalCmds==null){
totalCmds = new StringBuffer();
}
totalCmds.append(lastCommands);//line 61
}
//日志数量控制
int numberLimit = 1;
String flow_log_limit = r2mClusterClient.get(KeyUtils.KEY_PIPELINE_LIMIT);
if (StringUtils.isNotEmpty(flow_log_limit)) {
try {
numberLimit = Integer.parseInt(flow_log_limit);
} catch (Exception e) {
numberLimit = 1;
logger.error("error", e);
}
}
if (logEmitNumber >= numberLimit) {
StringBuffer _totalCmds = new StringBuffer(totalCmds);
try {
//pipeline submit
PipelinedCacheClusterClient pip = r2mClusterClient.pipelined();
String[] commandArray = _totalCmds.toString().split(KeyUtils.REDIS_CMD_SPILT);
PipelineHelper.cmd(pip, commandArray);
pip.sync();
pip.close();
totalCmds = new StringBuffer();
} catch (Exception e) {
logger.error("error", e);
}
logEmitNumber = 0;
}
} catch (Exception e) {
logger.error(new StringBuffer("====RedisBolt error for log=[ ").append(log).append("] \n commands=[").append(lastCommands).append("]").toString(), e);
_collector.reportError(e);
_collector.fail(tuple);
}
_collector.ack(tuple);
}
#Override
public void cleanup() {
}
#Override
public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
}
#Override
public Map<String, Object> getComponentConfiguration() {
return null;
}
}
exception info:
java.lang.NullPointerException at java.lang.AbstractStringBuilder.ensureCapacityInternal(AbstractStringBuilder.java:113) at java.lang.AbstractStringBuilder.append(AbstractStringBuilder.java:415) at java.lang.StringBuffer.append(StringBuffer.java:237) at com.jd.jr.dataeye.storm.bolt.RedisBolt.execute(RedisBolt.java:61) at org.apache.storm.daemon.executor$fn__5044$tuple_action_fn__5046.invoke(executor.clj:727) at org.apache.storm.daemon.executor$mk_task_receiver$fn__4965.invoke(executor.clj:459) at org.apache.storm.disruptor$clojure_handler$reify__4480.onEvent(disruptor.clj:40) at org.apache.storm.utils.DisruptorQueue.consumeBatchToCursor(DisruptorQueue.java:472) at org.apache.storm.utils.DisruptorQueue.consumeBatchWhenAvailable(DisruptorQueue.java:451) at org.apache.storm.disruptor$consume_batch_when_available.invoke(disruptor.clj:73) at org.apache.storm.daemon.executor$fn__5044$fn__5057$fn__5110.invoke(executor.clj:846) at org.apache.storm.util$async_loop$fn__557.invoke(util.clj:484) at clojure.lang.AFn.run(AFn.java:22) at java.lang.Thread.run(Thread.java:745)
can anyone give me some advice to find the reason.
That is really odd thing to happen. Please read the code for two classes.
https://github.com/openjdk-mirror/jdk7u-jdk/blob/master/src/share/classes/java/lang/AbstractStringBuilder.java
https://github.com/openjdk-mirror/jdk7u-jdk/blob/master/src/share/classes/java/lang/StringBuffer.java
AbstractStringBuilder has constructor with no args which doesn't allocate the field 'value', which makes accessing the 'value' field being NPE. Any constructors in StringBuffer use that constructor. So maybe some odd thing happens in serialization/deserialization and unfortunately 'value' field in AbstractStringBuilder is being null.
Maybe initializing totalCmds in prepare() would be better, and also you need to consider synchronization (thread-safety) between bolts. prepare() can be called per bolt instance so fields are thread-safe, but class fields are not thread-safe.
I think I find the problem maybe;
the key point is
"StringBuffer _totalCmds = new StringBuffer(totalCmds);" and " totalCmds.append(lastCommands);//line 61"
when new a object, It takes serval steps:
(1) allocate memory and return reference
(2) initialize
if append after (1) and before (2) then the StringBuffer.java extends AbstractStringBuilder.java
/**
* The value is used for character storage.
*/
char[] value;
value is not initialized;so this will get null:
#Override
public synchronized void ensureCapacity(int minimumCapacity) {
if (minimumCapacity > value.length) {
expandCapacity(minimumCapacity);
}
}
this blot has a another question, some data maybe lost under a multithreaded environment

How do I configure spring-kafka to ignore messages in the wrong format?

We have an issue with one of our Kafka topics which is consumed by the DefaultKafkaConsumerFactory & ConcurrentMessageListenerContainer combination described here with a JsonDeserializer used by the Factory. Unfortunately someone got a little enthusiastic and published some invalid messages onto the topic. It appears that spring-kafka silently fails to process past the first of these messages. Is it possible to have spring-kafka log an error and continue? Looking at the error messages which are logged it seems that perhaps the Apache kafka-clients library should deal with the case that when iterating a batch of messages one or more of them may fail to parse?
The below code is an example test case illustrating this issue:
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.junit.ClassRule;
import org.junit.Test;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.listener.KafkaMessageListenerContainer;
import org.springframework.kafka.listener.MessageListener;
import org.springframework.kafka.listener.config.ContainerProperties;
import org.springframework.kafka.support.SendResult;
import org.springframework.kafka.support.serializer.JsonDeserializer;
import org.springframework.kafka.support.serializer.JsonSerializer;
import org.springframework.kafka.test.rule.KafkaEmbedded;
import org.springframework.kafka.test.utils.ContainerTestUtils;
import org.springframework.util.concurrent.ListenableFuture;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.springframework.kafka.test.hamcrest.KafkaMatchers.hasKey;
import static org.springframework.kafka.test.hamcrest.KafkaMatchers.hasValue;
/**
* #author jfreedman
*/
public class TestSpringKafka {
private static final String TOPIC1 = "spring.kafka.1.t";
#ClassRule
public static KafkaEmbedded embeddedKafka = new KafkaEmbedded(1, true, 1, TOPIC1);
#Test
public void submitMessageThenGarbageThenAnotherMessage() throws Exception {
final BlockingQueue<ConsumerRecord<String, JsonObject>> records = createListener(TOPIC1);
final KafkaTemplate<String, JsonObject> objectTemplate = createPublisher("json", new JsonSerializer<JsonObject>());
sendAndVerifyMessage(records, objectTemplate, "foo", new JsonObject("foo"), 0L);
// push some garbage text to Kafka which cannot be marshalled, this should not interrupt processing
final KafkaTemplate<String, String> garbageTemplate = createPublisher("garbage", new StringSerializer());
final SendResult<String, String> garbageResult = garbageTemplate.send(TOPIC1, "bar","bar").get(5, TimeUnit.SECONDS);
assertEquals(1L, garbageResult.getRecordMetadata().offset());
sendAndVerifyMessage(records, objectTemplate, "baz", new JsonObject("baz"), 2L);
}
private <T> KafkaTemplate<String, T> createPublisher(final String label, final Serializer<T> serializer) {
final Map<String, Object> producerProps = new HashMap<>();
producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, embeddedKafka.getBrokersAsString());
producerProps.put(ProducerConfig.CLIENT_ID_CONFIG, "TestPublisher-" + label);
producerProps.put(ProducerConfig.ACKS_CONFIG, "all");
producerProps.put(ProducerConfig.RETRIES_CONFIG, 2);
producerProps.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, 1);
producerProps.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, 5000);
producerProps.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, 5000);
producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, serializer.getClass());
final DefaultKafkaProducerFactory<String, T> pf = new DefaultKafkaProducerFactory<>(producerProps);
pf.setValueSerializer(serializer);
return new KafkaTemplate<>(pf);
}
private BlockingQueue<ConsumerRecord<String, JsonObject>> createListener(final String topic) throws Exception {
final Map<String, Object> consumerProps = new HashMap<>();
consumerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, embeddedKafka.getBrokersAsString());
consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "TestConsumer");
consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
consumerProps.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100");
consumerProps.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 15000);
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
final DefaultKafkaConsumerFactory<String, JsonObject> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
cf.setValueDeserializer(new JsonDeserializer<>(JsonObject.class));
final KafkaMessageListenerContainer<String, JsonObject> container = new KafkaMessageListenerContainer<>(cf, new ContainerProperties(topic));
final BlockingQueue<ConsumerRecord<String, JsonObject>> records = new LinkedBlockingQueue<>();
container.setupMessageListener((MessageListener<String, JsonObject>) records::add);
container.setBeanName("TestListener");
container.start();
ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic());
return records;
}
private void sendAndVerifyMessage(final BlockingQueue<ConsumerRecord<String, JsonObject>> records,
final KafkaTemplate<String, JsonObject> template,
final String key, final JsonObject value,
final long expectedOffset) throws InterruptedException, ExecutionException, TimeoutException {
final ListenableFuture<SendResult<String, JsonObject>> future = template.send(TOPIC1, key, value);
final ConsumerRecord<String, JsonObject> record = records.poll(5, TimeUnit.SECONDS);
assertThat(record, hasKey(key));
assertThat(record, hasValue(value));
assertEquals(expectedOffset, future.get(5, TimeUnit.SECONDS).getRecordMetadata().offset());
}
public static final class JsonObject {
private String value;
public JsonObject() {}
JsonObject(final String value) {
this.value = value;
}
public String getValue() {
return value;
}
public void setValue(final String value) {
this.value = value;
}
#Override
public boolean equals(final Object o) {
if (this == o) { return true; }
if (o == null || getClass() != o.getClass()) { return false; }
final JsonObject that = (JsonObject) o;
return Objects.equals(value, that.value);
}
#Override
public int hashCode() {
return Objects.hash(value);
}
#Override
public String toString() {
return "JsonObject{" +
"value='" + value + '\'' +
'}';
}
}
}
I have a solution but I don't know if it's the best one, I extended JsonDeserializer as follows which results in a null value being consumed by spring-kafka and requires the necessary downstream changes to handle that case.
class SafeJsonDeserializer[A >: Null](targetType: Class[A], objectMapper: ObjectMapper) extends JsonDeserializer[A](targetType, objectMapper) with Logging {
override def deserialize(topic: String, data: Array[Byte]): A = try {
super.deserialize(topic, data)
} catch {
case e: Exception =>
logger.error("Failed to deserialize data [%s] from topic [%s]".format(new String(data), topic), e)
null
}
}
Starting from the spring-kafka-2.x.x, we now have the comfort of declaring beans in the config file for the interface KafkaListenerErrorHandler with a implementation something as
#Bean
public ConsumerAwareListenerErrorHandler listen3ErrorHandler() {
return (m, e, c) -> {
this.listen3Exception = e;
MessageHeaders headers = m.getHeaders();
c.seek(new org.apache.kafka.common.TopicPartition(
headers.get(KafkaHeaders.RECEIVED_TOPIC, String.class),
headers.get(KafkaHeaders.RECEIVED_PARTITION_ID, Integer.class)),
headers.get(KafkaHeaders.OFFSET, Long.class));
return null;
};
}
more resources can be found at https://docs.spring.io/spring-kafka/reference/htmlsingle/#annotation-error-handling There is also another link with the similar issue: Spring Kafka error handling - v1.1.x and How to handle SerializationException after deserialization
Use ErrorHandlingDeserializer2. This is a delegating key/value deserializer that catches exceptions, returning them in the headers as serialized java objects.
Under consumer configuration, add/update the below lines:
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.springframework.kafka.support.serializer.ErrorHandlingDeserializer2
configProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
classOf[ErrorHandlingDeserializer2[JsonDeserializer]].getName)
configProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, classOf[ErrorHandlingDeserializer2[StringDeserializer]].getName)
configProps.put(ErrorHandlingDeserializer2.KEY_DESERIALIZER_CLASS, classOf[StringDeserializer].getName)
configProps.put(ErrorHandlingDeserializer2.VALUE_DESERIALIZER_CLASS, classOf[JsonDeserializer].getName)

How to serialize a Predicate<T> from Nashorn engine in java 8

How can i serialize a predicate obtained from java ScriptEngine nashorn? or how can i cast jdk.nashorn.javaadapters.java.util.function.Predicate to Serializable?
Here is the case:
I have this class
import java.io.Serializable;
import java.util.function.Predicate;
public class Filter implements Serializable {
private Predicate<Object> filter;
public Predicate<Object> getFilter() {
return filter;
}
public void setFilter(Predicate<Object> filter) {
this.filter = filter;
}
public boolean evaluate(int value) {
return filter.test(value);
}
}
and
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.function.Predicate;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import javax.script.ScriptException;
public class TestFilterSer {
public static void main(String[] args) throws ScriptException {
Filter f = new Filter();
//This works
//f.setFilter(getCastedPred());
// But I want this to work
f.setFilter(getScriptEnginePred());
System.out.println(f.evaluate(6));
try (ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(new File("pred.ser")))) {
oos.writeObject(f);
} catch (IOException e) {
e.printStackTrace();
}
f= null;
try (ObjectInputStream ois = new ObjectInputStream(new FileInputStream(new File("pred.ser")))) {
f= (Filter)ois.readObject();
} catch (IOException | ClassNotFoundException e) {
e.printStackTrace();
}
System.out.println(f.evaluate(7));
}
public static Predicate<Object> getCastedPred() {
Predicate<Object> isEven = (Predicate<Object> & Serializable)(i) -> (Integer)i%2 == 0;
return isEven;
}
public static Predicate<Object> getScriptEnginePred() throws ScriptException {
ScriptEngine engine = new ScriptEngineManager().getEngineByName("nashorn");
Predicate<Object> p = (Predicate<Object> & Serializable)engine.eval(
String.format("new java.util.function.Predicate(%s)", "function(i) i%2==0")
);
return p;
}
}
Requirement: To be able to serialize the Predicate obtained from Nashorn engine.
Observation: When I get Predicate from method getCastedPred(). It works because it is java.util.function.Predicate. it does Serialize after casting to Serializable. But when I get the Predicate from the Nashorn engine, Internally it returns me the jdk.nashorn.javaadapters.java.util.function.Predicate this one doesn't Serialize and casting to Serializable doesn't work.
Any idea how can i serialize this type of Predicate?
The problem is your API uses Predicate, not AggregateFilter. So the target type for the lambda in
setAggregatePredicate(x -> true)
will be Predicate, not AggregateFilter -- and the compiler won't know to make it serializable. If you change your API to use the more specific functional interface, serializable lambdas will be generated.

Spring restController: how to error when unknown #RequestParam is in url

I'm using spring 4.2 to create some restfull webservices.
But we realized that when a user mistypes one of the not-mandatory #RequestParam, we do not get an error that the param he passed is unknown.
like we have #RequestParam(required=false, value="valueA") String value A and in the call he uses '?valuueA=AA' -> we want an error.
But I do not seem to find a way to do this, the value is just ignored and the user is unaware of this.
One possible solution would be to create an implementation of HandlerInterceptor which will verify that all request parameters passed to the handler method are declared in its #RequestParam annotated parameters.
However you should consider the disadvantages of such solution. There might be situations where you want to allow certain parameters to be passed in and not be declared as request params. For instance if you have request like GET /foo?page=1&offset=0 and have handler with following signature:
#RequestMapping
public List<Foo> listFoos(PagingParams page);
and PagingParams is a class containing page and offset properties, it will normally be mapped from the request parameters. Implementation of a solution you want would interfere with this Spring MVC'c functionality.
That being said, here is a sample implementation I had in mind:
public class UndeclaredParamsHandlerInterceptor extends HandlerInterceptorAdapter {
#Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response,
Object handler) throws Exception {
if (handler instanceof HandlerMethod) {
HandlerMethod handlerMethod = (HandlerMethod) handler;
checkParams(request, getDeclaredRequestParams(handlerMethod));
}
return true;
}
private void checkParams(HttpServletRequest request, Set<String> allowedParams) {
request.getParameterMap().entrySet().forEach(entry -> {
String param = entry.getKey();
if (!allowedParams.contains(param)) {
throw new UndeclaredRequestParamException(param, allowedParams);
}
});
}
private Set<String> getDeclaredRequestParams(HandlerMethod handlerMethod) {
Set<String> declaredRequestParams = new HashSet<>();
MethodParameter[] methodParameters = handlerMethod.getMethodParameters();
ParameterNameDiscoverer parameterNameDiscoverer = new DefaultParameterNameDiscoverer();
for (MethodParameter methodParameter : methodParameters) {
if (methodParameter.hasParameterAnnotation(RequestParam.class)) {
RequestParam requestParam = methodParameter.getParameterAnnotation(RequestParam.class);
if (StringUtils.hasText(requestParam.value())) {
declaredRequestParams.add(requestParam.value());
} else {
methodParameter.initParameterNameDiscovery(parameterNameDiscoverer);
declaredRequestParams.add(methodParameter.getParameterName());
}
}
}
return declaredRequestParams;
}
}
Basically this will do what I described above. You can then add exception handler for the exception it throws and translate it to HTTP 400 response. I've put more of an complete sample on Github, which includes a way to selectively enable this behavior for individual handler methods via annotation.
I translated Bohuslav Burghardt's solution for Spring WebFlux applications.
I dropped the #DisallowUndeclaredRequestParams annotation class from GitHub because I didn't need it -- it just applies the filter to all HandlerMethods. But someone else could update this answer and put it back.
package com.example.springundeclaredparamerror;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.core.DefaultParameterNameDiscoverer;
import org.springframework.core.MethodParameter;
import org.springframework.core.ParameterNameDiscoverer;
import org.springframework.http.HttpStatus;
import org.springframework.http.server.reactive.ServerHttpRequest;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.method.HandlerMethod;
import org.springframework.web.reactive.result.method.annotation.RequestMappingHandlerMapping;
import org.springframework.web.server.ServerWebExchange;
import org.springframework.web.server.WebFilter;
import org.springframework.web.server.WebFilterChain;
import reactor.core.publisher.Mono;
import java.nio.charset.StandardCharsets;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
/**
* Handler interceptor used for ensuring that no request params other than those explicitly
* declared via {#link RequestParam} parameters of the handler method are passed in.
*/
// Implementation translated into WebFlux WebFilter from:
// https://github.com/bohuslav-burghardt/spring-sandbox/tree/master/handler-interceptors/src/main/java/handler_interceptors
#Component
public class DisallowUndeclaredParamsFilter implements WebFilter {
private static final Logger LOGGER = LoggerFactory.getLogger(DisallowUndeclaredParamsFilter.class);
#Autowired
#Qualifier("requestMappingHandlerMapping")
RequestMappingHandlerMapping mapping;
#Autowired
ObjectMapper mapper;
#Override
public Mono<Void> filter(ServerWebExchange serverWebExchange, WebFilterChain webFilterChain) {
Object o = mapping.getHandler(serverWebExchange).toFuture().getNow(null);
Optional<String> undeclaredParam = Optional.empty();
if (o != null && o instanceof HandlerMethod) {
var handlerMethod = (HandlerMethod) o;
undeclaredParam = checkParams(serverWebExchange.getRequest(),
getDeclaredRequestParams(handlerMethod));
}
return undeclaredParam.map((param) -> RespondWithError(serverWebExchange, param))
.orElseGet(() -> webFilterChain.filter(serverWebExchange));
}
/** Responds to the request with an error message for the given undeclared parameter. */
private Mono<Void> RespondWithError(ServerWebExchange serverWebExchange, String undeclaredParam) {
final HttpStatus status = HttpStatus.BAD_REQUEST;
serverWebExchange.getResponse().setStatusCode(status);
serverWebExchange.getResponse().getHeaders().add(
"Content-Type", "application/json");
UndeclaredParamErrorResponse response = new UndeclaredParamErrorResponse();
response.message = "Parameter not expected: " + undeclaredParam;
response.statusCode = status.value();
String error = null;
try {
error = mapper.writeValueAsString(response);
} catch (JsonProcessingException e) {
error = "Parameter not expected; error generating JSON response";
LOGGER.warn("Error generating JSON response for undeclared argument", e);
}
return serverWebExchange.getResponse().writeAndFlushWith(
Mono.just(Mono.just(serverWebExchange.getResponse().bufferFactory().wrap(
error.getBytes(StandardCharsets.UTF_8)))));
}
/** Structure for generating error JSON. */
static class UndeclaredParamErrorResponse {
public String message;
public int statusCode;
}
/**
* Check that all of the request params of the specified request are contained within the specified set of allowed
* parameters.
*
* #param request Request whose params to check.
* #param allowedParams Set of allowed request parameters.
* #return Name of a param in the request that is not allowed, or empty if all params in the request are allowed.
*/
private Optional<String> checkParams(ServerHttpRequest request, Set<String> allowedParams) {
return request.getQueryParams().keySet().stream().filter(param ->
!allowedParams.contains(param)
).findFirst();
}
/**
* Extract all request parameters declared via {#link RequestParam} for the specified handler method.
*
* #param handlerMethod Handler method to extract declared params for.
* #return Set of declared request parameters.
*/
private Set<String> getDeclaredRequestParams(HandlerMethod handlerMethod) {
Set<String> declaredRequestParams = new HashSet<>();
MethodParameter[] methodParameters = handlerMethod.getMethodParameters();
ParameterNameDiscoverer parameterNameDiscoverer = new DefaultParameterNameDiscoverer();
for (MethodParameter methodParameter : methodParameters) {
if (methodParameter.hasParameterAnnotation(RequestParam.class)) {
RequestParam requestParam = methodParameter.getParameterAnnotation(RequestParam.class);
if (StringUtils.hasText(requestParam.value())) {
declaredRequestParams.add(requestParam.value());
} else {
methodParameter.initParameterNameDiscovery(parameterNameDiscoverer);
declaredRequestParams.add(methodParameter.getParameterName());
}
}
}
return declaredRequestParams;
}
}
Here's the unit test I wrote for it. I recommend checking it into your codebase as well.
package com.example.springundeclaredparamerror;
import com.github.tomakehurst.wiremock.junit.WireMockRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.reactive.WebFluxTest;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.reactive.server.WebTestClient;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Mono;
import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
#RunWith(SpringRunner.class)
#WebFluxTest(controllers = {DisallowUndeclaredParamFilterTest.TestController.class})
public class DisallowUndeclaredParamFilterTest {
private static final String TEST_ENDPOINT = "/disallowUndeclaredParamFilterTest";
#Rule
public final WireMockRule wireMockRule = new WireMockRule(wireMockConfig().dynamicPort());
#Autowired
private WebTestClient webClient;
#Configuration
#Import({TestController.class, DisallowUndeclaredParamsFilter.class})
static class TestConfig {
}
#RestController
static class TestController {
#GetMapping(TEST_ENDPOINT)
public Mono<String> retrieveEntity(#RequestParam(name = "a", required = false) final String a) {
return Mono.just("ok");
}
}
#Test
public void testAllowsNoArgs() {
webClient.get().uri(TEST_ENDPOINT).exchange().expectBody(String.class).isEqualTo("ok");
}
#Test
public void testAllowsDeclaredArg() {
webClient.get().uri(TEST_ENDPOINT + "?a=1").exchange().expectBody(String.class).isEqualTo("ok");
}
#Test
public void testDisallowsUndeclaredArg() {
webClient.get().uri(TEST_ENDPOINT + "?b=1").exchange().expectStatus().is4xxClientError();
}
}