how to assert objects are equal using hamcrest - testing

I want to write test where it checks if two objects are same. When the assert fails I want to know what fields are same and which are not. I can do assert on each of the field but was wondering if there is a way to compere the objects.
Truck Class:
import java.util.List;
public class Truck {
private String model;
private String make;
private int year;
private List<TruckPartsObject> parts;
public Truck(String model, String make, int year) {
super();
this.model = model;
this.make = make;
this.year = year;
}
public String getModel() {
return model;
}
public void setModel(String model) {
this.model = model;
}
public String getMake() {
return make;
}
public void setMake(String make) {
this.make = make;
}
public int getYear() {
return year;
}
public void setYear(int year) {
this.year = year;
}
public List<TruckPartsObject> getParts() {
return parts;
}
public void setParts(List<TruckPartsObject> parts) {
this.parts = parts;
}
#Override
public String toString() {
return "Truck{" + "model='" + model + '\'' + ", make='" + make + '\''
+ ", year=" + year + ", parts=" + parts + '}';
}
}
TruckPartsObject Class:
import java.util.HashMap;
public class TruckPartsObject {
private String name;
private String price;
private HashMap<String, String> partsHashMap;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getPrice() {
return price;
}
public void setPrice(String price) {
this.price = price;
}
public HashMap<String, String> getPartsHashMap() {
return partsHashMap;
}
public void setPartsHashMap(HashMap<String, String> partsHashMap) {
this.partsHashMap = partsHashMap;
}
#Override
public String toString() {
return "TruckPartsObject{" + "name='" + name + '\'' + ", price='"
+ price + '\'' + ", partsHashMap=" + partsHashMap + '}';
}
}
Test:
#Test
public void truckTest () {
Truck pickupTruck1 = new Truck("Big 10", "Chevy", 1976);
Truck pickupTruck2 = new Truck("Big 10", "Chevy", 1976);
List<TruckPartsObject> parts1 = new ArrayList<TruckPartsObject>();
TruckPartsObject truckPartsObject1 = new TruckPartsObject();
truckPartsObject1.setName("part1");
parts1.add(truckPartsObject1);
pickupTruck1.setParts(parts1);
List<TruckPartsObject> parts2 = new ArrayList<TruckPartsObject>();
TruckPartsObject truckPartsObject2 = new TruckPartsObject();
truckPartsObject2.setName("part1");
parts2.add(truckPartsObject2);
pickupTruck2.setParts(parts2);
pickupTruck2.setParts(parts2);
System.out.println(pickupTruck1);
System.out.println(pickupTruck2);
//how to assert pickupTruck1 and pickupTruck2 have the same
}
how to assert pickupTruck1 and pickupTruck2 have the same?

Thanks to a co-worker for the bellow solution:
Use Shazamcrest (https://github.com/shazam/shazamcrest)
assertThat(pickupTruck1, sameBeanAs(pickupTruck2));

Related

Delete duplicates items from my ArrayList of MyModel

I tried everything to remove the elements from my array without losing the json data of my SharedPreferences, if I don't add in my HashSet the data that I merged, I get an array without duplicate elements. This is what I want, add my model element without duplications in my array by loading the elements from the Json.
**Here my MyModel :**
public class MyModel {
Integer imageRes;
String textRes;
public MyModel(Integer imageRes, String textRes) {
this.imageRes = imageRes;
this.textRes = textRes;
}
public Integer getImageRes() {
return imageRes;
}
public void setImageRes(Integer imageRes) {
this.imageRes = imageRes;
}
public String getTextRes() {
return textRes;
}
public void setTextRes(String textRes) {
this.textRes = textRes;
}
}
And here my Holder
public class MyAdapter extends RecyclerView.Adapter<MyAdapter.myViewHolder> {
public MyAdapter(ArrayList<MyModel> models) {
this.models = models;
}
private final ArrayList<MyModel> models;
#NonNull
#Override
public myViewHolder onCreateViewHolder(#NonNull ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.items, parent,false);
return new myViewHolder(view);
}
#Override
public void onBindViewHolder(#NonNull myViewHolder holder, int position) {
holder.setIsRecyclable(false);
holder.imageItem.setImageResource(models.get(position).getImageRes());
holder.textItem.setText(models.get(position).getTextRes());
}
public ArrayList<MyModel> getModels() {
return models;
}
#Override
public long getItemId(int position) {
return super.getItemId(position);
}
#Override
public int getItemViewType(int position) {
return super.getItemViewType(position);
}
#Override
public int getItemCount() {
return (models == null) ? 0 : models.size();
}
#Override
public void setHasStableIds(boolean hasStableIds) {
super.setHasStableIds(hasStableIds);
}
static class myViewHolder extends RecyclerView.ViewHolder{
ImageView imageItem;
TextView textItem;
public myViewHolder(#NonNull View itemView){
super(itemView);
imageItem = itemView.findViewById(R.id.ItemMarker);
textItem = itemView.findViewById(R.id.itemTxt);
}
}
}
**And here my problem :**
if(models != null){
try {
try{
SharedPreferences ListDatas = getSharedPreferences("SavedList", MODE_PRIVATE);
Gson gson = new Gson();
String json = ListDatas.getString("ListKey", null);
Type type = new TypeToken<ArrayList<MyModel>>(){}.getType();
models = gson.fromJson(json, type);
list.setHasFixedSize(true);
if(adapter != null){
adapter.setHasStableIds(true);
}
list.setAdapter(adapter);
list.setBackgroundColor(R.color.items);
list.setDuplicateParentStateEnabled(false);
models.ensureCapacity(100);
adapter = new MyAdapter(models);
NewModel = models;
Set<MyModel> DelItems = new HashSet<>();
DelItems.add(new MyModel(R.drawable.marker, "Commande en cours..." + "\n" + "Date : " + dateFormat + "\n" + "Nettoyage enregistré à : " + heure + "H" + minutes));
Set<MyModel> DelItems2 = new HashSet<>(NewModel);
NewModel.clear();
Set<MyModel> DelItemsFinal = new HashSet<>(DelItems2);
DelItemsFinal.addAll(DelItems2);
DelItemsFinal.addAll(DelItems);
Set<MyModel> DelItemsFinal2 = new LinkedHashSet<>(DelItemsFinal);
NewModel.clear();
NewModel.addAll(DelItemsFinal2);
DelItems.clear();
DelItems2.clear();
adapter.notifyDataSetChanged();
lastPosition = NewModel.size() -1;
layoutManager.scrollToPosition(lastPosition);
InfosHisto.setText("Votre historique de commande");
} catch (JsonSyntaxException e) {
e.printStackTrace();
}

Kafka consumer receive null value when sending customer object

So i want to implement application which reads data from json format files. And I have created customer object for the data in json. And I want to send these object through kafka topic. So far i have successfully send String message to producer to consumer. But when i try to send object, in the consumer side, when I do .value().toString(). I got null value. The following is the code I have used:
This is producer:
public class MyProducer {
public static void main(String[] args) throws Exception {
Properties properties = new Properties();
properties.put("bootstrap.servers", "kafka.kafka-cluster-shared.non-prod-5-az-scus.prod.us.walmart.net:9092");
properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
properties.put("value.serializer", "xxxxxxxxx.KafkaJsonSerializer");
properties.put("acks", "1");
properties.put("retries", "2");
properties.put("batch.size", "16384");
properties.put("linger.ms", "1");
properties.put("buffer.memory", "33554432");
KafkaProducer<String, pharmacyData> kafkaProducer = new KafkaProducer<String, pharmacyData>(
properties);
String topic = "insights";
//try {
Gson gson = new Gson();
Reader reader = Files.newBufferedReader(Paths.get("......./part.json"));
List<pharmacyData> pdata = new Gson().fromJson(reader, new TypeToken<List<pharmacyData>>() {}.getType());
//pdata.forEach(System.out::println);
reader.close();
//} catch (Exception e) {
//e.printStackTrace();
//}
for (pharmacyData data : pdata) {
kafkaProducer.send(new ProducerRecord<String, pharmacyData>(topic, data), new Callback() {
#Override
public void onCompletion(RecordMetadata recordMetadata, Exception e) {
if (e == null) {
System.out.println(recordMetadata.partition() + "--" + recordMetadata.serializedValueSize());
} else {
e.printStackTrace();
}
}
});
}
kafkaProducer.close();
}
}
This is the customer object class:
public class pharmacyData {
private String load_date;
private String store_nbr;
private String state;
private String pmp_flag;
private String zero_flag;
private String submit_ts;
public pharmacyData(String load_date, String store_nbr, String state, String pmp_flag, String zero_flag, String submit_ts) {
this.load_date = load_date;
this.store_nbr = store_nbr;
this.state = state;
this.pmp_flag = pmp_flag;
this.zero_flag = zero_flag;
this.submit_ts = submit_ts;
}
public String getLoad_date() {
return load_date;
}
public void setLoad_date(String load_date) {
this.load_date = load_date;
}
public String getStore_nbr() {
return store_nbr;
}
public void setStore_nbr(String store_nbr) {
this.store_nbr = store_nbr;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public String getPmp_flag() {
return pmp_flag;
}
public void setPmp_flag(String pmp_flag) {
this.pmp_flag = pmp_flag;
}
public String getZero_flag() {
return zero_flag;
}
public void setZero_flag(String zero_flag) {
this.zero_flag = zero_flag;
}
public String getSubmit_ts() {
return submit_ts;
}
public void setSubmit_ts(String submit_ts) {
this.submit_ts = submit_ts;
}
#Override
public String toString() {
return "pharmacyData{" +
"load_date='" + load_date + '\'' +
", store_nbr='" + store_nbr + '\'' +
", state='" + state + '\'' +
", pmp_flag='" + pmp_flag + '\'' +
", zero_flag='" + zero_flag + '\'' +
", submit_ts='" + submit_ts + '\'' +
'}';
}
}
this is the customer Serializer:
public class KafkaJsonSerializer implements Serializer {
private Logger logger = LogManager.getLogger(this.getClass());
#Override
public void configure(Map map, boolean b) {
}
#Override
public byte[] serialize(String s, Object o) {
byte[] retVal = null;
ObjectMapper objectMapper = new ObjectMapper();
try {
retVal = objectMapper.writeValueAsBytes(o);
} catch (Exception e) {
logger.error(e.getMessage());
}
return retVal;
}
#Override
public void close() {
}
}
This is the customer Deserializer:
public class KafkaJsonDeserializer implements Deserializer {
#Override
public void configure(Map map, boolean b) {
}
#Override
public Object deserialize(String s, byte[] bytes) {
ObjectMapper mapper = new ObjectMapper();
pharmacyData pdata = null;
try {
pdata = mapper.readValue(bytes, pharmacyData.class);
} catch (Exception e) {
e.printStackTrace();
}
return pdata;
}
#Override
public void close() {
}
}
This is consumer:
public class MyConsumer {
public static void main(String[] args) {
Properties properties = new Properties();
properties.put("bootstrap.servers", "kafka.kafka-cluster-shared.non-prod-5-az-scus.prod.us.walmart.net:9092");
properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
properties.put("value.deserializer", "xxxxxxxx.KafkaJsonDeserializer");
properties.put("group.id", "consumer-group-1");
properties.put("enable.auto.commit", "true");
properties.put("auto.commit.interval.ms", "1000");
properties.put("auto.offset.reset", "earliest");
properties.put("session.timeout.ms", "30000");
KafkaConsumer<String, pharmacyData> consumer = new KafkaConsumer<>(properties);
String topic = "insights";
consumer.subscribe(Collections.singletonList(topic));
while (true) {
ConsumerRecords<String, pharmacyData> consumerRecords = consumer.poll(100);
for (ConsumerRecord<String, pharmacyData> consumerRecord : consumerRecords) {
System.out.println(consumerRecord.key() + "--" + consumerRecord.toString());
//System.out.println(consumerRecord.offset() + "--" + consumerRecord.partition());
}
}
}
}
Can someone please help me with the issues? Thank you very much!
Problem solved:
The solution of this issue just add a default constructor as below:
public pharmacyData() {
}
See this page for more details.

Delta Propagation in GemFire

When I was trying to implement Delta Propagation in GemFire, I was getting Exception shown below...
Exception...
com.gemstone.gemfire.pdx.PdxSerializationException: Could not create an instance of a class DeltaTesting with root cause
java.lang.ClassNotFoundException: DeltaTesting
at org.apache.geode.internal.ClassPathLoader.forName(ClassPathLoader.java:437)
at org.apache.geode.internal.InternalDataSerializer.getCachedClass(InternalDataSerializer.java:4010)
at org.apache.geode.pdx.internal.PdxType.getPdxClass(PdxType.java:235)
at org.apache.geode.pdx.internal.PdxReaderImpl.basicGetObject(PdxReaderImpl.java:687)
at org.apache.geode.pdx.internal.PdxReaderImpl.getObject(PdxReaderImpl.java:682)
Code...
#Region("deltaTesting")
public class DeltaTesting implements Delta,Serializable {
private static final long serialVersionUID = 1L;
public DeltaTesting(){
}
public DeltaTesting(String id, String firstName, String lastName){
this.id = id;
this.firstName = firstName;
this.lastName = lastName;
}
#Id
private String id;
private String firstName;
private String lastName;
private transient boolean stringId = false;
private transient boolean stringFirstName = false;
private transient boolean stringLastName = false;
public String getId() {
return id;
}
public void setId(String id) {
this.stringId = true;
this.id = id;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.stringFirstName = true;
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.stringLastName = true;
this.lastName = lastName;
}
#Override
public void fromDelta(DataInput in) throws IOException, InvalidDeltaException {
// TODO Auto-generated method stub
System.out.println("Applying delta to " + this.toString());
// For each field, read whether there is a change
if (in.readBoolean()) {
// Read the change and apply it to the object
this.id = in.readLine();
System.out.println(" Applied delta to field 'id' = "
+ this.id);
}
if (in.readBoolean()) {
// Read the change and apply it to the object
this.firstName = in.readLine();
System.out.println(" Applied delta to field 'firstname' = "
+ this.firstName);
}
if (in.readBoolean()) {
// Read the change and apply it to the object
this.lastName = in.readLine();
System.out.println(" Applied delta to field 'lastname' = "
+ this.lastName);
}
}
#Override
public boolean hasDelta() {
// TODO Auto-generated method stub
return this.stringId || this.stringFirstName || this.stringLastName;
}
#Override
public void toDelta(DataOutput out) throws IOException {
out.writeBoolean(stringId);
// TODO Auto-generated method stub
if (stringId) {
out.writeBytes(this.id);
this.stringId = false;
System.out.println(" Extracted delta from field 'id' = " + this.id);
}
out.writeBoolean(stringFirstName);
if (stringFirstName) {
out.writeBytes(this.firstName);
this.stringFirstName = false;
System.out.println(" Extracted delta from field 'firstName' = " + this.firstName);
}
out.writeBoolean(stringLastName);
if (stringLastName) {
out.writeBytes(this.lastName);
this.stringLastName = false;
System.out.println(" Extracted delta from field 'lastName' = " + this.lastName);
}
}
}
**cache.xml**
<pdx>
<pdx-serializer>
<class-name>com.gemstone.gemfire.pdx.ReflectionBasedAutoSerializer</class-
name>
<parameter name="classes">
<string>com\.xxx\..+</string>
</parameter>
</pdx-serializer>
</pdx>
#Vigneshwara-
Delta Propagation is not supported by PDX Serialization in GemFire. You must use the GemFire Data Serialization framework (i.e. DataSerializable and DataSerializers) with Deltas instead.
In short... if you use PDX, then you cannot use Deltas. If you use Deltas, then you cannot use PDX.
Sorry,
-j

Embedded Neo4j delete node and Lucene legacy indexing - node_auto_indexing out of sync issue

I'm trying to delete node with fields in node_auto_indexing.
When I try to delete node using repository.delete(id).
Right after that I'm trying to get deleted Node by its id and I get following exception:
java.lang.IllegalStateException: This index (Index[__rel_types__,Relationship]) has been marked as deleted in this transaction
at org.neo4j.index.impl.lucene.LuceneTransaction$DeletedTxDataBoth.illegalStateException(LuceneTransaction.java:475)
at org.neo4j.index.impl.lucene.LuceneTransaction$DeletedTxDataBoth.removed(LuceneTransaction.java:470)
at org.neo4j.index.impl.lucene.LuceneTransaction.remove(LuceneTransaction.java:112)
at org.neo4j.index.impl.lucene.LuceneXaConnection.remove(LuceneXaConnection.java:116)
at org.neo4j.index.impl.lucene.LuceneIndex.remove(LuceneIndex.java:215)
at org.springframework.data.neo4j.support.typerepresentation.AbstractIndexBasedTypeRepresentationStrategy.remove(AbstractIndexBasedTypeRepresentationStrategy.java:113)
at org.springframework.data.neo4j.support.typerepresentation.AbstractIndexBasedTypeRepresentationStrategy.preEntityRemoval(AbstractIndexBasedTypeRepresentationStrategy.java:100)
at org.springframework.data.neo4j.support.mapping.EntityRemover.removeRelationship(EntityRemover.java:63)
at org.springframework.data.neo4j.support.mapping.EntityRemover.removeNode(EntityRemover.java:51)
at org.springframework.data.neo4j.support.mapping.EntityRemover.removeNodeEntity(EntityRemover.java:45)
at org.springframework.data.neo4j.support.mapping.EntityRemover.remove(EntityRemover.java:85)
at org.springframework.data.neo4j.support.Neo4jTemplate.delete(Neo4jTemplate.java:267)
at org.springframework.data.neo4j.repository.AbstractGraphRepository.delete(AbstractGraphRepository.java:276)
at org.springframework.data.neo4j.repository.AbstractGraphRepository.delete(AbstractGraphRepository.java:282)
Also, when I'm trying to delete node via Cypher query
#Query("MATCH ()-[r]-(p:Product) WHERE id(p) = {productId} DELETE r, p")
void deleteProduct(#Param("productId") Long productId);
I'm getting another exception after looking this deleted Node by its Id:
java.lang.IllegalStateException: No primary SDN label exists .. (i.e one starting with _)
at org.springframework.data.neo4j.support.typerepresentation.LabelBasedNodeTypeRepresentationStrategy.readAliasFrom(LabelBasedNodeTypeRepresentationStrategy.java:126)
at org.springframework.data.neo4j.support.typerepresentation.LabelBasedNodeTypeRepresentationStrategy.readAliasFrom(LabelBasedNodeTypeRepresentationStrategy.java:39)
at org.springframework.data.neo4j.support.mapping.TRSTypeAliasAccessor.readAliasFrom(TRSTypeAliasAccessor.java:36)
at org.springframework.data.neo4j.support.mapping.TRSTypeAliasAccessor.readAliasFrom(TRSTypeAliasAccessor.java:26)
at org.springframework.data.convert.DefaultTypeMapper.readType(DefaultTypeMapper.java:102)
at org.springframework.data.convert.DefaultTypeMapper.getDefaultedTypeToBeUsed(DefaultTypeMapper.java:165)
at org.springframework.data.convert.DefaultTypeMapper.readType(DefaultTypeMapper.java:142)
at org.springframework.data.neo4j.support.mapping.Neo4jEntityConverterImpl.read(Neo4jEntityConverterImpl.java:78)
at org.springframework.data.neo4j.support.mapping.Neo4jEntityPersister$CachedConverter.read(Neo4jEntityPersister.java:170)
at org.springframework.data.neo4j.support.mapping.Neo4jEntityPersister.createEntityFromState(Neo4jEntityPersister.java:189)
at org.springframework.data.neo4j.support.Neo4jTemplate.createEntityFromState(Neo4jTemplate.java:224)
at org.springframework.data.neo4j.repository.AbstractGraphRepository.createEntity(AbstractGraphRepository.java:62)
at org.springframework.data.neo4j.repository.AbstractGraphRepository.findOne(AbstractGraphRepository.java:127)
at org.springframework.data.neo4j.repository.AbstractGraphRepository.delete(AbstractGraphRepository.java:282)
How to correctly delete node that participates in Lucene Legacy Indexing node_auto_indexing ? How to remove this Node from Lucene index ?
UPDATED:
This is my Neo4jConfig:
#Configuration
#EnableNeo4jRepositories(basePackages = "com.example")
#EnableTransactionManagement
public class Neo4jConfig extends Neo4jConfiguration implements BeanFactoryAware {
#Resource
private Environment environment;
private BeanFactory beanFactory;
public Neo4jConfig() {
setBasePackage("com.example");
}
#Bean(destroyMethod = "shutdown")
public GraphDatabaseService graphDatabaseService() {
GraphDatabaseService graphDb = new GraphDatabaseFactory()
.newEmbeddedDatabaseBuilder("target/example-test-db")
.setConfig(GraphDatabaseSettings.node_keys_indexable, "name,description")
.setConfig(GraphDatabaseSettings.node_auto_indexing, "true")
.newGraphDatabase();
return graphDb;
}
/**
* Hook into the application lifecycle and register listeners that perform
* behaviour across types of entities during this life cycle
*
*/
#Bean
protected ApplicationListener<BeforeSaveEvent<BaseEntity>> beforeSaveEventApplicationListener() {
return new ApplicationListener<BeforeSaveEvent<BaseEntity>>() {
#Override
public void onApplicationEvent(BeforeSaveEvent<BaseEntity> event) {
BaseEntity entity = event.getEntity();
if (entity.getCreateDate() == null) {
entity.setCreateDate(new Date());
} else {
entity.setUpdateDate(new Date());
}
}
};
}
#Override
public void setBeanFactory(BeanFactory beanFactory) throws BeansException {
this.beanFactory = beanFactory;
}
public BeanFactory getBeanFactory() {
return beanFactory;
}
}
Base entity for entities in the project:
public class BaseEntity {
private Date createDate;
private Date updateDate;
public BaseEntity() {
}
public Date getCreateDate() {
return createDate;
}
public void setCreateDate(Date createDate) {
this.createDate = createDate;
}
public Date getUpdateDate() {
return updateDate;
}
public void setUpdateDate(Date updateDate) {
this.updateDate = updateDate;
}
}
and the Vote entity that I tried to delete:
#NodeEntity
public class Vote extends BaseEntity {
private static final String VOTED_ON = "VOTED_ON";
private final static String VOTED_FOR = "VOTED_FOR";
private static final String CREATED_BY = "CREATED_BY";
#GraphId
private Long id;
#RelatedTo(type = VOTED_FOR, direction = Direction.OUTGOING)
private Decision decision;
#RelatedTo(type = VOTED_ON, direction = Direction.OUTGOING)
private Criterion criterion;
#RelatedTo(type = CREATED_BY, direction = Direction.OUTGOING)
private User author;
private double weight;
private String description;
public Vote() {
}
public Vote(Decision decision, Criterion criterion, User author, double weight, String description) {
this.decision = decision;
this.criterion = criterion;
this.author = author;
this.weight = weight;
this.description = description;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Decision getDecision() {
return decision;
}
public void setDecision(Decision decision) {
this.decision = decision;
}
public Criterion getCriterion() {
return criterion;
}
public void setCriterion(Criterion criterion) {
this.criterion = criterion;
}
public User getAuthor() {
return author;
}
public void setAuthor(User author) {
this.author = author;
}
public double getWeight() {
return weight;
}
public void setWeight(double weight) {
this.weight = weight;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
#Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
Vote vote = (Vote) o;
if (id == null)
return super.equals(o);
return id.equals(vote.id);
}
#Override
public int hashCode() {
return id != null ? id.hashCode() : super.hashCode();
}
#Override
public String toString() {
return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE);
}
}
Thanks to #MichaelHunger and Neo4j this issue has been fixed in Neo4j 2.2.2 and SDN 3.4.0.M1

orika property expression mapping

Given
classA {
long fahr;
....
and
classB {
long cels;
....
how can I map the following in Orika?
A.fahr <-> (B.cels*9)/5
Do I need customised Mapper or Filter ?
I suggest to use field level converter if both are of different data types but since they are of same data type we have to use a custom converter for entire class.
This is sample converter that suitable for this use case.
import ma.glasnost.orika.BoundMapperFacade;
import ma.glasnost.orika.MapperFactory;
import ma.glasnost.orika.converter.ConverterFactory;
import ma.glasnost.orika.impl.DefaultMapperFactory;
public class EntryClass {
public static void main(String[] args) {
EntryClass ec = new EntryClass();
BoundMapperFacade<A, B> facade = getMapperFactory().getMapperFacade(A.class, B.class);
A fahr = new A(455);
B cels = facade.map(fahr);
System.out.println(cels);
A revFahr = facade.mapReverse(cels);
System.out.println(revFahr);
}
private static MapperFactory getMapperFactory() {
MapperFactory factory = new DefaultMapperFactory.Builder()
.build();
ConverterFactory cfactory = factory.getConverterFactory();
cfactory.registerConverter(new FahrCelsConverter());
factory.classMap(A.class, B.class)
.field("fahr", "cels")
.byDefault()
.register();
return factory;
}
}
public class A {
long fahr;
public A(long fahr) {
this.fahr = fahr;
}
public long getFahr() {
return fahr;
}
public void setFahr(long fahr) {
this.fahr = fahr;
}
#Override
public String toString() {
return "A [fahr=" + fahr + "]";
}
}
public class B {
long cels;
public B(long cels) {
this.cels = cels;
}
public long getCels() {
return cels;
}
public void setCels(long cels) {
this.cels = cels;
}
#Override
public String toString() {
return "B [cels=" + cels + "]";
}
}
public class FahrCelsConverter extends BidirectionalConverter<A, B>
{
#Override
public B convertTo(A source, Type<B> destinationType, MappingContext mappingContext) {
if(source != null)
{
return new B((source.fahr - 32) * 5 / 9);
}
return null;
}
#Override
public A convertFrom(B source, Type<A> destinationType, MappingContext mappingContext) {
if(source != null)
{
return new A((source.cels / 5) * 9 + 32);
}
return null;
}
}
It's more suited to use a converter (by id).