Convert String to Date with null safety - kotlin

I'm using #TypeConverter in Room to convert string to Date (datetime). Here is the code
public class DateTimeConverter {
#TypeConverter
public static Date stringToDate(String value) {
DateFormat df = new SimpleDateFormat(Constants.SQLITE_DATE_TIMEFORMAT, Locale.US);
if (value != null) {
try {
return df.parse(value);
} catch (ParseException e) {
e.printStackTrace();
}
}
return null;
}
#TypeConverter
public static String dateToString(Date value) {
DateFormat df = new SimpleDateFormat(Constants.SQLITE_DATE_TIMEFORMAT, Locale.US);
if (value != null) {
return df.format(value);
} else {
return null;
}
}
}
#Entity
#TypeConverters(DateTimeConverter::class)
data class Entity(
var writeDate: Date = Date() // java.util.Date
)
My current issues is
stringtoDate receives value = null which results in Entity.writeDate to be null which is a run-time exception
Question
How to convert string to Date with null safety? The value of writeDate in the table is never null, but stringToDate still receives value = null.
Note:
Using SDK > 23. So can't use DateTimeFormatter.ofPattern

The value of writeDate in the table is never null, but stringToDate still receives value = null.
You issue would be to determine why the writeDate is being extracted as null and that would be within the functions in classes that are annotated with #Dao.
However, you could us the following to ensure that nulls are never returned:-
public class DateTimeConverter {
#TypeConverter
public static Date stringToDate(String value) {
Date defaultDate = new Date(0); //1970-01-01 00:00:00
DateFormat df = new SimpleDateFormat(Constants.SQLITE_DATE_TIMEFORMAT, Locale.US);
if (value != null) {
try {
return df.parse(value);
} catch (ParseException e) {
e.printStackTrace();
}
}
return defaultDate;
}
#TypeConverter
public static String dateToString(Date value) {
DateFormat df = new SimpleDateFormat(Constants.SQLITE_DATE_TIMEFORMAT, Locale.US);
if (value != null) {
return df.format(value);
} else {
return "1970-01-01 00:00:00";
}
}
}
Here's a demonstration that uses the above in conjunction with:-
The #Dao class AllDao :-
#Dao
abstract class AllDao {
#Insert(onConflict = IGNORE)
abstract fun insert(entity: Entity)
/* Delete all rows */
#Query("DELETE FROM entity")
abstract fun clear()
/* get inserted data */
#Query("SELECT * FROM entity")
abstract fun getAllFromEntity(): List<Entity>
/* purposefully get 2 invalid dates (1 rubbish date, 1 NULL) */
#Query("SELECT -999 AS id,'invaliddate' AS writeDate UNION SELECT -123 AS id, NULL as writeDate")
abstract fun getMessedUpDate(): List<Entity>
}
The getMessedUpDate designed to do as it says and get dates that would result in nulls by the TypeConverters in the question but not by the modified TypeConverters in the Answer.
and then using :-
db = TheDatabase.getInstance(this)
dao = db.getAllDao()
dao.clear()
dao.insert(Entity())
var dateInt = (System.currentTimeMillis())
dao.insert(Entity(writeDate = Date(dateInt)))
dao.insert(Entity(1000,Date((System.currentTimeMillis()) - (100 /*days*/ * 24 /*hours*/ * 60 /*mins*/ * 60 /*secs*/ * 1000))))
for (e: Entity in dao.getAllFromEntity()) {
Log.d("DBINFO","Date is ${e.writeDate} ID is ${e.id}")
}
for (e: Entity in dao.getMessedUpDate()) {
Log.d("DBINFO","Date is ${e.writeDate} ID is ${e.id}")
}
The log includes :-
2021-11-06 07:40:38.063 D/DBINFO: Date is Sat Nov 06 07:40:38 GMT+11:00 2021 ID is 1
2021-11-06 07:40:38.064 D/DBINFO: Date is Sat Nov 06 07:40:38 GMT+11:00 2021 ID is 2
2021-11-06 07:40:38.064 D/DBINFO: Date is Fri Nov 05 17:46:12 GMT+11:00 2021 ID is 1000
2021-11-06 07:40:38.069 D/DBINFO: Date is Thu Jan 01 10:00:00 GMT+10:00 1970 ID is -999
2021-11-06 07:40:38.069 D/DBINFO: Date is Thu Jan 01 10:00:00 GMT+10:00 1970 ID is -123
i.e. the 4th and 5th lines have returned the "default date" rather than null.

Related

Kotlin: create a list from another list

I want to create a list using Kotlin that contains items of another list, based on endDate equals to startDate and .. etc
Example:
listOf(
{id1, startDate=1, endDate=3},
{id3, startDate=5, endDate=6},
{id2, startDate=3, endDate=5},
{id4, startDate=10, endDate=12},
{id5, startDate=12, endDate=13},
{id6, startDate=13, endDate=16})
result listOf[{id1}, {id2}, {id3}], [{id4}, {id5}, {id6}] // these are two items
With the given dataset, this problem looks innocent at a first glance, but may grow to a more complex problem quickly. Imagine a dataset that has the potential of multiple, possible results. Should longest possible chains be preferred, or a result with balanced chain size?
A naive implementation may be like this (written inside a Kotest).
data class ListItem(
val id: String,
val startDate: Int,
val endDate: Int
)
given("another StackOverflow issue") {
val coll = listOf(
ListItem("id1", startDate = 1, endDate = 3),
ListItem("id3", startDate = 5, endDate = 6),
ListItem("id2", startDate = 3, endDate = 5),
ListItem("id4", startDate = 10, endDate = 12),
ListItem("id5", startDate = 12, endDate = 13),
ListItem("id6", startDate = 13, endDate = 16)
)
`when`("linking chain") {
/** final result ends up here */
val chains: MutableList<MutableList<ListItem>> = mutableListOf()
/** populate dequeue with items ordered by startDate */
val arrayDeque = ArrayDeque(coll.sortedBy { it.startDate })
/** loop is iterated at least once, hence do/while */
do {
/** add a new chain */
chains.add(mutableListOf())
/** get first element for chain */
var currentItem: ListItem = arrayDeque.removeFirst()
/** add first element to current chain */
chains.last().add(currentItem)
/** add items to current chain until chain is broken */
while (arrayDeque.any { it.startDate == currentItem.endDate }) {
/** get next element to add to chain and remove it from dequeue */
currentItem = arrayDeque
.first { it.startDate == currentItem.endDate }
.also { arrayDeque.remove(it) }
chains.last().add(currentItem)
}
} while (arrayDeque.any())
then("result should be as expected") {
chains.size shouldBe 2
chains.first().size shouldBe 3
chains.last().size shouldBe 3
chains.flatMap { it.map { innerItem -> innerItem.id } } shouldBe listOf(
"id1",
"id2",
"id3",
"id4",
"id5",
"id6",
)
}
}
}

Springboot - Weekdays count query

My Use-cases:
We have an installation schedule entity (check below code) and it has an installation date.
Once installation has completed, after 4 weekdays we will verify the installation status with customers.
Note: (4 weekdays - this count is configurable. So 'X' weekdays)
Weekdays means - Monday to Friday. We don't care about other holidays.
I have a scheduler, it will retrieve these orders after 'X' weekdays - I'm stuck here
I don't know how to make a query for after 'X' weekdays.
My code:
#Entity
#Table(schema = "myschema", name = "installation_dates")
#Getter
#Setter
#NoArgsConstructor
public class InstallDates extends TransEntity implements Serializable {
// other columns
#Column(name = "installation_schedule_datetime")
private LocalDateTime installationScheduleDatetime;//I use this column for calculation
#Formula("getWeekDaysCount(installationScheduleDatetime)")
private int weekDaysCount;
public int getWeekDaysCount(LocalDateTime installationScheduleDatetime) {
int totalWeekDays = 0;
LocalDateTime todayDate = LocalDateTime.now();
while (!installationScheduleDatetime.isAfter(todayDate)) {
switch (installationScheduleDatetime.getDayOfWeek()) {
case FRIDAY:
case SATURDAY:
break;
default:
totalWeekDays++;
break;
}
installationScheduleDatetime = installationScheduleDatetime.plusDays(1);
}
return totalWeekDays;
}
}
Question:
How to make a SQL or JPQL or JPA query for weekdays?
I knew its very basic question, I am a mobile app developer, I recently joined the Springboard team, it's really hard for me :(
Feel free to give your valuable feedback!
I have a following suggestion if I correctly got the problem.
Java:
Take the current date
Find the date of interest: count minus 4 workdays (so if it is Friday today - subtract 4 days, if it is Monday - subtract 2 days for weekend and 4 more days for weekdays)
Then write a query that will select all installations that were done on the date of interest.
In pseudo code:
select * from installations where installation_date = <date of interest>;.
Date of interest Java code:
public LocalDateTime getDateOfInterest(int workdays) {
LocalDateTime currentDate = LocalDateTime.now();
if (workdays < 1) {
return currentDate;
}
//it will subtract 'X' working days from current date
LocalDateTime result = currentDate;
int addedDays = 0;
while (addedDays < workdays) {
result = result.minusDays(1);
if (!(result.getDayOfWeek() == DayOfWeek.FRIDAY ||
result.getDayOfWeek() == DayOfWeek.SATURDAY)) {
++addedDays;
}
}
return result;
}
First lets answer your questions
No you cannot call a method from #Formula
You probably could (see here but that might depend on your database.
The fact that you use an entity and JPA doesn't mean everything has to be a JPA property.
You could:
Write a get method that calculates it on the fly
Write a getter which sets it lazily.
Use the #PostLoad to always set it.
#Entity
#Table(schema = "myschema", name = "installation_dates")
#Getter
#Setter
#NoArgsConstructor
public class InstallDates extends TransEntity implements Serializable {
// other columns
#Column(name = "installation_schedule_datetime")
private LocalDateTime installationScheduleDatetime;//I use this column for calculation
public int getWeekDaysCount() {
int totalWeekDays = 0;
LocalDateTime isdt = this.installationScheduleDatetime;
LocalDateTime todayDate = LocalDateTime.now();
while (!isdt.isAfter(todayDate)) {
switch (isdt.getDayOfWeek()) {
case FRIDAY:
case SATURDAY:
break;
default:
totalWeekDays++;
break;
}
isdt = isdt.plusDays(1);
}
return totalWeekDays;
}
}
Or if you really want it to be a property, you could use the getter to set it lazily.
#Entity
#Table(schema = "myschema", name = "installation_dates")
#Getter
#Setter
#NoArgsConstructor
public class InstallDates extends TransEntity implements Serializable {
// other columns
#Column(name = "installation_schedule_datetime")
private LocalDateTime installationScheduleDatetime;//I use this column for calculation
private int weekDaysCount = -1;
public int getWeekDaysCount() {
if (weekDaysCount == -1) {
int totalWeekDays = 0;
LocalDateTime isdt = this.installationScheduleDatetime;
LocalDateTime todayDate = LocalDateTime.now();
while (!isdt.isAfter(todayDate)) {
switch (isdt.getDayOfWeek()) {
case FRIDAY:
case SATURDAY:
break;
default:
totalWeekDays++;
break;
}
isdt = isdt.plusDays(1);
}
weekDaysCount = totalWeekDays;
}
return weekDaysCount;
}
}
Or if you always want to calculate that value you could even place it in an #PostLoad annotation on a method to initialize it (you could even reuse the above lazy getter for it). Or move the init code to the #PostLoad annotated method.
#PostLoad
private void initValues() {
getWeekDaysCount();
}
#Formula specifies an expression written in native SQL that is used to read the value of an attribute instead of storing the value in a Column. (https://docs.jboss.org/hibernate/orm/current/javadocs/org/hibernate/annotations/Formula.html)
As for your case, it doesn't look like there's much use in storing weekDaysCount in the DB if it's derived from installationScheduleDatetime. I'd just mark the weekDaysCount as #Transient and be done with it (#Formula should be removed).
Another solution would be to leave weekDaysCount non-transient and put your calculations in a #PreUpdate/#PrePersist method. See https://www.baeldung.com/jpa-entity-lifecycle-events for more info on that.

how to use spark sql udaf to implement window counting with condition?

I have a table with columns: timestamp and id and condition, and I want to count the number of each id per interval such as 10 seconds.
If condition is true, the count++, otherwise return the previous value.
the udaf code like:
public class MyCount extends UserDefinedAggregateFunction {
#Override
public StructType inputSchema() {
return DataTypes.createStructType(
Arrays.asList(
DataTypes.createStructField("condition", DataTypes.BooleanType, true),
DataTypes.createStructField("timestamp", DataTypes.LongType, true),
DataTypes.createStructField("interval", DataTypes.IntegerType, true)
)
);
}
#Override
public StructType bufferSchema() {
return DataTypes.createStructType(
Arrays.asList(
DataTypes.createStructField("timestamp", DataTypes.LongType, true),
DataTypes.createStructField("count", DataTypes.LongType, true)
)
);
}
#Override
public DataType dataType() {
return DataTypes.LongType;
}
#Override
public boolean deterministic() {
return true;
}
#Override
public void initialize(MutableAggregationBuffer mutableAggregationBuffer) {
mutableAggregationBuffer.update(0, 0L);
mutableAggregationBuffer.update(1, 0L);
}
public void update(MutableAggregationBuffer mutableAggregationBuffer, Row row) {
long timestamp = mutableAggregationBuffer.getLong(0);
long count = mutableAggregationBuffer.getLong(1);
long event_time = row.getLong(1);
int interval = row.getInt(2);
if (event_time > timestamp + interval) {
timestamp = event_time - event_time % interval;
count = 0;
}
if (row.getBoolean(0)) {
count++;
}
mutableAggregationBuffer.update(0, timestamp);
mutableAggregationBuffer.update(1, count);
}
#Override
public void merge(MutableAggregationBuffer mutableAggregationBuffer, Row row) {
}
#Override
public Object evaluate(Row row) {
return row.getLong(1);
}
}
Then I sumbit a sql like:
select timestamp, id, MyCount(true, timestamp, 10) over(PARTITION BY id ORDER BY timestamp) as count from xxx.xxx
the result is:
timestamp id count
1642760594 0 1
1642760596 0 2
1642760599 0 3
1642760610 0 2 --duplicate
1642760610 0 2
1642760613 0 3
1642760594 1 1
1642760597 1 2
1642760600 1 1
1642760603 1 2
1642760606 1 4 --duplicate
1642760606 1 4
1642760608 1 5
When the timestamp is repeated, I get 1,2,4,4,5 instead of 1,2,3,4,5
How to fix it?
And another requestion is that when to execute the merge method of udaf? I empty implement it but it runs normally. I try to add the log in the method but I haven't seen this log. Is it really necessary?
There is a similar question: Apache Spark SQL UDAF over window showing odd behaviour with duplicate input
However, row_number() does not have such a problem. row_number() is a hive udaf, then I try to create a hive udaf. But I also have the problem...Why hive udaf row_number() terminate() returns 'ArrayList'? I create my udaf row_number2() by copying its code then I got list return?
Finally I solved it by spark aggregateWindowFunction:
case class Count(condition: Expression) extends AggregateWindowFunction with Logging {
override def prettyName: String = "myCount"
override def dataType: DataType = LongType
override def children: Seq[Expression] = Seq(condition)
private val zero = Literal(0L)
private val one = Literal(1L)
private val count = AttributeReference("count", LongType, nullable = false)()
private val increaseCount = If(condition, Add(count, one), count)
override val initialValues: Seq[Expression] = zero :: Nil
override val updateExpressions: Seq[Expression] = increaseCount :: Nil
override val evaluateExpression: Expression = count
override val aggBufferAttributes: Seq[AttributeReference] = count :: Nil
Then use spark_session.functionRegistry.registerFunction to register it.
"select myCount(true) over(partition by window(timestamp, '10 seconds'), id order by timestamp) as count from xxx"

get count for rolling date value Using Apache Pig

How can we achieve using Apache Pig :
File :
A 2014/10/01
A 2014/09/01
A 2014/08/01
A 2014/02/01
Result should A count 3, since i want to count the number of records using rolling window of 30 days between records group by A.
Please find the solution, i hope you can do further enhancement if it required. Try to execute with your input and let me know how it works.
input.txt
A 2014/12/01
A 2014/11/01
A 2014/10/01
A 2014/07/01
A 2014/05/01
A 2014/04/01
B 2014/09/01
B 2014/07/01
B 2014/06/01
B 2014/02/01
C 2014/09/01
C 2014/07/01
C 2014/05/01
Expected output
A 5
B 2
C 0
PigScript:
REGISTER rollingCount.jar;
A = LOAD 'input.txt' Using PigStorage(' ') AS (f1:chararray,f2:chararray);
B = GROUP A BY f1;
C = FOREACH B GENERATE mypackage.ROLLINGCOUNT(BagToString($1)) AS rollingCnt;
DUMP C;
OutPut from the Script:
(A,5)
(B,2)
(C,0)
Java Code:
1. Compile the below java code and create jar file name rollingCount.jar
2. I just wrote the code temporarily, you can optimize if required.
ROLLINGCOUNT.java
package mypackage;
import java.io.*;
import org.apache.pig.EvalFunc;
import org.apache.pig.data.Tuple;
import java.text.SimpleDateFormat;
import java.util.concurrent.TimeUnit;
import java.util.*;
public class ROLLINGCOUNT extends EvalFunc<Integer> {
public Integer exec(Tuple input) throws IOException {
//Get the input String from request
String inputString = (String)input.get(0);
Date[] arrayOfDates = getArrayOfDate(inputString);
long diffDays[] = getDaysBetweenList(arrayOfDates);
int rollingCount = getRollingCount(diffDays);
return rollingCount;
}
//Function to convert strings to array of dates
static protected Date[] getArrayOfDate(String inputString)
{
//Get the 1st column, this will be the Id
String ID = inputString.split("_")[0];
//Replace all the Ids with Null, bcoz its a duplicate columns
String modifiedString = inputString.replace(ID+"_","");
//Split the string into multiple columns using '_' as delimiter
String list[] = modifiedString.split("_");
//Convert the string to list of array dates
Date[] dateList = new Date[list.length];
int index=0;
for (String dateString: list)
{
try
{
//Convert the date string to date object in the give format
SimpleDateFormat dFormat = new SimpleDateFormat("yyyy/MM/dd");
dateList[index++] = dFormat.parse(dateString);
}
catch(Exception e)
{
// error handling goes here
}
}
return dateList;
}
//Function to get difference between two dates
static protected long[] getDaysBetweenList(Date[] arrayOfDate)
{
long diffDays[] = new long[arrayOfDate.length-1];
int cnt=0;
for (int index=0; index<arrayOfDate.length-1;index++)
{
long diff = Math.abs(arrayOfDate[index+1].getTime() - arrayOfDate[index].getTime());
long days = TimeUnit.DAYS.convert(diff, TimeUnit.MILLISECONDS);
diffDays[cnt++] = days;
}
return diffDays;
}
//Function to get the total rolling count
static protected int getRollingCount(long diffDays[])
{
int result =0;
for(int index=0;index<diffDays.length;index++)
{
int cnt =0;
//hardcoded the values of 30 and 31 days, may need to handle Feb month 28 or 29 days
while((index<diffDays.length)&&((diffDays[index]==30)||(diffDays[index]==31)))
{
cnt++;
index++;
}
if(cnt>0)
{
result = result + cnt+1;
}
}
return result;
}
}

Create a Gson TypeAdapter for a Guava Range

I am trying to serialize Guava Range objects to JSON using Gson, however the default serialization fails, and I'm unsure how to correctly implement a TypeAdapter for this generic type.
Gson gson = new Gson();
Range<Integer> range = Range.closed(10, 20);
String json = gson.toJson(range);
System.out.println(json);
Range<Integer> range2 = gson.fromJson(json,
new TypeToken<Range<Integer>>(){}.getType());
System.out.println(range2);
assertEquals(range2, range);
This fails like so:
{"lowerBound":{"endpoint":10},"upperBound":{"endpoint":20}}
PASSED: typeTokenInterface
FAILED: range
java.lang.RuntimeException: Unable to invoke no-args constructor for
com.google.common.collect.Cut<java.lang.Integer>. Register an
InstanceCreator with Gson for this type may fix this problem.
at com.google.gson.internal.ConstructorConstructor$12.construct(
ConstructorConstructor.java:210)
...
Note that the default serialization actually loses information - it fails to report whether the endpoints are open or closed. I would prefer to see it serialized similar to its toString(), e.g. [10‥20] however simply calling toString() won't work with generic Range instances, as the elements of the range may not be primitives (Joda-Time LocalDate instances, for example). For the same reason, implementing a custom TypeAdapter seems difficult, as we don't know how to deserialize the endpoints.
I've implemented most of a TypeAdaptorFactory based on the template provided for Multimap which ought to work, but now I'm stuck on the generics. Here's what I have so far:
public class RangeTypeAdapterFactory implements TypeAdapterFactory {
public <T> TypeAdapter<T> create(Gson gson, TypeToken<T> typeToken) {
Type type = typeToken.getType();
if (typeToken.getRawType() != Range.class
|| !(type instanceof ParameterizedType)) {
return null;
}
Type elementType = ((ParameterizedType) type).getActualTypeArguments()[0];
TypeAdapter<?> elementAdapter = (TypeAdapter<?>)gson.getAdapter(TypeToken.get(elementType));
// Bound mismatch: The generic method newRangeAdapter(TypeAdapter<E>) of type
// GsonUtils.RangeTypeAdapterFactory is not applicable for the arguments
// (TypeAdapter<capture#4-of ?>). The inferred type capture#4-of ? is not a valid
// substitute for the bounded parameter <E extends Comparable<?>>
return (TypeAdapter<T>) newRangeAdapter(elementAdapter);
}
private <E extends Comparable<?>> TypeAdapter<Range<E>> newRangeAdapter(final TypeAdapter<E> elementAdapter) {
return new TypeAdapter<Range<E>>() {
#Override
public void write(JsonWriter out, Range<E> value) throws IOException {
if (value == null) {
out.nullValue();
return;
}
String repr = (value.lowerBoundType() == BoundType.CLOSED ? "[" : "(") +
(value.hasLowerBound() ? elementAdapter.toJson(value.lowerEndpoint()) : "-\u221e") +
'\u2025' +
(value.hasLowerBound() ? elementAdapter.toJson(value.upperEndpoint()) : "+\u221e") +
(value.upperBoundType() == BoundType.CLOSED ? "]" : ")");
out.value(repr);
}
public Range<E> read(JsonReader in) throws IOException {
if (in.peek() == JsonToken.NULL) {
in.nextNull();
return null;
}
String[] endpoints = in.nextString().split("\u2025");
E lower = elementAdapter.fromJson(endpoints[0].substring(1));
E upper = elementAdapter.fromJson(endpoints[1].substring(0,endpoints[1].length()-1));
return Range.range(lower, endpoints[0].charAt(0) == '[' ? BoundType.CLOSED : BoundType.OPEN,
upper, endpoints[1].charAt(endpoints[1].length()-1) == '[' ? BoundType.CLOSED : BoundType.OPEN);
}
};
}
}
However the return (TypeAdapter<T>) newRangeAdapter(elementAdapter); line has a compilation error and I'm now at a loss.
What's the best way to resolve this error? Is there a better way to serialize Range objects that I'm missing? What about if I want to serialize RangeSets?
Rather frustrating that the Google utility library and Google serialization library seem to require so much glue to work together :(
This feels somewhat like reinventing the wheel, but it was a lot quicker to put together and test than the time spent trying to get Gson to behave, so at least presently I'll be using the following Converters to serialize Range and RangeSet*, rather than Gson.
/**
* Converter between Range instances and Strings, essentially a custom serializer.
* Ideally we'd let Gson or Guava do this for us, but presently this is cleaner.
*/
public static <T extends Comparable<? super T>> Converter<Range<T>, String> rangeConverter(final Converter<T, String> elementConverter) {
final String NEG_INFINITY = "-\u221e";
final String POS_INFINITY = "+\u221e";
final String DOTDOT = "\u2025";
return new Converter<Range<T>, String>() {
#Override
protected String doForward(Range<T> range) {
return (range.hasLowerBound() && range.lowerBoundType() == BoundType.CLOSED ? "[" : "(") +
(range.hasLowerBound() ? elementConverter.convert(range.lowerEndpoint()) : NEG_INFINITY) +
DOTDOT +
(range.hasUpperBound() ? elementConverter.convert(range.upperEndpoint()) : POS_INFINITY) +
(range.hasUpperBound() && range.upperBoundType() == BoundType.CLOSED ? "]" : ")");
}
#Override
protected Range<T> doBackward(String range) {
String[] endpoints = range.split(DOTDOT);
Range<T> ret = Range.all();
if(!endpoints[0].substring(1).equals(NEG_INFINITY)) {
T lower = elementConverter.reverse().convert(endpoints[0].substring(1));
ret = ret.intersection(Range.downTo(lower, endpoints[0].charAt(0) == '[' ? BoundType.CLOSED : BoundType.OPEN));
}
if(!endpoints[1].substring(0,endpoints[1].length()-1).equals(POS_INFINITY)) {
T upper = elementConverter.reverse().convert(endpoints[1].substring(0,endpoints[1].length()-1));
ret = ret.intersection(Range.upTo(upper, endpoints[1].charAt(endpoints[1].length()-1) == ']' ? BoundType.CLOSED : BoundType.OPEN));
}
return ret;
}
};
}
/**
* Converter between RangeSet instances and Strings, essentially a custom serializer.
* Ideally we'd let Gson or Guava do this for us, but presently this is cleaner.
*/
public static <T extends Comparable<? super T>> Converter<RangeSet<T>, String> rangeSetConverter(final Converter<T, String> elementConverter) {
return new Converter<RangeSet<T>, String>() {
private final Converter<Range<T>, String> rangeConverter = rangeConverter(elementConverter);
#Override
protected String doForward(RangeSet<T> rs) {
ArrayList<String> ls = new ArrayList<>();
for(Range<T> range : rs.asRanges()) {
ls.add(rangeConverter.convert(range));
}
return Joiner.on(", ").join(ls);
}
#Override
protected RangeSet<T> doBackward(String rs) {
Iterable<String> parts = Splitter.on(",").trimResults().split(rs);
ImmutableRangeSet.Builder<T> build = ImmutableRangeSet.builder();
for(String range : parts) {
build.add(rangeConverter.reverse().convert(range));
}
return build.build();
}
};
}
*For inter-process communication, Java serialization would likely work just fine, as both classes implement Serializable. However I'm serializing to disk for more permanent storage, meaning I need a format I can trust won't change over time. Guava's serialization doesn't provide that guarantee.
Here is a Gson JsonSerializer and JsonDeserializer that generically supports a Range: https://github.com/jamespedwards42/Fava/wiki/Range-Marshaller
#Override
public JsonElement serialize(final Range src, final Type typeOfSrc, final JsonSerializationContext context) {
final JsonObject jsonObject = new JsonObject();
if ( src.hasLowerBound() ) {
jsonObject.add( "lowerBoundType", context.serialize( src.lowerBoundType() ) );
jsonObject.add( "lowerBound", context.serialize( src.lowerEndpoint() ) );
} else
jsonObject.add( "lowerBoundType", context.serialize( BoundType.OPEN ) );
if ( src.hasUpperBound() ) {
jsonObject.add( "upperBoundType", context.serialize( src.upperBoundType() ) );
jsonObject.add( "upperBound", context.serialize( src.upperEndpoint() ) );
} else
jsonObject.add( "upperBoundType", context.serialize( BoundType.OPEN ) );
return jsonObject;
}
#Override
public Range<? extends Comparable<?>> deserialize(final JsonElement json, final Type typeOfT, final JsonDeserializationContext context) throws JsonParseException {
if ( !( typeOfT instanceof ParameterizedType ) )
throw new IllegalStateException( "typeOfT must be a parameterized Range." );
final JsonObject jsonObject = json.getAsJsonObject();
final JsonElement lowerBoundTypeJsonElement = jsonObject.get( "lowerBoundType" );
final JsonElement upperBoundTypeJsonElement = jsonObject.get( "upperBoundType" );
if ( lowerBoundTypeJsonElement == null || upperBoundTypeJsonElement == null )
throw new IllegalStateException( "Range " + json
+ "was not serialized with this serializer! The default serialization does not store the boundary types, therfore we can not deserialize." );
final Type type = ( ( ParameterizedType ) typeOfT ).getActualTypeArguments()[0];
final BoundType lowerBoundType = context.deserialize( lowerBoundTypeJsonElement, BoundType.class );
final JsonElement lowerBoundJsonElement = jsonObject.get( "lowerBound" );
final Comparable<?> lowerBound = lowerBoundJsonElement == null ? null : context.deserialize( lowerBoundJsonElement, type );
final BoundType upperBoundType = context.deserialize( upperBoundTypeJsonElement, BoundType.class );
final JsonElement upperBoundJsonElement = jsonObject.get( "upperBound" );
final Comparable<?> upperBound = upperBoundJsonElement == null ? null : context.deserialize( upperBoundJsonElement, type );
if ( lowerBound == null && upperBound != null )
return Range.upTo( upperBound, upperBoundType );
else if ( lowerBound != null && upperBound == null )
return Range.downTo( lowerBound, lowerBoundType );
else if ( lowerBound == null && upperBound == null )
return Range.all();
return Range.range( lowerBound, lowerBoundType, upperBound, upperBoundType );
}
Here is a straight forward solution. Works very well
import com.google.common.collect.BoundType;
import com.google.common.collect.Range;
import com.google.gson.*;
import java.lang.reflect.Type;
public class GoogleRangeAdapter implements JsonSerializer, JsonDeserializer {
public static String TK_hasLowerBound = "hasLowerBound";
public static String TK_hasUpperBound = "hasUpperBound";
public static String TK_lowerBoundType = "lowerBoundType";
public static String TK_upperBoundType = "upperBoundType";
public static String TK_lowerBound = "lowerBound";
public static String TK_upperBound = "upperBound";
#Override
public Object deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
JsonObject jsonObject = (JsonObject)json;
boolean hasLowerBound = jsonObject.get(TK_hasLowerBound).getAsBoolean();
boolean hasUpperBound = jsonObject.get(TK_hasUpperBound).getAsBoolean();
if (!hasLowerBound && !hasUpperBound) {
return Range.all();
}
else if (!hasLowerBound && hasUpperBound){
double upperBound = jsonObject.get(TK_upperBound).getAsDouble();
BoundType upperBoundType = BoundType.valueOf(jsonObject.get(TK_upperBoundType).getAsString());
if (upperBoundType == BoundType.OPEN)
return Range.lessThan(upperBound);
else
return Range.atMost(upperBound);
}
else if (hasLowerBound && !hasUpperBound){
double lowerBound = jsonObject.get(TK_lowerBound).getAsDouble();
BoundType lowerBoundType = BoundType.valueOf(jsonObject.get(TK_lowerBoundType).getAsString());
if (lowerBoundType == BoundType.OPEN)
return Range.greaterThan(lowerBound);
else
return Range.atLeast(lowerBound);
}
else {
double lowerBound = jsonObject.get(TK_lowerBound).getAsDouble();
double upperBound = jsonObject.get(TK_upperBound).getAsDouble();
BoundType upperBoundType = BoundType.valueOf(jsonObject.get(TK_upperBoundType).getAsString());
BoundType lowerBoundType = BoundType.valueOf(jsonObject.get(TK_lowerBoundType).getAsString());
if (lowerBoundType == BoundType.OPEN && upperBoundType == BoundType.OPEN)
return Range.open(lowerBound, upperBound);
else if (lowerBoundType == BoundType.OPEN && upperBoundType == BoundType.CLOSED)
return Range.openClosed(lowerBound, upperBound);
else if (lowerBoundType == BoundType.CLOSED && upperBoundType == BoundType.OPEN)
return Range.closedOpen(lowerBound, upperBound);
else
return Range.closed(lowerBound, upperBound);
}
}
#Override
public JsonElement serialize(Object src, Type typeOfSrc, JsonSerializationContext context) {
JsonObject jsonObject = new JsonObject();
Range<Double> range = (Range<Double>)src;
boolean hasLowerBound = range.hasLowerBound();
boolean hasUpperBound = range.hasUpperBound();
jsonObject.addProperty(TK_hasLowerBound, hasLowerBound);
jsonObject.addProperty(TK_hasUpperBound, hasUpperBound);
if (hasLowerBound) {
jsonObject.addProperty(TK_lowerBound, range.lowerEndpoint());
jsonObject.addProperty(TK_lowerBoundType, range.lowerBoundType().name());
}
if (hasUpperBound) {
jsonObject.addProperty(TK_upperBound, range.upperEndpoint());
jsonObject.addProperty(TK_upperBoundType, range.upperBoundType().name());
}
return jsonObject;
}
}