Related
Ok , I"m doing what looks like a simple Dapper query
but if I pass in my studId parameter, it blows up with this low level networking exception:
{"A request to send or receive data was disallowed because the socket is not connected and (when sending on a datagram socket using a sendto call) no address was supplied."}
if I comment out the line of sql that uses it, fix the where clause, ,and comment out the line where it's added the the parameters object. It retrieves rows as expected.
I've spent the last 2.5 days trying everything I could think of, changing the names to match common naming patterns, changing type to a string (just gave a error converting string to number), yadda yadda yadda..
I'm at a complete loss as to why it doesn't like that parameter, I look at other code that works and they pass Id's just fine...
At this point I figure it has to be an ID-10-t that's staring me in the face and I'm just assuming my way right past it with out seeing it.
Any help is appreciated
public List<StudDistLearnSchedRawResponse> GetStudDistanceLearningScheduleRaw( StudDistLearnSchedQueryParam inputs )
{
var aseSqlConnectionString = Configuration.GetConnectionString( "SybaseDBDapper" );
string mainSql = " SELECT " +
" enrollment.stud_id, " +
" sched_start_dt, " +
" sched_end_dt, " +
" code.code_desc, " +
" student_schedule_dl.enrtype_id, " +
" student_schedule_dl.stud_sched_dl_id, " +
" dl_correspond_cd, " +
" course.course_name, " +
" stud_course_sched_dl.sched_hours, " +
" actual_hours, " +
" course_comments as staff_remarks, " + // note this column rename - EWB
" stud_course_sched_dl.sched_item_id , " +
" stud_course_sched_dl.stud_course_sched_dl_id " +
" from stud_course_sched_dl " +
" join student_schedule_dl on student_schedule_dl.stud_sched_dl_id = stud_course_sched_dl.stud_sched_dl_id " +
" join course on stud_course_sched_dl.sched_item_id = course.sched_item_id " +
" left join code on student_schedule_dl.dl_correspond_cd = code.code_id " +
" join enrollment_type on student_schedule_dl.enrtype_id = enrollment_type.enrtype_id " +
" join enrollment on enrollment_type.enr_id = enrollment.enr_id " +
" where enrollment.stud_id = #studId " +
" and sched_start_dt >= #startOfWeek" +
" and sched_end_dt <= #startOfNextWeek";
DapperTools.DapperCustomMapping<StudDistLearnSchedRawResponse>();
//string sql = query.ToString();
DateTime? startOfWeek = StartOfWeek( inputs.weekStartDateTime, DayOfWeek.Monday );
DateTime? startOfNextWeek = StartOfWeek( inputs.weekStartDateTime.Value.AddDays( 7 ) , DayOfWeek.Monday );
try
{
using ( IDbConnection db = new AseConnection( aseSqlConnectionString ) )
{
db.Open();
var arguments = new
{
studId = inputs.StudId, // it chokes and gives a low level networking error - EWB
startOfWeek = startOfWeek.Value.ToShortDateString(),
startOfNextWeek = startOfNextWeek.Value.ToShortDateString(),
};
List<StudDistLearnSchedRawResponse> list = new List<StudDistLearnSchedRawResponse>();
list = db.Query<StudDistLearnSchedRawResponse>( mainSql, arguments ).ToList();
return list;
}
}
catch (Exception ex)
{
Trace.WriteLine(ex.ToString());
return null;
}
}
Here is the input object
public class StudDistLearnSchedQueryParam
{
public Int64 StudId;
public DateTime? weekStartDateTime;
}
Here is the dapper tools object which just abstracts some ugly code to look nicer.
namespace EricSandboxVue.Utilities
{
public interface IDapperTools
{
string ASEConnectionString { get; }
AseConnection _aseconnection { get; }
void ReportSqlError( ILogger DalLog, string sql, Exception errorFound );
void DapperCustomMapping< T >( );
}
public class DapperTools : IDapperTools
{
public readonly string _aseconnectionString;
public string ASEConnectionString => _aseconnectionString;
public AseConnection _aseconnection
{
get
{
return new AseConnection( _aseconnectionString );
}
}
public DapperTools( )
{
_aseconnectionString = Environment.GetEnvironmentVariable( "EIS_ASESQL_CONNECTIONSTRING" );
}
public void ReportSqlError( ILogger DalLog, string sql, Exception errorFound )
{
DalLog.LogError( "Error in Sql" );
DalLog.LogError( errorFound.Message );
//if (env.IsDevelopment())
//{
DalLog.LogError( sql );
//}
throw errorFound;
}
public void DapperCustomMapping< T >( )
{
// custom mapping
var map = new CustomPropertyTypeMap(
typeof( T ),
( type, columnName ) => type.GetProperties( ).FirstOrDefault( prop => GetDescriptionFromAttribute( prop ) == columnName )
);
SqlMapper.SetTypeMap( typeof( T ), map );
}
private string GetDescriptionFromAttribute( System.Reflection.MemberInfo member )
{
if ( member == null ) return null;
var attrib = (Dapper.ColumnAttribute) Attribute.GetCustomAttribute( member, typeof(Dapper.ColumnAttribute), false );
return attrib == null ? null : attrib.Name;
}
}
}
If I change the SQL string building to this(below), but leave everything else the same(Including StudId in the args struct)... it doesn't crash and retrieves rows, so it's clearly about the substitution of #studId...
// " where enrollment.stud_id = #studId " +
" where sched_start_dt >= #startOfWeek" +
" and sched_end_dt <= #startOfNextWeek";
You name your data members wrong. I had no idea starting a variable name with # was possible.
The problem is here:
var arguments = new
{
#studId = inputs.StudId, // it chokes and gives a low level networking error - EWB
#startOfWeek = startOfWeek.Value.ToShortDateString(),
#startOfNextWeek = startOfNextWeek.Value.ToShortDateString(),
};
It should have been:
var arguments = new
{
studId = inputs.StudId, // it chokes and gives a low level networking error - EWB
startOfWeek = startOfWeek.Value.ToShortDateString(),
startOfNextWeek = startOfNextWeek.Value.ToShortDateString(),
};
The # is just a hint to Dapper, that it should replace with a corresponding member name.
## has special meaning in some SQL dialects, that's probably what makes the trouble.
So here's what I Found out.
The Sybase implementation has a hard time with Arguments.
It especially has a hard time with arguments of type int64 (this existed way pre .NetCore)
So If you change the type of the passed in argument from int64 to int32, everything works fine.
You can cast it, or just change the type of the method parameter
I want to pass list of email address strings from Borland C++ to my C# library. Below my C# side code. I could make call to PrintName() method and it works. Now I want to print email addresses, but if I call PrintEmails() function nothing happens. Could you please suggest how I can pass multiple email addresses to C# lib.
[ComVisible(true)]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
[Guid("4A686AB1-8425-43D9-BD89-B696BB5F6A18")]
public interface ITestConnector
{
void PrintEmails(string[] emails);
void PrintName(string name);
}
[ComVisible(true)]
[ClassInterface(ClassInterfaceType.None)]
[ComDefaultInterface(typeof(ITestConnector))]
[Guid("7D818287-298A-41BF-A224-5EAC9C581BD0")]
public class TestConnector : ITestConnector
{
public void PrintEmails(string[] emails)
{
System.IO.File.WriteAllLines(#"c:\temp\emails.txt", emails);
}
public void PrintName(string name)
{
System.IO.File.WriteAllText(#"c:\temp\name.txt", name);
}
}
I have imported TLB file of above C# library into RAD Studio and my C++ side code is as below.
interface ITestConnector : public IUnknown
{
public:
virtual HRESULT STDMETHODCALLTYPE PrintEmails(LPSAFEARRAY* emails/*[in,out]*/) = 0; // [-1]
virtual HRESULT STDMETHODCALLTYPE PrintName(WideString name/*[in]*/) = 0; // [-1]
};
TTestConnector *connector = new TTestConnector(this);
SAFEARRAYBOUND bounds[] = {{2, 0}}; //Array Contains 2 Elements starting from Index '0'
LPSAFEARRAY pSA = SafeArrayCreate(VT_VARIANT,1,bounds); //Create a one-dimensional SafeArray of variants
long lIndex[1];
VARIANT var;
lIndex[0] = 0; // index of the element being inserted in the array
var.vt = VT_BSTR; // type of the element being inserted
var.bstrVal = ::SysAllocStringLen( L"abc#xyz.com", 11 ); // the value of the element being inserted
HRESULT hr= SafeArrayPutElement(pSA, lIndex, &var); // insert the element
// repeat the insertion for one more element (at index 1)
lIndex[0] = 1;
var.vt = VT_BSTR;
var.bstrVal = ::SysAllocStringLen( L"pqr#xyz.com", 11 );
hr = SafeArrayPutElement(pSA, lIndex, &var);
connector->PrintEmails(pSA);
delete connector;
Below C++ side code worked in my case.
SAFEARRAYBOUND saBound[1];
saBound[0].cElements = nElements;
saBound[0].lLbound = 0;
SAFEARRAY *pSA = SafeArrayCreate(VT_BSTR, 1, saBound);
if (pSA == NULL)
{
return NULL;
}
for (int ix = 0; ix < nElements; ix++)
{
BSTR pData = SysAllocString(elements[ix]);
long rgIndicies[1];
rgIndicies[0] = saBound[0].lLbound + ix;
HRESULT hr = SafeArrayPutElement(pSA, rgIndicies, pData);
_tprintf(TEXT("%d"), hr);
}
Objective
I'm doing dynamic bytecode instrumentation using a JVMTI agent. I have to instrument those methods which are "hot", that is, the methods which invoke JIT compiler. To do so I listen to a CompiledLoadEvent and inside its call back function, call RetransformClasses. This in turn invokes ClassFileLoadHook on the class containing "hot" function and actual instrumentation begins.
Problem Premises
Currently I'm instrumenting my class to spawn some threads. I also listen to thread starts and print them within my agent. With simple ClassFileLoadHook at class load time (without RetransformClasses), my instrumentation works perfectly and spawns new threads. I get following output when ClassFileLoadHook instruments at class load time:
Running Thread: Signal Dispatcher, Priority: 9, context class loader:Not Null
Running Thread: main, Priority: 5, context class loader:Not Null
Running Thread: Thread-0, Priority: 5, context class loader:Not Null
Running Thread: Thread-1, Priority: 5, context class loader:Not Null
Running Thread: Thread-2, Priority: 5, context class loader:Not Null
Running Thread: Thread-3, Priority: 5, context class loader:Not Null
Running Thread: Thread-4, Priority: 5, context class loader:Not Null
Running Thread: Thread-6, Priority: 5, context class loader:Not Null
Running Thread: Thread-5, Priority: 5, context class loader:Not Null
Running Thread: Thread-7, Priority: 5, context class loader:Not Null
Running Thread: DestroyJavaVM, Priority: 5, context class loader:: NULL
When I instrument the class file by invoking RetransformClasses and then ClassFileLoadHook, everything works fine but no threads are spawned and hence no effective instrumentation takes place. VM takes a long time even to execute the original code.
I double checked both instrumentations using -XX:+TraceClassLoading. All the retransformed classes are loaded in both cases. Even the class I'm generating during runtime also gets loaded but no instrumentation happens. Below is the output of class loading trace:
[Loaded Test from __VM_RedefineClasses__]
[Loaded Test_Worker_main_0 from file:/home/saqib/workspace/test/bin]
I'm generating second class during runtime and it loads into VM but I don't get any thread spawning.
Questions
Given my understanding of the problem (There is a high probability
that I'd be wrong), why ClassFileLoadHook retransforms the class
successfully during load time, but somehow doesn't behave correctly
when JIT is invoked?
Just writing the RetransformClasses function, with empty
ClassFileLoadHook call back, also takes a lot of time without
incurring any sort of error. What could be taking time?
Agent Code
Compiled Load Event Call Back
static int x = 1;
void JNICALL
compiled_method_load(jvmtiEnv *jvmti, jmethodID method, jint code_size,
const void* code_addr, jint map_length, const jvmtiAddrLocationMap* map,
const void* compile_info) {
jvmtiError err;
jclass klass;
char* name = NULL;
char* signature = NULL;
char* generic_ptr = NULL;
err = (*jvmti)->RawMonitorEnter(jvmti, lock);
check_jvmti_error(jvmti, err, "raw monitor enter");
err = (*jvmti)->GetMethodName(jvmti, method, &name, &signature,
&generic_ptr);
check_jvmti_error(jvmti, err, "Get Method Name");
printf("\nCompiled method load event\n");
printf("Method name %s %s %s\n\n", name, signature,
generic_ptr == NULL ? "" : generic_ptr);
if (strstr(name, "main") != NULL && x == 1) {
x++;
err = (*jvmti)->GetMethodDeclaringClass(jvmti, method, &klass);
check_jvmti_error(jvmti, err, "Get Declaring Class");
err = (*jvmti)->RetransformClasses(jvmti, 1, &klass);
check_jvmti_error(jvmti, err, "Retransform class");
}
if (name != NULL) {
err = (*jvmti)->Deallocate(jvmti, (unsigned char*) name);
check_jvmti_error(jvmti, err, "deallocate name");
}
if (signature != NULL) {
err = (*jvmti)->Deallocate(jvmti, (unsigned char*) signature);
check_jvmti_error(jvmti, err, "deallocate signature");
}
if (generic_ptr != NULL) {
err = (*jvmti)->Deallocate(jvmti, (unsigned char*) generic_ptr);
check_jvmti_error(jvmti, err, "deallocate generic_ptr");
}
err = (*jvmti)->RawMonitorExit(jvmti, lock);
check_jvmti_error(jvmti, err, "raw monitor exit");
}
Class File Load Hook
void JNICALL
Class_File_Load_Hook(jvmtiEnv *jvmti_env, JNIEnv* jni_env,
jclass class_being_redefined, jobject loader, const char* name,
jobject protection_domain, jint class_data_len,
const unsigned char* class_data, jint* new_class_data_len,
unsigned char** new_class_data) {
jvmtiError err;
unsigned char* jvmti_space = NULL;
if (strstr(name, "Test") != NULL && x == 2) {
char* args = "op";
javab_main(2, args, class_data, class_data_len);
err = (*jvmti_env)->Allocate(jvmti_env, (jlong)global_pos, &jvmti_space);
check_jvmti_error(jvmti_env, err, "Allocate new class Buffer.");
(void)memcpy((void*)jvmti_space, (void*)new_class_ptr, (int)global_pos);
*new_class_data_len = (jint)global_pos;
*new_class_data = jvmti_space;
if ( new_class_ptr != NULL ) {
(void)free((void*)new_class_ptr);
}
#if DEBUG
printf("Size of the class is: %d\n", class_data_len);
for (int i = 0; i < class_data_len; i += 4) {
if (i % 16 == 0)
printf("\n");
printf("%02x%02x %02x%02x ", new_class_data[i],
new_class_data[i + 1], new_class_data[i + 2],
new_class_data[i + 3]);
}
printf("\n");
system("javap -c -v Test_debug");
#endif
x++;
}
}
Here javab_main returns the instrumented char * array which is correct. The instrumented array is stored in a global variable new_class_ptr which is copied into new_class_data. To debug the output of the instrumentation, I also printed the instrumented class in a file called Test_debug and invoking javap on it produces desired result.
The complete agent file is given here:
Agent.c
Original Code:
for (int i = 0; i < s; i++)
for (int j = 0; j < s; j++) {
c2[i][j] = 0;
for (int k = 0; k < s; k++)
c2[i][j] += a[i][k] * b[k][j];
}
Instrumented Code: (Equivalent)
Thread[] threads = new Thread[NTHREADS];
for (int i = 0; i < NTHREADS ; i++) {
final int lb = i * SIZE/NTHREADS;
final int ub = (i+1) * SIZE/NTHREADS;
threads[i] = new Thread(new Runnable() {
public void run() {
for (int i = lb; i < ub; i++)
for (int j = 0; j < SIZE; j++) {
c2[i][j] = 0;
for (int k = 0; k < SIZE; k++)
c2[i][j] += a[i][k] * b[k][j];
}
}
});
threads[i].start();
}
// wait for completion
for (int i = 0; i < NTHREADS; i++) {
try {
threads[i].join();
} catch (InterruptedException ignore) {
}
}
Java Version
openjdk version "1.8.0-internal-debug"
OpenJDK Runtime Environment (build 1.8.0-internal-debug-saqib_2016_12_26_10_52-b00)
OpenJDK 64-Bit Server VM (build 25.71-b00-debug, mixed mode)
I'm constructing this answer mainly from the comments. There are still some riddles unsolved but the main question has been resolved. Bytecode instrumentation does not fail in my case. It actually never happens. According to the theory,
Dynamic bytecode instrumentation of an executing function takes place
at subsequent call of the function. If a function has only one invocation, it cannot be instrumented while execution using current hotswap techniques in JVM.
I was trying to instrument a class which had only one function, i.e. main. I was trying to instrument that during runtime. This was the main problem. To check the validity of this argument, I tried to put my code in another function and called it from main in a loop. It got instrumented and everything worked. Nothing has to be changed in the agent.
I have a sample hive table created as
CREATE TABLE union_test(foo UNIONTYPE<int, double, array<string>, struct<a:int,b:string>>);
The data can be viewed as
SELECT foo FROM union_test;
The output is
{0:1}
{1:2.0}
{2:["three","four"]}
{3:{"a":5,"b":"five"}}
{2:["six","seven"]}
{3:{"a":8,"b":"eight"}}
{0:9}
{1:10.0}
the first field (tag) denotes the type of the union ( 0 for int, 1 for double, 2 for array etc).
My problem is if I found to select only those records where the union type is 2 (array), how should I frame my query?
There is no function in Hive to read data from UnionType. So i wrote 2 UDF´s. One to get Union tag (that you trying to do) and second to get struct from union as an example.
get_union_tag() function:
package HiveUDF;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
#Description(name = "get_union_tag", value = "_FUNC_(unionObject)"
+ " - Returns union object Tag", extended = "Example:\n" + " > SELECT _FUNC_(unionObject) FROM src LIMIT 1;\n one")
public class GetUnionTag extends GenericUDF {
// Global variables that inspect the input.
// These are set up during the initialize() call, and are then used during the
// calls to evaluate()
private transient UnionObjectInspector uoi;
#Override
// This is what we do in the initialize() method:
// Verify that the input is of the type expected
// Set up the ObjectInspectors for the input in global variables
// Return the ObjectInspector for the output
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
// Verify the input is of the required type.
// Set the global variables (the various ObjectInspectors) while we're doing this
// Exactly one input argument
if( arguments.length != 1 ){
throw new UDFArgumentLengthException("_FUNC_(unionObject) accepts exactly one argument.");
}
// Is the input an array<>
if( arguments[0].getCategory() != ObjectInspector.Category.UNION ){
throw new UDFArgumentTypeException(0,"The single argument to AddExternalIdToPurchaseDetails should be "
+ "Union<>"
+ " but " + arguments[0].getTypeName() + " is found");
}
// Store the ObjectInspectors for use later in the evaluate() method
uoi = ((UnionObjectInspector)arguments[0]);
// Set up the object inspector for the output, and return it
return PrimitiveObjectInspectorFactory.javaByteObjectInspector;
}
#Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
byte tag = uoi.getTag(arguments[0].get());
return tag;
}
#Override
public String getDisplayString(String[] children) {
StringBuilder sb = new StringBuilder();
sb.append("get_union_tag(");
for (int i = 0; i < children.length; i++) {
if (i > 0) {
sb.append(',');
}
sb.append(children[i]);
}
sb.append(')');
return sb.toString();
}
}
function get_struct_from_union() UDF :
package HiveUDF;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
#Description(name = "get_union_struct", value = "_FUNC_(unionObject)"
+ " - Returns struct ", extended = "Example:\n" + " > _FUNC_(unionObject).value \n 90.0121")
public class GetUnionStruct extends GenericUDF {
// Global variables that inspect the input.
// These are set up during the initialize() call, and are then used during the
// calls to evaluate()
//
// ObjectInspector for the list (input array<>)
// ObjectInspector for the struct<>
// ObjectInspectors for the elements of the struct<>, target, quantity and price
private UnionObjectInspector unionObjectInspector;
private StructObjectInspector structObjectInspector;
#Override
// This is what we do in the initialize() method:
// Verify that the input is of the type expected
// Set up the ObjectInspectors for the input in global variables
// Return the ObjectInspector for the output
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
// Verify the input is of the required type.
// Set the global variables (the various ObjectInspectors) while we're doing this
// Exactly one input argument
if( arguments.length != 1 ){
throw new UDFArgumentLengthException("_FUNC_(unionObject) accepts exactly one argument.");
}
// Is the input an array<>
if( arguments[0].getCategory() != ObjectInspector.Category.UNION ){
throw new UDFArgumentTypeException(0,"The single argument to AddExternalIdToPurchaseDetails should be "
+ "Union<Struct>"
+ " but " + arguments[0].getTypeName() + " is found");
}
// Set up the object inspector for the output, and return it
return structObjectInspector;
}
#Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
return ((UnionObjectInspector) unionObjectInspector).getField(arguments[0].get());
}
#Override
public String getDisplayString(String[] children) {
StringBuilder sb = new StringBuilder();
sb.append("get_union_vqtstruct(");
for (int i = 0; i < children.length; i++) {
if (i > 0) {
sb.append(',');
}
sb.append(children[i]);
}
sb.append(')');
return sb.toString();
}
}
to use these UDF´s compile and create jar file. Than upload into hive (in my case HDInsight). Than just use
add jar wasb:///hive/HiveGUDF.jar;
CREATE TEMPORARY FUNCTION get_union_struct AS 'HiveUDF.GetUnionStruct';
before u run e.g.
SELECT get_union_tag(exposed) FROM test;
I use Json.Net.
When I serialize a Department2 object and WriteJson() is invoked I want it to be recursively invoked with each of the Telephone2 objects like I do in ReadJson().
How do I do that?
using System;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
public interface ISimpleDatabag
{
string Databag { get; set; }
}
[JsonConverter(typeof(JsonDataBagCreationConverter<Department2>))]
public class Department2
{
public Telephone2[] Phones { get; set; }
}
[JsonConverter(typeof(JsonDataBagCreationConverter<Telephone2>))]
public class Telephone2
{
public string Name { get; set; }
public string AreaCode { get; set; }
public string Number { get; set; }
}
public class JsonDataBagCreationConverter<T> : JsonConverter where T : new()
{
// Json.Net version 4.5.7.15008
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
// When I serialize Department and this function is invoked
// I want it to recursively invoke WriteJson with each of the Telephone objects
// Like I do in ReadJson
// How do I do that?
T t = (T)value;
serializer.Serialize(writer, t.GetType());
}
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
var jsonObject = JObject.Load(reader);
var target = Create(objectType, jsonObject);
serializer.Populate(jsonObject.CreateReader(), target); // Will call this function recursively for any objects that have JsonDataBagCreationConverter as attribute
return target;
}
protected T Create(Type objectType, JObject jsonObject)
{
return new T();
}
public override bool CanConvert(Type objectType)
{
return typeof(T).IsAssignableFrom(objectType);
}
}
private void Form1_Load(object sender, EventArgs e)
{
string jsonInput = "{\"Name\": \"Seek4\" , \"CustomDepartmentData\": \"This is custom department data\", \"Phones\":[ {\"Name\": \"A\", \"AreaCode\":444, \"Number\":11111111} ,{\"Name\": \"B\", \"AreaCode\":555, \"Number\":987987987}, {\"Name\": \"C\", \"AreaCode\":222, \"Number\":123123123, \"CustomPhoneData\": \"This is custom phone data\"} ] }";
Department2 objDepartment2 = JsonConvert.DeserializeObject<Department2>(jsonInput); // Yes, it works well
Array.Reverse(objDepartment2.Phones);
string jsonNoDatabag = JsonConvert.SerializeObject(objDepartment2);
}
I ended up controlling the entire process myself, using this huge (not refactored) function.
I basically investigate each of the properties of the object to serialize and then serialize it property by property.
Then I can do custom things on each property
/// <summary>
/// Serializes an object by merging its current values into its databag and returns the databag
/// </summary>
/// <param name="objectToSerialize"></param>
/// <returns>the current values merged into the original databag</returns>
/// <remarks>Jan Nielsen, 01-10-2012</remarks>
internal static string SerializeObjectToDatabag(object objectToSerialize)
{
// You have to do it property by property instead of just serializing the entire object and merge it into the original
// because the object might contain lists of objects with custom data and these list might have been sorted differently from when they were loaded
// So you cannot merge them properly unless you do it on a per listitem basis.
// Which is what I do here.
try
{
if (objectToSerialize == null) // If you ie serialize an empty object in an array
{
return null;
}
string updatedDatabag = "";
bool isIDataBag = objectToSerialize is IDataBag;
if (isIDataBag)
{
updatedDatabag = ((IDataBag)objectToSerialize).Data == null ? "" : ((IDataBag)objectToSerialize).Data.ToString();
// updatedDatabag = ((IDataBag)objectToSerialize).Data.ToString(); // Save original data in a local variable. This is the one we will merge new values into
}
string result = "";
// Now iterate through the objects properties
// Possible properties:
// Simple types: string, int, bool etc: their current value should be overwritten in the databag
// types that implement IDatabag: they should be sent to this function recursively so their possible customdata is not overwritten
// but instead their simple values are merged into their own databag. Then the result of this single property merge is overwritten in the outer objects databag
// Types that are not simple and don't implement IDatabag but have properties that implement IDatabag
// types that are not simple and don't implement IDatabag and don't have any properties in any depth that implement IDatabag: They are overwritten in the databag
// Types that are arrays:
// If the types in the array are simple types (string, bool etc) the entire array property is overwritten in the databag
// If the types in the array implement IDatabag each object is sent recursively to this function and their databag is updated via merge
// Then the entire array is overwritten in the outer objects databag
// Types that are generic list are treated like arrays
var properties = objectToSerialize.GetType().GetProperties();
// In order to be able to deserialize abstract classes and interfaces, we need to serialize the classname with the class
// the deserializer recognizes the word $type followed by a type, when its is invoked with a serializerSettings of
// serializerSettings.TypeNameHandling = TypeNameHandling.Objects;
string name = objectToSerialize.GetType().AssemblyQualifiedName;
string shortName = RemoveAssemblyDetails(name);
bool addedType = false;
foreach (var propertyInfo in properties)
{
if (propertyInfo.Name.ToLower() != "data") // Just skip Databag. Databag is not a "real" property but the contents of all the properties when the object was loaded + possible custom data
{
if (!addedType)
{
string jsonSingleProperty = "{ " + ToCustomJson("$type") + " : " + ToCustomJson(shortName) + " }";
// Merge the current value (jsonSingleProperty) into the databag (that might already have been updated with the values of other properties)
// and update the current result with the new values. Ie "Name" : "Seek4" is updated to "Name" : "Seek4Cars" in the databag
// and the function will now use the updated databag to merge the other properties into
updatedDatabag = MergeDefault(jsonSingleProperty, updatedDatabag, true);
addedType = true;
}
// propertyInfo.Name.ToLower().Contains("struct")
var value = propertyInfo.GetValue(objectToSerialize, null); // This gets the value of the specified property in the current object
isIDataBag = value is IDataBag; // Update for the current object. Note that ie an array of IDatabag will return false here, because array is not IsimpleDatabag
// Basically we should just check if the property implements IDatabag
// But the simpletype check is faster because I don't have to check for the interfaces on ie a string, int etc.
// This branch takes care of 3 cases:
// 1) it is a simple type, ie int
// 2) value is null
// 3) it is an array with a value of null
// If an array with values enters this branch of code the values of the array will be appended, overwritten
// Therefore arrays are treated below in a special case. Unless they are null
// GeneralFunctions.IsExtendedSimpleType_AllTypes(propertyInfo.PropertyType) returns true on ie string[], but only arrays with a value of null should be handled here
// This first check originally just checked for simple types
// Then it became extended simple types ie non-simple types that only contains simple types ie List<int,int>
// But not arrays that must be handled separately
// Then it also handled null values
// And then a special case was made for arrays that are null
if ((GeneralFunctions.IsExtendedSimpleType_AllTypes(propertyInfo.PropertyType) || value == null) && (!propertyInfo.PropertyType.IsArray || (propertyInfo.PropertyType.IsArray && value == null)))
{
// You have to merge even though it is default value.
// If you have ie a bool that has an initial value of true and you deliberately sets it to false
// You want the defaultvalue of false to be merged into the json.
string jsonSingleProperty = "{" + ToCustomJson(propertyInfo.Name) + " : " + ToCustomJson(value) + "}"; // ie {"Name" : "Seek4Cars"}
// Merge the current value (jsonSingleProperty) into the databag (that might already have been updated with the values of other properties)
// and update the current result with the new values. Ie "Name" : "Seek4" is updated to "Name" : "Seek4Cars" in the databag
// and the function will now use the updated databag to merge the other properties into
updatedDatabag = MergeDefault(jsonSingleProperty, updatedDatabag, true);
continue;
}
if (isIDataBag) // ie PhoneSingle. A single property of type IDataBag
{
// Invoke recursively
// First check if this is an object with all null values
bool allPropertiesAreNull = true; // Maybe this should in the future be expanded with a check on if the property has its default value ie an int property with a value of 0
foreach (var propertyInfoLocal in value.GetType().GetProperties())
{
var valueLocal = propertyInfoLocal.GetValue(value, null);
if (valueLocal != null)
{
allPropertiesAreNull = false;
break;
}
}
var testjson = "";
if (allPropertiesAreNull)
{
result = "{" + ToCustomJson(propertyInfo.Name) + " : " + " { } }";
}
else
{
testjson = ToCustomJson(value);
result = "{" + ToCustomJson(propertyInfo.Name) + " : " + SerializeObjectToDatabag(value) + "}";
}
updatedDatabag = MergeDefault(result, updatedDatabag, true);
continue;
}
bool containsIDataBag = CheckForDatabagInterfaces.ImplementsInterface(propertyInfo.PropertyType, "idatabag"); // Check if anything inside the property implements IDatabag ie an array of IDatabag
if (containsIDataBag)
{
// Check if it is somekind of generic list (List<T>, Dictionary<T,T) etc) and if it is a type of ignoreTypes ie List<entity>)
if (value.GetType().IsGenericType && value.GetType().GetGenericArguments().Length > 0)
{
string listValuesAsJson = "";
if (value is IEnumerable)
{
listValuesAsJson += "{ " + ToCustomJson(propertyInfo.Name) + " : [";
bool containsItems = false;
foreach (var element in (IEnumerable)value)
{
containsItems = true;
var current = SerializeObjectToDatabag(element);
if (current != null) // If you serialize an empty array element it is null
{
listValuesAsJson += current + ", "; // Add , between each element
}
}
if (containsItems)
{
listValuesAsJson = listValuesAsJson.Substring(0, listValuesAsJson.Length - 2) + "] }"; // remove last , and add ending ] for the array and add a } because this property is flowing in the free
}
else // No items in value
{
listValuesAsJson += "] }"; // add ending ] for the array and add a } because this property is flowing in the free
}
}
else // A single, generic KeyValuePair property
{
listValuesAsJson += "{ " + ToCustomJson(propertyInfo.Name) + " : ";
listValuesAsJson += SerializeObjectToDatabag(value);
listValuesAsJson += " }";
}
updatedDatabag = MergeDefault(listValuesAsJson, updatedDatabag, false);
}
else if (value.GetType().IsArray)
{
string arrayValuesAsJson = "{ " + ToCustomJson(propertyInfo.Name) + " : [";
bool containsItems = false;
foreach (var element in (Array)value)
{
// Treat them the same way you treat any other object
var current = SerializeObjectToDatabag(element);
if (current != null) // If you serialize an empty array element it is null
{
containsItems = true;
arrayValuesAsJson += current + ", ";
}
}
if (containsItems)
{
arrayValuesAsJson = arrayValuesAsJson.Substring(0, arrayValuesAsJson.Length - 2) + "] }"; // remove last , and add ending ] for the array and add a } because this property is flowing in the free
}
else // No items in value
{
arrayValuesAsJson += "] }"; // add ending ] for the array and add a } because this property is flowing in the free
}
updatedDatabag = MergeDefault(arrayValuesAsJson, updatedDatabag, false);
}
else if ( value.GetType().BaseType != null && value.GetType().BaseType.FullName.ToLower().Contains("system.collections.objectmodel"))
{
// This branch was made specifically to take care of the Media collection of a Seek4.Entities.V2.Media.MediaCollection
var genericList = (IList)value;
int counter = genericList.Count;
string listAsJson = "{ " + ToCustomJson(propertyInfo.Name) + " : [";
if (counter == 0)
{
listAsJson += "] }"; // Ie { "Media": [] }
}
else
{
foreach (var obj in genericList)
{
var current = SerializeObjectToDatabag(obj);
listAsJson += current + ", ";
}
listAsJson = listAsJson.Substring(0, listAsJson.Length -2) + " ] }" ;
}
updatedDatabag = MergeDefault(listAsJson, updatedDatabag, true); // hvordan gør json.net dette med standard?
}
else // a single Non-IDatabag property that contains Idatabag properties
{
string tempResult = "{ " + ToCustomJson(propertyInfo.Name) + " : ";
tempResult += SerializeObjectToDatabag(value) + " }";
updatedDatabag = MergeDefault(tempResult, updatedDatabag, true);
}
}
else
{
if (value.GetType().IsArray) // This is an array of simple types so just overwrite
{
string arrayAsJson = "{ " + ToCustomJson(propertyInfo.Name) + " : ";
arrayAsJson += ToCustomJson(value) + "}";
updatedDatabag = MergeDefault(arrayAsJson, updatedDatabag, false);
}
else // ie an object that is not simpledatabag and doesn't contain simple databag
{
string jsonSingleProperty = "{" + ToCustomJson(propertyInfo.Name) + " : " + ToCustomJson(value) + "}";
updatedDatabag = MergeDefault(jsonSingleProperty, updatedDatabag, true);
}
}
}
}
return updatedDatabag;
}
catch (Exception ex)
{
string message = ex.Message;
string stack = ex.StackTrace;
throw;
}
}
internal static string ToCustomJson(object objectToConvertToJson)
{
try
{
// Distinguished from Mongodb.Bson.ToJson() extensionmethod by Custom name
JsonSerializerSettings serializerSettings = new JsonSerializerSettings();
serializerSettings.TypeNameHandling = TypeNameHandling.Objects; // Adds a $type on all objects which we need when it is abstract classes and interfaces
IgnoreDataMemberContractResolver contractResolver = new IgnoreDataMemberContractResolver(null, true, true);
serializerSettings.ContractResolver = contractResolver;
serializerSettings.DefaultValueHandling = DefaultValueHandling.Ignore;
IsoDateTimeConverter converter = new IsoDateTimeConverter();
serializerSettings.Converters.Add(converter);
string result = JsonConvert.SerializeObject(objectToConvertToJson, Formatting.None, serializerSettings);
return result;
}
catch (Exception ex)
{
throw new Exception("Error in ToCustomJson: " + ex.Message, ex);
}
}