I am integrating data from Salesforce to another API. The source has 1 object with related child object. However target API needs to take form like below. Each related Salesforce record in child object needs node within subforms. The POST example below is the ultimate desired form for the payload.
I suppose I need to recurse using DataWeave utilizing a For Each with each child record from Salesforce. But I'm not quite sure where to start with that.
Here is the incoming JSON from Salesforce post topic:
{
"LastModifiedDate": "2016-09-22T14:19:08.000+0000",
"Engagement__c": "1234",
"Product_CoC__c": null,
"Name": "Prod-22880",
"Year_1_Approved_Production_Hectares__c": 100.0,
"Year_1_Approved_Production_Volume__c": 1000.0,
"Product_Variety__c": null,
"CreatedById": "1234",
"Year_2_Estimated_Production_Volume__c": 1001.0,
"Product__c": "1234",
"Engagement_Decision_Date__c": null,
"Year_1_Harvest_Period__c": "2016",
"CPH_Number__c": null,
"Product_Type__c": null,
"Certificate_Product__c": "1234",
"IsDeleted": false,
"Crop_Yield__c": 10.0,
"Year_2_Estimated_Production_Hectares__c": 100.0,
"Valid_Period_End_Date__c": null,
"Intercrop__c": false,
"SystemModstamp": "2016-09-22T14:19:08.000+0000",
"Year_2_Harvest_Period__c": null,
"Crop_Production_Unit__c": "kg",
"Certificate__c": "1234",
"Year__c": 1.0,
"Harvest_Frequency__c": "Single",
"CreatedDate": "2016-07-28T15:44:18.000+0000",
"Valid_Period_Start_Date__c": null,
"Id": "1234",
"LastModifiedById": "1234",
"RA_Id__c": null
}
Here is the Transform Message I have for the child object:
%dw 1.0
%output application/java
---
{
acv_crop3:flowVars.ChainpointCropID default "",
acv_cph3:payload.Name default "",
acv_harvest_period2:payload.Year_1_Harvest_Period__c default "",
acv_frequency2:flowVars.frequency default "",
acv_aq2:payload.Year_1_Approved_Production_Volume__c default "",
acv_cph4:payload.Year_1_Approved_Production_Hectares__c default "",
acv_yield2:payload.Crop_Yield__c default "",
acv_start_date2:payload.Valid_Period_Start_Date__c default "",
acv_end_date2:payload.Valid_Period_End_Date__c default "",
acv_uom2:flowVars.uom default ""
}
Here is an example of a desired POST payload:
{
"ownerId": 24695,
"Q_CERT_SFGUID": "a00G000000WR3xxxxx",
"Q_CERT": "467",
"Q_CERTCROP2": [
"1199"
],
"textId": "auto",
"resultDate": null,
"authorId": null,
"Q_CERTNO": "RA-G-",
"Q_CERTHL": 24695,
"Q_CERTCB": 43986,
"Q_CERTST": "NO_STATUS",
"Q_CERTSUB": "",
"Q_CERTSTDATE": "2016-06-30",
"Q_CERT_expiration_date": "2019-06-29",
"Q_CERTENDATE": "",
"Q_CERT_redate": "2016-06-30",
"Q_CERT_CHA": "8.0",
"Q_CERT_TPH": "8.0",
"Q_CERT_CONTACT_EMAIL": "m#gmail.com",
"Q_CERT_CONTACT_NAME": 1300,
"subforms": [
{
"textId": "acv2",
"rows": [
{
"acv_crop3": "1199",
"acv_cph3": "Prod-22880",
"acv_harvest_period2": "2016",
"acv_frequency2": 668,
"acv_aq2": 1000,
"acv_cph4": 100,
"acv_yield2": 10,
"acv_start_date2": "2099-12-31",
"acv_end_date2": "2099-12-31",
"acv_uom2": 557
},
{
"acv_crop3": "1100",
"acv_cph3": "Prod-22881",
"acv_harvest_period2": "2017",
"acv_frequency2": 668,
"acv_aq2": 1000,
"acv_cph4": 100,
"acv_yield2": 1000,
"acv_start_date2": "2017-12-31",
"acv_end_date2": "2018-12-31",
"acv_uom2": 557
}
]
}
]
}
Here is the flow I have so far with the Transform message...
<flow name="prepareEP">
<set-variable variableName="API_Path" value="v0/forms/FR_CERT" doc:name="engagementProduct API_URL"/>
<set-variable variableName="entityType" value="engagementProduct" doc:name="set Entity"/>
<component class="com.ww.mule.reporting.et.ObjectStoreController" doc:name="load countryDiccionary"/>
<set-variable variableName="originalPayloadEP" value="#[payload]" doc:name="original payload"/>
<json:json-to-object-transformer returnClass="java.util.Map" doc:name="JSON to Object"/>
<set-variable variableName="originalPayloadEPMap" value="#[payload]" doc:name="original payload map"/>
<logger message="#['About to query certificate ' + flowVars.originalPayloadEPMap.Certificate__c + ' with engagement product Id ' + flowVars.originalPayloadEPMap.Id]" level="INFO" doc:name="Logger"/>
<sfdc:query config-ref="Salesforce__Basic_Authentication" doc:name="Get Engagement Products By Cert" query="dsql:SELECT Id,Name,Year_1_Harvest_Period__c,Harvest_Frequency__c,Year_1_Approved_Production_Volume__c,Year_1_Approved_Production_Hectares__c,Crop_Yield__c,Valid_Period_Start_Date__c,Valid_Period_End_Date__c FROM Engagement_Product__c WHERE Certificate__c = '#[flowVars.originalPayloadEPMap.Certificate__c]'"/>
<foreach doc:name="For Each">
<json:object-to-json-transformer doc:name="Object to JSON"/>
<objectstore:retrieve config-ref="CountryStoreObjectStore" key="#[payload.Cert_Country__c == empty?'UNKNOWN_COUNTRY':payload.Cert_Country__c]" targetProperty="countryISO" doc:name="ObjectStore" defaultValue-ref="#['XX']"/>
<set-payload value="#[flowVars.originalPayloadEP]" mimeType="application/json" doc:name="Set Payload"/>
<set-variable variableName="frequency" value="#[originalPayloadEPMap.Harvest_Frequency__c == 'Single' ? 668 : originalPayloadEPMap.Harvest_Frequency__c == 'Multiple' ? 669 : originalPayloadEPMap.Harvest_Frequency__c == 'Permanent/Continuous' ? 667 : originalPayloadEPMap.Harvest_Frequency__c == 'Occasional' ? 1910 : null]" doc:name="Set Harvest Frequency Variable"/>
<set-variable variableName="uom" value="#[originalPayloadEPMap.Crop_Production_Unit__c == 'kg' ? 557 : originalPayloadEPMap.Crop_Production_Unit__c == 'Liters' ? 596 : originalPayloadEPMap.Crop_Production_Unit__c == 'Number of Stems' ? 1802 : originalPayloadEPMap.Crop_Production_Unit__c == 'mts2' ? 556 : null]" doc:name="Set UOM Variable"/>
<!--
<set-payload doc:name="Set Payload"/>
-->
<dw:transform-message doc:name="Transform Message">
<dw:set-payload><![CDATA[%dw 1.0
%output application/java
---
{
acv_crop3:flowVars.ChainpointCropID default "",
acv_cph3:payload.Name default "",
acv_harvest_period2:payload.Year_1_Harvest_Period__c default "",
acv_frequency2:flowVars.frequency default "",
acv_aq2:payload.Year_1_Approved_Production_Volume__c default "",
acv_cph4:payload.Year_1_Approved_Production_Hectares__c default "",
acv_yield2:payload.Crop_Yield__c default "",
acv_start_date2:payload.Valid_Period_Start_Date__c default "",
acv_end_date2:payload.Valid_Period_End_Date__c default "",
acv_uom2:flowVars.uom default ""
}]]>
</dw:set-payload>
</dw:transform-message>
<object-to-string-transformer mimeType="application/json" doc:name="Object to String"/>
<set-variable variableName="acv2_info" value="#[flowVars.acv2_info == null ? payload : flowVars.acv2_info + ',' + payload]" doc:name="acv2_info"/>
<logger message="#['acv2_info = ' + flowVars.acv2_info]" level="INFO" doc:name="Logger"/>
<set-variable variableName="transformedPayload" value="#[payload]" doc:name="save payload"/>
<logger message="#['transformedPayload = ' + flowVars.transformedPayload]" level="INFO" doc:name="Print transformedPayload"/>
</foreach>
<flow-ref name="sendAuthenticatedChainPointRequest" doc:name="sendAuthenticatedChainPointRequest"/>
</flow>
Related
Facing below runtime error after upgrading from Mule Runtime 4.2.0 to 4.2.1.
Below detailed my codebase, and quick error message, and detailed error logs for review. Please suggest, if anyone faced the same problem?
I referred to other similar question and answer in StackOverflow Mule ESB : Cannot copy message with a stream payload, this doesn't solve mine below error.
Runitime Error on Mule 4.2.1:
Cannot copy message with a stream payload. Payload can be transformed by using an <object-to-byte-array-transformer> in order to be able to copy the message. Payload type is "org.mule.runtime.core.internal.streaming.bytes.ManagedCursorStreamProvider"
Detailed Error Log
INFO 2019-08-05 11:29:15,388 [[MuleRuntime].cpuLight.11: [jda-demand-adapter].Get_BY_Status_n_Update_Part_Flow.CPU_LITE #7ed56bee] [event: 20e0d4f1-b746-11e9-afb2-0a0027000005] org.mule.runtime.core.internal.processor.LoggerMessageProcessor: BY status: <?xml version='1.0' encoding='UTF-8'?>
<DeliveryStatus>
<DeliveryID>46658</DeliveryID>
<Username>user_di_test</Username>
<StartDelivery>2019-08-05T05:24:34.886502+00:00</StartDelivery>
<LastProcessed>2019-08-05T05:46:16.508861+00:00</LastProcessed>
<Status>LOADED</Status>
<ParameterInfo>
<Category>Events</Category>
<Version>1.14.17</Version>
<File>Events.gz</File>
</ParameterInfo>
<Statistics>
<DeliveredRecords>323</DeliveredRecords>
<PendingRecords>323</PendingRecords>
<ProcessedRecords>0</ProcessedRecords>
<ErroneousRecords>0</ErroneousRecords>
</Statistics>
</DeliveryStatus>
INFO 2019-08-05 11:29:15,397 [[MuleRuntime].cpuIntensive.09: [jda-demand-adapter].Get_BY_Status_n_Update_Part_Flow.CPU_INTENSIVE #6b417a92] [event: 20e0d4f1-b746-11e9-afb2-0a0027000005] org.mule.runtime.core.internal.processor.LoggerMessageProcessor: payload : {
"DeliveryStatus": {
"DeliveryID": "46658",
"Username": "user_di_test",
"StartDelivery": "2019-08-05T05:24:34.886502+00:00",
"LastProcessed": "2019-08-05T05:46:16.508861+00:00",
"Status": "LOADED",
"ParameterInfo": {
"Category": "Events",
"Version": "1.14.17",
"File": "Events.gz"
},
"Statistics": {
"DeliveredRecords": "323",
"PendingRecords": "323",
"ProcessedRecords": "0",
"ErroneousRecords": "0"
}
}
}
INFO 2019-08-05 11:29:15,410 [[MuleRuntime].cpuIntensive.09: [jda-demand-adapter].Get_BY_Status_n_Update_Part_Flow.CPU_INTENSIVE #6b417a92] [event: 20e0d4f1-b746-11e9-afb2-0a0027000005] org.mule.runtime.core.internal.processor.LoggerMessageProcessor: Delivery Status summary: "46658","LOADED", {
"DeliveredRecords": "323",
"PendingRecords": "323",
"ProcessedRecords": "0",
"ErroneousRecords": "0"
}
INFO 2019-08-05 11:29:17,030 [[MuleRuntime].io.110: [jda-demand-adapter].Update_Part_Status_Recordcount_Flow.BLOCKING #502d3ddf] [event: 20e0d4f1-b746-11e9-afb2-0a0027000005] org.mule.runtime.core.internal.processor.LoggerMessageProcessor: Final response for Job without Errors, jobDetails: null
INFO 2019-08-05 11:29:17,032 [[MuleRuntime].io.110: [jda-demand-adapter].Update_Part_Status_Recordcount_Flow.BLOCKING #502d3ddf] [event: 20e0d4f1-b746-11e9-afb2-0a0027000005] org.mule.runtime.core.internal.processor.LoggerMessageProcessor: Inside Aggregation complete - For loop payload:[org.mule.runtime.api.metadata.TypedValue#fc4ba33d]
INFO 2019-08-05 11:29:17,052 [[MuleRuntime].io.110: [jda-demand-adapter].Update_Part_Status_Recordcount_Flow.BLOCKING #502d3ddf] [event: 20e0d4f1-b746-11e9-afb2-0a0027000005] org.mule.runtime.core.internal.processor.LoggerMessageProcessor: After Reporting Transformation: {
"Summary": {
"DataPostingStatus": "",
"DataPostingDetailedStatus": "",
"JobCreatedDate": "",
"JobRestartDate": "",
"JobLastModifiedTime": "",
"JobExecutionDuration": "",
"JobExecutionDuration_in_Secs": "",
"RecordsProcessed": "",
"DeliveredRecords": "323",
"PendingRecords": "323",
"ProcessedRecords": "0",
"ErrorRecords": "0",
"Categories": [
"Events"
]
},
"CategorySummary": [
{
"Category": "Events",
"DeliveredRecords": "323",
"PendingRecords": "323",
"ProcessedRecords": "0",
"ErrorRecords": "0",
"DeliveryId": [
"46658"
]
}
],
"Errors": {
},
"Details": [
{
"DeliveryStatus": {
"DeliveryID": "46658",
"Username": "user_di_test",
"StartDelivery": "2019-08-05T05:24:34.886502+00:00",
"LastProcessed": "2019-08-05T05:46:16.508861+00:00",
"Status": "LOADED",
"ParameterInfo": {
"Category": "Events",
"Version": "1.14.17",
"File": "Events.gz"
},
"Statistics": {
"DeliveredRecords": "323",
"PendingRecords": "323",
"ProcessedRecords": "0",
"ErroneousRecords": "0"
}
}
}
]
}
ERROR 2019-08-05 11:29:17,076 [[MuleRuntime].cpuLight.11: [jda-demand-adapter].Ingestion_Status_Main_Flow.CPU_LITE #7132a277] [event: 20e0d4f1-b746-11e9-afb2-0a0027000005] org.mule.runtime.core.internal.exception.OnErrorContinueHandler:
********************************************************************************
Message : Cannot copy message with a stream payload. Payload can be transformed by using an <object-to-byte-array-transformer> in order to be able to copy the message. Payload type is "org.mule.runtime.core.internal.streaming.bytes.ManagedCursorStreamProvider".
Element : Get_Job_Status_Details_Flow/processors/1 # jda-demand-adapter:com/jda/cd/common/flow/ingestion-status.xml:257 (Scatter-Gather)
Element XML : <scatter-gather doc:name="Scatter-Gather">
<route>
<logger level="INFO" doc:name="Logger" message="Calculating the Job Satus ..."></logger>
<flow-ref name="GetDetailedJobStatus_SubFlow" doc:name="GetDetailedJobStatus_SubFlow"></flow-ref>
<set-payload value="#[{
"detailedStatus": vars.detailedStatus
}]" mimeType="application/java" doc:name="Set Payload" doc:id="5ea3c7bf-b1b1-4231-9af3-c8e39f241866"></set-payload>
</route>
<route>
<flow-ref name="Get_Job_Details_Flow_Enricher_0" doc:name="Get_Job_Details_Flow_Enricher_0"></flow-ref>
<set-payload value="#[{
"jobStatus": vars.jobStatus
}]" mimeType="application/java" doc:name="Set Payload" doc:id="e0a78803-a93c-4523-9551-4cf9218f059b"></set-payload>
<logger level="INFO" doc:name="Logger Job Details" doc:id="8b31e869-5630-4e01-902d-3e259ba2734e" message="Scatter-Gather Get_Job_Details_Flow_Enricher_0 payload:#[payload], jobStatus:#[vars.jobStatus]"></logger>
<choice doc:name="CalcJobStatus_IfNeeded-Choice">
<when expression="#[vars.jobStatus == 'IN_PROGRESS']">
<async doc:name="Async">
<flow-ref name="CalcBatchStatus_Flow" doc:name="CalcBatchStatus_Flow"></flow-ref>
</async>
</when>
<otherwise>
<logger message="job status is final" level="INFO" doc:name="Logger"></logger>
</otherwise>
</choice>
</route>
<route>
<flow-ref name="Get_Records_By_Status_SubFlow_Enricher_1" doc:name="Get_Records_By_Status_SubFlow_Enricher_1"></flow-ref>
<set-payload value="#[{
"recordsByStatus": vars.recordsByStatus
}]" mimeType="application/java" doc:name="Set Payload"></set-payload>
</route>
<route>
<logger message="#["jobExecution Duration : "]" level="INFO" doc:name="Logger"></logger>
<flow-ref name="Get_Job_Status_Details_Flow_Enricher_2" doc:name="Get_Job_Status_Details_Flow_Enricher_2"></flow-ref>
<logger message="payload last_mod_date #[vars] #[vars.lastModDate]" level="INFO" doc:name="Logger"></logger>
<flow-ref name="Get_Job_Status_Details_Flow_Enricher_6" target="jobCreatedDate" targetValue="#[payload[0].CREATED_DATE]" doc:name="Get_Job_Status_Details_Flow_Enricher_6"></flow-ref>
<set-variable value="#[now()]" doc:name="Current time" doc:id="547a11d2-b9dc-479e-a0c4-36e5b367bcee" variableName="currentTime"></set-variable>
<set-variable value="#[%dw 2.0 import * from dw::util::Timer output application/java --- ((toMilliseconds(vars.lastModDate) - toMilliseconds(vars.jobCreatedDate))/1000) as Number as String {format: ".##"}]" doc:name="Set Variable jobExecutionDurationSeconds" doc:id="499b7eda-f565-4a7c-94bd-9aeaf7306941" variableName="jobExecutionDurationSeconds"></set-variable>
<set-variable value="#[%dw 2.0 import * from dw::util::Timer output application/java --- ((toMilliseconds(vars.currentTime) - toMilliseconds(vars.lastModDate))/1000) as Number as String {format: ".##"}]" doc:name="TimeSinceLastMod" doc:id="0f30996f-1c89-4d45-90e9-915f3d2440bb" variableName="timeSinceLastMod"></set-variable>
<set-variable value="#[%dw 2.0 output application/json --- (((vars.jobExecutionDurationSeconds / 3600) as Number) as String {format: ".##"}) ++ ":" ++ ((((vars.jobExecutionDurationSeconds mod 3600) as Number) / 60 as Number) as String)as String {format: ".##"} ++ ":" ++((((vars.jobExecutionDurationSeconds mod 60) as Number))as String) as String {format: ".##"}]" doc:name="Set Variable jobExecutionDuration" doc:id="eaa22fce-6085-4290-8a31-46fc0bdd90c0" variableName="jobExecutionDuration"></set-variable>
<set-variable value="#[%dw 2.0 output application/json --- (((vars.timeSinceLastMod / 3600) as Number) as String) as String {format: ".##"} ++ ":" ++ ((((vars.timeSinceLastMod mod 3600) as Number) / 60 as Number) as String) as String {format: ".##"} ++ ":" ++((((vars.timeSinceLastMod mod 60) as Number))as String) as String {format: ".##"}]" doc:name="Set Variable TimeSinceLastModString" doc:id="15942ccf-d219-40f1-a783-3e4466bdfbbf" variableName="timeSinceLastModString"></set-variable>
<logger message="job_create date #[vars]" level="INFO" doc:name="Logger"></logger>
<set-payload value="#[%dw 2.0 output application/json --- {"jobExecutionDuration": vars.jobExecutionDuration, "jobExecutionDurationSeconds": vars.jobExecutionDurationSeconds, "jobCreatedDate":vars.jobCreatedDate,"lastModDate": vars.lastModDate,"currentTime": vars.currentTime,"timeSinceLastMod": vars.timeSinceLastMod}]" doc:name="Set Payload" doc:id="a97e5020-9d64-411e-87bf-ab325f29cd7f" mimeType="application/json"></set-payload>
</route>
</scatter-gather>
Error type : MULE:UNKNOWN
--------------------------------------------------------------------------------
Root Exception stack trace:
org.mule.runtime.api.exception.MuleRuntimeException: Cannot copy message with a stream payload. Payload can be transformed by using an <object-to-byte-array-transformer> in order to be able to copy the message. Payload type is "org.mule.runtime.core.internal.streaming.bytes.ManagedCursorStreamProvider"
********************************************************************************
Below Codebase works fine in Mule 4.2.0, after upgrading Mule runtime to 4.2.1 and trying to use Anypoint 7.3.4, getting above runtime error. I am not getting the clue to fix it.
Below same code works on Mule 4.2.0:
<flow name="Ingestion_Status_Main_Flow" doc:description="Ingestion_Status_Main_Flow will read BY_delivery_IDs and try to read the status of the delivery from BY. If BY service is not available, then it provides the status of the status tables. In addition to the BY response details, it shows the job's current status, Number of records as per the status. ">
<http:listener path="${secure::ingestion.status.resource}/{jobId}" allowedMethods="GET" doc:name="/status" config-ref="HTTP_Listener_Config">
<http:response statusCode="#[migration::HttpListener::httpListenerResponseSuccessStatusCode(vars)]"/>
<http:error-response statusCode="#[vars.statusCode default migration::HttpListener::httpListenerResponseErrorStatusCode(vars)]"/>
</http:listener>
<logger message='#[attributes.uriParams.jobId]' level="INFO" doc:name="Job ID from input" />
<set-variable variableName="jobId" value="#[attributes.uriParams.jobId]" doc:name="jobId" />
<set-variable value="#[attributes.queryParams.notify]" doc:name="notifyFlag" doc:id="c2c8a869-4941-4f72-9cd3-3c9ea7441e65" variableName="notify"/>
<flow-ref name="Validate_Input_JobID_Sub_Flow" doc:name="Validate_Input_JobID_Sub_Flow" />
<set-variable variableName="correlationId" value="#[vars.jobId]" doc:name="Session Variable"/>
<flow-ref name="Fetch_Parts_for_Job_SubFlow" doc:name="Fetch_Parts_for_Job_SubFlow" />
<set-variable value="#[sizeOf(payload)]" doc:name="sizeOfPayload" doc:id="0ab10eae-0b50-4b37-bd89-0ac614f548db" variableName="sizeOfPayload" />
<logger message='Delivery ID for Job: #[payload.BY_DELIVERY_ID]' level="INFO" doc:name="Deliver IDs" />
<choice doc:name="Choice">
<when expression="#[sizeOf(payload) > 0]">
<foreach doc:name="For Each" doc:id="742d1496-9be5-409a-a42c-e0da8a1a696d" >
<logger level="INFO" doc:name="Logger For Each" doc:id="e9aeff5c-e00c-4fd4-ada3-09b22f4161cd" message="inside for each #[payload] "/>
<flow-ref doc:name="Get_BY_Status_n_Update_Part_Flow" doc:id="6e00a6b0-8d83-4df6-b22a-ce6666fed36a" name="Get_BY_Status_n_Update_Part_Flow"/>
<aggregators:group-based-aggregator doc:name="groupByDeliveryId" doc:id="b4b76fac-3f35-4812-9c1d-81b23ddd1eb9" name="groupByDeliveryId" groupSize="#[vars.sizeOfPayload]" >
<aggregators:incremental-aggregation >
<logger level="INFO" doc:name="Logger" doc:id="f3e2bdd1-d660-4074-92b0-4c04ea421706" message="Incremental size #[sizeOf(payload)]" />
</aggregators:incremental-aggregation>
<aggregators:aggregation-complete >
<logger message="Final response for Job without Errors, jobDetails: #[vars.detailedStatus]" level="INFO" doc:name="Response without Errors included" />
<logger level="INFO" doc:name="Logger" doc:id="eb7c7626-7091-481a-939c-926f938d84ae" message="Inside Aggregation complete - For loop payload:#[payload]" />
<ee:transform doc:name="Reporting Transformation" doc:id="421fb819-c58f-4f46-8d6d-e5dfa43df133">
<ee:message>
<ee:set-payload resource="ingestion\ingestion-status-resp.dwl" />
</ee:message>
</ee:transform>
<logger level="INFO" doc:name="Logger" doc:id="f6acf89c-5b84-4cbc-9cab-bf4b9e48da4f" message="After Reporting Transformation: #[payload]"/>
<set-variable variableName="aggregatedPayload" value="#[payload]" doc:name="Set Variable" doc:id="eded53f3-18ac-4a84-8fcf-916e89d7a185"/>
</aggregators:aggregation-complete>
</aggregators:group-based-aggregator>
</foreach>
</when>
<otherwise>
<logger message="No Delivery IDs found." level="INFO" doc:name="Logger" />
<set-payload value="#[{
"Summary": {
"DataPostingStatus": '',
"DataPostingDetailedStatus": '',
"JobCreatedDate": '',
"JobRestartDate": '',
"JobLastModifiedTime": '',
"JobExecutionDuration": '',
"JobExecutionDuration_in_Secs": '',
"RecordsProcessed": ''
}
}]" mimeType="application/json" doc:name="Set Payload" doc:id="066c7871-dd4b-4bde-8663-e0558a5847df"/>
</otherwise>
</choice>
<set-payload value="#[vars.aggregatedPayload]" doc:name="Set Payload" doc:id="337a0771-458c-4511-920c-ca38fcb8c6d9" />
<logger message='getting db details #[payload]' level="INFO" doc:name="Logger"/>
<flow-ref name="Ingestion_Status_Main_Flow_Enricher_22" doc:name="Ingestion_Status_Main_Flow_Enricher_22"/>
<set-payload value="#[vars.aggregatedPayload]" doc:name="Set Payload" doc:id="7ef033ba-17d6-4597-9b4c-96fb73212e63" />
<ee:transform doc:name="Transform Message" doc:id="794cfad2-e368-4b44-9c7e-746e0e692338" >
<ee:message >
<ee:set-payload ><![CDATA[%dw 2.0
output application/json
---
{
Summary: {
DataPostingStatus: vars.jobStatus,
DataPostingDetailedStatus: vars.detailedStatus,
JobCreatedDate: vars.jobCreatedDate,
JobRestartDate: "",
JobLastModifiedTime: vars.lastModDate,
JobExecutionDuration: vars.jobExecutionDuration,
JobExecutionDuration_in_Secs: vars.jobExecutionDurationSeconds,
RecordsProcessed: "",
DeliveredRecords: payload.Summary.DeliveredRecords,
PendingRecords: payload.Summary.PendingRecords,
ProcessedRecords: payload.Summary.ProcessedRecords,
ErrorRecords: payload.Summary.ErrorRecords,
Categories: payload.Summary.Categories,
},
Errors:{},
CategorySummary: payload.CategorySummary,
Details: payload.Details
}]]></ee:set-payload>
</ee:message>
</ee:transform>
<logger message='Getting errors details...' level="INFO" doc:name="Logger"/>
<flow-ref name="Add_Errors_to_Payload_Sub_Flow_Enricher_0" doc:name="Add_Errors_to_Payload_Sub_Flow_Enricher_0"/>
<ee:transform doc:name="Transform Message" doc:id="5b0518ac-139a-42f8-ae12-a42cce2449c1" >
<ee:message >
<ee:set-payload ><![CDATA[%dw 2.0
output application/json
---
payload
]]></ee:set-payload>
</ee:message>
</ee:transform>
<async doc:name="Async">
<choice doc:name="Choice">
<when expression="#[vars.notify == 'true']">
<flow-ref name="Send_Status_Mail_Flow" doc:name="Send_Status_Mail_Flow" />
</when>
<otherwise>
<logger message="Notification flag is not provided as query parameter." level="INFO" doc:name="Logger" />
</otherwise>
</choice>
</async>
<logger message='Final response: #[payload] jobId: #[vars.jobId]' level="DEBUG" doc:name="Response with Errors" />
<logger message="Ingestion status request done" level="INFO" doc:name="Status request completed" />
<error-handler ref="Global_Errorflow_Choice_Exception_Strategy" doc:name="Global Exception Strategy" />
</flow>
<sub-flow name="Ingestion_Status_Main_Flow_Enricher_22">
<flow-ref name="Get_Job_Status_Details_Flow" doc:description="Job status retrieved from status tables."/>
</sub-flow>
</flow>
<sub-flow name="Get_Job_Status_Details_Flow" doc:description="Job status retrieved from status tables.
For now, different stages of the job processing has been reported.
1. Batches not yet created.
2. Batches created, and parts are getting creating.
3. Parts are created, and processing.
4. Some part uploads are failed.
5. Some parts processing is failed i.e. payload is not created. ">
<scatter-gather doc:name="Scatter-Gather">
<route>
<logger level="INFO" doc:name="Logger" message="Calculating the Job Satus ..." />
<flow-ref name="GetDetailedJobStatus_SubFlow" doc:name="GetDetailedJobStatus_SubFlow" />
<set-payload value="#[{
"detailedStatus": vars.detailedStatus
}]" mimeType="application/java" doc:name="Set Payload" doc:id="5ea3c7bf-b1b1-4231-9af3-c8e39f241866"/>
</route>
<route>
<flow-ref name="Get_Job_Details_Flow_Enricher_0" doc:name="Get_Job_Details_Flow_Enricher_0"/>
<set-payload value="#[{
"jobStatus": vars.jobStatus
}]" mimeType="application/java" doc:name="Set Payload" doc:id="e0a78803-a93c-4523-9551-4cf9218f059b"/>
<logger level="INFO" doc:name="Logger Job Details" doc:id="8b31e869-5630-4e01-902d-3e259ba2734e" message="Scatter-Gather Get_Job_Details_Flow_Enricher_0 payload:#[payload], jobStatus:#[vars.jobStatus]"/>
<choice doc:name="CalcJobStatus_IfNeeded-Choice">
<when expression="#[vars.jobStatus == 'IN_PROGRESS']">
<async doc:name="Async">
<flow-ref name="CalcBatchStatus_Flow" doc:name="CalcBatchStatus_Flow" />
</async>
</when>
<otherwise>
<logger message="job status is final" level="INFO" doc:name="Logger" />
</otherwise>
</choice>
</route>
<route>
<flow-ref name="Get_Records_By_Status_SubFlow_Enricher_1" doc:name="Get_Records_By_Status_SubFlow_Enricher_1"/>
<set-payload value="#[{
"recordsByStatus": vars.recordsByStatus
}]" mimeType="application/java" doc:name="Set Payload" />
</route>
<route>
<logger message='#["jobExecution Duration : "]' level="INFO" doc:name="Logger" />
<flow-ref name="Get_Job_Status_Details_Flow_Enricher_2" doc:name="Get_Job_Status_Details_Flow_Enricher_2"/>
<logger message='payload last_mod_date #[vars] #[vars.lastModDate]' level="INFO" doc:name="Logger" />
<flow-ref name="Get_Job_Status_Details_Flow_Enricher_6" target="jobCreatedDate" targetValue="#[payload[0].CREATED_DATE]" doc:name="Get_Job_Status_Details_Flow_Enricher_6"/>
<set-variable value="#[now()]" doc:name="Current time" doc:id="547a11d2-b9dc-479e-a0c4-36e5b367bcee" variableName="currentTime"/>
<set-variable value='#[%dw 2.0
import * from dw::util::Timer
output application/java
---
((toMilliseconds(vars.lastModDate) - toMilliseconds(vars.jobCreatedDate))/1000) as Number as String {format: ".##"}]' doc:name="Set Variable jobExecutionDurationSeconds" doc:id="499b7eda-f565-4a7c-94bd-9aeaf7306941" variableName="jobExecutionDurationSeconds"/>
<set-variable value='#[%dw 2.0
import * from dw::util::Timer
output application/java
---
((toMilliseconds(vars.currentTime) - toMilliseconds(vars.lastModDate))/1000) as Number as String {format: ".##"}]' doc:name="TimeSinceLastMod" doc:id="0f30996f-1c89-4d45-90e9-915f3d2440bb" variableName="timeSinceLastMod"/>
<set-variable value='#[%dw 2.0
output application/json
---
(((vars.jobExecutionDurationSeconds / 3600) as Number) as String {format: ".##"}) ++ ":" ++ ((((vars.jobExecutionDurationSeconds mod 3600) as Number) / 60 as Number) as String)as String {format: ".##"} ++ ":" ++((((vars.jobExecutionDurationSeconds mod 60) as Number))as String) as String {format: ".##"}]' doc:name="Set Variable jobExecutionDuration" doc:id="eaa22fce-6085-4290-8a31-46fc0bdd90c0" variableName="jobExecutionDuration"/>
<set-variable value='#[%dw 2.0
output application/json
---
(((vars.timeSinceLastMod / 3600) as Number) as String) as String {format: ".##"} ++ ":" ++ ((((vars.timeSinceLastMod mod 3600) as Number) / 60 as Number) as String) as String {format: ".##"} ++ ":" ++((((vars.timeSinceLastMod mod 60) as Number))as String) as String {format: ".##"}]' doc:name="Set Variable TimeSinceLastModString" doc:id="15942ccf-d219-40f1-a783-3e4466bdfbbf" variableName="timeSinceLastModString"/>
<logger message='job_create date #[vars]' level="INFO" doc:name="Logger" />
<set-payload value='#[%dw 2.0 output application/json --- {"jobExecutionDuration": vars.jobExecutionDuration, "jobExecutionDurationSeconds": vars.jobExecutionDurationSeconds, "jobCreatedDate":vars.jobCreatedDate,"lastModDate": vars.lastModDate,"currentTime": vars.currentTime,"timeSinceLastMod": vars.timeSinceLastMod}]' doc:name="Set Payload" doc:id="a97e5020-9d64-411e-87bf-ab325f29cd7f" mimeType="application/json"/>
</route>
</scatter-gather>
<logger message="Flowvars used to invoke the restart-flows: payload: #[payload]" level="INFO" doc:name="RestartFlags - Logger" />
</sub-flow>
I am trying to create aperak-edifact file from mule data weaver component.
Here is my piece of code
<edifact-edi:config name="EDIFACT_EDI" delimiterUsage="USE_SPECIFIED_FOR_WRITES" doc:name="EDIFACT EDI" interchangeIdPartner="YYYY" interchangeIdSelf="XXXX">
<edifact-edi:schemas>
<edifact-edi:schema>/edifact/d98b/APERAK.esl</edifact-edi:schema>
</edifact-edi:schemas>
</edifact-edi:config>
<flow name="new1Flow">
<file:inbound-endpoint path="C:\Users\Desktop" responseTimeout="10000" doc:name="File">
<file:filename-regex-filter pattern="aperak.xml" caseSensitive="true"/>
</file:inbound-endpoint>
<dw:transform-message doc:name="Transform Message">
<dw:set-payload><![CDATA[%dw 1.0
%output application/java
---
{
Messages: {
D98B: {
APERAK: [{
Interchange: {
UNB0201: "XXX",
UNB0301: "YYYY"
},
MessageHeader: {
UNH01: "1"
},
Heading: {
"0020_BGM": {
BGM0101: "7"
}
}
}]
}
}
}]]></dw:set-payload>
</dw:transform-message>
<edifact-edi:write config-ref="EDIFACT_EDI" doc:name="EDIFACT EDI"/>
<file:outbound-endpoint path="C:\Users\Desktop" outputPattern="out.json" responseTimeout="10000" doc:name="File"/>
</flow>
Output
UNB+UNOB:4+XXX+YYY+05042016:0948+1'UNH+2+APERAK:D:98B:UN'BGM+7'UNT+3+2'UNZ+1+1'
But the output created is all in one line where I want it to be appeared line by line. So how can I introduce new line for edifact
Any help is appreciated
Expected Output
UNB+UNOB:4+XXX+YYY+05042016:0948+1'
UNH+2+APERAK:D:98B:UN'
BGM+7'
UNT+3+2'UNZ+1+1'
Set the lineEnding configuration attribute of edifact-edi:config to output a CRLF.
Friends,
I have two inputs, A and B, both JSON.
I need to generate with DataMapper, a single output (custom) also in JSON format.
Example:
My Input A:
{
"name": "John Doe",
"age": "40"
}
My Input B:
{
"country": "Brazil"
"city": "Rio de Janeiro"
}
My Custom Output:
{
"customerName": "John Doe",
"customerAge": "40",
"customerCountry": "Brazil",
"customerCity": "Rio de Janeiro"
}
It can generate this output using the Mule DataMapper? How to?
At the time, I know use only one Input with DataMapper.
You need to take one of the input as the payload and the other as the variable.
My Payload was
{
"name": "John Doe",
"age": "40"
}
My Variable was
{
"country": "Brazil"
"city": "Rio de Janeiro"
}
Please find the below sample that I tested and works.
<flow name="combineFlow">
<http:listener config-ref="HTTP_Listener_Configuration"
path="/testings" doc:name="HTTP" />
<logger message="Coming here " level="INFO" doc:name="Logger" />
<set-variable variableName="mypayload"
value="{"country": "Brazil","city": "Rio de Janeiro"}"
doc:name="Variable" mimeType="application/json" />
<logger level="INFO" doc:name="Logger" />
<dw:transform-message doc:name="Transform Message">
<dw:set-payload><![CDATA[%dw 1.0
%output application/json
---
{
"customerName": payload.name,
"customerAge": payload.age,
"customerCountry": flowVars.mypayload.country,
"customerCity": flowVars.mypayload.city
}]]></dw:set-payload>
</dw:transform-message>
</flow>
Hope this helps
Slightly similar with previous answer: you need to take one input as payload and another one as variable.
Following is the implementation using DataMapper (not DataWeave/Transform Message)
XML:
<flow name="genericFlow">
<http:listener config-ref="HTTP_Listener_Configuration" path="/dm" doc:name="HTTP" allowedMethods="POST"/>
<set-variable variableName="inputB"
value="#['{"country": "Brazil", "city": "Rio de Janeiro"}']" mimeType="application/json"
doc:name="Input B" />
<data-mapper:transform config-ref="JSON_To_JSON" doc:name="JSON To JSON">
<data-mapper:input-arguments>
<data-mapper:input-argument key="inputBjson">#[new org.json.JSONObject(flowVars.inputB)]</data-mapper:input-argument>
</data-mapper:input-arguments>
</data-mapper:transform>
<echo-component doc:name="Echo" />
</flow>
GRF:
...
<Dictionary>
<Entry id="DictionaryEntry0" input="true" name="inputPayload" output="false" type="object"/>
<Entry id="DictionaryEntry1" input="false" name="outputPayload" output="true" type="object"/>
<Entry dictval.__javaType="org.json.JSONObject" id="DictionaryEntry2" input="true" name="inputBjson" output="false" type="object"/>
</Dictionary>
...
<attr name="melScript"><![CDATA[//MEL
//START -> DO NOT REMOVE
output.__id = input.__id;
//END -> DO NOT REMOVE
output.customerName = input.name;
output.customerAge = input.age;
output.customerCountry = inputArguments.inputBjson.getString("country");
output.customerCity = inputArguments.inputBjson.getString("city");]]></attr>
Notes: input B is registered to DataMapper as Input Argument with Type = Object (MEL only), and its class is: org.json.JSONObject
One input give it as payload and other one use message properties and store the second input values as variable.Now inside data mapper use the variable in input argument field and map it.Simple!! hope it helps.
How can I do the equivalent of a SQL join in Dataweave? I'm basically implementing a lookup. I have CSV data and JSON data in flow variables. I'm able to use both of them, but I can't figure out how connect them. Let's say the CSV has two columns:
Name,ExternalId
Foo,1
Bar,2
Baz,2
The JSON data is:
{
ExternalEntities: [
{ "Id": 1, "Name": "One", "Description": "Thing One" }
, { "Id": 2, "Name": "Two", "Description": "Thing Two" }
]
}
In the end, I'd like a List<Hashmap> with the following (expressed here in JSON format for convenience.)
[
{ "Name": "Foo", "ExternalName": "One", "ExternalDescription": "Thing One" }
, { "Name": "Bar", "ExternalName": "Two", "ExternalDescription": "Thing Two" }
, { "Name": "Baz", "ExternalName": "Two", "ExternalDescription": "Thing Two" }
]
So here is a sample using the json as the payload and the csv as a lookup table. The first flows parses the json and does a flow lookup passing the 'Id' as the payload. The second flow loads the csv as an array and does a search by the Id passed in. This returns a map of the CSV record found where you can then extract the 'Name' field.
It is possible to do this in one transformer, but for demo sake this was easier. You can also reverse the lookup with some tinkering if you want the csv as your payload and the json as your lookup.
<flow name="lookuptest" processingStrategy="synchronous">
<poll doc:name="Poll">
<logger level="INFO" doc:name="Logger" />
</poll>
<set-payload
value="{ "ExternalEntities": [ { "Id": 1, "Name": "One", "Description": "Thing One" } , { "Id": 2, "Name": "Two", "Description": "Thing Two" } ] }"
doc:name="Set Payload" mimeType="application/json"></set-payload>
<dw:transform-message doc:name="Transform Message">
<dw:input-payload doc:sample="string_2.dwl" />
<dw:set-payload><![CDATA[%dw 1.0
%input payload application/json
%output application/json
---
payload.ExternalEntities map ((value , index) -> {
Name: lookup("NameLookup", [value.Id as :string]).Name
})]]></dw:set-payload>
</dw:transform-message>
<object-to-string-transformer doc:name="Object to String"/>
<logger level="ERROR" message="#[payload]" doc:name="Logger"/>
</flow>
<flow name="NameLookup" processingStrategy="synchronous">
<set-variable value="#[payload[0]]" variableName="ExternalId"
doc:name="Variable" />
<set-payload
value="#[Thread.currentThread().getContextClassLoader().getResourceAsStream('lookuptables/namelookup.csv')]"
mimeType="application/csv" doc:name="Variable" />
<dw:transform-message doc:name="Transform Message">
<dw:input-payload doc:sample="string_2.dwl" />
<dw:set-payload><![CDATA[%dw 1.0
%input payload application/csv
%output application/java
---
(payload[?($.ExternalId == flowVars['ExternalId'])])[0]
]]></dw:set-payload>
</dw:transform-message>
</flow>
Here's a second solution that works, though I accepted #Ryan Carter's answer.
%dw 1.0
%output application/json
---
flowVars.myCsv map (
(row0, i) -> using (lookupElement = (flowVars.jsonLookup.ExternalEntities filter ((obj1) -> row0.ExternalId ~= obj1.Id))[0]) {
Name: row0.Name
, ExternalName: lookupElement.Name
, ExternalDescription: lookupElement.Description
} mapObject ({"$$": $, (StartsWithB:$[0] == "B") when $$ ~= "Name"})
)
I tried to extract json array data using message enricher. But in output I got some object data.How can I fetch properly?
Following is my flow
<flow name="readfileFlow1" doc:name="readfileFlow1">
<file:inbound-endpoint path="Test" moveToDirectory="Backup" responseTimeout="10000" doc:name="File"/>
<enricher source="#[(name in payload.data)]" target="#[flowVars.myMap]" doc:name="Message Enricher">
<json:json-to-object-transformer returnClass="java.util.HashMap" doc:name="JSON to Object"/>
</enricher>
<logger message="#[payload.toString()]" level="INFO" doc:name="Logger"/>
</flow>
When I print in logger then I got following output
INFO 2015-04-29 13:01:20,409 [[readfile].readfileFlow1.stage1.02] org.mule.api.processor.LoggerMessageProcessor: org.mule.transport.file.ReceiverFileInputStream#15bc5cc
How can I extract properly all name
I tried using for each . Using for each I can extract succesfully. But want to use message enricher. How can I do this using message enricher MVEL??
My json data is as follow..
{
"data":[
{
"id" : "1",
"name": "AAA"
},
{
"id" : "5",
"name": "DDD"
},
{
"id" : "6",
"name": "CCC"
},
]
}
I have done this..
silly mistake I made..
Following is my answer
<logger message="#[flowVars.myMap]" level="INFO" doc:name="Logger"/>
This is how you can achieve to extract name and id from your JSON payload .. you will get it in logger
<flow name="readfileFlow1" doc:name="readfileFlow1">
<file:inbound-endpoint path="Test" moveToDirectory="Backup" responseTimeout="10000" doc:name="File"/>
<enricher source="#[message.payload]" target="#[flowVars.myMap]" doc:name="Message Enricher">
<processor-chain doc:name="Processor Chain">
<json:json-to-object-transformer returnClass="java.util.HashMap" doc:name="JSON to Object"/>
<foreach collection="#[payload.data]">
<logger level="INFO" message="Name :- #[payload.name] and Id:- #[payload.id]"/>
</foreach>
</processor-chain>
</enricher>
<logger level="INFO" message="#[flowVars.myMap]"/>
</flow>
One more thing you JSON payload is not valid.. try to remove an extra , at the end .. so your valid JSON file will be :-
{
"data":[
{
"id" : "1",
"name": "AAA"
},
{
"id" : "5",
"name": "DDD"
},
{
"id" : "6",
"name": "CCC"
}
]
}