Programmatically provide NiFi InvokeHTTP different certificates - ssl-certificate

I have a requirement in Nifi where I have cycle through different HTTPS REST Endpoints and provide different certificates for some endpoints and different username / password for some other endpoints.
I used InvokeHTTP processor to send the requests, although URL takes an expression language, I cannot setup SSLContextService with an expression.
Alternatively, I thought on using ExecuteScript to call those Endpoints, however as listed here in StackOverflow post; I still don't know how to programmatically call an external service through a script.
Any help appreciated.

just for fun created the groovy script that calls http.
for sure you can avoid using it. and I believe InvokeHTTP processor covers almost all needs.
However.. going to call test rest service: /post at https://httpbin.org
the flow: GenerateFlowFile (generates body) -> EcecuteGroovyScript (call service)
The body generated by GenerateFlowFile : {"id":123, "txt":"aaabbbccc"}
In ExecuteGroovyScript 1.5.0 declare the CTL.ssl1 property and link it to StandardSSLContextService
and now the script:
#Grab(group='acme.groovy', module='acmehttp', version='20180301', transitive=false)
import groovyx.acme.net.AcmeHTTP
import org.apache.nifi.ssl.SSLContextService.ClientAuth
def ff=session.get()
if(!ff)return
def http
ff.write{ffIn, ffOut->
http = AcmeHTTP.post(
url: "https://httpbin.org/post", //base url
query: [aaa:"hello", bbb:"world!"], //query parameters
// send flowfile content (stream) as a body
body: ffIn,
headers:[
//assign content-type from flowfile `mime.type` attribute
"content-type":ff.'mime.type'
],
// you can declare `CTX.ssl1`, `CTX,.ssl2`,... processor properties and map them to SSLContextService
// then depending on some condition create different SSLContext
// in this case let's take `CTL.ssl1` service to create context
ssl: CTL["ssl"+1].createSSLContext(ClientAuth.WANT),
// the next commented line creates trust all ssl context:
//ssl: AcmeHTTP.getNaiveSSLContext(),
// the receiver that transfers url response stream to flowfile stream
receiver:{respStream, httpCtx-> ffOut << respStream }
)
}
//set response hesders as flow file attributes with 'http.header.' prefix
http.response.headers.each{ k,v-> ff['http.header.'+k]=v }
//status code and message
ff.'http.status.code' = http.response.code
ff.'http.status.message' = http.response.message
if( http.response.code < 400){
//transfer to success if response was ok
REL_SUCCESS << ff
}else{
//transfer to failure when response code is 400+
REL_FAILURE << ff
}

Related

Why aren't HTTP Headers passed from AWS API Gateway to Step functions

I have an api gateway set up with integration to a step function - the integration is working well and my function is executed. However I have a need to access the headers on the initial request to the api gateway (as they need to be passed on to an API call made by one of the steps in the step function) I've added the http headers to the API Gateway Method Request and also done this in the HTTP Headers section of the Integration Request, then in the mapping template I have
#set($inputbody = $input.json('$'))
{
"method": "$context.httpMethod",
"input": "$util.escapeJavaScript($inputbody)",
"stateMachineArn": "MyStateMachineARN",
"headers": {
#foreach($param in $input.params().header.keySet())
"$param": "$util.escapeJavaScript($input.params().header.get($param))"
#if($foreach.hasNext),#end
#end
}
}
When I test this I see the headers after the request body has been transformed - before it executes the step function
Thu Dec 23 09:35:28 UTC 2021 : Endpoint request body after transformations: {
"method": "POST",
"input": "{\"surname\":\"TESTSURNAME\"}",
"stateMachineArn": "MyStateMachineARN",
"headers": {
"HeaderA": "ValueA"
, "HeaderB": "ValueB"
}
}
But in the step functions I am struggling to see the headers - the input I can see at the start of the execution is only
{
"surname::"TESTSURNAME"
}
I have inputPath set to $ and the same for the payload.
All the suggestions I've found online point to the mapping template but I can not get it to work - any ideas what I'm doing wrong?
The API Gateway -> Step Function integration uses StartExecution as the action for the integration. Checking the documentation for StartExecution request syntax, it turns out that headers is unfortunately not one of the allowable fields that can be passed. To pass the headers in it looks like you would need to add them onto the input.

Control cloudflare origin server using workers

I'm trying to use a cloudflare worker to dynamically set the origin based on the requesting IP (so we can serve a testing version of the website internally)
I have this
addEventListener('fetch', event => {
event.respondWith(handleRequest(event.request))
})
async function handleRequest(request) {
if (request.headers.get("cf-connecting-ip") == '185.X.X.X')
{
console.log('internal request change origin');
}
const response = await fetch(request)
console.log('Got response', response)
return response
}
I'm not sure what to set. The request object doesn't seem to have any suitable parameters to change.
Thanks
Normally, you should change the request's URL, like this:
// Parse the URL.
let url = new URL(request.url)
// Change the hostname.
url.hostname = "test-server.example.com"
// Construct a new request with the new URL
// and all other properties the same.
request = new Request(url, request)
Note that this will affect the Host header seen by the origin (it'll be test-server.example.com). Sometimes people want the Host header to remain the same. Cloudflare offers a non-standard extension to accomplish that:
// Tell Cloudflare to connect to `test-server.example.com`
// instead of the hostname specified in the URL.
request = new Request(request,
{cf: {resolveOverride: "test-server.example.com"}})
Note that for this to be allowed, test-server.example.com must be a hostname within your domain. However, you can of course configure that host to be a CNAME.
The resolveOverride feature is documented here: https://developers.cloudflare.com/workers/reference/apis/request/#the-cf-object
(The docs claim it is an "Enterprise only" feature, but this seems to be an error in the docs. Anyone can use this feature. I've filed a ticket to fix that...)

Webhook call failed. Error: Failed to parse webhook JSON response: Expect message object but got: [Chinese letters]

I'm building my own WebhookClient for dialog flow. My code is the following (using Azure Functions, similar to Firebase Functions):
module.exports = async function(context, req) {
const agent = new WebhookClient({ request: context.req, response: context.res });
function welcome(agent) {
agent.add(`Welcome to my agent!!`);
}
let intentMap = new Map();
intentMap.set("Look up person", welcome);
agent.handleRequest(intentMap);
}
I tested the query and the response payload looks like this:
{
"fulfillmentText": "Welcome to my agent!!",
"outputContexts": []
}
And the headers in the response look like this:
Transfer-Encoding: chunked
Content-Type: application/json; charset=utf-8
Server: Microsoft-IIS/10.0
X-Powered-By: ASP.NET
Date: Tue, 11 Dec 2018 18:16:06 GMT
But when I test my bot in dialog flow, it returns the following:
Webhook call failed. Error: Failed to parse webhook JSON response:
Expect message object but got:
"笀ഀ਀  ∀昀甀氀昀椀氀氀洀攀渀琀吀攀砀琀∀㨀 ∀圀攀氀挀漀洀攀 琀漀 洀礀 愀最攀渀琀℀℀∀Ⰰഀ਀  ∀漀甀琀瀀甀琀䌀漀渀琀攀砀琀猀∀㨀 嬀崀ഀ਀紀".
There's Chinese symbols!? Here's a video of me testing it out in DialogFlow: https://imgur.com/yzcj0Kw
I know this should be a comment (as it isn't really an answer), but it's fairly verbose and I didn't want it to get lost in the noise.
I have the same problem using WebAPI on a local machine (using ngrok to tunnel back to Kestrel). A friend of mine has working code (he's hosting in AWS rather than Azure), so I started examining the differences between our responses. I've notice the following:
This occurs with Azure Functions and WebAPI (so it's not that)
The JSON payloads are identical (so it's not that)
Working payload isn't chunked
Working payload doesn't have a content type
As an experiment, I added this code to Startup.cs, in the Configure method:
app.Use(async (context, next) =>
{
var original = context.Response.Body;
var memory = new MemoryStream();
context.Response.Body = memory;
await next();
memory.Seek(0, SeekOrigin.Begin);
if (!context.Response.Headers.ContentLength.HasValue)
{
context.Response.Headers.ContentLength = memory.Length;
context.Response.ContentType = null;
}
await memory.CopyToAsync(original);
});
This code disables response chunking, which is now causing a new and slightly more interesting error for me in the google console:
*Webhook call failed. Error: Failed to parse webhook JSON response: com.google.gson.stream.MalformedJsonException: Unterminated object at line 1 column 94 path $.\u0000\\"\u0000f\u0000u\u0000l\u0000f\u0000i\u0000l\u0000l\u0000m\u0000e\u0000n\u0000t\u0000M\u0000e\u0000s\u0000s\u0000a\u0000g\u0000e\u0000s\u0000\\"\u0000.\
I thought this could be encoding at first, so I stashed my JSON as a string and used the various Encoding classes to convert between them, to no avail.
I fired up Postman and called my endpoint (using the same payload as Google) and I can see the whole response payload correctly - it's almost as if Google's end is terminating the stream part-way through reading...
Hopefully, this additional information will help us figure out what's going on!
Update
After some more digging and various server/lambda configs, I spotted this post here: https://github.com/googleapis/google-cloud-dotnet/issues/2258
It turns out that json.net IS the culprit! I guess it's something to do with the formatters on the way out of the pipeline. In order to prove this, I added this hard-coded response to my POST controller and it worked! :)
return new ContentResult()
{
Content = "{\"fulfillmentText\": null,\"fulfillmentMessages\": [],\"source\": null,\"payload\": {\"google\": {\"expectUserResponse\": false,\"userStorage\": null,\"richResponse\": {\"items\": [{\"simpleResponse\": {\"textToSpeech\": \"Why hello there\",\"ssml\": null,\"displayText\": \"Why hello there\"}}],\"suggestions\": null,\"linkOutSuggestion\": null}}}}",
ContentType = "application/json",
StatusCode = 200
};
Despite the HTTP header saying the charset is utf-8, that is definitely using the utf-16le character set, and then the receiving side is treating them as utf-16be. Given you're running on Azure, it sounds like there is some configuration you need to make in Azure Functions to represent the output as UTF-8 instead of using UTF-16 strings.

API Connect 5 - Error attempting to read the urlopen response data

I'm trying to create a REST API from a SOAP Service using IBM API Connect 5. I have followed all the steps described in this guide (https://www.ibm.com/support/knowledgecenter/en/SSFS6T/com.ibm.apic.apionprem.doc/tutorial_apionprem_expose_SOAP.html).
So, after dragging the web service block from palette, ensuring the correctness of endpoint and publishing the API, I have tried to call the API from the browser. Unfortunately, the API return the following message:
<errorResponse>
<httpCode>500</httpCode>
<httpMessage>Internal Server Error</httpMessage>
<moreInformation>Error attempting to read the urlopen response
data</moreInformation>
</errorResponse>
To testing purpose, I have logged the request and I have tried the request on SOAPUI. The service return the response correctly.
What is the problem?
In my case, the problem was in the backend charset (Content-Type: text/xml;charset=iso-8859-1).
For example, backend returns text/xml in German (or French). Api Connect cannot process character ü. It needs Content-Type: text/xml;charset=UTF-8.
I had a similar issue, in my case was the accept. if you have an Invoke and the content-type or the accept, is not matching the one of the request, or the response that you got, APIC is getting mad.
Please, check if the formats to send (contentType) and receive (accept) are the same of that your API expected. In my case the error occurs because the API returns a String and my default code is configured to receive a JSON body.
//define a JSON-PLAIN TEXT protocol
private HttpEntity<String> httpEntityWithBody(Object objToParse){
HttpHeaders headers = new HttpHeaders();
headers.set("Authorization", "Bearer " + "xxx token xxx");
headers.set("Accept", MediaType.TEXT_PLAIN_VALUE);
headers.setContentType(MediaType.APPLICATION_JSON);
Gson gson = new GsonBuilder().create();
String json = gson.toJson(objToParse);
HttpEntity<String> httpEntity = new HttpEntity<String>(json, headers);
return httpEntity;
}
//calling the API to APIC...
ParameterizedTypeReference<String> responseType = new
ParameterizedTypeReference<String>(){};
ResponseEntity<String> result =
rest.exchange(builder.buildAndExpand(urlParams).toUri(), HttpMethod.PUT, httpEntityWithBody(myDTO), responseType);
String statusCode = result.getStatusCodeValue();
String message = result.getBody();

Using Node JS to proxy http and modify response

I'm trying to write a front end to an API service with Node JS.
I'd like to be able to have a user point their browser at my node server and make a request. The node script would modify the input to the request, call the api service, then modify the output and pass back to the user.
I like the solution here (with Express JS and node-http-proxy) as it passes the cookies and headers directly from the user through my site to the api server.
proxy request in node.js / express
I see how to modify the input to the request, but i can't figure out how to modify the response. Any suggestions?
transformer-proxy could be useful here. I'm the author of this plugin and I'm answering here because I found this page when looking for the same question and wasn't satisfied with harmon as I don't want to manipulate HTML.
Maybe someone else is looking for this and finds it useful.
Harmon is designed to plug into node-http-proxy https://github.com/No9/harmon
It uses trumpet and so is stream based to work around any buffering problems.
It uses an element and attribute selector to enable manipulation of a response.
This can be used to modify output response.
See here: https://github.com/nodejitsu/node-http-proxy/issues/382#issuecomment-14895039
http-proxy-interceptor is a middleware I wrote for this very purpose. It allows you to modify the http response using one or more transform streams. There are tons of stream-based packages available (like trumpet, which harmon uses), and by using streams you can avoid buffering the entire response.
var httpProxy = require('http-proxy');
var modifyResponse = require('http-proxy-response-rewrite');
var proxy = httpProxy.createServer({
target:'target server IP here',
});
proxy.listen(8001);
proxy.on('error', function (err, req, res) {
res.writeHead(500, {
'Content-Type': 'text/plain'
});
res.end('Something went wrong. And we are reporting a custom error message.');
});
proxy.on('proxyRes', function (proxyRes, req, res) {
modifyResponse(res, proxyRes.headers['content-encoding'], function (body) {
if (body && (body.indexOf("<process-order-response>")!= -1)) {
var beforeTag = "</receipt-text>"; //tag after which u can add data to
// response
var beforeTagBody = body.substring(0,(body.indexOf(beforeTag) + beforeTag.length));
var requiredXml = " <ga-loyalty-rewards>\n"+
"<previousBalance>0</previousBalance>\n"+
"<availableBalance>0</availableBalance>\n"+
"<accuruedAmount>0</accuruedAmount>\n"+
"<redeemedAmount>0</redeemedAmount>\n"+
"</ga-loyalty-rewards>";
var afterTagBody = body.substring(body.indexOf(beforeTag)+ beforeTag.length)+
var res = [];
res.push(beforeTagBody, requiredXml, afterTagBody);
console.log(res.join(""));
return res.join("");
}
return body;
});
});