HapiJS not accepting form-data requests - hapi.js

Using HapiJS 19 on Linux.
I am in the process of upgrading an older version of a back-end (hapi-16) and in the older version sending form-data would work just fine, but for some reason I get:
{
"statusCode": 415,
"error": "Unsupported Media Type",
"message": "Unsupported Media Type"
}
It seems as if I need something added to my hapi server in order to accept form-data.
Here is my server file:
const hapi = require('#hapi/hapi');
// const HapiAuthCookie = require('hapi-auth-cookie');
const CORS = { cors: true };
const SERVER_SETTINGS = { host: 'localhost', port: 8040, routes: CORS };
const server = hapi.server(SERVER_SETTINGS);
const endpoints = require('./routes.js');
// connect to database
require('../db/dbconn.js');
module.exports = {
start: async() => {
// plugin registration
await server.register([]);
server.route(endpoints,
{ prefix: '/dart-api' }
);
// start server
try {
await server.start();
console.log(`Server running on: ${server.info.uri}`);
} catch (error) {
console.log(error);
}
}
};
// Graceful server stop
process.on('SIGINT', () => {
/* eslint-disable-next-line */
console.warn('\n\n>> Stopping Hapi server ...');
server.stop({ timeout: 100 }).then((err) => {
if (err) {
console.log('Hapi server stopped');
throw err;
}
});
}); // server.stop {ends}
process.on('unhandledRejection', (err) => {
console.log(err);
throw err;
});
I'll appreciate some help.
Thanks in advance.
Update: I've done some testing with different clients with the same result. Here is the response when I use curl:
curl http://localhost:8040/users/avatars -X PUT -F 'files=#/home/ralvez/.downloads/ICONS/ricavatar.png' -vvv
* Trying 127.0.0.1:8040...
* Connected to localhost (127.0.0.1) port 8040 (#0)
> PUT /users/avatars HTTP/1.1
> Host: localhost:8040
> User-Agent: curl/7.70.0
> Accept: */*
> Content-Length: 17433
> Content-Type: multipart/form-data; boundary=------------------------0e59b1780748d1d6
>
* We are completely uploaded and fine
* Mark bundle as not supporting multiuse
< HTTP/1.1 415 Unsupported Media Type
< content-type: application/json; charset=utf-8
< vary: origin
< access-control-expose-headers: WWW-Authenticate,Server-Authorization
< cache-control: no-cache
< content-length: 86
< Date: Fri, 22 May 2020 17:27:01 GMT
< Connection: keep-alive
<
* Connection #0 to host localhost left intact
{"statusCode":415,"error":"Unsupported Media Type","message":"Unsupported Media Type"}
This make no sense to me since my handler is like this:
{
method: 'PUT',
path: '/users/avatars',
handler: users.updateAvatar,
options: {
payload: {
output: 'stream',
parse: true,
allow: 'multipart/form-data'
}
}
},
there is no doubt in my mind that this is ready to accept form-data, yet it is rejecting with "statusCode": 415, "error": "Unsupported Media Type",
what gives?!

OK. After much searching and after realizing this HAD to be related to the version of HapiJS I'm using I found someone else had the same problem.
Here is the link:
[Fix Hapi version 19.0.3 error 415 unsupported media type upload file with multipart/form-data
So I decided to change my route as follows:
{
method: 'PUT',
path: '/users/avatars',
handler: users.updateAvatar,
options: {
payload: {
output: 'stream',
parse: true,
allow: 'multipart/form-data',
multipart: true
}
}
},
and it works.
A shorter version also works:
{
method: 'PUT',
path: '/users/avatars',
handler: users.updateAvatar,
options: {
payload: {
multipart: true
}
}
},

Related

HTTPOnly Cookie not being set in browser localhost

Problem
I have a REST API that has a login endpoint. The login endpoint accepts a username and password, the server responds by sending a HTTPOnly Cookie containing some payload (like JWT).
The approach I always use had been working for a few years until the Set-Cookie header stopped working roughly last week. I have not touched the REST API's source prior to its non-functionality, as I was working on a Svelte-based front-end.
I suspect it has something to do with the Secure attribute being set to false as it is in localhost. However, according to Using HTTP cookies, having an insecure connection should be fine as long as it's localhost. I've been developing REST APIs in this manner for some time now and was surprised to see the cookie no longer being set.
Testing the API with Postman yields the expected result of having the cookie set.
Approaches Used
I tried to recreate the general flow of the real API and stripped it down to its core essentials.
package main
import (
"fmt"
"io/ioutil"
"log"
"net/http"
"os"
"os/signal"
"syscall"
"time"
"github.com/gofiber/fiber/v2"
"github.com/gofiber/fiber/v2/middleware/cors"
"github.com/golang-jwt/jwt/v4"
)
const idleTimeout = 5 * time.Second
func main() {
app := fiber.New(fiber.Config{
IdleTimeout: idleTimeout,
})
app.Use(cors.New(cors.Config{
AllowOrigins: "*",
AllowHeaders: "Origin, Content-Type, Accept, Range",
AllowCredentials: true,
AllowMethods: "GET,POST,HEAD,DELETE,PUT",
ExposeHeaders: "X-Total-Count, Content-Range",
}))
app.Get("/", hello)
app.Post("/login", login)
go func() {
if err := app.Listen("0.0.0.0:8080"); err != nil {
log.Panic(err)
}
}()
c := make(chan os.Signal, 1)
signal.Notify(c, os.Interrupt, syscall.SIGTERM)
_ = <-c
fmt.Println("\n\nShutting down server...")
_ = app.Shutdown()
}
func hello(c *fiber.Ctx) error {
return c.SendString("Hello, World!")
}
func login(c *fiber.Ctx) error {
type LoginInput struct {
Email string `json:"email"`
}
var input LoginInput
if err := c.BodyParser(&input); err != nil {
return c.Status(400).SendString(err.Error())
}
stringUrl := fmt.Sprintf("https://jsonplaceholder.typicode.com/users?email=%s", input.Email)
resp, err := http.Get(stringUrl)
if err != nil {
return c.Status(500).SendString(err.Error())
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return c.Status(500).SendString(err.Error())
}
if len(body) > 0 {
fmt.Println(string(body))
} else {
return c.Status(400).JSON(fiber.Map{
"message": "Yeah, we couldn't find that user",
})
}
token := jwt.New(jwt.SigningMethodHS256)
cookie := new(fiber.Cookie)
claims := token.Claims.(jwt.MapClaims)
claims["purpose"] = "Just a test really"
signedToken, err := token.SignedString([]byte("NiceSecret"))
if err != nil {
// Internal Server Error if anything goes wrong in getting the signed token
fmt.Println(err)
return c.SendStatus(500)
}
cookie.Name = "access"
cookie.HTTPOnly = true
cookie.Secure = false
cookie.Domain = "localhost"
cookie.SameSite = "Lax"
cookie.Path = "/"
cookie.Value = signedToken
cookie.Expires = time.Now().Add(time.Hour * 24)
c.Cookie(cookie)
return c.Status(200).JSON(fiber.Map{
"message": "You have logged in",
})
}
What does this is basically look through JSON Placeholder's Users and if it finds one with a matching email, it sends the HTTPOnly Cookie with some data attached to it.
Seeing as it might be a problem with the library I'm using, I decided to write a Node version with Express.
import axios from 'axios'
import express from 'express'
import cookieParser from 'cookie-parser'
import jwt from 'jsonwebtoken'
const app = express()
app.use(express.json())
app.use(cookieParser())
app.use(express.urlencoded({ extended: true }))
app.disable('x-powered-by')
app.get("/", (req, res) => {
res.send("Hello there!")
})
app.post("/login", async (req, res, next) => {
try {
const { email } = req.body
const { data } = await axios.get(`https://jsonplaceholder.typicode.com/users?email=${email}`)
if (data) {
if (data.length > 0) {
res.locals.user = data[0]
next()
} else {
return res.status(404).json({
message: "No results found"
})
}
}
} catch (error) {
return console.error(error)
}
}, async (req, res) => {
try {
let { user } = res.locals
const token = jwt.sign({
user: user.name
}, "mega ultra secret sauce 123")
res
.cookie(
'access',
token,
{
httpOnly: true,
secure: false,
maxAge: 3600
}
)
.status(200)
.json({
message: "You have logged in, check your cookies"
})
} catch (error) {
return console.error(error)
}
})
app.listen(8000, () => console.log(`Server is up at localhost:8000`))
Both of these do not work on the browsers I've tested them on.
Results
Go responds with this.
HTTP/1.1 200 OK
Date: Mon, 21 Feb 2022 05:17:36 GMT
Content-Type: application/json
Content-Length: 32
Vary: Origin
Access-Control-Allow-Origin: http://localhost:3000
Access-Control-Allow-Credentials: true
Access-Control-Expose-Headers: X-Total-Count,Content-Range
Set-Cookie: access=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJwdXJwb3NlIjoiSnVzdCBhIHRlc3QgcmVhbGx5In0.8YKepcvnMreP1gUoe_S3S7uYngsLFd9Rrd4Jto-6UPI; expires=Tue, 22 Feb 2022 05:17:36 GMT; domain=localhost; path=/; HttpOnly; SameSite=Lax
For the Node API, this is the response header.
HTTP/1.1 200 OK
Set-Cookie: access=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyIjoiTGVhbm5lIEdyYWhhbSIsImlhdCI6MTY0NTQyMDM4N30.z1NQcYm5XN-L6Bge_ECsMGFDCgxJi2eNy9sg8GCnhIU; Max-Age=3; Path=/; Expires=Mon, 21 Feb 2022 05:13:11 GMT; HttpOnly
Content-Type: application/json; charset=utf-8
Content-Length: 52
ETag: W/"34-TsGOkRa49turdlOQSt5gB2H3nxw"
Date: Mon, 21 Feb 2022 05:13:07 GMT
Connection: keep-alive
Keep-Alive: timeout=5
Client Source
I'm using this as a test form to send and receive data.
<script>
let email = "";
async function handleSubmit() {
try {
let response = await fetch(`http://localhost:8000/login`, {
method: "POST",
body: JSON.stringify({
email,
}),
headers: {
"Content-Type": "application/json",
},
});
if (response) {
console.info(response);
let result = await response.json();
if (result) {
console.info(result);
}
}
} catch (error) {
alert("Something went wrong. Check your console.");
return console.error(error);
}
}
</script>
<h1>Please Login</h1>
<svelte:head>
<title>Just a basic login form</title>
</svelte:head>
<form on:submit|preventDefault={handleSubmit}>
<label for="email">Email:</label>
<input
type="email"
name="email"
bind:value={email}
placeholder="enter your email"
/>
</form>
Additional Information
Postman: 9.8.3
Language Versions
Go: 1.17.6
Node.js: v16.13.1
Svelte: 3.44.0
Browsers Used
Mozilla Firefox: 97.0.1
Microsoft Edge: 98.0.1108.56
Chromium: 99.0.4781.0
I just had the same issue with axios, this was causing the Set-Cookie response header to be silently ignored. Which was annoying as usually if it rejects them it will show that little yellow triangle against that header and say why in the network inspector.
I solved this by adding a request interceptor to force it true for every request:
axios.interceptors.request.use(
(config) => {
config.withCredentials = true
return config
},
(error) => {
return Promise.reject(error)
}
)
Solution
It turns out the problem is in the front-end, specifically JavaScript's fetch() method.
let response = await fetch(`http://localhost:8000/login`, {
method: "POST",
credentials: "include", //--> send/receive cookies
body: JSON.stringify({
email,
}),
headers: {
"Content-Type": "application/json",
},
});
You'll need credentials: include property in your RequestInit object, not just for making requests that require cookie authentication, but for also receiving said cookie.
Axios usually fills this part out automatically (based from experience), but if it doesn't, you'll also need to put withCredentials: true on the third config argument of your request to allow the browser to set the cookie.

next.js 404 fail to load resource /api/ on vercel

i'm building a next js website that i deploy on vercel.
I made a next.js api /api/contact which send a mail via nodemailer. It works fine when i try the code on my pc but when i upload on vercel (with github integration) i get a "404 failed to load resource" for /api/contact in the console and it doesn't work.
Is there any more configuration to do for next.js api to work on vercel ?
Here is the code for the api call :
fetch("/api/contact", {
method: "POST",
headers: {
Accept: "application/json, text/plain, */*",
"Content-Type": "application/json",
},
body: JSON.stringify(data),
}).then((res) => {
contact.js in api folder
So here are the answers that worked :
"failed to load resource" disappeared after several deployments but i still had a 404.
Problem was that transporter.sendMail needed to be async + i had issue with gmail, i ended using another mail provider (zoho). So for anyone faceing the same issues here is a working code (maybe not the best but it's working) :
export default async (req, res) => {
require('dotenv').config()
const nodemailer = require('nodemailer');
async function mail() {
console.log('enter async function');
const transporter = nodemailer.createTransport({
name: "smtp.zoho.com",
port: 465,
host: "smtp.zoho.com",
auth: {
user: process.env.mailsender,
pass: process.env.mailpw,
},
secure: true,
})
let mail = await transporter.sendMail({
from: process.env.mailsender,
to: process.env.mailreceive,
subject: `${req.body.message}`,
text: `${req.body.message}`,
html: `<div><p>${req.body.message}</p></div>`
});
}
try {
console.log('sending mail');
await mail();
res.status(200);
console.log('mail should be sent');
} catch (error) {
console.log(error);
console.log('error sending mail');
res.status(404);
} finally {
res.end();
};
}

React, Axios issue: Response for preflight does not have HTTP ok status ( 404 not found )

I've been trying using axios with basic auth but it always returns not found.
Here is my code
const ROOT_URL='http://localhost/b2b_ecom/b2b-api/index.php/api/';
export function login () {
return function(dispatch)
{
const api = axios.create({
mode: 'no-cors',
credentials:'include',
redirect: 'follow',
auth: {
username: 'mouad#b2b.dz',
password: '123456'
},
headers: {
'Content-Type': 'application/json',
}
});
api.post(`${ROOT_URL}site/login `).then(function(response)
{
dispatch({LOGIN,payload:response.data});
return true;
}).catch(function(error)
{
return error;
});
} ;
}
And here it is the error I got:
OPTIONS http://localhost/b2b_ecom/b2b-api/index.php/api/site/login 404 (Not Found)
Failed to load http://localhost/b2b_ecom/b2b-api/index.php/api/site/login: Response for preflight does not have HTTP ok status.
Screenshot
I think browser is adding access control header to request.
Check the following things
1) When you hit from postman or other such api clients this api should be working properly. (even curl is fine)
2) Try adding Access-Control-Allow-Origin as * for this request on server and see if it works.

Hapi send request to current local server

I have a graphql running on my server. And I have an upload route like this:
server.route({
config: {
cors: {
origin: ['*'],
credentials: true
},
payload: {
output: 'stream',
parse: true,
maxBytes: 50869457,
allow: 'multipart/form-data'
},
},
method: ['POST', 'PUT'],
path: '/uploadAvatar',
handler: (request, reply) => {
const data = request.payload;
data.identity = options.safeGuard.authenticate(request);
// REQUEST TO THE SAME SERVER THIS IS RUNNING ON
}
});
I want to send a request to the same server as I am in if that makes sense.. How to do that?
btw I want to call localhost:3004/graphql if it's running on localhost:3004 but on production it's running on port 80.
You can use hapi's built in server.inject method for handling internal routing, the docs for inject are here

React-dropzone Upload to S3 from client returns 400 bad request

I am trying to implement this code example here to upload an image to an S3 bucket.
My server seems to return the pre-signed URL OK, but the PUT request that follows fails with a 400 error.
Here is the server-side pre-sign code:
var s3 = new aws.S3();
var params = {
Bucket: secrets.aws.bucket,
Key: body.filename,
ContentType: body.filetype,
ACL: 'public-read'
};
s3.getSignedUrl('putObject', params, function(err, data) {
if (err) {
console.log(err);
return err;
} else {
console.log('URL sent back to client ------------------------------------');
res.status(200).send(data);
}
});
And here is the upload action client-side:
export function uploadFile(data) {
var file = data[0];
return dispatch => {
dispatch(dropFileAccepted(data));
return makeUploadRequest('post', {filename: file.name, filetype: file.type}, '/signURLforS3')
.then(function (result) {
var signedUrl = result.data;
var options = {
headers: {
'Content-Type': file.type,
'x-amz-acl': 'public-read',
'x-amz-region': 'eu-central-1'
}
};
return axios.put(signedUrl, file, options);
})
.then(function (result) {
console.log(result);
})
.catch(function (err) {
console.log(err);
});
};
}
From the network request/response headers it looks like the content-type and CORS configuration on the bucket are correctly set, but I'm unsure whether there could be an issue with needing to set the region.
or do I need some additional settings on the bucket, like a bucket policy, or specify a Signature version ?
Request URL:https://XXXXXXXXXX.s3.amazonaws.com/brand.png?AWSAccessKeyId=AKXXXXXXXXXXXX&Content-Type=image%2Fpng&Expires=1460128516&Signature=%2BooCHlrwelBYC9fMYnu01PokgWM%3D&x-amz-acl=public-read
Request Method:PUT
Status Code:400 Bad Request
Remote Address:54.231.192.36:443
Response Headers
Access-Control-Allow-Methods:PUT, POST, GET, HEAD
Access-Control-Allow-Origin:*
Access-Control-Max-Age:3000
Connection:close
Content-Type:application/xml
Date:Fri, 08 Apr 2016 15:00:17 GMT
Server:AmazonS3
Transfer-Encoding:chunked
Vary:Origin, Access-Control-Request-Headers, Access-Control-Request-Method
x-amz-id-2:rXMCu6YD5mLrN3beBCs+kmXDGzhzrQHV2fTUTNooWXBQuPfLNOKDcArGQWRj+NLk+zo=
x-amz-region:eu-central-1
x-amz-request-id:FC181ED154
Request Headers
Accept:application/json, text/plain, */*
Accept-Encoding:gzip, deflate, sdch
Accept-Language:de-DE,de;q=0.8,en-US;q=0.6,en;q=0.4,fr-FR;q=0.2,fr;q=0.2
Connection:keep-alive
Content-Length:16819
Content-Type:image/png
Host:XXXXXXX.s3.amazonaws.com
Origin:http://localhost:3000
Referer:http://localhost:3000/admin/blog/create
User-Agent:Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.110 Safari/537.36
x-amz-acl:public-read
x-amz-region:eu-central-1
Query String Params
AWSAccessKeyId:AKXXXXXXXX
Content-Type:image/png
Expires:1460128516
Signature:+ooCHlrwelBYu01PokgWM=
x-amz-acl:public-read
Thanks in advance for any pointers. I've been pulling my hair out over this...