I'm trying to write some integration tests in sailsjs. I have a bootstrap.test.js file that lifts my server in a global before as the docs suggest.
In my integration test when I try to pass my sails app to supertest I get an error:
app is not defined
agent = request.agent(app.hooks.http.app);
^
bootstrap.test.js
var Sails = require('sails'),
Barrels = require('barrels'),
app;
before(function(done) {
console.log('Global before hook'); // Never called?
this.timeout(5000);
Sails.lift({
log: {
level: 'error'
},
models: {
connection: 'test',
migrate: 'drop'
}
}, function(err, sails) {
app = sails;
if (err) return done(err);
var barrels = new Barrels();
fixtures = barrels.data;
barrels.populate(function(err) {
done(err, sails);
});
});
});
// Global after hook
after(function (done) {
console.log(); // Skip a line before displaying Sails lowering logs
Sails.lower(done);
});
integration test
var chai = require('chai'),
expect = chai.expect,
request = require('supertest'),
agent = request.agent(app.hooks.http.app);
describe('Species CRUD test', function() {
it('should not allow an unauthenticated user create a species', function(done){
var species = {
scientificName: 'Latin name',
commonName: 'Common name',
taxon: 'Amphibian',
leadOffice: 'Vero Beach',
range: ['Florida', 'Georgia']
};
agent.post('species')
.send(species)
.end(function(err, species) {
expect(err).to.exist;
expect(species).to.not.exist;
done();
});
});
});
I have been trying to make the integration test work for a few days now. This seems to be working fine in my environment. Maybe you can give it a try.
bootstrap.test.js
var Sails = require('sails');
var sails;
before(function(done)
{
Sails.lift({
log: {
level: 'error'
},
connections: {
testDB: {
adapter: 'sails-memory'
}
},
connection: 'testDB',
}, function(err, server)
{
sails = server;
if (err) return done(err);
done(err, sails);
});
});
after(function(done)
{
Sails.lower(done);
});
Test
var request = require('supertest');
it('should return all users', function(done){
request(sails.hooks.http.app)
.get('/user)
.expect(200)
.expect('Content-Type', /json/)
.end(function(err, res){
// check the response
done();
);
}
I place bootstrap.test.js on the root of my test folder and then use mocha to run the test.
mocha test/bootstrap.test.js test/**/*.test.js
Hope this help.
it seems that since version 3.x of mocha, nodejs global variables capabilities was removed. so if you need it, you should specifically pass it to your environment like that:
mocha --globals global test/bootstrap.test.js test/**/*.test.js
or
in your mocha.opts file :
#test/mocha.opts
--globals global
Related
I am trying to follow this tutorial here: https://github.com/Azure-Samples/ms-identity-javascript-nodejs-tutorial/tree/main/2-Authorization/1-call-graph. It uses the microsoft-identity-express wrapper for msal-node, but I am having trouble with some Graph routes.
I have set up my Azure AD app permissions exactly as described in the tutorial.
I have successfully got the /profile route working, like this:
router.get('/profile',
msid.isAuthenticated(),
msid.getToken({
resource: appSettings.protectedResources.graphAPI
}),
async (req,res,next) => {
let output;
try {
const graphClient = graphManager.getAuthenticatedClient(req.session.protectedResources["graphAPI"].accessToken);
output = await graphClient
.api('/me')
.get();
} catch (error) {
console.log(error);
next(error);
}
res.send(output);
}
);
My appSettings look like this:
const appSettings = {
appCredentials: {
clientId: "zzz",
tenantId: "yyy",
clientSecret: "xxx"
},
authRoutes: {
redirect: "/redirect",
error: "/error", // the wrapper will redirect to this route in case of any error
unauthorized: "/unauthorized" // the wrapper will redirect to this route in case of unauthorized access attempt
},
protectedResources: {
graphAPI: {
endpoint: "https://graph.microsoft.com/v1.0/me",
scopes: ["user.read"]
},
armAPI: {
endpoint: "https://management.azure.com/tenants?api-version=2020-01-01",
scopes: ["https://management.azure.com/user_impersonation"]
},
joinedTeams: {
endpoint: "https://graph.microsoft.com/v1.0/me/joinedTeams",
scopes: ["user.read", "teamsettings.readwrite.all"]
},
myEvents: {
endpoint: "https://graph.microsoft.com/v1.0/me/events",
scopes: ["user.read", "calendars.readwrite"]
},
messages: {
endpoint: "https://graph.microsoft.com/v1.0/me/messages",
scopes: ["Mail.ReadWrite"]
}
}
};
And finally, my getAuthenticatedClient function from graphManager is:
const getAuthenticatedClient = (accessToken) => {
// Initialize Graph client
const client = graph.Client.init({
// Use the provided access token to authenticate requests
authProvider: (done) => {
done(null, accessToken);
}
});
return client;
};
graphManager and appSettings are set up exactly how the tutorial does it. I added more API endpoints to the protectedResources object as the tutorial says. I followed the same logic process as the /profile route for the extra Graph API routes. But I still get 401 errors whenever running this:
router.get('/me/events',
msid.isAuthenticated(),
msid.getToken({
resource: appSettings.protectedResources.myEvents
}),
async (req,res,next) => {
let output;
try {
const graphClient = graphManager.getAuthenticatedClient(req.session.protectedResources["myEvents"].accessToken);
output = await graphClient
.api('/me/events')
.get();
} catch (error) {
console.log(error);
next(error);
}
res.send(output);
}
);
I can't understand why /me calls to the Graph API work but /me/events does not. The correct permissions are being passed to msid.getToken() and getAuthenticatedClient(), so why do the API calls fail?
I'm trying to run a test suite with mocha, the goal is to start the server beforeEach test case and then
close it afterEach test case.
But for some reason when the afterEach case ignites I get the following error:
Error [ERR_SERVER_NOT_RUNNING]: Server is not running.
The test case passes which means the server is up and running.
I Export the server like this from my app.js file:
var server = app.listen(3000, function () {
var port = server.address().port;
console.log("Example app listening at port %s", port);
});
module.exports = server; // Export server in order to use it in test files
My test file:
describe("loading express", function () {
var server;
before(function (done) {
User.deleteMany(done);
});
beforeEach(function () {
server = require("../app");
});
afterEach(function (done) {
server.close(done);
});
describe("Create user account with valid email address", function () {
describe("Route: POST /signup", () => {
it("201 HAPPY PATH", (done) => {
chai
.request(server)
.post("/signup")
.send({
email: "test23222#test.test",
password: "12345678",
firstname: "testtest",
lastname: "testtest",
})
.end((err, res) => {
res.should.have.status(201);
done();
});
});
});
});
});
I believe I need to export a promise.
This is what I got so far:
var server = new Promise(function (resolve, reject) {
app.listen(3000, function () {
var port = server.address().port;
console.log("Example app listening at port %s", port);
resolve();
});
}
module.exports = server; // Export server in order to use it in test files
in test suite:
var server = require('./app.js')
server.then(function() {
....
}
The server is closed by chai-http every time a request is served.
From the chai-http docs:
If you want to keep the server open, perhaps if you’re making multiple requests, you must call .keepOpen() after .request(), and manually close the server down:
E.g:
chai
.request(server)
.keepOpen() // <-- Here
.post("/signup")
.send({
email: "test23222#test.test",
password: "12345678",
firstname: "testtest",
lastname: "testtest",
})
I'm working on some tests using Detox for my React-Native application, one of those test is a flow where I need to check that the user's session is secured. If not, I'm sending an SMS Verification Code.
Test : Success to mock the POST API Call api/sessions/:sessionId, {code : 123456}
Problem : Mirage is not catching the call, so of course my Saga return an error for the fake code 123456, where I want instead Mirage.JS to return true to continue the flow.
Here are the file (file.spec.js):
import { Server } from "miragejs"
import { makeServer } from "./server";
let server;
beforeEach(() => {
server = makeServer({ environment: "development" });
})
afterEach(() => {
server.shutdown()
})
describe('SecureFlow', () => {
it("should do nav to a project and start Investment Flow", async () => {
server.get("https://random-api.eu/sessions/:sessionId", () => {
return new Response( 200, {}, { ok: true });
});
await basicNavigation(); //randomNavigation until the secure part (Screen)
await element(by.id('Accept-andLend')).tap();
await element(by.id('textInput-SMSCode')).typeText("123456");
})
})
server.js
import { Server, Model, Factory } from "miragejs";
export function makeServer({ environment = "development" } = {}) {
let server = new Server({
environment,
models: {
},
routes() {
this.post("https://random-api.eu/sessions/:sessionId", schema => {
return [{ok: true}];
});
}
});
return server;
}
I'm developing a Vue.js application which has only frontend (no server) and send a lot of requests to different APIs. The originally quite simple app became more complex. And there are problems with some APIs, because browsers do not accept the responses due to CORS. That is why I'm trying to test, if I can migrate the app to Nuxt.js.
My approach is as follows (inspired by this comment), but I expect, that there is probably a better way to send the requests from the client over the server.
pages/test-page.vue
methods: {
async sendRequest(testData) {
const response = await axios.post('api', testData)
// Here can I use the response on the page.
}
}
nuxt.config.js
serverMiddleware: [
{ path: '/api', handler: '~/server-middleware/postRequestHandler.js' }
],
server-middleware/postRequestHandler.js
import axios from 'axios'
const configs = require('../store/config.js')
module.exports = function(req, res, next) {
let body = ''
req.on('data', (data) => {
body += data
})
req.on('end', async () => {
if (req.hasOwnProperty('originalUrl') && req.originalUrl === '/api') {
const parsedBody = JSON.parse(body)
// Send the request from the server.
const response = await axios.post(
configs.state().testUrl,
body
)
req.body = response
}
next()
})
}
middleware/test.js (see: API: The Context)
export default function(context) {
// Universal keys
const { store } = context
// Server-side
if (process.server) {
const { req } = context
store.body = req.body
}
}
pages/api.vue
<template>
{{ body }}
</template>
<script>
export default {
middleware: 'test',
computed: {
body() {
return this.$store.body
}
}
}
</script>
When the user makes an action on the page "test", which will initiate the method "sendRequest()", then the request "axios.post('api', testData)" will result in a response, which contains the HTML code of the page "api". I can then extract the JSON "body" from the HTML.
I find the final step as suboptimal, but I have no idea, how can I send just the JSON and not the whole page. But I suppose, that there must be a much better way to get the data to the client.
There are two possible solutions:
Proxy (see: https://nuxtjs.org/faq/http-proxy)
API (see: https://medium.com/#johnryancottam/running-nuxt-in-parallel-with-express-ffbd1feef83c)
Ad 1. Proxy
The configuration of the proxy can look like this:
nuxt.config.js
module.exports = {
...
modules: [
'#nuxtjs/axios',
'#nuxtjs/proxy'
],
proxy: {
'/proxy/packagist-search/': {
target: 'https://packagist.org',
pathRewrite: {
'^/proxy/packagist-search/': '/search.json?q='
},
changeOrigin: true
}
},
...
}
The request over proxy can look like this:
axios
.get('/proxy/packagist-search/' + this.search.phpLibrary.searchPhrase)
.then((response) => {
console.log(
'Could get the values packagist.org',
response.data
)
}
})
.catch((e) => {
console.log(
'Could not get the values from packagist.org',
e
)
})
Ad 2. API
Select Express as the project’s server-side framework, when creating the new Nuxt.js app.
server/index.js
...
app.post('/api/confluence', confluence.send)
app.use(nuxt.render)
...
server/confluence.js (simplified)
const axios = require('axios')
const config = require('../nuxt.config.js')
exports.send = function(req, res) {
let body = ''
let page = {}
req.on('data', (data) => {
body += data
})
req.on('end', async () => {
const parsedBody = JSON.parse(body)
try {
page = await axios.get(
config.api.confluence.url.api + ...,
config.api.confluence.auth
)
} catch (e) {
console.log('ERROR: ', e)
}
}
res.json({
page
})
}
The request over API can look like this:
this.$axios
.post('api/confluence', postData)
.then((response) => {
console.log('Wiki response: ', response.data)
})
.catch((e) => {
console.log('Could not update the wiki page. ', e)
})
Now with nuxtjs3 :
nuxtjs3 rc release
you have fetch or useFetch no need to import axios or other libs, what is great, automatic parsing of body, automatic detection of head
fetching data
you have middleware and server api on same application, you can add headers on queries, hide for example token etc
server layer
a quick example here in vue file i call server api :
const { status } = await $fetch.raw( '/api/newsletter', { method: "POST", body: this.form.email } )
.then( (response) => ({
status: response.status,
}) )
.catch( (error) => ({
status: error?.response?.status || 500,
}) );
it will call a method on my server, to init the server on root directory i created a folder name server then api, and a file name newsletter.ts (i use typescript)
then in this file :
export default defineEventHandler(async (event) => {
const {REST_API, MAILINGLIST_UNID, MAILINGLIST_TOKEN} = useRuntimeConfig();
const subscriber = await readBody(event);
console.log("url used for rest call" + REST_API);
console.log("token" + MAILINGLIST_TOKEN);
console.log("mailing list unid" + MAILINGLIST_UNID);
let recipientWebDTO = {
email: subscriber,
subscriptions: [{
"mailingListUnid": MAILINGLIST_UNID
}]
};
const {status} = await $fetch.raw(REST_API, {
method: "POST",
body: recipientWebDTO,
headers: {
Authorization: MAILINGLIST_TOKEN,
},
}).then((response) => ({
status: response.status,
}))
.catch((error) => ({
status: error?.response?.status || 500,
}));
event.res.statusCode = status;
return "";
})
What are the benefits ?
REST_API,MAILING_LIST_UNID, MAILING_LIST_TOKEN are not exposed on
client and even file newsletter.ts is not available on debug browser.
You can add log only on server side You event not expose api url to avoid some attacks
You don't have to create a new backend just to hide some criticals token or datas
then it is up to you to choose middleware route or server api. You don't have to import new libs, h3 is embedded via nitro with nuxtjs3 and fetch with vuejs3
for proxy you have also sendProxy offered by h3 : sendProxy H3
When you build in dev server and client build in same time(and nothing to implement or configure in config file), and with build to o, just don deploy your project in static way (but i think you can deploy front in static and server in node i don't know)
I have installed Jasmine CLI globally using npm install -g jasmine
I'm trying to test multiple http requests at once using test suite below, multiple calls per each requests were sent (seeing output of console.log() but nothing returned so the test was failure, please guide me is this possible to do so ? and how to do this ?
index.js
var app = require('express')();
var request = require('request');
app.get('/', function(req, res) {
console.log('GET /');
res.status(200);
res.send('Hello World');
});
app.listen(3000);
spec/multipleRequestSpec.js
var request = require('request');
var async = require('async');
describe('express application', function() {
var baseUrl = 'http://localhost:3000';
var statusCode = [0, 0];
var b = ['', ''];
beforeEach(function(done) {
async.parallel([
function() {
request.get(baseUrl, function(err, res, body) {
statusCode[0] = res.statusCode;
b[0] = body;
})
}
,
function() {
request.post(baseUrl, function(err, res, body) {
statusCode[1] = res.statusCode;
b[1] = body;
})
}
], done());
});
it('should return 200', function() {
expect(statusCode[0]).toBe(200);
});
it('should return hello world', function() {
expect(b[0]).toEqual('Hello World');
});
it('should return error 404', function() {
expect(statusCode[1]).toBe(404);
});
});
Edited
When testing only one request I place done() inside the request() it works just fine, but I quite confuse where to place done() when using async.pararell()
spec/requestSpec.js
var request = require('request');
describe('expresss application', function() {
var baseUrl = 'http://localhost:3000';
var statusCode = 0;
beforeEach(function(done) {
request.get(baseUrl, function(err, res, body) {
statusCode = res.statusCode;
done();
});
});
it('should return 200', function() {
expect(statusCode).toBe(200);
});
});
In describe block you initiate variable body. And you use it in it blocks. But in request.get and in request.post you have callback function with parameter body which is in use instead of your describe body variable.
Change beforeEach to:
beforeEach(function(done) {
async.parallel([
function(callback) {
request.get(baseUrl, function(err, res, reqBody) {
statusCode[0] = res.statusCode;
body[0] = reqBody;
callback();
})
}
,
function(callback) {
request.post(baseUrl, function(err, res, reqBody) {
statusCode[1] = res.statusCode;
body[1] = reqBody;
callback();
})
}
], done);
});
I think that you should also check err param in request callbacks. Because there may be errors which fails/pass your tests.
For api endpoints tests it is more easy to use superagent or supertest instead of request.