Native support for ES6 in PhantomJS - phantomjs

is there a way to make PhantomJS natively support ES6, I have a bunch of ES6 code which is converted to ES5 via Babel, what I need to accomplish is accurate measurement of code coverage which is done for ES6 code rather than ES5. It's a requirement from client, so I can't just tell him to stop requesting such thing...
Afaik NodeJS already has native support for ES6, is there a way to do that with PhantomJS?

I've ended up using raw NodeJS (without PhantomJs) + Express + JSDom (https://github.com/tmpvar/jsdom), the POC looks like this:
"use strict"
const $module = require('module');
const path = require('path');
const babel = require("babel-core");
const Jasmine = require('jasmine');
const reporters = require('jasmine-reporters');
const express = require('express');
const jsdom = require("jsdom");
const app = express();
const vm = require('vm');
const fs = require("fs");
app.get('/', function (req, res) {
res.sendFile('index.html', { root: __dirname });
});
app.use('/bower_components', express.static('bower_components'));
const load = function (filename) {
return fs.readFileSync(`./bower_components/${filename}`, "utf-8");
};
const packages = [
fs.readFileSync('./bower_components/jquery/dist/jquery.js', "utf-8"),
fs.readFileSync('./bower_components/angular/angular.js', "utf-8"),
fs.readFileSync('./bower_components/angular-mocks/angular-mocks.js', "utf-8")
];
const sut = {
'./js/code.js': fs.readFileSync('./js/code.js', "utf-8")
};
const tests = {
'./tests/test.js': fs.readFileSync('./tests/test.js', "utf-8")
};
function navigate(FakeFileSystem, root, cwd, filename) {
// Normalize path according to root
let relative = path.relative(root, path.resolve(root, cwd, filename));
let parts = relative.split(path.sep);
let iterator = FakeFileSystem;
for (let part of parts) {
iterator = iterator[part] || (iterator[part] = { });
}
return iterator;
}
const server = app.listen(3333, function () {
const host = server.address().address;
const port = server.address().port;
const url = `http://${host === '::' ? 'localhost' : host}:${port}`;
console.log(`Server launched at ${ url }`);
console.log(`Running tests...`)
jsdom.env({
url: url,
src: packages,
done: function (err, window) {
let jasmine = new Jasmine();
let FakeFileSystem = {};
let descriptors = [];
jasmine.configureDefaultReporter({ showColors: true });
let env = jasmine.env;
for (let propertyName in env) {
if (env.hasOwnProperty(propertyName)) {
window[propertyName] = env[propertyName];
}
}
let context = vm.createContext(window);
let collections = [sut, tests];
for (let collection of collections) {
for (let filename in collection) {
let descriptor = navigate(FakeFileSystem, __dirname, '.', filename);
let source = collection[filename];
let transpiled = babel.transform(source, { "plugins": ["transform-es2015-modules-commonjs"] });
let code = $module.wrap(transpiled.code);
let _exports = {};
let _module = { exports: _exports };
descriptor.code = vm.runInContext(code, context);
descriptor.module = _module;
descriptor.exports = _exports;
descriptor.filename = filename;
descriptors.push(descriptor);
}
}
for (let descriptor of descriptors) {
let cwd = path.dirname(path.relative(__dirname, descriptor.filename));
descriptor.code.call(
undefined,
descriptor.exports,
// Closure is used to capture cwd
(function (cwd) {
return function (filename) { // Fake require function
return navigate(FakeFileSystem, __dirname, cwd, filename).exports;
}
})(cwd),
descriptor.module,
descriptor.filename
);
}
jasmine.execute();
server.close();
}
});
});
The beauty of this approach is that there is no need in transpiling code with babel, it allows frontend packages such as Angular to get loaded from bower, while all the config stuff comes from npm...
EDIT
I've stumbled upon the fact that NodeJS doesn't support all ES6 features yet, and such feature as ES6 modules is a real pain, they aren't supported anywhere, so I've ended up with doing partial transpilation of code with babel, with the expectation that as NodeJS will start providing richer and richer support for ES6 I will eventually turn-off babel features step by step and switch to native support when it will become available...

Related

How to dynamically access a remote component in vue js with module federation

I am trying to build a vue js 2 microfrontend with module federation. I dont want to use static remote imports via the webpack.config.js like this
module.exports = {
plugins: [
new ModuleFederationPlugin({
name: 'host',
remotes: {
app1: 'app1#http://localhost:3001/remoteEntry.js',
},
}),
],
};
I am looking for a way to dynamically import vue components into my host application. I tried this approach so far, but i only found examples that worked with angular or react.
The goal is to have multiple remote frontends that can automatically register somewhere, maybe in some kind of store. The host application then can access this store and get all of the registered remote applications (name, url, components). The host application then loads the components and should be able to use them. I remote import the component HelloDerp, the loading process is working fine but i dont know how to render it on my host application. I read the vue js doc about dynamic and async imports but i think that only works for local components.
What i've got so far in the host application:
<template>
<div id="app">
<HelloWorld />
<HelloDerp />
</div>
</template>
<script>
import HelloWorld from "./components/HelloWorld.vue";
const HelloDerp = null;
export default {
name: "App",
components: {
HelloWorld,
HelloDerp,
},
mounted() {
var remoteUrlWithVersion = "http://localhost:9000/remoteEntry.js";
const element = document.createElement("script");
element.type = "text/javascript";
element.async = true;
element.src = remoteUrlWithVersion;
element.onload = () => {
console.log(`Dynamic Script Loaded: ${element.src}`);
HelloDerp = loadComponent("core", "./HelloDerp");
};
document.head.appendChild(element);
return null;
},
};
async function loadComponent(scope, module) {
// Initializes the shared scope. Fills it with known provided modules from this build and all remotes
await __webpack_init_sharing__("default");
const container = window[scope]; // or get the container somewhere else
// Initialize the container, it may provide shared modules
await container.init(__webpack_share_scopes__.default);
const factory = await window[scope].get(module);
const Module = factory();
return Module;
}
</script>
Sorry i almost forgot about this. Here's my solution.
Load Modules:
export default async function loadModules(
host: string,
ownModuleName: string,
wantedNames: string[]
): Promise<RemoteComponent[]> {
...
uiApplications.forEach((uiApplication) => {
const remoteURL = `${uiApplication.protocol}://${uiApplication.host}:${uiApplication.port}/${uiApplication.moduleName}/${uiApplication.fileName}`;
const { componentNames } = uiApplication;
const { moduleName } = uiApplication;
const element = document.createElement('script');
element.type = 'text/javascript';
element.async = true;
element.src = remoteURL;
element.onload = () => {
componentNames?.forEach((componentName) => {
const component = loadModule(moduleName, `./${componentName}`);
component.then((result) => {
if (componentName.toLowerCase().endsWith('view')) {
// share views
components.push(new RemoteComponent(result.default, componentName));
} else {
// share business logic
components.push(new RemoteComponent(result, componentName));
}
});
});
};
document.head.appendChild(element);
});
});
...
}
export default async function loadModule(scope: string, module: string): Promise<any> {
await __webpack_init_sharing__('default');
const container = window[scope]; // or get the container somewhere else
await container.init(__webpack_share_scopes__.default);
const factory = await window[scope].get(module);
const Module = factory();
return Module;
}
Add Modules to routes
router.addRoute({
name: remoteComponent.componentName,
path: `/${remoteComponent.componentName}`,
component: remoteComponent.component,
});

How to call Nunjucks configuration file into my main app.js

Within my Express 4.17.1 app, I have the following template setup (below). I have read sources on pulling out routes into their own module files. But how can I take my nunjucks configuration code below and have it called into my app.js as a separate file and still use my template filters? Just trying to unclutter.
Thanks!
App.js
const express = require('express');
const nunjucks = require('nunjucks');
const env = new nunjucks.Environment();
const path = require('path');
//...other stuff...
// VIEWS | NUNJUCKS
app.set('view engine', 'njk');
app.set('views', [path.join(__dirname, 'views'), path.join(__dirname, 'views/shared/')]);
// NUNJUCKS - CONFIGURATION & CUSTOM FILTERS
function setUpNunjucks(expressApp) {
const env = nunjucks.configure([path.join(__dirname, 'views'), path.join(__dirname, 'views/shared/')], {
autoescape: true, //default
throwOnUndefined: true, //for dev testing only
trimBlocks: true,
lstripBlocks: true,
noCache: false, //default
express: app
});
//custom Nunjucks filter to pass back into template!
env.addFilter('shorten', function (str, count) {
return str.slice(0, count || 5);
});
//an object looper. then do stuff with it
//apply the following inside the template form
env.addFilter('param', function (obj, mparam) {
for (const [key, value] of Object.entries(obj)) {
//example of whatever is needed...
if (mparam == value.param) {
return `${value.value}`;
}
}
});
}
setUpNunjucks(app);
//...other stuff...
//LISTEN
const PORT = process.env.PORT || 3000;
app.listen(PORT, () => {
console.log(`Sever started on port ${PORT}`);
});

store sql queries as string in node server to get them as a response(express)

I am trying to do something may or may not be possible.
I have a SQL file called "travel.sql" that I am trying to make an api out of, so I thought the simplest thing to do is to save the queries as strings in an array and then save the array of strings as a response for a node server(express.js)
so simply here's the code till now but this is returning nothing in postman and I don't know what's missing or not
I checked all the packages and they are installed properly
const express = require('express')
const fse = require( "fs-extra" );
const { join } = require( "path" );
const app = express()
const port = 3000
app.get('/sqlfile', (req, res) => {
const loadSqlQueries = async folderName => {
// determine the file path for the folder
const filePath = join( process.cwd(), travel );
// get a list of all the files in the folder
const files = await fse.readdir( filePath );
// only files that have the .sql extension
const sqlFiles = files.filter( f => f.endsWith( ".sql" ) );
// loop over the files and read in their contents
const queries = {};
for ( let i = 0; i < sqlFiles.length; i++ ) {
const query = fse.readFileSync( join( filePath, sqlFiles[ i ] ), { encoding: "UTF-8" } );
queries[ sqlFiles[ i ].replace( ".sql", "" ) ] = query;
console.log(queries)
}
return queries;
res.send(queries);
};
})
app.listen(port, () => {
console.log(`Example app listening at http://localhost:${port}`)
})
I'm not quite sure of what you are trying to achieve, But anyway You have multiple parts of your code need to be enhanced:
As a first proposal I suggest to add a "try and catch" to your code so you can know the errors you are facing.
You are creating a function expression "loadSqlQueries" which I think is not needed and it never runs as you are just creating it but you never used it.
As the function expression is not needed then also the "return" is not needed.
To be able to use "await" like here: const files = await fse.readdir( filePath ); You need to use it inside "async" function.
You are using "travel" here const filePath = join( process.cwd(), travel ); as a variable, you need to use it as a string like this const filePath = join( process.cwd(), "travel" );
I've applied the above mentioned changes, kindly read the comments I added to your code to catch the changes and here is the final code:
const express = require('express')
const fse = require("fs-extra");
const { join } = require("path");
const app = express()
const port = 3000
app.get('/sqlfile',
// add async to be able to use await
async (req, res) => {
// add try and catch block to your code to catch the errors
try {
// no need for the function expression which is never used
// const loadSqlQueries = async folderName => {
// determine the file path for the folder
//use travel as a string not a variable
const filePath = join(process.cwd(), "travel");
// get a list of all the files in the folder
const files = await fse.readdir(filePath);
// only files that have the .sql extension
const sqlFiles = files.filter(f => f.endsWith(".sql"));
// loop over the files and read in their contents
const queries = {};
for (let i = 0; i < sqlFiles.length; i++) {
const query = fse.readFileSync(join(filePath, sqlFiles[i]), { encoding: "UTF-8" });
queries[sqlFiles[i].replace(".sql", "")] = query;
console.log(queries)
}
// As the function expression is not used we will comment return
// return queries;
res.send(queries);
// }
} catch (error) {
console.log(error);
}
})
app.listen(port, () => {
console.log(`Example app listening at http://localhost:${port}`)
})

Invalid asm.js: Invalid member of stdlib

(node:7894) V8: /var/www/html/testeth/node_modules/solc/soljson.js:3 Invalid asm.js: Invalid member of stdlib
i am making the test deploy on ganache-cli simple contract but it is showing that warning. Please help to resolve that problem.
Below the code of the "index.sol"
pragma solidity ^0.4.17;
contract testalk{
string public message;
function testalk(string initialMsg) public {
message = initialMsg;
}
function setMessage(string nwMsg) public {
message = nwMsg;
}
}
and i am testing it using "mocha" and ganache-cli provider as code below :-
const assert = require('assert');
const ganache = require('ganache-cli');
const Web3 = require('web3');
const web3 = new Web3(ganache.provider());
const { interface, bytecode} = require('../compile');
require('events').EventEmitter.defaultMaxListeners = 15;
let accounts;
let testeth;
beforeEach( async ()=>{
accounts = await web3.eth.getAccounts();
testeth = await new web3.eth.Contract(JSON.parse(interface))
.deploy({data: bytecode, arguments: ['Hi Alok!']})
.send({gas: '1000000',from: accounts['0']});
});
describe("testalk",() => {
it('deploy a contract', () =>{
assert.ok(testeth.options.address);
});
it('get the message', async () => {
const message = await testeth.methods.message().call();
assert.equal('Hi Alok!', message);
//console.log(message);
})
it('get the message', async () => {
await testeth.methods.setMessage("Bye Alok!").send({from: accounts[0], gas: '1000000'});
const message = await testeth.methods.message().call();
console.log(message);
});
});
I am using Ubuntu and nodejs.
I recommend you to select a newer version of the solc compiler (e.g. check Remix to see which version works with your code). Check that the version in the pragma sentence of your Solidity code is the same as the solc version you have installed with node. Check the sample usages in the solc releases and copy the JS code. I used 0.7.4 https://libraries.io/npm/solc/0.7.4
After that, you need to adapt the compile script to return the ABI and bytecode to your tests. The names must match exactly. Here I'm returning the values in JSON format so I don't have to use JSON.parse(interface) in my test file. Note that the bytecode is only the HEX value, therefore I'm returning the contract.evm.bytecode.object. Change Lottery by the name of your contract (if you have multiple contracts you want to check the docs or try a for loop).
const path = require('path');
const fs = require('fs');
const solc = require('solc');
const lotteryPath = path.resolve(__dirname, 'contracts', 'Lottery.sol');
const source = fs.readFileSync(lotteryPath, 'utf8')
var input = {
language: 'Solidity',
sources: {
'test.sol': {
content: source
}
},
settings: {
outputSelection: {
'*': {
'*': ['*']
}
}
}
};
var output = JSON.parse(solc.compile(JSON.stringify(input)));
var contract = output.contracts['test.sol'].Lottery;
var bytecode = contract.evm.bytecode.object;
var interface = contract.abi;
module.exports = {interface, bytecode};
My test file didn't change much, but here it is:
const assert = require('assert');
const ganache = require('ganache-cli');
const Web3 = require('web3');
const web3 = new Web3(ganache.provider());
const { interface, bytecode } = require('../compile');
let lottery;
let accounts;
beforeEach(async () => {
accounts = await web3.eth.getAccounts();
lottery = await new web3.eth.Contract(interface)
.deploy({ data: bytecode })
.send({ from: accounts[0], gas: '6000000' });
});
describe('Lottery Contract', () => {
it('deploys a contract', () => {
assert.ok(lottery.options.address);
});
});
This can happen because of many reasons. If you are using remix ide then by default asm is disabled in chrome that could be an issue. You could be using old compiler of Solidity in some incompatible way. There could be issues with your code itself. Unless you give all specifics of the code, the environment you are using like IDE and Operating System etc., anyone can only guess the max.
Try to install solc version >= 0.4.26.
Also, make sure you've correctly installed truffle-hdwallet-provider since it's been renamed to #truffle/hdwallet-provider.
https://www.npmjs.com/package/#truffle/hdwallet-provider
I have solved the invalid member of stdlib and "before each" hook for "deploys a contract" problems by deleting the "node-modules" folder in the current directory and then reinstall the modules from terminal:
npm install --save ganache-cli mocha web3#1.0.0-beta.37 solc#0.4.17
If you have anaconda installed on your computer you should deactivate it before installing these modules by using the
conda deactivate
command. I couldn't find a solution for this on internet and I hope this helps you to solve your problem as well...

How do i install spectron and start scripting on it?

Got a recent Requirement, where i need to do Test Automation of Backend Node js application using the spectron. I would like to know what are programming skills required to approach the same
Find the Spectron documentation at https://electronjs.org/spectron
Installation
npm install --save-dev spectron
Sample test file looks like this
const Application = require('spectron').Application
const assert = require('assert')
const electronPath = require('electron')
const path = require('path')
describe('Application launch', function () {
this.timeout(10000)
beforeEach(function () {
this.app = new Application({
path: electronPath,
args: [path.join(__dirname, '..')]
})
return this.app.start()
})
afterEach(function () {
if (this.app && this.app.isRunning()) {
return this.app.stop()
}
})
it('shows an initial window', function () {
return this.app.client.getWindowCount().then(function (count) {
assert.equal(count, 1)
})
})
})
Spectron can work with any test framework. I prefer using mocha.
Clone this project for more info https://github.com/electron/spectron