I'm trying to integrate one Express app with Docpad. What I want most is share layouts between my static pages and my Express views.
In the process I follow this instructions of question 16332833, with:
var docpadInstanceConfiguration = {
action: 'generate',
env: 'static'
};
require('docpad').createInstance(docpadInstanceConfiguration, function(err,docpadInstance){
if (err) return console.log(err.stack);
// ...
});
I'm getting the following error:
info: Generating...
notice: DocPad is currently running without any plugins installed. You probably want to install some: http://docpad.org/plugins
TypeError: path must be a string
at fs.exists (fs.js:166:11)
at Task.method (/Users/../node_modules/docpad/node_modules/safefs/out/lib/safefs.js:183:14)
at ambi (/Users/../node_modules/docpad/node_modules/ambi/out/lib/ambi.js:22:16)
at /Users/../node_modules/docpad/node_modules/taskgroup/out/lib/taskgroup.js:117:19
at b (domain.js:183:18)
at Domain.run (domain.js:123:23)
at Task.fire (/Users/../node_modules/docpad/node_modules/taskgroup/out/lib/taskgroup.js:116:21)
at process._tickCallback (node.js:415:13)
at Function.Module.runMain (module.js:499:11)
at startup (node.js:119:16)
at node.js:901:3
without options, and var docpadInstanceConfiguration = {}; there is no errors:
info: Welcome to DocPad v6.52.1
info: Contribute: http://docpad.org/docs/contribute
info: Plugins: cleanurls, coffeescript, downloader, eco, less, livereload, marked, partials, related, stylus, text
info: Environment: development
Any idea for getting shared layouts between docpad and express views, and get csrf protection for example? (I'm using ectjs for my views, is eco compatible)
Thanks
Turns out you need the load and ready actions prior to the generate, like so:
var docpadInstanceConfiguration = {
action: 'load ready generate',
env: 'static'
};
require('docpad').createInstance(docpadInstanceConfiguration, function(err,docpadInstance){
if (err) return console.log(err.stack);
// ...
});
However looking at the API docs it doesn't seem doing actions in this way is the right way at all, instead we should do:
var docpadInstanceConfiguration = {
env: 'static'
};
require('docpad').createInstance(docpadInstanceConfiguration, function(err,docpadInstance){
if (err) return console.log(err.stack);
var generateOpts = {};
docpadInstance.action('generate', generateOpts, function(err,result){
if (err) return console.log(err.stack);
// ...
});
});
Related
I created a landing page using Astro with Tailwind CSS. And it is currently hosted on Vercel. I wanted to try out the analytics service provided by Vercel. I have been able to avail the Audience analytics service provided by Vercel. However, I cannot avail the web vitals services. After enabling the service and redeploying my project, I am stuck in this screen (screen shot provided).
Please note that I did turn off the ad blocker but that did not resolve the issue.I also added the following meta tag to resolve any CSP issue
<meta http-equiv="Content-Security-Policy"
content="default-src 'self' vitals.vercel-insights.com"/>
But that has not solved the problem.That is why I want to know does Vercel support analytics for Astro projects and if they do, then what am I doing wrong? Thank you.
Vercel’s Web Vitals analytics currently only has out-of-the-box support for Next, Nuxt, and Gatsby.
To track Web Vitals with a different framework like Astro, you need a bit of manual set up work as documented in Vercel’s Web Vitals API docs.
For example in your base Astro layout you could include a script tag that will import their example code and run it:
---
// src/layouts/BaseLayout.astro
---
<script>
import { webVitals } from '../scripts/vitals';
const analyticsId = import.meta.env.PUBLIC_VERCEL_ANALYTICS_ID;
webVitals({
path: window.location.pathname,
analyticsId,
});
</script>
Here’s Vercel’s example vitals.js snippet:
// src/scripts/vitals.js
import { getCLS, getFCP, getFID, getLCP, getTTFB } from 'web-vitals';
const vitalsUrl = 'https://vitals.vercel-analytics.com/v1/vitals';
function getConnectionSpeed() {
return 'connection' in navigator &&
navigator['connection'] &&
'effectiveType' in navigator['connection']
? navigator['connection']['effectiveType']
: '';
}
function sendToAnalytics(metric, options) {
const body = {
dsn: options.analyticsId, // qPgJqYH9LQX5o31Ormk8iWhCxZO
id: metric.id, // v2-1653884975443-1839479248192
page: options.path, // /blog/my-test
href: location.href, // https://my-app.vercel.app/blog/my-test
event_name: metric.name, // TTFB
value: metric.value.toString(), // 60.20000000298023
speed: getConnectionSpeed(), // 4g
};
if (options.debug) {
console.log('[Analytics]', metric.name, JSON.stringify(body, null, 2));
}
const blob = new Blob([new URLSearchParams(body).toString()], {
// This content type is necessary for `sendBeacon`
type: 'application/x-www-form-urlencoded',
});
if (navigator.sendBeacon) {
navigator.sendBeacon(vitalsUrl, blob);
} else
fetch(vitalsUrl, {
body: blob,
method: 'POST',
credentials: 'omit',
keepalive: true,
});
}
export function webVitals(options) {
try {
getFID((metric) => sendToAnalytics(metric, options));
getTTFB((metric) => sendToAnalytics(metric, options));
getLCP((metric) => sendToAnalytics(metric, options));
getCLS((metric) => sendToAnalytics(metric, options));
getFCP((metric) => sendToAnalytics(metric, options));
} catch (err) {
console.error('[Analytics]', err);
}
}
For a slightly more real-world implementation you, check out the <TrackVitals> Astro component in the astro-badge repo.
Vercel analytics has support for frameworks other than Next, Nuxt Gatsby etc. The way to achieve it in Astro (1.6, 2.0 etc.) is to install the #vercel/analytics package and inject a simple <script> tag that imports it and calls its exported function inject():
<script>
import { inject } from '#vercel/analytics'
// #ts-ignore: process.env.NODE_ENV is required by #vercel/analytics internally
// so that it can determine the correct path for importing the analytics script
globalThis.process = { env: { NODE_ENV: import.meta.env.MODE } }
inject()
</script>
You can inject this code in your <head> section in any .astro template file.
Unfortunately, the package is expecting a non-ESM runtime environment and is internally conditionally checking for process.env.NODE_ENV to determine which script to load (local-relative path to JS or from a remote host, fully qualified domain name). This is the reason, the MODE needs to be exposed as process.env.NODE_ENV. I tried to achieve this via Vite using define, but Astro seems to check for process somewhere else internally and fails.
I want to write local docusaurus plugins using typescript. (it works fine using js)
Is this possible? Like the docs here suggest to put them under ./src/plugins/name-of-plugin.
Reading the docs there are examples with ts, but if I just try and replace is with ts i get various errors. (eg "Cannot find module").
I'm using typescript for pages as well.
Adding the file extension worked for me.
For example:
plugins: ['./src/plugins/my-plugin.ts'],
my-plugin.ts
module.exports = async function myPlugin(context, options) {
return {
name: "my-plugin",
async loadContent() {
console.log("Hello World plugin wow!");
},
async contentLoaded({ content, actions }) {
console.log(content);
},
};
};
removing the extension shows the same error :)
I am trying to follow the Strapi v4.0.0 guide on https://docs.strapi.io/developer-docs/latest/developer-resources/plugin-api-reference/server.html#entry-file for extending the users-permission plugin to add a custom route/controller, but so far have been unsuccessful. I add the custom files as stated in the docs, but there is no change in the UI.
I managed to get this to work for normal API highlighted in yellow, but was unable to do so for the users-permission plugin
In the previous version 3.6.8 this functionality was allowed through the extensions folder.
Am I missing something from the new guide, I even tried copying the files from node_modules > #strapi > plugin-users-permission and adding a new route and method to the exiting controller file but it still does not reflect the change in the section where we assign different route permission to roles. The user-permission plugin still shows the original routes, with no change.
Thanks,
I ran into this thread while researching pretty much the same issue, and I wanted to share my solution.
First of all, I found this portion of the documentation more useful than the one you referenced: https://docs.strapi.io/developer-docs/latest/development/plugins-extension.html
My goal was the write a new route to validate JWT tokens based on the comment made here: https://github.com/strapi/strapi/issues/3601#issuecomment-510810027 but updated for Strapi v4.
The solution turned out to be simple:
Create a new folder structure: ./src/extensions/user-permissions if it does not exist.
Create a new file ./src/extensions/user-permissions/strapi-server.js if it does not exist.
Add the following to the file:
module.exports = (plugin) => {
plugin.controllers.<controller>['<new method>'] = async (ctx) => {
// custom logic here
}
plugin.routes['content-api'].routes.push({
method: '<method>',
path: '/your/path',
handler: '<controller>.<new method>',
config: {
policies: [],
prefix: '',
},
});
return plugin;
};
If you're unsure what controllers are available, you can always check the API documentation or console.log(plugin) or console.log(plugin.controllers).
After the admin server restarts, you should see your new route under the user-permissions section as you would expect, and you can assign rights to it as you see fit.
My full strapi-server.js file including the logic to validate JWT:
module.exports = (plugin) => {
plugin.controllers.auth['tokenDecrypt'] = async (ctx) => {
// get token from the POST request
const {token} = ctx.request.body;
// check token requirement
if (!token) {
return ctx.badRequest('`token` param is missing')
}
try {
// decrypt the jwt
const obj = await strapi.plugin('users-permissions').service('jwt').verify(token);
// send the decrypted object
return obj;
} catch (err) {
// if the token is not a valid token it will throw and error
return ctx.badRequest(err.toString());
}
}
plugin.routes['content-api'].routes.push({
method: 'POST',
path: '/token/validation',
handler: 'auth.tokenDecrypt',
config: {
policies: [],
prefix: '',
},
});
return plugin;
};
When exporting routes you need to export the type, either content-api or admin. Look at the Strapi email plugin in node_modules for example, change the folder and file structure in your routes folder to match that and then you will be able to set permissions in the admin panel.
If your Strapi server is using Typescript, make sure that you name your extension files accordingly. So instead of strapi-server.js, you would need to name your file strapi-server.ts.
I have NestJs application with TypeORM configured with mysql. I want to have e2e(integration) test and for that reason I want to have in memory database in the tests which I configured this way:
{
type: 'sqlite',
database: ':memory:',
synchronize: true,
dropSchema: true,
entities: [`dist/**/*.entity{.ts,.js}`],
}
And the setup of the tests
beforeEach(async () => {
const moduleFixture: TestingModule =
await Test.createTestingModule({imports: [AppModule, UserModule]})
.overrideProvider(TypeOrmConfigService).useClass(MockTypeOrmConfigService)
.compile();
app = await moduleFixture.createNestApplication();
await app.init();
});
. When running the test I got
AlreadyHasActiveConnectionError: Cannot create a new connection named "default", because connection with such name already exist and it now has an active connection session.
at new AlreadyHasActiveConnectionError (/Users/user/workspace/app/src/error/AlreadyHasActiveConnectionError.ts:8:9)
at ConnectionManager.Object.<anonymous>.ConnectionManager.create (/Users/user/workspace/app/src/connection/ConnectionManager.ts:57:23)
at Object.<anonymous> (/Users/user/workspace/app/src/index.ts:228:35)
at step (/Users/user/workspace/app/node_modules/tslib/tslib.js:136:27)
at Object.next (/Users/user/workspace/app/node_modules/tslib/tslib.js:117:57)
at /Users/user/workspace/app/node_modules/tslib/tslib.js:110:75
at new Promise (<anonymous>)
at Object.__awaiter (/Users/user/workspace/app/node_modules/tslib/tslib.js:106:16)
at Object.createConnection (/Users/user/workspace/app/node_modules/typeorm/index.js:186:20)
at rxjs_1.defer (/Users/user/workspace/app/node_modules/#nestjs/typeorm/dist/typeorm-core.module.js:151:29)
(node:19140) UnhandledPromiseRejectionWarning: AlreadyHasActiveConnectionError: Caught error after test environment was torn down
If I move the setup from beforeEach in beforeAll block it's ok, but I'm afraid that when I create several specs the error will come back. How should be handled properly?
EDIT:
The problem was that each test is making a setup of the application and so creates a new connection.The solution was to use "keepConnectionAlive: true," in order all tests to reuse same connection.
keepCOnnectionAlive: true is the way to go
Using keepConnectionAlive: true produced the following error for me.
Jest did not exit one second after the test run has completed.
This usually means that there are asynchronous operations that weren't
stopped in your tests. Consider running Jest with
--detectOpenHandles to troubleshoot this issue.
Adding the below to each e2e test fixed my issue:
afterEach(async () => {
await app.close();
});
Base on 0xCAP's answer, you can do something like this also.
// jest.setup.ts
jest.mock("/path/to/database/config/object", () => {
const { databaseConfig, ...rest } = jest.requireActual("/path/to/database/config/object")
return {
...rest,
databaseConfig: {
...databaseConfig,
keepConnectionAlive: true // replace old config
}
}
})
// jest.config.js
module.exports = {
...other options
setupFilesAfterEnv: ["jest.setup.ts"],
}
I was using Mocha to test my Nodejs app with a test database. In order to reset the DB before each test I had the following code, which worked perfectly:
process.env.NODE_ENV = 'test';
var knex = require('../db/knex');
describe("Add Item", function() {
beforeEach(function(done) {
knex.migrate.rollback()
.then(function() {
knex.migrate.latest()
.then(function() {
return knex.seed.run()
.then(function() {
done();
});
});
});
});
...
I've since switched from mocha to mocha-casperjs for my integration tests, and now the knex migrations won't run. I'm given this error message with the exact same before each hook:
undefined is not an object (evaluating 'knex.migrate.rollback')
phantomjs://platform/new-item.js:12:17
value#phantomjs://platform/mocha-casperjs.js:114:20
callFnAsync#phantomjs://platform/mocha.js:4314:12
run#phantomjs://platform/mocha.js:4266:18
next#phantomjs://platform/mocha.js:4630:13
phantomjs://platform/mocha.js:4652:9
timeslice#phantomjs://platform/mocha.js:12620:27
I'm pretty sure that migration functionality is not included in webpack build. If you go to http://knexjs.org/ open up debug console and checkout different clients e.g. mysql.migrate you see that there are no functions declared at all.
Actually you can check it out with node too if you explicitly load webpack build instead of node lib.
// load webpack build instead of node build...
let knex = require('knex/build/knex')({client : 'pg'});
console.log(knex.migrate);
// outputs: {}
So... the question is why are you trying to run your tests on PhantomJS browser instead of node.js?