publish two separate result directories from nextflow process - nextflow

I have a nextflow process that outputs a result folder and a log as below:
process test {
label "x"
input:
path(somefile)
output:
path "test_results"
path "test_log.log"
path "something_for_next_process", emit: f
shell:
'''
myshellcommand
'''
}
And a config file to publish the results like below.
process
{
withLabel : x
{
publishDir =
[
path: {"$params.outdir/"},
pattern: "*_results",
mode: 'copy',
saveAs: {filename -> "${filename.split('_')[0]}/all_results/${filename.split("_")[1]}"}
]
}
withLabel : x
{
publishDir =
[
path: {"$params.outdir/"},
pattern: "*.log",
mode: 'copy',
saveAs: {filename -> "${filename.split('_')[0]}/logs/${filename}"}
]
}
}
I tried multiple combinations however, I can't get a label to publish its desired contents to two different folders. It always takes whichever is the last one in the publish config (in this case the logs). I know I can just put the publishdir options to the process in the workflow and that also works but I would like to do it through the config file. Is there a way I can get this to work?

You can specify the publishDir directive more than once in your nextflow.config by supplying a list of maps, for example:
process {
withLabel: 'x' {
publishDir = [
[
path: { "$params.outdir/" },
pattern: "*_results",
mode: 'copy',
saveAs: { fn -> "${fn.split('_')[0]}/all_results/${fn.split("_")[1]}" }
],
[
path: { "$params.outdir/" },
pattern: "*.log",
mode: 'copy',
saveAs: { fn -> "${fn.split('_')[0]}/logs/${fn}" }
]
]
}
}
If you don't need the withName or withLabel process selectors, the publishDir directive can just be specified multiple times inside of your process definition, for example:
process test {
publishDir(
path: { "$params.outdir/" },
pattern: "*_results",
mode: 'copy',
saveAs: { fn -> "${fn.split('_')[0]}/all_results/${fn.split("_")[1]}" }
)
publishDir(
path: { "$params.outdir/" },
pattern: "*.log",
mode: 'copy',
saveAs: { fn -> "${fn.split('_')[0]}/logs/${fn}" }
)
...
}

You can have multiple publishDir directives for the same process. No need to have the withLabel twice. If it was in the process block, it would be like the snippet below, for example.
process foo {
publishDir "$params.outdir/$sampleId/counts", pattern: "*_counts.txt"
publishDir "$params.outdir/$sampleId/outlooks", pattern: '*_outlook.txt'
publishDir "$params.outdir/$sampleId/", pattern: '*.fq'
input:
Check an example here.

Related

Rollup: Equivalent configuration of r.js config

I'm using requirejs optimizer and I have this configuration file which can be run via node r.js -o build.js
({
baseUrl: ".",
name: "src/app/SPA/AppContext",
out: "src/app/SPA/build/app.js",
paths: {
n: "empty:",
N: "empty:"
},
optimize: 'none'
});
Can someone provide the equivalent for rollup.config.mjs? Here's what I have now:
export default [
{
input: 'src/app/SPA/AppContext.js',
output: {
file: 'src/app/SPA/build/app.js',
name: 'src/app/SPA/AppContext',
format: 'amd',
paths: {
n: 'empty:',
N: 'empty:'
},
amd: {
id: 'src/app/SPA/AppContext'
}
}
}
];
Currently, the output file does not include the dependencies.

Installing Rabbitmq using helm3 from bitnami throws chart.metadata is required

I am trying to install rabbitmq:8.6.1 from bitnami chart repository using terraform:0.12.18.
My helm version is 3.4.2
while installing I am getting following error
Error: validation: chart.metadata is required
My terraform file is as below
resource "kubernetes_secret" "rabbitmq_load_definition" {
metadata {
name = "rabbitmq-load-definition"
namespace = kubernetes_namespace.kylas_sales.metadata[0].name
}
type = "Opaque"
data = {
"load_definition.json" = jsonencode({
"users": [
{
name: "sales",
tags: "administrator",
password: var.rabbitmq_password
}
],
"vhosts": [
{
name: "/"
}
],
"permissions": [
{
user: "sales",
vhost: "/",
configure: ".*",
write: ".*",
read: ".*"
}
],
"exchanges": [
{
name: "ex.iam",
vhost: "/",
type: "topic",
durable: true,
auto_delete: false,
internal: false,
arguments: {}
}
]
})
}
}
resource "helm_release" "rabbitmq" {
chart = "rabbitmq"
name = "rabbitmq"
version = "8.6.1"
timeout = 600
repository = "https://charts.bitnami.com/bitnami"
namespace = "sales"
depends_on = [
kubernetes_secret.rabbitmq_load_definition
]
}
After looking issue(509) at terraform-provider-helm,
If your module/subdirectory name is same as your chart name (In my case directory name is rabbitmq and my helm_resource name is also same rabbitmq), so I am getting this error, still not able to identify why, With reference to,
Solution: I change my directory name from rabbitmq to rabbitmq-resource and this error is gone.

rollup watch include directory

I am trying with following rollup.config.js file
import typescript from "rollup-plugin-typescript2";
import pkg from "./package.json";
import copy from 'rollup-plugin-copy'
import clean from 'rollup-plugin-clean';
export default [
{
input: "src/index.ts",
external: Object.keys(pkg.peerDependencies || {}),
watch: {
skipWrite: false,
clearScreen: false,
include: 'src/**/*',
//exclude: 'node_modules/**',
// chokidar: {
// paths: 'src/**/*',
// usePolling: false
// }
},
plugins: [
clean(),
copy({
targets: [
{ src: 'src/*', dest: 'dist' }
]
}),
typescript({
typescript: require("typescript"),
include: [ "*.ts+(|x)", "**/*.ts+(|x)", "*.d.ts", "**/*.d.ts" ]
}),
],
output: [
{ file: pkg.main, format: "cjs" },
{ file: pkg.module, format: "esm" },
{
file: "example/src/reactComponentLib/index.js",
format: "es",
banner: "/* eslint-disable */"
}
]
}
];
I want to rebuild when anything in src changes. I have couple of files which are not imported in .js and .ts files but I want them to copy in dist folder. copy is working fine but the watch is not picking up changes in those other files. Tried alot of variations on chokidar options but no luck yet.
Anyone have any idea how to resolve this?
watch.include only works on files pertaining to the module graph so if they are not imported they won't be included (https://rollupjs.org/guide/en/#watchinclude).
You can solve this by creating a small plugin that calls this.addWatchFile on those external files when the build starts. Here is an example:
plugins: [
{
name: 'watch-external',
buildStart(){
this.addWatchFile(path.resolve(__dirname, 'foo.js'))
}
}
]
Combine it with some globbing utility such as fast-glob and just call this.addWatchFile for every file you want to copy:
import fg from 'fast-glob';
export default {
// ...
plugins: [
{
name: 'watch-external',
async buildStart(){
const files = await fg('src/**/*');
for(let file of files){
this.addWatchFile(file);
}
}
}
]
}

logging in hapijs server with good-http plugin issue

I'm using good plugin for my app, and I have copied the config parameters straight from the sample in the page, console and file are working , but good-http is not working!
here is my configuration:
myHTTPReporter: [
{
module: 'good-squeeze',
name: 'Squeeze',
args: [{error: '*', log: 'error', request: 'error'}]
},
{
module: 'good-http',
args: [
'http://localhost/log-request/index.php?test=23424', // <- test file I created to see if data is being sent or not
{
wreck: {
headers: {'x-api-key': 12345}
}
}
]
}
]
the only argument that is actually working is the ops: * and none of the error: *, ... is working
Am I missing something in my config parameters?
Maybe you should change the threshold parameter of the plugin, is set by default to 20 so it only send data after 20 events. If you want immediate results yo have to change it to 0.
myHTTPReporter: [
{
module: 'good-squeeze',
name: 'Squeeze',
args: [{error: '*', log: 'error', request: 'error'}]
},
{
module: 'good-http',
args: [
'http://localhost/log-request/index.php?test=23424', // <- test file I created to see if data is being sent or not
{
threshold: 0,
wreck: {
headers: {'x-api-key': 12345}
}
}
]
}
]

How to perform dijit optimization with r.js?

How do we get around the "document is not defined" error when doing a r.js build via r.js -o against a dijit?
Specifically, I'm trying to build r-build.js:
define(["require", "exports", "dijit/layout/ContentPane"], function (require, exports, ContentPane) {
function simple() {
return ContentPane;
}
return simple;
});
Using r.js.cmd -o r-build.js and it reports:
ReferenceError: document is not defined
In module tree:
test/simple
dijit/layout/ContentPane
dijit/_Widget
dojo/query
dojo/selector/_loader
My r-build.js file looks like this:
({
appDir: "../",
baseUrl: "amd",
dir: "../../release",
optimize: "none",
modules: [
{
name: "test/simple",
exclude: ["jquery", "dojo"]
}
],
packages: [
{
name: 'cm',
location: 'http://localhost:93/CodeMirror'
},
{
name: 'jquery',
location: 'd:/code/jquery/src',
main: 'jquery'
},
{
name: 'jquery/ui',
location: 'http://localhost:93/jquery-ui/ui'
},
{
name: 'jquery/themes',
location: 'http://localhost:93/jquery-ui/themes'
},
{
name: 'sizzle',
location: 'http://localhost:93/jquery/external/sizzle/dist',
main: 'sizzle'
},
{
name: 'dojo',
location: 'd:/code/dojo'
},
{
name: 'dijit',
location: 'd:/code/dijit'
},
{
name: 'xstyle',
location: 'http://localhost:93/xstyle'
}
]
})
I'm fighting with the same issue. Building a r.js bundle with Dojo is a pain.
That one can be easy to fix... If you don't have problems with the supported browsers (post pre-ie9, for instance) override that file and change the lines that check the querySelectorAll to true and you will not need to do those checks. Like...
has.add("dom-qsa2.1", true);
has.add("dom-qsa3", true);
Hope it helps a bit...