Grunt loop for copy task, but copying only last file - loops

Can you help me with this problem?
I'm trying to create a function in Gruntfile.js to copy template files to my project using a loop and .json file to "automate" a copy job, apparently the function looks great because Grunt runs the copy job according to the number of records in the .json file, using a grunt.log.write in a loop Grunt shows the names of the .json files but in reality it copies only the last registered file.
First my .json file:
{
"config": {
"modules": [
{"name": "footer", "number": 2},
{"name": "header", "number": 1}
]
}
}
Second my copy task with the loop variables:
copy: {
core: {
files: [{
expand: true,
cwd: "core/<%=grunt.option('coreName')%>/<%=grunt.option('coreNumber')%>/",
src: "**",
dest: "../sass/2_deploy/core/"
}]
}
}
The intention was to get the file inside the directory of the version "header /1/", "footer/2/" and transfer into the deploy directory according to the code above.
Third, here is the function that reads the .json file and declares the variables and executes the task inside the loop:
function moveCoreFiles() {
var models = require('./variables.json');
var cores = models.config.modules;
for (core in cores) {
grunt.option('coreName', cores[core].name);
grunt.option('coreNumber', cores[core].number);
grunt.task.run('copy:core');
grunt.log.write(grunt.option("coreName"));
}
}
// ... enter code here
grunt.registerTask('moveCore', moveCoreFiles);
At this point, when executing the task, Grunt returns this information:
$ grunt moveCore
Running "moveCore" task
footerheader
Running "copy:core" (copy) task
Copied 1 file
Running "copy:core" (copy) task
Copied 1 file
From the description of the task it seems that grunt has executed task one for each record twice, but in practice it only moved the last "header" file to the directory, my question would be if this type of action is really possible or if I should abandon the loop within the Gruntfile.
Thanks a lot for the help!
Regards!

I think you should build an array of files to copy, and then pass the array to the task, so grunt will run the task for all the folders to copy only once, instead of running the task multiple times.
First you define the function that will create the array of files to copy:
function getFiles() {
var models = require('./variables.json');
var cores = models.config.modules;
var files = [];
for (var core in cores) {
files.push({
expand: true,
cwd: "core/" + cores[core].name + "/" + cores[core].number + "/",
src: "**",
dest: "../sass/2_deploy/core/"
});
grunt.log.write("core/" + cores[core].name + "/" + cores[core].number + "/\r\n");
}
return files;
}
Then, define the copy task to use the array of files:
grunt.initConfig({
copy: {
core: {
files: getFiles()
}
}
});
Then just define the task:
grunt.registerTask('default', ['copy:core']);
The resulting Gruntfile.js will looks as follows:
module.exports = function(grunt) {
function getFiles() {
var models = require('./variables.json');
var cores = models.config.modules;
var files = [];
for (var core in cores) {
files.push({
expand: true,
cwd: "core/" + cores[core].name + "/" + cores[core].number + "/",
src: "**",
dest: "../sass/2_deploy/core/"
});
grunt.log.write("core/" + cores[core].name + "/" + cores[core].number + "/\r\n");
}
return files;
}
grunt.initConfig({
copy: {
core: {
files: getFiles()
}
}
});
grunt.loadNpmTasks('grunt-contrib-copy');
grunt.registerTask('default', ['copy:core']);
};
Hope it helps!

You can dynamically configure a Tasks target (i.e. copy:core) via a separate Custom Task.
In the example Gruntfile.js below we dynamically configure a copy:core target based on each entry in the modules Array in variables.json, before running the task. This is handled by the configCopyCoreAndRun task.
Gruntfile.js
module.exports = function (grunt) {
grunt.loadNpmTasks('grunt-contrib-copy');
grunt.initConfig({
copy: {
// <-- `core` target is intentionally not defined. It will be
// configured, set, and run by configCopyCoreAndRun task.
anotherTarget: {
src: './variables.json',
dest: '../sass/2_deploy/'
}
}
});
/**
* Custom Helper function dynamically configures the `core` target in
* `copy` task, based on data entered in an external `.json` file.
*/
grunt.registerTask('configCopyCoreAndRun', function() {
var modules = grunt.file.readJSON('./variables.json').config.modules;
var core = {files: []};
modules.forEach(function(module) {
core.files.push({
expand: true,
cwd: 'core/' + module.name + '/' + module.number,
src: '**',
dest: '../sass/2_deploy/core/'
})
});
grunt.config.set('copy.core', core);
grunt.task.run('copy:core');
});
// We call `configCopyAndRun` then `copy:anotherTask` to demonstrate
// that other copy target(s) can also exist and be used.
grunt.registerTask('default', ['configCopyCoreAndRun', 'copy:anotherTarget']);
};
Note:
To avoid the potential of files, (with the same name), being overwritten when copying multiple assets to the dest directory (i.e. ../sass/2_deploy/core/), you may want to consider changing this line in the Grunfile.js above:
dest: '../sass/2_deploy/core/'
to something like this instead:
dest: '../sass/2_deploy/core/' + module.name + '/' + module.number
This will ensure the named module folder and the numbered folder (of the source directories) gets replicated in the destination path.

Related

How to split dynamically by directories with Webpack/SplitChunks plugin?

I'm trying to split my React code (created with create-react-app) with the splitChunks plugin in the following way :
I have following components (JSX) structure :
services
serviceA
ComponentA1
ComponentA2
subFolder
ComponentA3
...
serviceB
ComponentB1
ComponentB2
...
serviceC
ComponentB1
ComponentB2
...
...
and I want to have following output (build) :
static/js
serviceA
serviceA.bundle.chunkhash.js
serviceB
serviceB.bundle.chunkhash.js
serviceC
serviceC.bundle.chunkhash.js
(other runtimes / mains are at the root of /static/js)
Another restriction is that components are loaded dynamically with
const Component = lazy(() => import(componentPath));
...
<Suspense fallback={..}>Component</suspense>
"componentPath" is determined on the fly (when a user clicks on an icon then it opens a given service).
The reason for this is that I want to include each bundle into a separate Docker image running the backend. Then each Docker image is reachable thanks to Application routing :
static/js/serviceA/ ==> js served by Docker container running service A
static/js/serviceB/ ==> js served by Docker container running service B
static/js/serviceC/ ==> js served by Docker container running service C
So far, I'v tried to:
set the output.chunkFilename to [name]/[name].[chunkhash].js
use the webpackChunkName with [name] and [request]:
[name] doesn't seem to work (got just litterally "[name]" as part of my directory name).
[request] flattens the name of the directories:
serviceA-ComponentA1
serviceA-ComponentA2
serviceA-subFolder-ComponentA3
serviceB-componentB1
serviceB-componentB2
...
Then I tried to use the splitChunks plugin with following :
splitChunks: {
chunks: 'all',
name: function(module) {
let serviceName = module.rawRequest ? module.rawRequest : 'default';
serviceName = serviceName.replace('../', '').replace('./', '');
serviceName = serviceName.split('/')[0];
return serviceName;
},
cacheGroups: {
vendors: {
test: /[\\/]node_modules[\\/]/,
priority: -10
},
default: {
minChunks: 2,
priority: -20,
reuseExistingChunk: true
},
serviceA: {
test: /[\\/]serviceA[\\/]/,
priority: -10
},
serviceB: {
test: /[\\/]serviceB[\\/]/,
priority: -10
},
serviceC: {
test: /[\\/]serviceC[\\/]/,
priority: -10
},
}
},
This approach looks like working as all my services are in their own directories. But I still have some additional directories as numbers (bundle ID probably) that I would have expect to be rather included into the default.
So the question is : is my approach correct ?
I'm not sure if the following option would work for you. I had a similar problem, where I needed different folders to be outputed on different bundles.
In my case, I started with the glob solution, suggested here.
Then, knowing that I needed an array of inputs for each desired output, I came up with this:
const path = require('path');
const glob = require('glob');
const plugins = [...];
module.exports = {
entry: glob.sync('./src/**/*.js').reduce((acc, item) => {
const path = item.split('/');
path.pop();
const rootFolder = path[2] ? `${path[0]}/${path[2]}` : path[0];
if (acc[rootFolder]) {
acc[rootFolder].push(item);
} else {
acc[rootFolder] = [item];
}
return acc;
}, {}),
output: {
filename: '[name]/main.js',
path: path.resolve(__dirname, 'dist'),
},
module: { ... },
plugins,
};
This is a simplified version of my config and it could probably be improved, but it works fine for my needs. :)
More info on glob library: https://github.com/isaacs/node-glob

React multiple output files of bundle of single Input file

I am working in React and want to publish my code . I am creating bundle using webpack , since I want the bundle to be divided in three parts , that is , my code should have divided in to three different files so that not only one file get populated too much . I went through the official docs of webpack and other online sites but still not found the solution for this .
Here is a complete configuration webpack.config.js that you can base yours on.
const webpack = require('webpack');
const CommonsChunkPlugin = webpack.optimize.CommonsChunkPlugin;
const WebpackConfig = {
// multiple component entry points
entry: {
AppPart1: './src/AppPart1',
AppPart2: './src/AppPart2',
AppPart3: './src/AppPart3'
},
// generate files
output: {
path: './assets',
// creates JS files like "AppPart1.js"
filename: '[name].js'
},
module: {
preLoaders: [
// add any pre-loaders here, OPTIONAL
],
loaders: [
// add any loaders here, like for ES6+ transpiling, React JSX etc
]
},
resolve: {
extensions: ['.jsx', '.js']
},
plugins: [
// this will factor out some common code into `bundle.js`
new CommonsChunkPlugin('bundle.js'),
]
};
module.exports = WebpackConfig;
At the end of webpack build, here is what you will have in the assets folder
AppPart1.js
AppPart2.js
AppPart3.js
bundle.js
bundle.js will contain some shared code and must be included on all your pages along with the appropriate part file for the page.
If you have the three separate files, you can put multiple entry points on your webpack config:
entry: {
"bundle1":"./src/app/somefile1.jsx",
"bundle2":"./src/app/somefile2.jsx,
"bundle3":"./src/app/somefile2.jsx"
}

webpack separate build files

I have a nested directory structure with jsx modules, like
app/js/header/index.jsx
app/js/task/runner.jsx
and so on
is it possible to have webpack transpile each one of them and output the result in the same directory as the jsx file?
Regards
If I understand you correctly, you want to put resulting module next to each source module. It seems that you can achieve this with a plugin:
var fs = require('fs');
function MyPlugin() {}
MyPlugin.prototype.apply = function(compiler) {
compiler.plugin('emit', function(compilation, callback) {
compilation.modules.forEach(m => {
if (/filename/.test(m.resource)) { // test for filename to exclude node_modules
fs.writeFileSync(m.resource + '.transpiled', m._source._value);
}
});
callback();
});
};
and in the webpack config:
{
...
plugins: [ MyPlugin() ],
...
}
Is it what you are trying to do?

Google Analytics optimization with r.js

Hi everyone I am trying to make r.js work but i got error on Google Analytics like this
Tracing dependencies for: main
Cannot optimize network URL, skipping: //www.google-analytics.com/analytics.js
this is my requierjs.config file
window.GoogleAnalyticsObject = "__ga__";
window.__ga__ = function() {
for (var i=0; i<arguments.length; i++) {
var arg = arguments[i];
if (arg.constructor == Object && arg.hitCallback) {
arg.hitCallback();
}
}
};
window.__ga__.q = [["create", "UA-82626142-1", "auto"]];
window.__ga__.l = Date.now();
paths: {
ga: "//www.google-analytics.com/analytics"
},
shim: {
"ga": {
exports: "__ga__"
},
}
and this is my r.js config file
{
baseUrl: "../js",
name: "main",
out: 'app-built.js',
findNestedDependencies: true,
paths: {
ga: "//www.google-analytics.com/analytics",
},
include: 'requireLib',
mainConfigFile: "../js/main.js",
}
I have used ''empty path for r.js config but i dosn`t work. Thanks in advance
I have checked Requierjs documantation again and again and i found the right way to make cdns(urls) optimize with r.js. Need write in optimize config file
paths: {
....
ga:'empty'
}
we can write 'empty' for any cdn link just need use console command when we run optimize config file like this (this is my own example from my app)
'node build/r.js -o build/build.single.js paths.ga=empty:'
Hope this will be usfulle for others who have same probleme

grunt-msdeploy for deploying AngularJS app to multiple servers

I am using grunt-msdeploy for deploying angularJs code to one of the server, this is working perfectly fine. I would like to deploy the same code to multiple servers. How do i achieve it ? Please help !
Gruntfile.js code for msdeploy:
var path = require('path');
//add the config file name
var configFile = path.resolve('./deployconfig.json');
var config = require(configFile);
msdeploy: {
push: {
options: {
verb: 'sync',
allowUntrusted: 'true',
source: {
'contentPath': path.resolve('./dist')
},
dest: {
contentPath: config.contentPath,
wmsvc: config.serverAddress,
userName: config.userName,
password: config.password
}
}
}
}
grunt.loadNpmTasks('grunt-msdeploy');
//add task for deployment - copying the dist from local server to remote server
grunt.registerTask('deploy', ['msdeploy:push']);
deployconfig.json:
{
"contentPath": "c:/inetpub/wwwroot/dist",
"serverAddress": "ec2-xx-xx-xx-x.ap-northeast-1.compute.amazonaws.com",
"userName": "xxxxxxxxx",
"password": "xxxxxxxxx"
}
I have tried using multiple dest in the msdeploy with multiple servers information in the json file, but that didn't work. Is there a way to do this at all ?
I think you are configuring the task wrong, that's why it doesn't work in your case, it should be defined as taks for grunt, here's the pseudo-code:
grunt.initConfig({
msdeploy: {
options: {
// Task-specific options go here.
},
your_target: {
// Target-specific file lists and/or options go here.
},
},
});
More information and options in the official manual.
For multiple destinations just create several target descriptions,
grunt.initConfig({
msdeploy: {
options: {
// Task-specific options go here.
},
your_target_1: {
// Target-specific file lists and/or options go here.
},
your_target_2: {
// Target-specific file lists and/or options go here.
},
...
},
});
You may create options, generic for all of those targets, as well as specific options per each target.
When you'll run the task, simply do not specify which target you need to run, and it will execute them one by one:
// will be executed for all the targets from task `msdeploy` definition
grunt.registerTask('deploy', ['msdeploy']);
// or alternatively you may explicitly define the order of tasks:
grunt.registerTask('deploy', ['msdeploy:your_target_1', 'msdeploy:your_target_2']);
This is the working solution:
msdeploy: {
target1: {
options: {
verb: 'sync',
allowUntrusted: 'true',
source: {
'contentPath': path.resolve('./dist')
},
dest: {
contentPath: "target1path",
wmsvc: "target1serverAddress",
userName:"target1userName",
password:"target1password"
}
}
},
target2: {
options: {
verb: 'sync',
allowUntrusted: 'true',
source: {
'contentPath': path.resolve('./dist')
},
dest: {
contentPath: "target2path",
wmsvc: "target2serverAddress",
userName:"target2userName",
password:"target2password"
}
}
}
}
grunt.registerTask('deploy', ['msdeploy:target1', 'msdeploy:target2']);
In case if any one wants to do it with the config file , add multiple entries to the json config file like this:
[
{
"contentPath": "c:/inetpub/wwwroot/dist",
"serverAddress": "ec2-xx-xxx-xx-xxx.ap-northeast-1.compute.amazonaws.com",
"userName": "xxxxxxxxxx",
"password": "xxxxxxxxx"
},
{
"contentPath": "c:/inetpub/wwwroot/dist",
"serverAddress": "ec2-xx-xxx-xx-xxx.ap-northeast-1.compute.amazonaws.com",
"userName": "xxxxxxxxxx",
"password": "xxxxxxxxx"
}
]
and the values can be referred as :
var path = require('path');
var configFile = path.resolve('./deployconfig.json');
var config = require(configFile);
contentPath: config[0].contentPath,
wmsvc: config[0].serverAddress,
userName: config[0].userName,
password: config[0].password

Resources