How can I deploy NodeJS app with local dependencies to GAE???
My app has local dependencies in package.json, so it failed.
Thanks!
No help came so I did it myself. Here's the solution for everyone has the same problem. Use gulp to copy local resources into current directory.
const gulp = require('gulp');
const merge = require('merge-stream');
const runSequence = require('run-sequence');
const del = require('del');
const fs = require('fs');
const resolve = require('path').resolve;
let getPackageGlobs = (dir) => {
let paths = [
dir + '/**',
'!' + dir + '/node_modules/**',
'!' + dir + '/npm-debug.log',
'!' + dir + '/build',
];
try {
let data = fs.readFileSync(dir + '/.npmignore', {
encoding: 'utf-8',
});
paths = paths.concat(data.split("\n")
.filter((e) => e.length > 0)
.map((e) => dir + '/' + e)
.filter(fs.existsSync)
.map((e) => fs.lstatSync(e).isDirectory() ? '!' + e + '/**' : '!' + e));
} catch (err) { }
return paths;
};
gulp.task('build.clean', () => {
return del(__dirname + '/build');
});
gulp.task('build.copy', () => {
return gulp.src(getPackageGlobs(__dirname))
.pipe(gulp.dest('build'));
});
gulp.task('build.normalize', () => {
let packageJson = require('./build/package.json');
let tasks = [];
for (let name in packageJson.dependencies) {
for (let s of ['../', '~/', './', '/']) {
if (packageJson.dependencies[name].startsWith(s)) {
tasks.push(gulp
.src(getPackageGlobs(resolve(packageJson.dependencies[name])))
.pipe(gulp.dest('./build/local_modules/' + name)));
packageJson.dependencies[name] = './local_modules' + '/' + name;
break;
}
}
}
return new Promise((resolve, reject) => {
fs.writeFile('./build/package.json',
JSON.stringify(packageJson), (err) => {
if (err) {
reject(err);
} else {
resolve(merge(tasks));
}
});
});
});
gulp.task('build', (done) => {
runSequence('build.clean',
'build.copy',
'build.normalize',
done);
});
And run with gulp build && gcloud app deploy build/app.yaml
Thanks to #Hung Hoang for the question and their answer. Their answer solved my issue as well. I was motivated to write a solution that achieves the same result (and a few improvements) without using Gulp.
The following code assumes that you have a local package located at ../local, and that the current working directory contains the Node.js application and the package.json to be deployed to App Engine.
The overall idea is to copy the contents of ../local to the Node.js app directory before deploying. This way, the local package code will be included as part of the code uploaded as part of the deploy, and the deploy will not result in an error. But, additionally, for this to work correctly the package.json dependency entry for the ../local package also needs to be updated.
There are three steps to the process (defined in Makefile format).
deploy: _predeploy _deploy _postdeploy
The pre-deploy step prepares files for deployment. See inline comments for details.
_predeploy:
# copy package to current directory
cp -r ../local tmp-local
# preserve original package.json,package-lock.json
cp package.json package.json.original
cp package-lock.json package-lock.json.original
# rewrite local dependency value in package.json (../local -> ./tmp-local)
sed -i '' 's/file:\.\.\/local/file:.\/tmp-local/g' package.json
# update package-lock.json corresondingly
npm i --package-lock-only
The deploy step does the actual deploy using the gcloud command.
_deploy:
gcloud app deploy
Finally the post-deploy step cleans up files created during the pre-deploy step, and restores the original state of package.json (such that ../local can be used as usual for local development).
_postdeploy:
# undo package.json,package-lock.json changes
mv package.json.original package.json
mv package-lock.json.original package-lock.json
# remove copied package
rm -rf tmp-local
To run, execute make deploy.
Related
How is webpack-cli used in a project?
From what I understand, as soon as I enter npm run start on my bash terminal, webpack starts running the webpack config file where I have written rules to convert jsx to js using babel, scss/less to css (correct me if I'm wrong).
But where does webpack-cli comes into play in all this?
The webpack-dev-server package is responsible to serve the build over an http server that it creates for it. It also re-starts if you make any changes to the source code (when using the hot reload option).
On the other hand, the webpack-cli package is responsible for the build and bundle of the source files. So, webpack-dev-server has to run the webpack-cli.
So you've got to have both of the packages installed.
You can see kind of how it does that in here:
https://github.com/webpack/webpack-dev-server/blob/master/bin/webpack-dev-server.js
/** #type {CliOption} */
const cli = {
name: 'webpack-cli',
package: 'webpack-cli',
binName: 'webpack-cli',
installed: isInstalled('webpack-cli'),
url: 'https://github.com/webpack/webpack-cli',
preprocess() {
process.argv.splice(2, 0, 'serve');
},
};
// ...
const runCli = (cli) => {
if (cli.preprocess) {
cli.preprocess();
}
const path = require('path');
const pkgPath = require.resolve(`${cli.package}/package.json`);
// eslint-disable-next-line import/no-dynamic-require
const pkg = require(pkgPath);
// eslint-disable-next-line import/no-dynamic-require
require(path.resolve(path.dirname(pkgPath), pkg.bin[cli.binName]));
};
// ...
runCommand(packageManager, installOptions.concat(cli.package))
.then(() => {
runCli(cli);
})
.catch((error) => {
console.error(error);
process.exitCode = 1;
});
In webpack v5, that order kind of got reversed since you use webpack server, which is a webpack-cli call, to initiate the serve, that will call the webpack-dev-server package.
I'm not a webpack expert by any means, but I think this will help you to understand it better.
So, I want to make a PWA for one of my CRA projects. The app has a different wallpaper everytime it loads, which it gets through an API. But since PWAs are supposed to have offline support, I wanted to have a fallback wallpaper which is cached. Or, even better the last result from the API is cached and returned till the user is offline. Something of a StaleWhileRevalidate strategy. But I can't figure out how to achieve this without ejecting from create-react-app. Any ideas?
Thanks
Ok, I figured it out myself. In case anyone else needs this, you can create a file named add_to_precache.js in your project's root directory and add the following code to it:
let fs = require('fs');
let build_dir = "./build";
let precache_re = /precache-manifest\.[a-zA-Z0-9]*\.js/
let urls_to_add = [
/*
Path to files you want to add relative to build directory. Along with any other homepage value you have. Eg: "/app/background.jpg"
*/
]
function generate_revision(){
var chars = "abcdefghijklmnopqrstuvwxyzABCDEEFGHIJKLMNOPQRSTUVWXYZ0123456789".split('');
var revision = '';
for(let i=0;i<24;i++){
revision += chars[Math.round(Math.random() * 62)];
}
return revision;
}
fs.readdir(build_dir, (err, files) => {
for(let file of files){
if(precache_re.test(file)){
let cont = fs.readFileSync(build_dir + '/' + file).toString()
cont = cont.slice(0, cont.length - 4)
urls_to_add.forEach((url) => {
cont += `,\n {\n "url": "${url}",\n "revision": "${generate_revision()}"\n }`
})
cont += `\n]);`
fs.writeFileSync(build_dir + '/' + file, cont);
break;
}
}
})
And then modify your package.json from
//....
"scripts": {
//....
"build": "react-scripts build"
//...
}
//...
}
to
//....
"scripts": {
//....
"build": "react-scripts build && node add_to_precache"
//...
}
//...
}
And you are done. It will do the trick when building the application
I use create-react-app in multiple packages in a monorepo. There is a fair amount of duplicated code and files in the "public" folder of each app, since they all have the same icons, descriptions, fonts etc.
Is there a way to move some or all of the files in the "public" folder to their own package, run them through a tool like handlebars.js and finally bundle them with create-react-app, without ejecting?
I couldn't find any open-source tool for this, so I wrote my own scripts:
prepare_cra_files.sh
#!/usr/bin/env bash
for app in apps/*/ ; do
# Flush the files if they already exist
if [ -d "$app"public ]; then
rm -r "$app"public
fi
# Copy over the template files
cp -r template "$app/public"
done
node templatify.js
templatify.js
const Handlebars = require("handlebars");
const fs = require("fs-extra");
const path = require("path");
const APPS_PATH = path.join(__dirname, "..", "apps");
const INDEX_HTML_TEMPLATE_PATH = path.join(__dirname, "template", "index.handlebars");
(async () => {
const dirs = await fs.readdir(APPS_PATH);
const indexHtmlTemplate = Handlebars.compile(await fs.readFile(INDEX_HTML_TEMPLATE_PATH, "utf-8"));
dirs.forEach(async appName => {
const indexHtmlContextPath = path.join(APPS_PATH, appName, "/handlebars/index.json");
if (!fs.existsSync(indexHtmlContextPath)) {
throw new Error(`Please provide the index.html context for the ${appName} appName`);
}
const indexHtmlContext = JSON.parse(await fs.readFile(indexHtmlContextPath, "utf-8"));
const indexHtml = indexHtmlTemplate(indexHtmlContext);
await fs.writeFile(path.join(APPS_PATH, appName, "public", "index.html"), indexHtml);
await fs.remove(path.join(APPS_PATH, appName, "public", "index.handlebars"));
});
})();
The template's default structure has everything in one place, like this:
/
bin/
obj/
ClientApp/
myproject.csproj
Startup.cs
etc.
My structure has many libraries and angular apps - i.e. a monorepo - so it must be more organized:
/
libs
client
client2
server
bin/
obj/
myproject.csproj
Startup.cs
lib1
lib2
I edited various references to reflect this structure, most importantly Startup.cs:
services.AddSpaStaticFiles(configuration => {
configuration.RootPath = "../../../../client/dist";
});
// and
app.UseSpa(spa => {
spa.Options.SourcePath = "../../../../client";
});
But when running I get: InvalidOperationException: Failed to start 'npm'.
When I run the server and client separately, they work... so the problem is with how the "spa services" is configured. I tried both ../../../../client (from bin directory) and ../client (from server project's base directory).
How do I reconfigure the project structure? (Is there a working sample repo somewhere?)
You can't run your app in dist folder when you are in development mode because dist folder only use for production mode
Default settings is like this
services.AddSpaStaticFiles(configuration =>
{
configuration.RootPath = "ClientApp/dist";
});
app.UseSpa(spa =>
{
spa.Options.SourcePath = "ClientApp";
if (env.IsDevelopment())
{
spa.UseAngularCliServer(npmScript: "start");
}
});
So in order to run your app you must create folder in same root of default Startup.cs or ClientApp file like in your case is libs then it will be libs/client
Then modify the setting
services.AddSpaStaticFiles(configuration =>
{
configuration.RootPath = "libs/client/dist";
});
app.UseSpa(spa =>
{
spa.Options.SourcePath = "libs/client";
if (env.IsDevelopment())
{
spa.UseAngularCliServer(npmScript: "start");
}
});
Please note that you can only run 1 angular app with this setting if you want to run another angular app you need to manage it selft manually
I got it working like this:
1 changed to monorepo structure as shown in my question above
2 edited Server.csproj (paths are relative to server app's directory):
<!--<SpaRoot>ClientApp\</SpaRoot>-->
<SpaRoot>../client/</SpaRoot>
3 edited Startup.cs (paths are relative to workspace's directory):
services.AddSpaStaticFiles(configuration => {
//configuration.RootPath = "ClientApp/dist";
configuration.RootPath = "./libs/client/dist";
});
and
app.UseSpa(spa => {
//spa.Options.SourcePath = "ClientApp";
spa.Options.SourcePath = "./libs/client";
// etc.
});
Is there a good way to execute the gcloud commands to deploy an app to GAE and see the stderr/stdout echoed back at the console? I've tried gulp-exec but it seems to batch up the output, dumping only upon completion. It also won't play nicely when trying to preview locally.
In the end I didn't want to have to use another npm, but loosely inspired by a portion of gulp-run, I came up with the following that assumes a 'clean' and 'build' task exists, and also overrides some constants per environment for a gulp-replace task that is part of my 'build', the key being the spawn of a subshell and piping its output to the current process's:
// gulp deploy [-a dev|staging|prod]
gulp.task('deploy', function() {
var commands = {
remote: 'gcloud preview app deploy app.yaml -q --set-default --project ',
local: 'gcloud preview app run app.yaml'
};
var environments = {
dev: {
app: 'myapp-dev',
},
staging: {
app: 'myapp-staging',
MY_ENDPOINT: 'https://staging.example.com'
},
prod: {
app: 'myapp',
MY_ENDPOINT: 'https://example.com'
}
};
var command = commands.local;
var env = environments[argv.a];
if (env) {
command = commands.remote + env.app;
constantsMap.MY_ENDPOINT = env.MY_ENDPOINT;
}
// Now that our constants are configured, kick off the build, then deploy.
runSequence('clean', 'build', function() {
var title = util.format('$ %s\n', $.util.colors.blue(command));
process.stdout.write(title);
// run the command in its own subshell and pipe the output to our own.
var subshell = childProcess.spawn('sh', ['-c', command]);
subshell.stdout.pipe(process.stdout);
subshell.stderr.pipe(process.stderr);
});
});
This relies on the npms: run-sequence, util, yargs, gulp-load-plugins
If you want to execute commands following code snippet will help you. I have wrapped it inside a promise as you are using gulp:
var cp = require('child_process');
function executeCommand(command, option) {
return new Promise(function (resolve, reject) {
var args = [option.something, option.something];
var ls = cp.spawn(command, args);
var output = "";
ls.on('error', function (err) {
reject(err);
});
ls.stdout.on('data', function (data) {
output += String(data);
console.log(output)
});
ls.on('exit', function (code) {
if (code === 0) {
resolve({
"output": output
});
} else {
reject(Error(output));
}
});
});
}
I am using gulp-gae and it seems it works well.
Supported commands are appcfg.py and dev_appserver.py (in current version). It can be also configured to override some values from the given app.yaml.