Why can't I view source maps with Angular and Browserify? - angularjs

I am trying to step into the Browserify world. I am using a this tutorial as a starter. The source maps work fine. As soon as I require in angular the source maps disappear. Here is my browserify task.
gulp.task('browserify', function(){
return browserify({
entries: ['./src/javascript/app.coffee'],
extensions: ['.coffee', '.hbs']
})
.bundle({debug: true})
.on('error', handleErrors)
.pipe(source('app.js'))
.pipe(gulp.dest('./build/'));
});
I am passing in the debug: true flag. Is it even possible to make source maps with angular?

I believe you have the debug flag in the wrong location, try moving it into the call to browserify().
gulp.task('browserify', function(){
return browserify({
entries: ['./src/javascript/app.coffee'],
extensions: ['.coffee', '.hbs'],
debug: true //moved to here
})
.bundle()
.on('error', handleErrors)
.pipe(source('app.js'))
.pipe(gulp.dest('./build/'));
});

Related

r.js cannot resolve dependencies mentioned in shim

I've recently joined a project which is built using Backbonejs, (uses Marionette for view rendering) + nodejs. They also use requirejs to load the backbonejs files. Would like to add at this stage, that I've never worked with backbonejs or requirejs before and hence I'm struggling with the issue I describe later.
Some code that will help explain the issue that I run into (All this code was already written by previous dev's)
Folder Structure:
/public
/js
/collection (consists all Backbone.js collections files)
/lib
/bower_components
/backone
/marionette
/etc
/models (consists all Backbone.js models files)
/views (consists all Backbone.js view files)
/main.js
/main.build.js
/app.js
/controller.js
/router.js
Code from files that I think relate to issue:
main.js
requirejs.config({
paths: {
'async': 'lib/bower_components/requirejs-plugins/src/async',
'jquery': 'lib/bower_components/jquery/dist/jquery.min',
'underscore': 'lib/bower_components/underscore/underscore-min',
'lodash': 'lib/bower_components/lodash/dist/lodash.min',
'backbone': 'lib/bower_components/backbone/backbone',
'marionette': 'lib/bower_components/marionette/lib/backbone.marionette.min',
'markercluster':'lib/markercluster',
'jquerymobile': 'lib/jquery.mobile-1.4.0.min',
'hogan': 'lib/template-2.0.0.min',
'templates': '/templates',
'real': 'lib/mainjs',
'touch': 'lib/jquery.touchSwipe.min',
'mouse': 'lib/jquery.mousewheel',
'moment': 'lib/moment-2.5.1.min',
'humanize': 'lib/bower_components/humanize-plus/public/dist/humanize.min',
'validator': 'lib/bower_components/validator-js/validator.min',
'real': 'lib/mainfile'
},
shim: {
backbone: {
deps: ["underscore"]
},
marionette: {
deps: ["backbone"]
},
templates: {
deps: ["hogan", "jquery"]
},
real: {
deps: ["jquery", "jquerymobile", "touch", "mouse"]
},
markercluster: {
exports: "MarkerClusterer"
},
humanize: {
exports: "humanize"
}
},
waitSeconds: 0
});
define('gmaps', ['async!http://maps.google.com/maps/api/js?v=3&key=AIzaSyBiV8f88yLWJ_IMSdP1fVNO1-gt3eLVSgg&sensor=true&callback=gMapsCallback'], function(){
// define('gmaps', ['http://maps.google.com/maps/api/js?v=3&sensor=false'], function(){
return window.google.maps;
});
require(['app', 'templates', 'real'], function(app) {
app.start({
version: "0.9.9"
});
});
main.build.js
({
baseUrl: ".",
name: "main",
wrapShim: true,
out: "main-built.js"
})
app.js
define(['underscore', 'controller', 'router', 'models/Cache', 'views/RootView'], function(_, Controller, Router, Cache, RootView) {
var Application = Marionette.Application.extend({
propertyListPageSize: 3,
initialize: function() {
_.templateSettings = { interpolate : /\{\{(.+?)\}\}/g };
},
onStart: function(options){
new RootView();
this.controller = new Controller();
this.router = new Router({controller: this.controller});
this.cache = new Cache();
this.context = {};
//this.evHistory = [];//#todo remove once BB/marionette navigation is in place
if(Backbone.history) Backbone.history.start({ pushState: false });
if(Backbone.history.fragment === "") this.navigate('home');
},
navigate: function(fragment, trigger, replace){
this.router.navigate(fragment, {trigger:trigger, replace:replace});
},
back: function() {
window.history.back();
}
});
app = new Application();
return app;
});
rootView.js
define(['marionette', 'views/HomeView', 'views/HeaderView', 'views/FooterView', 'views/MenuView', 'views/VideoView', 'views/LocationSearchView', 'views/LoginView', 'views/FindView', 'views/ServicesView', 'views/ValueView', 'views/PropertyListView', 'views/SideBySideView', 'views/ConfirmRegistrationView', 'views/ForgotPasswordView', 'views/CreateAccountView', 'views/UserHomeView', 'views/MyBrokerView', 'views/GiveFeedbackView', 'views/SeeFeedbackView', 'views/ViewingScheduleView', 'views/MyViewingsSummaryView', 'views/MyAccountView', 'views/ViewingConfirmView', 'views/ValueAddressPropertyListView'],
function(Marionette, HomeView, HeaderView, FooterView, MenuView, VideoView, LocationView, LoginView, FindView, ServicesView, ValueView, PropertyListView, SideBySideView, ConfirmRegistrationView, ForgotPasswordView, CreateAccountView, UserHomeView, MyBrokerView, GiveFeedbackView, SeeFeedbackView, ViewingScheduleView, MyViewingsSummaryView, MyAccountView, ViewingConfirmView, ValueAddressPropertyListView) {
var RootView = Marionette.LayoutView.extend({
...some view code
});
Use case I'm trying to solve:
So when I access the site in the browser, I notice in the debugger that it loads all the js files right at the beginning. During the load process my site is blank and user has to wait a while before he can use the site.
So what I've been able to understand is that when app is 'started' in main.js, app.js creates an instance of rootView.js , which in turn has all the views listed as dependencies. This triggers a download request for all the other views which in turn would solve their own dependencies and download all the relevant models and collections. Hence all files being downloaded when the user accessed the site.
Solution I've been trying:
Since requirejs is being used, I'm trying to use r.js to optimize and combine all the js files to reduce the number of downloads.
Issue I'm running into:
When i run r.js. i get the following error
Tracing dependencies for: main
Error: ENOENT: no such file or directory, open '/var/node_projects/rm/rm.src.server/src/public/js/underscore.js'
In module tree:
main
app
Error: Error: ENOENT: no such file or directory, open '/var/node_projects/rm/rm.src.server/src/public/js/underscore.js'
In module tree:
main
app
at Error (native)
If I add the underscore.js files directly to the specified path in the error, then I get the same error for marionette.js. What I think is happening is that app.js is not recognizing the shim'ed dependencies and hence its trying to find the files directly at specified path in the error.
Things I've tried:
- I've added wrapShim: true in the main.build.js file but that did not help
Honestly, I've been sitting on this for a couple of days and I'm not sure what I can do next and hence this post.
Any help/direction would be appreciated.
You need to include the same shim configuration in your build file, as wrapShim is not sufficient.
If shim config is used in the app during runtime, duplicate the config here. Necessary if shim config is used, so that the shim's dependencies are included in the build. Using "mainConfigFile" is a better way to pass this information though, so that it is only listed in one place. However, if mainConfigFile is not an option, the shim config can be inlined in the build config.
https://github.com/jrburke/r.js/blob/master/build/example.build.js

Starting protractor through gulp

I'm trying to run protractor as a gulp task but I can't get it to work.
I've tried the gulp-protractor plugin like this:
gulp.task('protractor-server', function (done) {
var called = false;
nodemon({
script: 'test/e2e/server/server.js',
stdout: true,
ignore: ['app/**', 'node_modules'],
watch: 'test/e2e/**/*.js'
})
.on('start', function () {
if (!called) {
done();
}
called = true;
});
});
gulp.task('run-protractor', ['protractor-server'], function (done) {
return gulp.src(['test/e2e/**/*.js'])
.pipe(protractor({
configFile: __dirname + '/protractor.conf.js'
}))
.on('error', function (error) {
console.log('gulp error: ', error);
throw error;
});
});
However, firstly why do I need to use gulp.src(['test/e2e/**/*.js']) and then pipe protractor? Is it not possible to run protractor by it self since i have specified the spec files in the protractor.conf-file. (fyi I did try that but it didn't work).
Secondly, when I try and run like specified in the above snippet I keep getting errors like this: WARNING - pattern C:\[absolutepath]\test\e2e\[subfolder]\[filename].js did not match any files. for all files that exist in the e2e folder and sub-folders. What could be the reason for this?
this is my protractor conf file:
exports.config = {
specs: [
'test/e2e/[subfolder]/*.page.js',
'test/e2e/[subfolder]/*.spec.js'
],
baseUrl: 'http://localhost:3000'
};
If I start the server separately and the run protractor from the command prompt it works fine. I was thinking to use child_process.spawn and start a protractor child process but i haven't gotten that to work either. Any suggestions on how to start protractor from a gulp task?
No need to use 'test/e2e/**/*.js']
Provide all configuration in protractor.conf.js file it self.
You can use task like
gulp.task('run-protractor', ['run-server','run-webdriver'], function (done) {
//run protractor conf here
})
create run-server and run-webdriver gulp taks, test them separately once they are working, utilize them into run-protractor task.

Gulp-protractor keep saying "Spec patterns did not match any files"

I'm trying to run my end to end tests written with protractor and jasmine. It works perfectly when I call protractor protractor.config.js directly.
However, when I use gulp-protractor, I keep getting the "Spec patterns did not match any files" error and the tests do not run.
This is my protractor runner gulp task:
gulp.task('protractor-run', function (done) {
return gulp.src(["./e2e-tests/**/*-spec.js"])
.pipe(protractor({
configFile: "./config/protractor-config.js",
args: ['--baseUrl', 'http://127.0.0.1:8000']
}))
.on('error', function(e) { throw e })
});
and this is the error:
WARNING - pattern C:\path\to\app\e2e-tests\login\login-spec.js did not math any files.
[launcher] Process exited with error code 1
C:\path\to\app\node_modules\protractor\node_modules\q\q.js:126
throw e;
^
Error: Spec patterns did not match any files.
What am I missing?
I managed to get it working. By providing an empty readable stream. Then you specify your spec files in the config file instead.
var protractor = require('gulp-protractor').protractor;
gulp.task('protractor', ['webdriverUpdate'],function(){
return gulp.src([])
.pipe(protractor({
configFile: __dirname + '/protractor.conf.js'
}));
});
also don't forget the webdriverUpdate
var webdriverUpdate = require('gulp-protractor').webdriver_update;
gulp.task('webdriverUpdate', webdriverUpdate );
and in the config file this:
seleniumServerJar: './node_modules/protractor/selenium/selenium-server-standalone-2.47.1.jar',
With this I stopped getting the error.
Update
The issue #2551 is closed and fixed since 2.5.0
I resolved this in a gulpfile that launches protractor tests by putting a file path into the parameter of gulp.src(['file_path_goes_here']). The task I was trying to run had no file path between the brackets, and was throwing the error.
gulp.task('works', 'Run some tests', function() {
gulp.src(['path/to/test.spec.js'])
.pipe(protractor({
configFile: __dirname + '/../test/protractor.conf.js',
args: ['--baseUrl', 'http://localhost:9099']
}))
});
gulp.task('error', 'Run feature tests locally', function() {
gulp.src([''])
.pipe(protractor({
configFile: __dirname + '/../test/protractor_local.conf.js',
args: ['--baseUrl', 'http://localhost:9099']
}))
});

Browserify recipe for AngularJS + source maps + minification

I am looking for a working recipe that can minify my AngularJS code and still provide a source map. Currently I have this gulp task but minification won't work:
gulp.task('browserify', function(cb) {
var bundler = browserify({
entries: [paths.browserEntry],
globals: false,
debug: !settings.PRODUCTION
})
bundler
.bundle()
.on('error', cb)
.on('log', util.log)
.pipe(gulpif(!settings.PRODUCTION, mold.transformSourcesRelativeTo(paths.js)))
.pipe(source(paths.js))
.pipe(buffer()) // because the next steps do not support streams
.pipe(concat('bundle.js'))
.pipe(gulpif(settings.server.minify.js, rename({suffix: '.min'})))
.pipe(gulpif(settings.server.minify.js, uglify()))
.pipe(gulp.dest(paths.js))
.on('end', function() {
cb()
})
})
Any clues?
You may see an example here. The example will output a minified bundle.min.js plus a bundle.map. The crucial points which makes the example works:
Installed debowerify
Installed minifyify
package.json - added transform property
"browserify": {
"transform": [
"debowerify"
]
}
Gruntfile.js - using preBundleCB to make minifyify work
preBundleCB: function (b) {
b.plugin( minifyify,
{ output: './dist/bundle.map',
map:'bundle.map'
});
}
Hope the example is useful to you.

How can I automate both E2E and unit tests with Yeoman & AngularJS?

I'm using Yeoman and generator-angular to manage AngularJS apps, but I'm having trouble with automated testing.
Running grunt test will run unit tests once. I can get E2E tests to run after unit tests by altering the karma config block in Gruntfile.js, adding e2e:
karma: {
//...
e2e: {
configFile: 'karma-e2e.conf.js',
singleRun: true
}
},
Great: now when I type grunt test all tests are run. But they're only run one time, and there's a big overhead (starting compass, running the server, launching the Chrome processes, etc.). Instead, the server and Chrome processes should remain running and, when I save a test, tests should be re-run.
I can achieve this by modifying both karma.conf.js and karma-e2e.conf.js and setting singleRun = true, then running karma start in one terminal pane, and karma start karma-e2e.conf.js in another. Provided none of the ports in the karma configs conflict (which they do by default), this works. Now I'm bypassing Grunt and just doing my own thing (which seems a little silly, as Grunt is supposed to make things easier).
Anyway, after a few more changes (fixes?) — not detailed for brevity — this works but doesn't cut it: I now have to run two different commands and keep an eye on two different terminal panes. Surely there's a better way.
How can I run a single command to watch my test files and re-run tests appropriately?
Bonus question: why on Earth is this functionality not provided as is? Is it just a question of the developer(s) of generator-angular not having enough time to implement this stuff? I ask because I'm only just getting into Angular/Yeoman/Karma (as you probably noticed), and feel that automated testing of both E2E and unit tests are crucial to workflow.
As I mentioned in a comment to your question - PhantomJS saves a lot of hassle. That aside, I believe you can handle everything from within your Gruntfile and just continue to run grunt test to start the whole thing.
grunt-karma allows full customization of your karma options with some handy add-ons.
From the docs:
....
You can override any of the config file's settings directly:
karma: {
unit: {
configFile: 'karma.conf.js',
runnerPort: 9999,
singleRun: true,
browsers: ['PhantomJS']
}
}
Sharing Configs
If you have multiple targets, it may be helpful to share common
configuration settings between them. Grunt-karma supports this by
using the options property:
karma: {
options: {
configFile: 'karma.conf.js',
runnerPort: 9999,
browsers: ['Chrome', 'Firefox']
},
continuous: {
singleRun: true
browsers: ['PhantomJS']
},
dev: {
reporters: 'dots'
}
}
Additionally you may want to snoop around in Yeoman's generator-angular Gruntfile code to see what else may be available or at least mockable.
You can try this to run only e2e tests
grunt karma:e2e
Within the karma.conf.js file (approx line:38) find autoWatch = false; and change it to true.
Now if you run grunt karma:unit you will find that it leaves the test server running and any changes to project files immediately run the tests again.
//
// test/midway/appSpec.js
//
describe("Midway: Testing Modules", function() {
describe("App Module:", function() {
var module;
before(function() {
module = angular.module("App");
});
it("should be registered", function() {
expect(module).not.to.equal(null);
});
describe("Dependencies:", function() {
var deps;
var hasModule = function(m) {
return deps.indexOf(m) >= 0;
};
before(function() {
deps = module.value('appName').requires;
});
//you can also test the module's dependencies
it("should have App.Controllers as a dependency", function() {
expect(hasModule('App.Controllers')).to.equal(true);
});
it("should have App.Directives as a dependency", function() {
expect(hasModule('App.Directives')).to.equal(true);
});
it("should have App.Filters as a dependency", function() {
expect(hasModule('App.Filters')).to.equal(true);
});
it("should have App.Routes as a dependency", function() {
expect(hasModule('App.Routes')).to.equal(true);
});
it("should have App.Services as a dependency", function() {
expect(hasModule('App.Services')).to.equal(true);
});
});
});
});

Resources