Remove unused tasks and update instructions

This commit is contained in:
Tim Schaub
2017-07-03 17:12:12 -06:00
parent fb820f4b81
commit 02b2cdbd27
10 changed files with 95 additions and 515 deletions

1
.gitignore vendored
View File

@@ -1,4 +1,3 @@
/build/
/node_modules/
/dist/
/coverage/

View File

@@ -17,7 +17,7 @@
"pretest": "npm run lint",
"test": "npm run karma -- --single-run",
"debug-server": "node tasks/serve-lib.js",
"karma": "node tasks/test-all.js start test/karma.config.js"
"karma": "node tasks/test.js start test/karma.config.js"
},
"main": "dist/ol.js",
"repository": {

View File

@@ -1,40 +0,0 @@
var fs = require('fs-extra');
// The number of files that we need to generate goog.require's for.
var numFiles = process.argv.length - 1;
/**
* Object used a set of found goog.provide's.
* @type {Object.<string, boolean>}
*/
var requires = {};
process.argv.forEach(function(val, index, array) {
if (index === 0) {
return;
}
fs.readFile(val, function(err, data) {
if (err) {
return;
}
var re = new RegExp('goog\\.provide\\(\'(.*)\'\\);');
data.toString().split('\n').forEach(function(line) {
var match = line.match(re);
if (match) {
requires[match[1]] = true;
}
});
if (--numFiles === 0) {
Object.keys(requires).sort().forEach(function(key) {
process.stdout.write('goog.require(\'' + key + '\');\n');
});
}
});
});

View File

@@ -1,102 +0,0 @@
const Server = require('karma').Server;
const closure = require('closure-util');
const path = require('path');
const processCliArgs = require('karma/lib/cli').process;
function insertDependencies(manager, files, previousLookup) {
previousLookup = previousLookup || {};
let firstIndex = NaN;
const original = files.filter((obj, index) => {
if (previousLookup[obj.pattern]) {
if (isNaN(firstIndex)) {
firstIndex = index;
}
return false;
} else {
return true;
}
});
if (isNaN(firstIndex)) {
firstIndex = 0;
}
const lookup = {};
const dependencies = manager.getDependencies().map(script => {
lookup[script.path] = true;
return {
pattern: script.path,
included: true,
served: true,
watched: false
};
});
original.splice.apply(original, [firstIndex, 0].concat(dependencies));
files.length = 0;
files.push.apply(files, original);
return lookup;
}
/**
* Start Karma. This prepends the Karma `files` config with all library files
* sorted in dependency order.
* @param {Object} config Karma options.
* @param {Manager} manager The dependency file manager.
* @param {function(Error)} callback Called with any error.
*/
function serve(config, manager, callback) {
function exit(code) {
let error = null;
if (code) {
error = new Error(`Karma exited with ${code}`);
error.code = code;
}
callback(error);
}
const server = new Server(config, exit);
const files = server.get('config.files');
let lookup = insertDependencies(manager, files);
// stop goog base.js from trying to load deps.js
files.unshift({
pattern: path.resolve(__dirname, '../test/no-deps.js'),
included: true,
served: true,
watched: false
});
manager.on('update', () => {
lookup = insertDependencies(manager, files, lookup);
server.refreshFiles();
});
server.start();
}
function main(config, callback) {
const manager = new closure.Manager({
lib: [
'src/**/*.js',
'build/ol.ext/*.js'
]
});
manager.on('error', callback);
manager.on('ready', () => {
serve(config, manager, callback);
});
}
if (require.main === module) {
const config = processCliArgs();
main(config, (err, manager) => {
if (err) {
process.stderr.write(err.message, () => process.exit(1));
return;
} else {
process.exit(0);
}
});
}

View File

@@ -1,210 +0,0 @@
/**
* This task instruments our source code with istanbul, runs the test suite
* on the instrumented source and collects the coverage data. It then creates
* test coverage reports.
*
* TODO This can be improved in style. We should possibly rewrite it and use
* async.waterfall.
*/
var fs = require('fs-extra');
var istanbul = require('istanbul');
var path = require('path');
var glob = require('glob');
var runTestsuite = require('./test').runTests;
// setup some paths
var dir = path.join(__dirname, '../src');
var backupDir = path.join(__dirname, '../src-backup');
var instrumentedDir = path.join(__dirname, '../src-instrumented');
var coverageDir = path.join(__dirname, '../coverage');
fs.mkdirSync(coverageDir);
// The main players in the coverage generation via istanbul
var instrumenter = new istanbul.Instrumenter();
var reporter = new istanbul.Reporter(false, coverageDir);
var collector = new istanbul.Collector();
// General options used for the resource shuffling / directory copying
var copyOpts = {
// Overwrite existing file or directory
clobber: true,
// Preserve the mtime and atime when copying files
preserveTimestamps: true
};
/**
* A small utility method printing out log messages.
* @param {string} msg The message.
*/
var log = function(msg) {
process.stdout.write(msg + '\n');
};
/**
* Creates folders for backup and instrumentation and copies the contents of the
* current src folder into them.
*/
var setupBackupAndInstrumentationDir = function() {
if (!fs.existsSync(backupDir)) {
log('• create directory for backup of src: ' + backupDir);
fs.mkdirSync(backupDir);
}
if (!fs.existsSync(instrumentedDir)) {
log('• create directory for instrumented src: ' + instrumentedDir);
fs.mkdirSync(instrumentedDir);
}
log('• copy src files to backup folder');
fs.copySync(dir, backupDir, copyOpts);
log('• copy src files to instrumentation folder');
fs.copySync(dir, instrumentedDir, copyOpts);
};
/**
* Reverts the changes done in setupBackupAndInstrumentationDir, copies the
* backup over the src directory and removes the instrumentation and backup
* directory.
*/
var revertBackupAndInstrumentationDir = function() {
log('• copy original src back to src folder');
fs.copySync(backupDir, dir, copyOpts);
log('• delete backup directory');
fs.removeSync(backupDir);
log('• delete instrumentation directory');
fs.removeSync(instrumentedDir);
};
/**
* Callback for when runTestsuite() has finished.
*/
var collectAndWriteCoverageData = function() {
log('• collect data from coverage *.json files');
var coverageFiles = [
path.join(__dirname, '..', 'coverage', 'coverage.json'),
path.join(__dirname, '..', 'coverage', 'coverage-rendering.json')
];
coverageFiles.forEach(function(coverageFile) {
if (fs.existsSync(coverageFile)) {
log(' • collect data from ' + path.basename(coverageFile));
var coverageJson = JSON.parse(fs.readFileSync(coverageFile, 'utf8'));
collector.add(coverageJson);
}
});
reporter.addAll(['lcovonly', 'html']);
revertBackupAndInstrumentationDir();
log('• write report from collected data');
reporter.write(collector, true, function() {
process.exit(0);
});
};
/**
* Runs the rendering test by spawning a call to `make test-rendering`. The
* `make`-call sets up certain things so that the rendering tests can actually
* run, which is why we call it this way.
*
* @param {Function} callback The callback to invoke once `make` has exited.
* Will receive the exit code.
*/
var runRenderingTestsuite = function(callback) {
var spawn = require('child_process').spawn;
var child = spawn('make', ['test-rendering'], {stdio: 'inherit'});
child.on('exit', function(code) {
callback(code);
});
};
/**
* Derive output file name from input file name, by replacing the *last*
* occurrence of `/src/` by `/src-instrumented/`
*
* @param {String} file The input filename.
* @return {String} file The output filename.
*/
var outputFilenameByFilename = function(file) {
var search = '/src/';
var replace = '/src-instrumented/';
var re = new RegExp(search, 'g');
var m, match;
while ((m = re.exec(file)) !== null) {
match = m;
}
var idx = match.index;
var outfile = file.substr(0, idx) + replace +
file.substr(idx + search.length);
return outfile;
};
/**
* Will instrument all JavaScript files that are passed as second parameter.
* This is the callback to the glob call.
* @param {Error} err Any error.
* @param {Array.<string>} files List of file paths.
*/
var foundAllJavaScriptSourceFiles = function(err, files) {
if (err) {
process.stderr.write(err.message + '\n');
process.exit(1);
}
log('• instrumenting every src file');
var cnt = 0;
files.forEach(function(file) {
cnt++;
var content = fs.readFileSync(file, 'utf-8');
var outfile = outputFilenameByFilename(file);
var instrumented = instrumenter.instrumentSync(content, file);
fs.writeFileSync(outfile, instrumented);
if (cnt % 10 === 0) {
log(' • instrumented ' + cnt + ' files');
}
});
log(' • done. ' + cnt + ' files instrumented');
log('• copy instrumented src back to src folder');
fs.copySync(instrumentedDir, dir, copyOpts);
log('• run test suites on instrumented code');
log(' • run rendering test suite');
runRenderingTestsuite(function(codeRendering) {
if (codeRendering === 0) {
log(' • run standard test suite');
runTestsuite({coverage: true, reporter: 'dot'}, function(code) {
if (code === 0) {
collectAndWriteCoverageData();
} else {
process.stderr.write('Trouble running the standard testsuite\n');
process.exit(1);
}
});
} else {
process.stderr.write('Trouble running the rendering testsuite\n');
process.exit(1);
}
});
};
/**
* Our main method, first it sets up certain directory, and then it starts the
* coverage process by gathering all JavaScript files and then instrumenting
* them.
*/
var main = function() {
setupBackupAndInstrumentationDir();
glob(dir + '/**/*.js', {}, foundAllJavaScriptSourceFiles);
};
if (require.main === module) {
main();
}
module.exports = main;

View File

@@ -1,50 +0,0 @@
/**
* This task starts a dev server that provides a script loader for OpenLayers
* and Closure Library and runs rendering tests in SlimerJS.
*/
var path = require('path');
var spawn = require('child_process').spawn;
var slimerjs = require('slimerjs');
var serve = require('./serve');
var listen = require('./test').listen;
/**
* Create the debug server and run tests.
*/
serve.createServer(function(err, server) {
if (err) {
process.stderr.write(err.message + '\n');
process.exit(1);
}
listen(3001, 3005, server, function(err) {
if (err) {
process.stderr.write('Server failed to start: ' + err.message + '\n');
process.exit(1);
}
var address = server.address();
var url = 'http://' + address.address + ':' + address.port;
var profile = path.join(__dirname, '../build/slimerjs-profile');
var args = [
'-profile',
profile,
path.join(__dirname,
'../test_rendering/test.js'),
url + '/test_rendering/index.html'
];
var child = spawn(slimerjs.path, args, {stdio: 'pipe'});
child.stdout.on('data', function(data) {
process.stdout.write(data);
});
child.on('exit', function(code) {
process.exit(code);
});
});
});

View File

@@ -1,95 +1,102 @@
/**
* This task starts a dev server that provides a script loader for OpenLayers
* and Closure Library and runs tests in PhantomJS.
*/
const Server = require('karma').Server;
const closure = require('closure-util');
const path = require('path');
const processCliArgs = require('karma/lib/cli').process;
var path = require('path');
var spawn = require('child_process').spawn;
var phantomjs = require('phantomjs-prebuilt');
var serve = require('./serve');
/**
* Try listening for incoming connections on a range of ports.
* @param {number} min Minimum port to try.
* @param {number} max Maximum port to try.
* @param {http.Server} server The server.
* @param {function(Error)} callback Callback called with any error.
*/
function listen(min, max, server, callback) {
function _listen(port) {
server.once('error', function(err) {
if (err.code === 'EADDRINUSE') {
++port;
if (port < max) {
_listen(port);
} else {
callback(new Error('Could not find an open port'));
}
} else {
callback(err);
function insertDependencies(manager, files, previousLookup) {
previousLookup = previousLookup || {};
let firstIndex = NaN;
const original = files.filter((obj, index) => {
if (previousLookup[obj.pattern]) {
if (isNaN(firstIndex)) {
firstIndex = index;
}
});
server.listen(port, '127.0.0.1');
}
server.once('listening', function() {
callback(null);
return false;
} else {
return true;
}
});
_listen(min);
if (isNaN(firstIndex)) {
firstIndex = 0;
}
const lookup = {};
const dependencies = manager.getDependencies().map(script => {
lookup[script.path] = true;
return {
pattern: script.path,
included: true,
served: true,
watched: false
};
});
original.splice.apply(original, [firstIndex, 0].concat(dependencies));
files.length = 0;
files.push.apply(files, original);
return lookup;
}
function runTests(conf, callback) {
var coverage = 'coverage' in conf ? conf.coverage : false;
var reporter = 'reporter' in conf ? conf.reporter : 'spec';
/**
* Create the debug server and run tests.
*/
serve.createServer(function(err, server) {
if (err) {
process.stderr.write(err.message + '\n');
process.exit(1);
/**
* Start Karma. This prepends the Karma `files` config with all library files
* sorted in dependency order.
* @param {Object} config Karma options.
* @param {Manager} manager The dependency file manager.
* @param {function(Error)} callback Called with any error.
*/
function serve(config, manager, callback) {
function exit(code) {
let error = null;
if (code) {
error = new Error(`Karma exited with ${code}`);
error.code = code;
}
callback(error);
}
const server = new Server(config, exit);
listen(3001, 3005, server, function(err) {
if (err) {
process.stderr.write('Server failed to start: ' + err.message + '\n');
process.exit(1);
}
var address = server.address();
var url = 'http://' + address.address + ':' + address.port;
var args = [
require.resolve('mocha-phantomjs-core'),
url + '/test/index.html',
reporter
];
var config = {
ignoreResourceErrors: true,
useColors: true
};
const files = server.get('config.files');
if (coverage) {
config.hooks = path.join(__dirname, '../test/phantom_hooks.js');
}
let lookup = insertDependencies(manager, files);
args.push(JSON.stringify(config));
// stop goog base.js from trying to load deps.js
files.unshift({
pattern: path.resolve(__dirname, '../test/no-deps.js'),
included: true,
served: true,
watched: false
});
var child = spawn(phantomjs.path, args, {stdio: 'inherit'});
child.on('exit', function(code) {
callback(code);
});
});
manager.on('update', () => {
lookup = insertDependencies(manager, files, lookup);
server.refreshFiles();
});
server.start();
}
function main(config, callback) {
const manager = new closure.Manager({
lib: [
'src/**/*.js',
'build/ol.ext/*.js'
]
});
manager.on('error', callback);
manager.on('ready', () => {
serve(config, manager, callback);
});
}
if (require.main === module) {
runTests({coverage: false, reporter: 'spec'}, function(code) {
process.exit(code);
const config = processCliArgs();
main(config, (err, manager) => {
if (err) {
process.stderr.write(err.message, () => process.exit(1));
return;
} else {
process.exit(0);
}
});
}
module.exports = {
runTests: runTests,
listen: listen
};

View File

@@ -14,24 +14,17 @@ Install the test dependencies (from the root of the repository):
npm install
Run the tests once with PhantomJS:
Run the tests once:
make test
npm test
(Note that for `npm` users, this can also be run as `npm test`.)
To run the tests continuously:
Run the tests in a browser:
npm run karma
make serve
(Again for `npm` users, this is `npm start`.)
Now visit http://localhost:3000/test/ in your browser. The tests will re-run
any time one of the source or spec files changes.
Tip for TDD'ers: to make PhantomJS run the test suite continuously each time
a spec file is changed you can use nosier (http://pypi.python.org/pypi/nosier)
and do `nosier -p test -p src "make test"`.
After this, the test server is listening on http://localhost:9876/, and you can
attach any number of browsers for testing (during development, tests will run
in Chrome by default).
# Rendering tests

View File

@@ -4,7 +4,7 @@ var path = require('path');
* The config below is not enough to run Karma. In addition, we need to add
* all library files in dependency order. This could be done with a plugin if
* Karma supported async plugins (there may other alternatives as well). But
* for now we start Karma with the `tasks/test-all.js` script. This script
* for now we start Karma with the `tasks/test.js` script. This script
* sorts dependencies and add files to the Karma config below.
*/

View File

@@ -1,17 +0,0 @@
/* eslint-disable no-console */
module.exports = {
afterEnd: function(runner) {
var fs = require('fs');
var coverage = runner.page.evaluate(function() {
return window.__coverage__;
});
if (coverage) {
console.log('Writing coverage to coverage/coverage.json');
fs.write('coverage/coverage.json', JSON.stringify(coverage), 'w');
} else {
console.log('No coverage data generated');
}
}
};