Merge branch 'master' of https://github.com/freedmand/sapper into freedmand-master

This commit is contained in:
Rich Harris
2018-01-14 13:48:07 -05:00
4 changed files with 65 additions and 40 deletions

View File

@@ -1,11 +1,29 @@
#!/usr/bin/env node
const build = require('../lib/build.js');
const cmd = process.argv[2];
const start = Date.now();
if (cmd === 'build') {
process.env.NODE_ENV = 'production';
require('../lib/build.js')();
build()
.then(() => {
const elapsed = Date.now() - start;
console.error(`built in ${elapsed}ms`); // TODO beautify this, e.g. 'built in 4.7 seconds'
})
.catch(err => {
console.error(err ? err.details || err.stack || err.message || err : 'Unknown error');
});
} else if (cmd === 'extract') {
process.env.NODE_ENV = 'production';
require('../lib/utils/extract.js')();
const start = Date.now();
build()
.then(() => require('../lib/utils/extract.js')())
.then(() => {
const elapsed = Date.now() - start;
console.error(`extracted in ${elapsed}ms`); // TODO beautify this, e.g. 'built in 4.7 seconds'
})
.catch(err => {
console.error(err ? err.details || err.stack || err.message || err : 'Unknown error');
});
}

View File

@@ -1,3 +1,5 @@
process.env.NODE_ENV = 'production';
const fs = require('fs');
const path = require('path');
const mkdirp = require('mkdirp');
@@ -14,29 +16,32 @@ module.exports = () => {
// create main.js and server-routes.js
create_app();
function handleErrors(err, stats) {
if (err) {
console.error(err ? err.details || err.stack || err.message || err : 'Unknown error');
process.exit(1);
return new Promise((fulfil, reject) => {
function handleErrors(err, stats) {
if (err) {
reject(err);
process.exit(1);
}
if (stats.hasErrors()) {
console.error(stats.toString({ colors: true }));
reject(new Error(`Encountered errors while building app`));
}
}
if (stats.hasErrors()) {
console.log(stats.toString({ colors: true }));
process.exit(1);
}
}
client.run((err, clientStats) => {
handleErrors(err, clientStats);
const clientInfo = clientStats.toJson();
fs.writeFileSync(path.join(dest, 'stats.client.json'), JSON.stringify(clientInfo, null, ' '));
client.run((err, clientStats) => {
handleErrors(err, clientStats);
const clientInfo = clientStats.toJson();
fs.writeFileSync(path.join(dest, 'stats.client.json'), JSON.stringify(clientInfo, null, ' '));
server.run((err, serverStats) => {
handleErrors(err, serverStats);
const serverInfo = serverStats.toJson();
fs.writeFileSync(path.join(dest, 'stats.server.json'), JSON.stringify(serverInfo, null, ' '));
server.run((err, serverStats) => {
handleErrors(err, serverStats);
const serverInfo = serverStats.toJson();
fs.writeFileSync(path.join(dest, 'stats.server.json'), JSON.stringify(serverInfo, null, ' '));
generate_asset_cache(clientInfo, serverInfo);
generate_asset_cache(clientInfo, serverInfo);
fulfil();
});
});
});
};
};

View File

@@ -1,8 +1,9 @@
const fs = require('fs-extra');
const app = require('express')();
const compression = require('compression');
const mkdirp = require('mkdirp');
const sapper = require('../index.js');
const static = require('serve-static');
const serve = require('serve-static');
const Spider = require('node-spider');
const path = require('path');
@@ -106,6 +107,12 @@ function getChunkFiles() {
*/
module.exports = function(includeUrls = null, excludeUrls = null,
apiPrefix = '/api', extractionDir = OUTPUT_DIR) {
// Clean the output directory and copy assets in.
fs.removeSync(extractionDir);
mkdirp.sync(extractionDir);
fs.copySync('assets', extractionDir);
// Set up the server.
// this allows us to do e.g. `fetch('/api/blog')` on the server
@@ -117,14 +124,10 @@ module.exports = function(includeUrls = null, excludeUrls = null,
app.use(compression({ threshold: 0 }));
app.use(static('assets'));
app.use(serve('assets'));
app.use(sapper());
// Clean the output directory and copy assets in.
fs.removeSync(extractionDir);
fs.copySync('assets', extractionDir);
// If exclude URLs are set, normalize them.
if (excludeUrls == null) excludeUrls = [];
excludeUrls = excludeUrls.map((url) => getFullUrl(url));
@@ -133,9 +136,11 @@ module.exports = function(includeUrls = null, excludeUrls = null,
// scraper. The program automatically exits after all the static pages have
// been scraped from the server that are accessible from the root page (`/`).
const extractedFiles = []; // keep track of extracted files.
const server = app.listen(PORT, () => {
console.log(`listening on port ${PORT} and beginning extraction`);
return new Promise((resolve, reject) => {
return new Promise((resolve, reject) => {
const server = app.listen(PORT, () => {
console.log(`listening on port ${PORT} and beginning extraction`);
const spider = new Spider({
concurrent: 5,
delay: 0,
@@ -228,4 +233,4 @@ module.exports = function(includeUrls = null, excludeUrls = null,
}
});
});
}
};

View File

@@ -409,13 +409,10 @@ function run(env) {
function exec(cmd) {
return new Promise((fulfil, reject) => {
require('child_process').exec(cmd, (err, stdout, stderr) => {
if (err) {
process.stdout.write(stdout);
process.stderr.write(stderr);
return reject(err);
}
process.stdout.write(stdout);
process.stderr.write(stderr);
if (err) return reject(err);
fulfil();
});
});