Compare commits

...

60 Commits

Author SHA1 Message Date
Richard Harris
969169ae20 -> v0.24.3 2019-02-01 05:40:58 -05:00
Rich Harris
79fa15da3d Merge pull request #525 from nolanlawson/nolan/sw-index-html
add service-worker-index.html
2019-02-01 05:36:08 -05:00
Rich Harris
ddc08d94cc Merge pull request #535 from nolanlawson/nolan/no-sourcemap-in-sw
Do not include sourcemap files in Service Worker shell
2019-02-01 05:34:20 -05:00
Richard Harris
e2193a6080 -> v0.24.2 2018-12-31 11:46:08 -05:00
Richard Harris
f66c7dcb0d -> v0.24.2 2018-12-31 11:45:20 -05:00
Rich Harris
06f1a0e6c0 Merge pull request #541 from sveltejs/rollup-1.0
support Rollup 1.0
2018-12-31 11:44:22 -05:00
Conduitry
7726325b4b support Rollup 1.0 2018-12-31 11:14:49 -05:00
Nolan Lawson
f97400caaa Do not include sourcemap files in Service Worker shell
Fixes #534
2018-12-16 11:48:54 -08:00
Nolan Lawson
03af9b1a16 add service-worker-index.html
fixes #422
2018-12-12 18:30:16 +00:00
Rich Harris
02cef046aa -> v0.24.1 2018-12-09 10:30:34 -05:00
Rich Harris
c2aeac34b6 Merge pull request #530 from artemjackson/patch-1
Fixed rel="preload" `as` attribute for styles
2018-12-09 10:28:39 -05:00
Rich Harris
abd2f7fd39 Merge pull request #529 from artemjackson/master
Added css build info for webpack apps
2018-12-09 10:28:19 -05:00
Artyom Stepanishchev
e7cf9bf1b6 Merge branch 'master' of github.com:sveltejs/sapper 2018-12-09 18:01:31 +03:00
Artyom Stepanishchev
1fdd0e3ad2 Merge branch 'master' of github.com:sveltejs/sapper into patch-1 2018-12-09 18:00:55 +03:00
Rich Harris
af0a7e04f9 Merge pull request #531 from artemjackson/bugfix/tests
Fixed tests for node 6
2018-12-09 09:57:12 -05:00
Artyom Stepanishchev
ed19a19fed Fixed tests for node 6 2018-12-09 17:28:59 +03:00
Artyom Stepanishchev
8ebfcc9a54 Fixed rel="preload" as attribute for styles 2018-12-07 23:59:51 +03:00
Artyom Stepanishchev
af2a792508 Added css build info for webpack apps 2018-12-07 23:28:28 +03:00
Conduitry
14e809af6e update npm-run-all to remove malicious flatmap-stream@0.1.1 2018-12-06 07:02:04 -05:00
Rich Harris
03c5f5b446 -> v0.24.0 2018-10-27 12:40:27 -04:00
Rich Harris
6655b1b49d Merge pull request #503 from sveltejs/gh-490
redirect to external URLs
2018-10-27 12:36:10 -04:00
Rich Harris
eebf076f23 Merge pull request #501 from sveltejs/gh-497
consistent query parameter handling between client and server
2018-10-27 12:35:58 -04:00
Rich Harris
198be28f4b Merge pull request #504 from sveltejs/gh-480
change scroll[X|Y] to page[X|Y]Offset
2018-10-27 12:35:47 -04:00
Rich Harris
4f720446b2 change scroll[X|Y] to page[X|Y]Offset - closes #480 2018-10-27 12:26:53 -04:00
Rich Harris
e69cb3639a redirect to external URLs - closes #490 2018-10-27 12:14:28 -04:00
Rich Harris
1b1a86764f Merge pull request #502 from sveltejs/gh-495
strip leading slash from basepath
2018-10-27 11:50:23 -04:00
Rich Harris
f50f83c4a4 strip leading slash from basepath - fixes #495 2018-10-27 11:41:02 -04:00
Rich Harris
eadefd996b consistent query parameter handling between client and server - fixes #497 2018-10-27 11:36:18 -04:00
Rich Harris
ab52aabd1d Merge pull request #500 from sveltejs/dont-buffer-logs
don't buffer stdout/stderr
2018-10-27 11:18:50 -04:00
Rich Harris
c5a80543b3 reduce flakiness of unrelated test 2018-10-27 11:12:10 -04:00
Rich Harris
cfd95ac024 dont buffer stdout/stderr 2018-10-27 11:02:34 -04:00
Rich Harris
73ff95c677 Merge pull request #498 from mrkishi/build-dir
Add missing posixify to `build_dir`
2018-10-27 10:17:58 -04:00
mrkishi
382fe6b7b9 Add missing posixify 2018-10-26 11:03:29 -03:00
Rich Harris
3b714c0de3 -> v0.23.5 2018-10-24 21:40:04 -04:00
Rich Harris
28186227a9 add tests 2018-10-24 21:20:27 -04:00
Rich Harris
2ac0f2bf3d Merge branch 'search-params-decoding' of https://github.com/mrkishi/sapper into mrkishi-search-params-decoding 2018-10-24 21:10:35 -04:00
Rich Harris
4991f3b359 support non-native promise implementations 2018-10-24 21:05:25 -04:00
Rich Harris
65128118c7 Merge branch '487-async-route-errors' of https://github.com/nikku/sapper into nikku-487-async-route-errors 2018-10-24 20:58:52 -04:00
Rich Harris
3eced6fa4d Merge pull request #492 from sveltejs/lazy-css
fix lazy css bug, add tests
2018-10-24 20:58:08 -04:00
mrkishi
c4aee66c32 Fix search params decoding 2018-10-24 21:19:03 -03:00
Rich Harris
410c52df41 fix lazy css bug, add tests 2018-10-24 18:48:38 -04:00
Rich Harris
ffd56e2a20 -> v0.23.4 2018-10-24 15:51:44 -04:00
Rich Harris
1e5a87cf71 Merge pull request #491 from sveltejs/empty-href-export
ignore empty anchors when exporting
2018-10-24 15:51:28 -04:00
Rich Harris
281e183c61 ignore empty anchors when exporting 2018-10-24 15:38:45 -04:00
Nico Rehwaldt
3fe7b55955 async -> Promise.reject 2018-10-20 22:46:42 +02:00
Nico Rehwaldt
464924ed67 handle async route errors
Related to #487
2018-10-20 22:40:21 +02:00
Rich Harris
e5d7d8ab2b -> v0.23.3 2018-10-16 16:16:11 -04:00
Rich Harris
d3e560325d Merge pull request #477 from sveltejs/clear-errors
clear errors on successful render
2018-10-16 16:15:28 -04:00
Rich Harris
64e5065aa5 clear errors on successful render 2018-10-16 15:59:57 -04:00
Rich Harris
cb45bb0fbe -> v0.23.2 2018-10-16 08:58:02 -04:00
Rich Harris
f39455014a update deps 2018-10-16 08:44:22 -04:00
Rich Harris
4fe8df3696 Merge pull request #471 from sveltejs/missing-css
include css depended upon by entry point, even if also depended on by a lazily-loaded component
2018-10-16 08:38:20 -04:00
Rich Harris
4fdc7055c1 -> v0.23.1 2018-10-15 22:10:18 -04:00
Rich Harris
cca417a85a simplify, preserve monomorphism 2018-10-15 22:07:23 -04:00
Rich Harris
635c13a175 Merge branch 'tags-from-other-pages' of https://github.com/DayBr3ak/sapper into DayBr3ak-tags-from-other-pages 2018-10-15 21:43:39 -04:00
Rich Harris
2e3aef8b21 simplify 2018-10-15 21:36:40 -04:00
Rich Harris
44736754ad fix file extension 2018-10-15 21:27:52 -04:00
Benjamin GROENEVELD
a399d87d9b handle tag click from another page 2018-10-13 21:59:47 +02:00
Benjamin GROENEVELD
a68c62ce91 Fix hash link reliability (fix #434) 2018-10-13 16:45:14 +02:00
Rich Harris
1b9b559d82 include css depended upon by entry point, even if also depended on by a lazily-loaded component 2018-10-11 23:18:51 -04:00
64 changed files with 1738 additions and 979 deletions

View File

@@ -1,5 +1,51 @@
# sapper changelog
## 0.24.3
* Add service-worker-index.html shell file for offline support ([#422](https://github.com/sveltejs/sapper/issues/422))
* Don't cache .map files ([#534](https://github.com/sveltejs/sapper/issues/534))
## 0.24.2
* Support Rollup 1.0 ([#541](https://github.com/sveltejs/sapper/pull/541))
## 0.24.1
* Include CSS chunks in webpack build info to avoid duplication ([#529](https://github.com/sveltejs/sapper/pull/529))
* Fix preload `as` for styles ([#530](https://github.com/sveltejs/sapper/pull/530))
## 0.24.0
* Handle external URLs in `this.redirect` ([#490](https://github.com/sveltejs/sapper/issues/490))
* Strip leading `/` from basepath ([#495](https://github.com/sveltejs/sapper/issues/495))
* Treat duplicate query string parameters as arrays ([#497](https://github.com/sveltejs/sapper/issues/497))
* Don't buffer `stdout` and `stderr` ([#305](https://github.com/sveltejs/sapper/issues/305))
* Posixify `build_dir` ([#498](https://github.com/sveltejs/sapper/pull/498))
* Use `page[XY]Offset` instead of `scroll[XY]` ([#480](https://github.com/sveltejs/sapper/issues/480))
## 0.23.5
* Include lazily-imported CSS in main CSS chunk ([#492](https://github.com/sveltejs/sapper/pull/492))
* Make search param decoding spec-compliant ([#493](https://github.com/sveltejs/sapper/pull/493))
* Handle async route errors ([#488](https://github.com/sveltejs/sapper/pull/488))
## 0.23.4
* Ignore empty anchors when exporting ([#491](https://github.com/sveltejs/sapper/pull/491))
## 0.23.3
* Clear `error` and `status` on successful render ([#477](https://github.com/sveltejs/sapper/pull/477))
## 0.23.2
* Fix entry point CSS ([#471](https://github.com/sveltejs/sapper/pull/471))
## 0.23.1
* Scroll to deeplink that matches current URL ([#472](https://github.com/sveltejs/sapper/pull/472))
* Scroll to deeplink on another page ([#341](https://github.com/sveltejs/sapper/issues/341))
## 0.23.0
* Overhaul internal APIs ([#468](https://github.com/sveltejs/sapper/pull/468))

1156
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "sapper",
"version": "0.23.0",
"version": "0.24.3",
"description": "Military-grade apps, engineered by Svelte",
"bin": {
"sapper": "./sapper"
@@ -17,53 +17,53 @@
"test": "test"
},
"dependencies": {
"html-minifier": "^3.5.16",
"html-minifier": "^3.5.20",
"shimport": "0.0.11",
"source-map-support": "^0.5.6",
"sourcemap-codec": "^1.4.1",
"source-map-support": "^0.5.9",
"sourcemap-codec": "^1.4.3",
"string-hash": "^1.1.3",
"tslib": "^1.9.1"
"tslib": "^1.9.3"
},
"devDependencies": {
"@types/mkdirp": "^0.5.2",
"@types/mocha": "^5.2.5",
"@types/node": "^10.7.1",
"@types/node": "^10.12.0",
"@types/puppeteer": "^1.9.0",
"@types/rimraf": "^2.0.2",
"agadoo": "^1.0.1",
"cheap-watch": "^0.3.0",
"cheap-watch": "^1.0.0",
"cookie": "^0.3.1",
"devalue": "^1.0.4",
"eslint": "^4.13.1",
"eslint-plugin-import": "^2.12.0",
"kleur": "^2.0.1",
"eslint": "^5.7.0",
"eslint-plugin-import": "^2.14.0",
"kleur": "^2.0.2",
"mkdirp": "^0.5.1",
"mocha": "^5.2.0",
"node-fetch": "^2.1.1",
"npm-run-all": "^4.1.3",
"polka": "^0.4.0",
"node-fetch": "^2.2.0",
"npm-run-all": "^4.1.5",
"polka": "^0.5.1",
"port-authority": "^1.0.5",
"pretty-bytes": "^5.0.0",
"pretty-bytes": "^5.1.0",
"puppeteer": "^1.9.0",
"require-relative": "^0.8.7",
"rimraf": "^2.6.2",
"rollup": "^0.65.0",
"rollup-plugin-commonjs": "^9.1.3",
"rollup-plugin-json": "^3.0.0",
"rollup": "^0.66.6",
"rollup-plugin-commonjs": "^9.2.0",
"rollup-plugin-json": "^3.1.0",
"rollup-plugin-node-resolve": "^3.4.0",
"rollup-plugin-replace": "^2.0.0",
"rollup-plugin-replace": "^2.1.0",
"rollup-plugin-string": "^2.0.2",
"rollup-plugin-svelte": "^4.3.2",
"rollup-plugin-typescript": "^0.8.1",
"rollup-plugin-typescript": "^1.0.0",
"sade": "^1.4.1",
"sander": "^0.6.0",
"sirv": "^0.2.2",
"svelte": "^2.6.3",
"svelte-loader": "^2.9.0",
"svelte": "^2.13.5",
"svelte-loader": "^2.11.0",
"ts-node": "^7.0.1",
"typescript": "^2.8.3",
"webpack": "^4.8.3",
"webpack-format-messages": "^2.0.1"
"typescript": "^3.1.3",
"webpack": "^4.20.2",
"webpack-format-messages": "^2.0.3"
},
"scripts": {
"test": "mocha --opts mocha.opts",

View File

@@ -116,10 +116,15 @@ export async function build({
let serviceworker_stats;
if (serviceworker) {
const client_files = client_result.chunks
.filter(chunk => !chunk.file.endsWith('.map')) // SW does not need to cache sourcemap files
.map(chunk => `client/${chunk.file}`);
create_serviceworker_manifest({
manifest_data,
output,
client_files: client_result.chunks.map(chunk => `client/${chunk.file}`),
client_files,
static_files
});

View File

@@ -214,12 +214,9 @@ class Watcher extends EventEmitter {
// TODO watch the configs themselves?
const compilers: Compilers = await create_compilers(this.bundler, cwd, src, dest, false);
let log = '';
const emitFatal = () => {
this.emit('fatal', <FatalEvent>{
message: `Server crashed`,
log
message: `Server crashed`
});
this.crashed = true;
@@ -236,7 +233,6 @@ class Watcher extends EventEmitter {
handle_result: (result: CompileResult) => {
deferred.promise.then(() => {
const restart = () => {
log = '';
this.crashed = false;
ports.wait(this.port)
@@ -260,8 +256,7 @@ class Watcher extends EventEmitter {
if (this.crashed) return;
this.emit('fatal', <FatalEvent>{
message: `Server is not listening on port ${this.port}`,
log
message: `Server is not listening on port ${this.port}`
});
});
};
@@ -292,12 +287,10 @@ class Watcher extends EventEmitter {
});
this.proc.stdout.on('data', chunk => {
log += chunk;
this.emit('stdout', chunk);
});
this.proc.stderr.on('data', chunk => {
log += chunk;
this.emit('stderr', chunk);
});

View File

@@ -38,6 +38,8 @@ async function _export({
oninfo = noop,
onfile = noop
}: Opts = {}) {
basepath = basepath.replace(/^\//, '')
cwd = path.resolve(cwd);
static_files = path.resolve(cwd, static_files);
build_dir = path.resolve(cwd, build_dir);
@@ -92,7 +94,9 @@ async function _export({
const is_html = type === 'text/html';
if (is_html) {
file = file === '' ? 'index.html' : `${file}/index.html`;
if (pathname !== '/service-worker-index.html') {
file = file === '' ? 'index.html' : `${file}/index.html`;
}
body = minify_html(body);
}
@@ -111,7 +115,10 @@ async function _export({
});
async function handle(url: URL) {
const pathname = (url.pathname.replace(root.pathname, '') || '/');
let pathname = url.pathname;
if (pathname !== '/service-worker-index.html') {
pathname = pathname.replace(root.pathname, '') || '/'
}
if (seen.has(pathname)) return;
seen.add(pathname);
@@ -136,7 +143,7 @@ async function _export({
const range = ~~(r.status / 100);
if (range === 2) {
if (type === 'text/html') {
if (type === 'text/html' && pathname !== '/service-worker-index.html') {
const urls: URL[] = [];
const cleaned = clean_html(body);
@@ -179,6 +186,7 @@ async function _export({
return ports.wait(port)
.then(() => handle(root))
.then(() => handle(resolve(root.href, 'service-worker-index.html')))
.then(() => proc.kill())
.catch(err => {
proc.kill();
@@ -187,6 +195,6 @@ async function _export({
}
function get_href(attrs: string) {
const match = /href\s*=\s*(?:"(.+?)"|'(.+?)'|([^\s>]+))/.exec(attrs);
const match = /href\s*=\s*(?:"(.*?)"|'(.+?)'|([^\s>]+))/.exec(attrs);
return match[1] || match[2] || match[3];
}

View File

@@ -64,6 +64,14 @@ prog.command('dev')
let first = true;
watcher.on('stdout', data => {
process.stdout.write(data);
});
watcher.on('stderr', data => {
process.stderr.write(data);
});
watcher.on('ready', async (event: ReadyEvent) => {
if (first) {
console.log(colors.bold.cyan(`> Listening on http://localhost:${event.port}`));
@@ -73,16 +81,6 @@ prog.command('dev')
}
first = false;
}
// TODO clear screen?
event.process.stdout.on('data', data => {
process.stdout.write(data);
});
event.process.stderr.on('data', data => {
process.stderr.write(data);
});
});
watcher.on('invalid', (event: InvalidEvent) => {

View File

@@ -142,12 +142,14 @@ export default class RollupCompiler {
const bundle = await rollup.rollup({
input,
inlineDynamicImports: true,
external: (id: string) => {
return (id[0] !== '.' && !path.isAbsolute(id)) || id.slice(-5, id.length) === '.json';
}
});
const { code } = await bundle.generate({ format: 'cjs' });
const resp = await bundle.generate({ format: 'cjs' });
const { code } = resp.output ? resp.output[0] : resp;
// temporarily override require
const defaultLoader = require.extensions['.js'];

View File

@@ -55,14 +55,25 @@ export default class WebpackResult implements CompileResult {
}
to_json(manifest_data: ManifestData, dirs: Dirs): BuildInfo {
const extract_css = (assets: string[] | string) => {
assets = Array.isArray(assets) ? assets : [assets];
return assets.find(asset => /\.css$/.test(asset));
};
return {
bundler: 'webpack',
shimport: null, // webpack has its own loader
assets: this.assets,
css: {
// TODO
main: null,
chunks: {}
main: extract_css(this.assets.main),
chunks: Object
.keys(this.assets)
.filter(chunkName => chunkName !== 'main')
.reduce((chunks: { [key: string]: string }, chukName) => {
const assets = this.assets[chukName];
chunks[chukName] = extract_css(assets);
return chunks;
}, {})
}
};
}

View File

@@ -3,7 +3,7 @@ import * as path from 'path';
import hash from 'string-hash';
import * as codec from 'sourcemap-codec';
import { PageComponent, Dirs } from '../../interfaces';
import { CompileResult } from './interfaces';
import { CompileResult, Chunk } from './interfaces';
import { posixify } from '../../utils'
const inline_sourcemap_header = 'data:application/json;charset=utf-8;base64,';
@@ -46,6 +46,65 @@ type SourceMap = {
mappings: string;
};
function get_css_from_modules(modules: string[], css_map: Map<string, string>, dirs: Dirs) {
const parts: string[] = [];
const mappings: number[][][] = [];
const combined_map: SourceMap = {
version: 3,
file: null,
sources: [],
sourcesContent: [],
names: [],
mappings: null
};
modules.forEach(module => {
if (!/\.css$/.test(module)) return;
const css = css_map.get(module);
const { code, map } = extract_sourcemap(css, module);
parts.push(code);
if (map) {
const lines = codec.decode(map.mappings);
if (combined_map.sources.length > 0 || combined_map.names.length > 0) {
lines.forEach(line => {
line.forEach(segment => {
// adjust source index
segment[1] += combined_map.sources.length;
// adjust name index
if (segment[4]) segment[4] += combined_map.names.length;
});
});
}
combined_map.sources.push(...map.sources);
combined_map.sourcesContent.push(...map.sourcesContent);
combined_map.names.push(...map.names);
mappings.push(...lines);
}
});
if (parts.length > 0) {
combined_map.mappings = codec.encode(mappings);
combined_map.sources = combined_map.sources.map(source => path.relative(`${dirs.dest}/client`, source));
return {
code: parts.join('\n'),
map: combined_map
};
}
return null;
}
export default function extract_css(client_result: CompileResult, components: PageComponent[], dirs: Dirs) {
const result: {
main: string | null;
@@ -57,151 +116,94 @@ export default function extract_css(client_result: CompileResult, components: Pa
if (!client_result.css_files) return; // Rollup-only for now
const unaccounted_for = new Set();
let asset_dir = `${dirs.dest}/client`;
if (process.env.SAPPER_LEGACY_BUILD) asset_dir += '/legacy';
const css_map = new Map();
client_result.css_files.forEach(css => {
unaccounted_for.add(css.id);
css_map.set(css.id, css.code);
const unclaimed = new Set(client_result.css_files.map(x => x.id));
const lookup = new Map();
client_result.chunks.forEach(chunk => {
lookup.set(chunk.file, chunk);
});
const chunk_map = new Map();
client_result.chunks.forEach(chunk => {
chunk_map.set(chunk.file, chunk);
const css_map = new Map();
client_result.css_files.forEach(css_module => {
css_map.set(css_module.id, css_module.code);
});
const chunks_with_css = new Set();
// figure out which chunks belong to which components...
const component_owners = new Map();
// concatenate and emit CSS
client_result.chunks.forEach(chunk => {
chunk.modules.forEach(module => {
const component = posixify(path.relative(dirs.routes, module));
component_owners.set(component, chunk);
});
const css_modules = chunk.modules.filter(m => css_map.has(m));
if (!css_modules.length) return;
const css = get_css_from_modules(css_modules, css_map, dirs);
const { code, map } = css;
const output_file_name = chunk.file.replace(/\.js$/, '.css');
map.file = output_file_name;
map.sources = map.sources.map(source => path.relative(`${asset_dir}`, source));
fs.writeFileSync(`${asset_dir}/${output_file_name}`, `${code}\n/* sourceMappingURL=./${output_file_name}.map */`);
fs.writeFileSync(`${asset_dir}/${output_file_name}.map`, JSON.stringify(map, null, ' '));
chunks_with_css.add(chunk);
});
const chunks_depended_upon_by_component = new Map();
const entry = path.resolve(dirs.src, 'client.js');
const entry_chunk = client_result.chunks.find(chunk => chunk.modules.indexOf(entry) !== -1);
// ...so we can figure out which chunks don't belong
const entry_chunk_dependencies: Set<Chunk> = new Set([entry_chunk]);
const entry_css_modules: string[] = [];
// recursively find the chunks this component depends on
entry_chunk_dependencies.forEach(chunk => {
chunk.imports.forEach(file => {
entry_chunk_dependencies.add(lookup.get(file));
});
if (chunks_with_css.has(chunk)) {
chunk.modules.forEach(file => {
unclaimed.delete(file);
if (css_map.has(file)) {
entry_css_modules.push(file);
}
});
}
});
// figure out which (css-having) chunks each component depends on
components.forEach(component => {
const chunk = component_owners.get(component.file);
const resolved = path.resolve(dirs.routes, component.file);
const chunk: Chunk = client_result.chunks.find(chunk => chunk.modules.indexOf(resolved) !== -1);
if (!chunk) {
// this should never happen!
throw new Error(`Could not find chunk that owns ${component.file}`);
}
const chunks = new Set([chunk]);
chunks.forEach(chunk => {
chunk.imports.forEach((file: string) => {
const chunk = chunk_map.get(file);
if (chunk) chunks.add(chunk);
const chunk_dependencies: Set<Chunk> = new Set([chunk]);
const css_dependencies: string[] = [];
// recursively find the chunks this component depends on
chunk_dependencies.forEach(chunk => {
chunk.imports.forEach(file => {
chunk_dependencies.add(lookup.get(file));
});
});
chunks.forEach(chunk => {
chunk.modules.forEach((module: string) => {
unaccounted_for.delete(module);
});
});
if (chunks_with_css.has(chunk)) {
css_dependencies.push(chunk.file.replace(/\.js$/, '.css'));
chunks_depended_upon_by_component.set(
component,
chunks
);
});
function get_css_from_modules(modules: string[]) {
const parts: string[] = [];
const mappings: number[][][] = [];
const combined_map: SourceMap = {
version: 3,
file: null,
sources: [],
sourcesContent: [],
names: [],
mappings: null
};
modules.forEach(module => {
if (!/\.css$/.test(module)) return;
const css = css_map.get(module);
const { code, map } = extract_sourcemap(css, module);
parts.push(code);
if (map) {
const lines = codec.decode(map.mappings);
if (combined_map.sources.length > 0 || combined_map.names.length > 0) {
lines.forEach(line => {
line.forEach(segment => {
// adjust source index
segment[1] += combined_map.sources.length;
// adjust name index
if (segment[4]) segment[4] += combined_map.names.length;
});
});
}
combined_map.sources.push(...map.sources);
combined_map.sourcesContent.push(...map.sourcesContent);
combined_map.names.push(...map.names);
mappings.push(...lines);
chunk.modules.forEach(file => {
unclaimed.delete(file);
});
}
});
if (parts.length > 0) {
combined_map.mappings = codec.encode(mappings);
combined_map.sources = combined_map.sources.map(source => path.relative(`${dirs.dest}/client`, source));
return {
code: parts.join('\n'),
map: combined_map
};
}
return null;
}
let asset_dir = `${dirs.dest}/client`;
if (process.env.SAPPER_LEGACY_BUILD) asset_dir += '/legacy';
const replacements = new Map();
chunks_depended_upon_by_component.forEach((chunks, component) => {
const chunks_with_css = Array.from(chunks).filter(chunk => {
const css = get_css_from_modules(chunk.modules);
if (css) {
const { code, map } = css;
const output_file_name = chunk.file.replace(/\.js$/, '.css');
map.file = output_file_name;
map.sources = map.sources.map(source => path.relative(`${asset_dir}`, source));
fs.writeFileSync(`${asset_dir}/${output_file_name}`, `${code}\n/* sourceMappingURL=./${output_file_name}.map */`);
fs.writeFileSync(`${asset_dir}/${output_file_name}.map`, JSON.stringify(map, null, ' '));
return true;
}
});
const files = chunks_with_css.map(chunk => chunk.file.replace(/\.js$/, '.css'));
replacements.set(
component.file,
files
);
result.chunks[component.file] = files;
result.chunks[component.file] = css_dependencies;
});
fs.readdirSync(asset_dir).forEach(file => {
@@ -210,13 +212,17 @@ export default function extract_css(client_result: CompileResult, components: Pa
const source = fs.readFileSync(`${asset_dir}/${file}`, 'utf-8');
const replaced = source.replace(/["']__SAPPER_CSS_PLACEHOLDER:(.+?)__["']/g, (m, route) => {
return JSON.stringify(replacements.get(route));
return JSON.stringify(result.chunks[route]);
});
fs.writeFileSync(`${asset_dir}/${file}`, replaced);
});
const leftover = get_css_from_modules(Array.from(unaccounted_for));
unclaimed.forEach(file => {
entry_css_modules.push(file);
});
const leftover = get_css_from_modules(entry_css_modules, css_map, dirs);
if (leftover) {
const { code, map } = leftover;

View File

@@ -45,17 +45,15 @@ export function create_serviceworker_manifest({ manifest_data, output, client_fi
client_files: string[];
static_files: string;
}) {
let files: string[];
let files: string[] = ['/service-worker-index.html'];
if (fs.existsSync(static_files)) {
files = walk(static_files);
files = files.concat(walk(static_files));
} else {
// TODO remove in a future version
if (fs.existsSync('assets')) {
throw new Error(`As of Sapper 0.21, the assets/ directory should become static/`);
}
files = [];
}
let code = `
@@ -226,8 +224,8 @@ function generate_server(
error
};`.replace(/^\t\t/gm, '').trim();
const build_dir = path.relative(cwd, dest);
const src_dir = path.relative(cwd, src);
const build_dir = posixify(path.relative(cwd, dest));
const src_dir = posixify(path.relative(cwd, src));
return `// This file is generated by Sapper — do not edit it!\n` + template
.replace('__BUILD__DIR__', JSON.stringify(build_dir))
@@ -242,4 +240,4 @@ function get_file(path_to_routes: string, component: PageComponent) {
}
return posixify(`${path_to_routes}/${component.file}`);
}
}

View File

@@ -69,7 +69,6 @@ export type ErrorEvent = {
export type FatalEvent = {
message: string;
log?: string;
};
export type InvalidEvent = {

View File

@@ -87,11 +87,14 @@ export function select_route(url: URL): Target {
const match = page.pattern.exec(path);
if (match) {
const query: Record<string, string | true> = {};
const query: Record<string, string | string[]> = Object.create(null);
if (url.search.length > 0) {
url.search.slice(1).split('&').forEach(searchParam => {
const [, key, value] = /([^=]+)(?:=(.*))?/.exec(searchParam);
query[key] = decodeURIComponent((value || '').replace(/\+/g, ' '));
let [, key, value] = /([^=]*)(?:=(.*))?/.exec(decodeURIComponent(searchParam));
value = (value || '').replace(/\+/g, ' ');
if (typeof query[key] === 'string') query[key] = [<string>query[key]];
if (typeof query[key] === 'object') query[key].push(value);
else query[key] = value;
});
}
return { url, path, page, match, query };
@@ -101,12 +104,13 @@ export function select_route(url: URL): Target {
export function scroll_state() {
return {
x: scrollX,
y: scrollY
x: pageXOffset,
y: pageYOffset
};
}
export function navigate(target: Target, id: number, noscroll = false): Promise<any> {
export function navigate(target: Target, id: number, noscroll?: boolean, hash?: string): Promise<any> {
let scroll: ScrollPosition;
if (id) {
// popstate or initial navigation
cid = id;
@@ -137,13 +141,12 @@ export function navigate(target: Target, id: number, noscroll = false): Promise<
if (redirect) {
return goto(redirect.location, { replaceState: true });
}
render(data, nullable_depth, scroll_history[id], token);
render(data, nullable_depth, scroll_history[id], noscroll, hash, token);
if (document.activeElement) document.activeElement.blur();
});
}
function render(data: any, nullable_depth: number, scroll: ScrollPosition, token: {}) {
function render(data: any, nullable_depth: number, scroll: ScrollPosition, noscroll: boolean, hash: string, token: {}) {
if (current_token !== token) return;
if (root_component) {
@@ -182,8 +185,20 @@ function render(data: any, nullable_depth: number, scroll: ScrollPosition, token
});
}
if (scroll) {
scrollTo(scroll.x, scroll.y);
if (!noscroll) {
if (hash) {
// scroll is an element id (from a hash), we need to compute y.
const deep_linked = document.querySelector(hash);
if (deep_linked) {
scroll = {
x: 0,
y: deep_linked.getBoundingClientRect().top
};
}
}
scroll_history[cid] = scroll;
if (scroll) scrollTo(scroll.x, scroll.y);
}
Object.assign(root_props, data);
@@ -298,7 +313,7 @@ export function prepare_page(target: Target): Promise<{
};
}
const props = { path, query };
const props = { path, query, error: null, status: null };
const data = {
path,
preloading: false,

View File

@@ -13,7 +13,7 @@ import {
set_cid
} from '../app';
import prefetch from '../prefetch/index';
import { Store } from '../types';
import { Store, ScrollPosition } from '../types';
export default function start(opts: {
target: Node,
@@ -36,16 +36,11 @@ export default function start(opts: {
return Promise.resolve().then(() => {
const { hash, href } = location;
const deep_linked = hash && document.getElementById(hash.slice(1));
scroll_history[uid] = deep_linked ?
{ x: 0, y: deep_linked.getBoundingClientRect().top } :
scroll_state();
history.replaceState({ id: uid }, '', href);
if (!initial_data.error) {
const target = select_route(new URL(location.href));
if (target) return navigate(target, uid);
if (target) return navigate(target, uid, false, hash);
}
});
}
@@ -84,7 +79,7 @@ function handle_click(event: MouseEvent) {
const href = String(svg ? (<SVGAElement>a).href.baseVal : a.href);
if (href === location.href) {
event.preventDefault();
if (!location.hash) event.preventDefault();
return;
}
@@ -104,7 +99,7 @@ function handle_click(event: MouseEvent) {
const target = select_route(url);
if (target) {
const noscroll = a.hasAttribute('sapper-noscroll');
navigate(target, null, noscroll);
navigate(target, null, noscroll, url.hash);
event.preventDefault();
history.pushState({ id: cid }, '', url.href);
}

View File

@@ -55,7 +55,7 @@ export type Target = {
path: string;
page: Page;
match: RegExpExecArray;
query: Record<string, string | true>;
query: Record<string, string | string[]>;
};
export type Redirect = {

View File

@@ -3,7 +3,7 @@ import * as path from 'path';
import cookie from 'cookie';
import devalue from 'devalue';
import fetch from 'node-fetch';
import { URL } from 'url';
import { URL, resolve } from 'url';
import { build_dir, dev, src_dir, IGNORE } from '../placeholders';
import { Manifest, Page, Props, Req, Res, Store } from './types';
@@ -34,6 +34,7 @@ export function get_page_handler(
}
async function handle_page(page: Page, req: Req, res: Res, status = 200, error: Error | string = null) {
const isSWIndexHtml = req.path === '/service-worker-index.html';
const build_info: {
bundler: 'rollup' | 'webpack',
shimport: string | null,
@@ -47,7 +48,7 @@ export function get_page_handler(
// preload main.js and current route
// TODO detect other stuff we can preload? images, CSS, fonts?
let preloaded_chunks = Array.isArray(build_info.assets.main) ? build_info.assets.main : [build_info.assets.main];
if (!error) {
if (!error && !isSWIndexHtml) {
page.parts.forEach(part => {
if (!part) return;
@@ -67,7 +68,10 @@ export function get_page_handler(
} else {
const link = preloaded_chunks
.filter(file => file && !file.match(/\.map$/))
.map(file => `<${req.baseUrl}/client/${file}>;rel="preload";as="script"`)
.map((file) => {
const as = /\.css$/.test(file) ? 'style' : 'script';
return `<${req.baseUrl}/client/${file}>;rel="preload";as="${as}"`;
})
.join(', ');
res.setHeader('Link', link);
@@ -142,17 +146,22 @@ export function get_page_handler(
match = error ? null : page.pattern.exec(req.path);
preloaded = await Promise.all([root_preloaded].concat(page.parts.map(part => {
if (!part) return null;
let toPreload = [root_preloaded];
if (!isSWIndexHtml) {
toPreload = toPreload.concat(page.parts.map(part => {
if (!part) return null;
return part.component.preload
? part.component.preload.call(preload_context, {
path: req.path,
query: req.query,
params: part.params ? part.params(match) : {}
})
: {};
})));
return part.component.preload
? part.component.preload.call(preload_context, {
path: req.path,
query: req.query,
params: part.params ? part.params(match) : {}
})
: {};
}))
}
preloaded = await Promise.all(toPreload);
} catch (err) {
preload_error = { statusCode: 500, message: err };
preloaded = []; // appease TypeScript
@@ -160,7 +169,7 @@ export function get_page_handler(
try {
if (redirect) {
const location = `${req.baseUrl}/${redirect.location}`;
const location = resolve(req.baseUrl || '/', redirect.location);
res.statusCode = redirect.statusCode;
res.setHeader('Location', location);
@@ -201,23 +210,29 @@ export function get_page_handler(
});
let level = data.child;
for (let i = 0; i < page.parts.length; i += 1) {
const part = page.parts[i];
if (!part) continue;
if (isSWIndexHtml) {
level.props = Object.assign({}, props, {
params: {}
})
} else {
for (let i = 0; i < page.parts.length; i += 1) {
const part = page.parts[i];
if (!part) continue;
const get_params = part.params || (() => ({}));
const get_params = part.params || (() => ({}));
Object.assign(level, {
component: part.component,
props: Object.assign({}, props, {
params: get_params(match)
}, preloaded[i + 1])
});
Object.assign(level, {
component: part.component,
props: Object.assign({}, props, {
params: get_params(match)
}, preloaded[i + 1])
});
level.props.child = <Props["child"]>{
segment: segments[i + 1]
};
level = level.props.child;
level.props.child = <Props["child"]>{
segment: segments[i + 1]
};
level = level.props.child;
}
}
const { html, head, css } = manifest.root.render(data, {
@@ -300,6 +315,12 @@ export function get_page_handler(
return function find_route(req: Req, res: Res, next: () => void) {
if (req[IGNORE]) return next();
if (req.path === '/service-worker-index.html') {
const homePage = pages.find(page => page.pattern.test('/'));
handle_page(homePage, req, res);
return;
}
if (!server_routes.some(route => route.pattern.test(req.path))) {
for (const page of pages) {
if (page.pattern.test(req.path)) {
@@ -335,4 +356,4 @@ function escape_html(html: string) {
};
return html.replace(/["'&<>]/g, c => `&${chars[c]};`);
}
}

View File

@@ -2,7 +2,7 @@ import { IGNORE } from '../placeholders';
import { Req, Res, ServerRoute } from './types';
export function get_server_route_handler(routes: ServerRoute[]) {
function handle_route(route: ServerRoute, req: Req, res: Res, next: () => void) {
async function handle_route(route: ServerRoute, req: Req, res: Res, next: () => void) {
req.params = route.params(route.pattern.exec(req.path));
const method = req.method.toLowerCase();
@@ -53,7 +53,7 @@ export function get_server_route_handler(routes: ServerRoute[]) {
};
try {
handle_method(req, res, handle_next);
await handle_method(req, res, handle_next);
} catch (err) {
handle_next(err);
}

View File

@@ -8,6 +8,10 @@ declare const prefetchRoutes: () => Promise<void>;
declare const prefetch: (href: string) => Promise<void>;
declare const goto: (href: string) => Promise<void>;
type StartOpts = {
requestInterceptor?: (interceptedRequst: puppeteer.Request) => any
};
export class AppRunner {
cwd: string;
entry: string;
@@ -24,7 +28,7 @@ export class AppRunner {
this.messages = [];
}
async start() {
async start({ requestInterceptor }: StartOpts = {}) {
this.port = await ports.find(3000);
this.proc = fork(this.entry, [], {
@@ -50,6 +54,11 @@ export class AppRunner {
}
});
if (requestInterceptor) {
await this.page.setRequestInterception(true);
this.page.on('request', requestInterceptor);
}
return {
page: this.page,
base: `http://localhost:${this.port}`,
@@ -58,7 +67,8 @@ export class AppRunner {
start: () => this.page.evaluate(() => start()),
prefetchRoutes: () => this.page.evaluate(() => prefetchRoutes()),
prefetch: (href: string) => this.page.evaluate((href: string) => prefetch(href), href),
goto: (href: string) => this.page.evaluate((href: string) => goto(href), href)
goto: (href: string) => this.page.evaluate((href: string) => goto(href), href),
title: () => this.page.$eval('h1', node => node.textContent)
};
}

View File

@@ -1,9 +1 @@
<h1>message: "{message}"</h1>
<script>
export default {
preload({ query }) {
return query;
}
};
</script>
<h1>{JSON.stringify(query)}</h1>

View File

@@ -3,5 +3,6 @@
<a href="a">a</a>
<a href="ambiguous/ok.json">ok</a>
<a href="echo-query?message">ok</a>
<a href="echo-query?p=one&p=two">ok</a>
<div class='hydrate-test'></div>

View File

@@ -204,7 +204,7 @@ describe('basics', function() {
assert.equal(
await title(),
'message: ""'
'{"message":""}'
);
});
@@ -217,7 +217,29 @@ describe('basics', function() {
assert.equal(
await title(),
'message: ""'
'{"message":""}'
);
});
it('accepts duplicated query string parameter on server', async () => {
await page.goto(`${base}/echo-query?p=one&p=two`);
assert.equal(
await title(),
'{"p":["one","two"]}'
);
});
it('accepts duplicated query string parameter on client', async () => {
await page.goto(base);
await start();
await prefetchRoutes();
await page.click('a[href="echo-query?p=one&p=two"]')
assert.equal(
await title(),
'{"p":["one","two"]}'
);
});

View File

@@ -0,0 +1,64 @@
import resolve from 'rollup-plugin-node-resolve';
import replace from 'rollup-plugin-replace';
import svelte from 'rollup-plugin-svelte';
const mode = process.env.NODE_ENV;
const dev = mode === 'development';
const config = require('../../../config/rollup.js');
export default {
client: {
input: config.client.input(),
output: config.client.output(),
plugins: [
replace({
'process.browser': true,
'process.env.NODE_ENV': JSON.stringify(mode)
}),
svelte({
dev,
hydratable: true,
emitCss: true
}),
resolve()
],
// temporary, pending Rollup 1.0
experimentalCodeSplitting: true
},
server: {
input: config.server.input(),
output: config.server.output(),
plugins: [
replace({
'process.browser': false,
'process.env.NODE_ENV': JSON.stringify(mode)
}),
svelte({
generate: 'ssr',
dev
}),
resolve({
preferBuiltins: true
})
],
external: ['sirv', 'polka'],
// temporary, pending Rollup 1.0
experimentalCodeSplitting: true
},
serviceworker: {
input: config.serviceworker.input(),
output: config.serviceworker.output(),
plugins: [
resolve(),
replace({
'process.browser': true,
'process.env.NODE_ENV': JSON.stringify(mode)
})
]
}
};

View File

@@ -0,0 +1,9 @@
import * as sapper from '../__sapper__/client.js';
window.start = () => sapper.start({
target: document.querySelector('#sapper')
});
window.prefetchRoutes = () => sapper.prefetchRoutes();
window.prefetch = href => sapper.prefetch(href);
window.goto = href => sapper.goto(href);

View File

@@ -0,0 +1,7 @@
<h1>Title</h1>
<style>
h1 {
color: green;
}
</style>

View File

@@ -0,0 +1,3 @@
<h1>{status}</h1>
<p>{error.message}</p>

View File

@@ -0,0 +1,11 @@
<svelte:component this={Title}/>
<script>
export default {
oncreate() {
import('./_components/Title.html').then(({ default: Title }) => {
this.set({ Title });
});
}
};
</script>

View File

@@ -0,0 +1,7 @@
<h1>Foo</h1>
<style>
h1 {
color: blue;
}
</style>

View File

@@ -0,0 +1,10 @@
<h1>Great success!</h1>
<a href="foo">foo</a>
<a href="bar">bar</a>
<style>
h1 {
color: red;
}
</style>

View File

@@ -0,0 +1,8 @@
import polka from 'polka';
import * as sapper from '../__sapper__/server.js';
const { PORT } = process.env;
polka()
.use(sapper.middleware())
.listen(PORT);

View File

@@ -0,0 +1,82 @@
import { timestamp, files, shell, routes } from '../__sapper__/service-worker.js';
const ASSETS = `cache${timestamp}`;
// `shell` is an array of all the files generated by webpack,
// `files` is an array of everything in the `static` directory
const to_cache = shell.concat(ASSETS);
const cached = new Set(to_cache);
self.addEventListener('install', event => {
event.waitUntil(
caches
.open(ASSETS)
.then(cache => cache.addAll(to_cache))
.then(() => {
self.skipWaiting();
})
);
});
self.addEventListener('activate', event => {
event.waitUntil(
caches.keys().then(async keys => {
// delete old caches
for (const key of keys) {
if (key !== ASSETS) await caches.delete(key);
}
self.clients.claim();
})
);
});
self.addEventListener('fetch', event => {
if (event.request.method !== 'GET') return;
const url = new URL(event.request.url);
// don't try to handle e.g. data: URIs
if (!url.protocol.startsWith('http')) return;
// ignore dev server requests
if (url.hostname === self.location.hostname && url.port !== self.location.port) return;
// always serve assets and webpack-generated files from cache
if (url.host === self.location.host && cached.has(url.pathname)) {
event.respondWith(caches.match(event.request));
return;
}
// for pages, you might want to serve a shell `index.html` file,
// which Sapper has generated for you. It's not right for every
// app, but if it's right for yours then uncomment this section
/*
if (url.origin === self.origin && routes.find(route => route.pattern.test(url.pathname))) {
event.respondWith(caches.match('/index.html'));
return;
}
*/
if (event.request.cache === 'only-if-cached') return;
// for everything else, try the network first, falling back to
// cache if the user is offline. (If the pages never change, you
// might prefer a cache-first approach to a network-first one.)
event.respondWith(
caches
.open(`offline${timestamp}`)
.then(async cache => {
try {
const response = await fetch(event.request);
cache.put(event.request, response.clone());
return response;
} catch(err) {
const response = await cache.match(event.request);
if (response) return response;
throw err;
}
})
);
});

View File

@@ -0,0 +1,14 @@
<!doctype html>
<html lang="en">
<head>
<meta charset='utf-8'>
%sapper.base%
%sapper.styles%
%sapper.head%
</head>
<body>
<div id='sapper'>%sapper.html%</div>
%sapper.scripts%
</body>
</html>

78
test/apps/css/test.ts Normal file
View File

@@ -0,0 +1,78 @@
import * as assert from 'assert';
import * as puppeteer from 'puppeteer';
import { build } from '../../../api';
import { AppRunner } from '../AppRunner';
import { wait } from '../../utils';
describe('css', function() {
this.timeout(10000);
let runner: AppRunner;
let page: puppeteer.Page;
let base: string;
// helpers
let start: () => Promise<void>;
let prefetchRoutes: () => Promise<void>;
let prefetch: (href: string) => Promise<void>;
let goto: (href: string) => Promise<void>;
let title: () => Promise<string>;
// hooks
before(async () => {
await build({ cwd: __dirname });
runner = new AppRunner(__dirname, '__sapper__/build/server/server.js');
({ base, page, start, prefetchRoutes, prefetch, goto, title } = await runner.start());
});
after(() => runner.end());
it('includes critical CSS with server render', async () => {
await page.goto(base);
assert.equal(
await page.evaluate(() => {
const h1 = document.querySelector('h1');
return getComputedStyle(h1).color;
}),
'rgb(255, 0, 0)'
);
});
it('loads CSS when navigating client-side', async () => {
await page.goto(base);
await start();
await prefetchRoutes();
await page.click(`[href="foo"]`);
await wait(50);
assert.equal(
await page.evaluate(() => {
const h1 = document.querySelector('h1');
return getComputedStyle(h1).color;
}),
'rgb(0, 0, 255)'
);
});
it('loads CSS for a lazily-rendered component', async () => {
await page.goto(base);
await start();
await prefetchRoutes();
await page.click(`[href="bar"]`);
await wait(50);
assert.equal(
await page.evaluate(() => {
const h1 = document.querySelector('h1');
return getComputedStyle(h1).color;
}),
'rgb(0, 128, 0)'
);
});
});

View File

@@ -1,11 +1,10 @@
<h1>{slug} ({message})</h1>
<h1>{slug} {JSON.stringify(query)}</h1>
<script>
export default {
preload({ params, query }) {
preload({ params }) {
return {
slug: params.slug,
message: query.message
slug: params.slug
};
}
};

View File

@@ -1,3 +1,3 @@
<h1>Great success!</h1>
<a href="echo/page/encöded?message=hëllö+wörld">link</a>
<a href="echo/page/encöded?message=hëllö+wörld&föo=bar&=baz">link</a>

View File

@@ -35,11 +35,11 @@ describe('encoding', function() {
});
it('encodes req.params and req.query for server-rendered pages', async () => {
await page.goto(`${base}/echo/page/encöded?message=hëllö+wörld`);
await page.goto(`${base}/echo/page/encöded?message=hëllö+wörld&föo=bar&=baz`);
assert.equal(
await page.$eval('h1', node => node.textContent),
'encöded (hëllö wörld)'
'encöded {"message":"hëllö wörld","föo":"bar","":"baz"}'
);
});
@@ -48,12 +48,12 @@ describe('encoding', function() {
await start();
await prefetchRoutes();
await page.click('a[href="echo/page/encöded?message=hëllö+wörld"]');
await page.click('a[href="echo/page/encöded?message=hëllö+wörld&föo=bar&=baz"]');
await wait(50);
assert.equal(
await page.$eval('h1', node => node.textContent),
'encöded (hëllö wörld)'
'encöded {"message":"hëllö wörld","föo":"bar","":"baz"}'
);
});

View File

@@ -0,0 +1,3 @@
export function get(req, res) {
return Promise.reject(new Error('oops'));
}

View File

@@ -0,0 +1,7 @@
<script>
export default {
preload() {
this.error(420, 'Enhance your calm');
}
};
</script>

View File

@@ -0,0 +1,3 @@
<h1>{error ? error.message : 'No error here'}</h1>
<a href="enhance-your-calm">Enhance your calm</a>

View File

@@ -14,13 +14,14 @@ describe('errors', function() {
// helpers
let start: () => Promise<void>;
let prefetchRoutes: () => Promise<void>;
let title: () => Promise<string>;
// hooks
before(async () => {
await build({ cwd: __dirname });
runner = new AppRunner(__dirname, '__sapper__/build/server/server.js');
({ base, page, start, prefetchRoutes } = await runner.start());
({ base, page, start, prefetchRoutes, title } = await runner.start());
});
after(() => runner.end());
@@ -110,4 +111,27 @@ describe('errors', function() {
'oops'
);
});
it('does not serve error page for async non-page error', async () => {
await page.goto(`${base}/async-throw.json`);
assert.equal(
await page.evaluate(() => document.body.textContent),
'oops'
);
});
it('clears props.error on successful render', async () => {
await page.goto(`${base}/no-error`);
await start();
await prefetchRoutes();
await page.click('[href="enhance-your-calm"]');
await wait(50);
assert.equal(await title(), '420');
await page.goBack();
await wait(50);
assert.equal(await title(), 'No error here');
});
});

View File

@@ -1,3 +1,4 @@
<h1>Great success!</h1>
<a href="blog">blog</a>
<a href="blog">blog</a>
<a href="">empty anchor</a>

View File

@@ -30,6 +30,7 @@ describe('export', function() {
'blog/index.html',
'global.css',
'index.html',
'service-worker-index.html',
'service-worker.js'
]);
});

View File

@@ -1,4 +1,5 @@
<h1>root</h1>
<a href="redirect-from">redirect from</a>
<a href="redirect-to-root">redirect to root</a>
<a href="redirect-to-root">redirect to root</a>
<a href="redirect-to-external">redirect to external</a>

View File

@@ -0,0 +1,9 @@
<h1>unredirected</h1>
<script>
export default {
preload() {
this.redirect(301, 'https://example.com');
}
};
</script>

View File

@@ -14,13 +14,26 @@ describe('redirects', function() {
// helpers
let start: () => Promise<void>;
let prefetchRoutes: () => Promise<void>;
let title: () => Promise<string>;
// hooks
before(async () => {
await build({ cwd: __dirname });
runner = new AppRunner(__dirname, '__sapper__/build/server/server.js');
({ base, page, start, prefetchRoutes } = await runner.start());
({ base, page, start, prefetchRoutes, title } = await runner.start({
requestInterceptor: (interceptedRequest) => {
if (/example\.com/.test(interceptedRequest.url())) {
interceptedRequest.respond({
status: 200,
contentType: 'text/html',
body: `<h1>external</h1>`
});
} else {
interceptedRequest.continue();
}
}
}));
});
after(() => runner.end());
@@ -34,7 +47,7 @@ describe('redirects', function() {
);
assert.equal(
await page.$eval('h1', node => node.textContent),
await title(),
'redirected'
);
});
@@ -53,7 +66,7 @@ describe('redirects', function() {
);
assert.equal(
await page.$eval('h1', node => node.textContent),
await title(),
'redirected'
);
});
@@ -67,7 +80,7 @@ describe('redirects', function() {
);
assert.equal(
await page.$eval('h1', node => node.textContent),
await title(),
'root'
);
});
@@ -86,8 +99,41 @@ describe('redirects', function() {
);
assert.equal(
await page.$eval('h1', node => node.textContent),
await title(),
'root'
);
});
it('redirects to external URL on server', async () => {
await page.goto(`${base}/redirect-to-external`);
assert.equal(
page.url(),
`https://example.com/`
);
assert.equal(
await title(),
'external'
);
});
it('redirects to external URL in client', async () => {
await page.goto(base);
await start();
await prefetchRoutes();
await page.click('[href="redirect-to-external"]');
await wait(50);
assert.equal(
page.url(),
`https://example.com/`
);
assert.equal(
await title(),
'external'
);
});
});

View File

@@ -1 +1,2 @@
<div style="height: 9999px"></div>
<div style="height: 9999px"></div>
<p id="bar">element</p>

View File

@@ -1,6 +1,24 @@
<a href="tall-page#foo">scroll to foo</a>
<div style="height: 9999px"></div>
<div id="foo">
<a href="another-tall-page">link</a>
<a href="another-tall-page" sapper-noscroll>link</a>
</div>
{#if barLink}
<a href="another-tall-page#bar">link</a>
{/if}
</div>
<script>
export default {
data() {
return {
barLink: false
};
},
oncreate() {
this.set({ barLink: true })
}
}
</script>

View File

@@ -33,6 +33,24 @@ describe('scroll', function() {
assert.ok(scrollY > 0, scrollY);
});
it('scrolls to any deeplink if it was already active', async () => {
await page.goto(`${base}/tall-page#foo`);
await start();
let scrollY = await page.evaluate(() => window.scrollY);
assert.ok(scrollY > 0, scrollY);
scrollY = await page.evaluate(() => {
window.scrollTo(0, 0)
return window.scrollY
});
assert.ok(scrollY === 0, scrollY);
await page.click('[href="tall-page#foo"]');
scrollY = await page.evaluate(() => window.scrollY);
assert.ok(scrollY > 0, scrollY);
});
it('resets scroll when a link is clicked', async () => {
await page.goto(`${base}/tall-page#foo`);
await start();
@@ -59,4 +77,15 @@ describe('scroll', function() {
assert.ok(scrollY > 0);
});
it('scrolls into a deeplink on a new page', async () => {
await page.goto(`${base}/tall-page#foo`);
await start();
await prefetchRoutes();
await page.click('[href="another-tall-page#bar"]');
await wait(50);
const scrollY = await page.evaluate(() => window.scrollY);
assert.ok(scrollY > 0);
});
});

View File

@@ -17,7 +17,7 @@ describe('with-basepath', function() {
await api.export({
cwd: __dirname,
basepath: 'custom-basepath'
basepath: '/custom-basepath'
});
runner = new AppRunner(__dirname, '__sapper__/build/server/server.js');
@@ -56,6 +56,7 @@ describe('with-basepath', function() {
assert.deepEqual(non_client_assets, [
'custom-basepath/global.css',
'custom-basepath/index.html',
'custom-basepath/service-worker-index.html',
'custom-basepath/service-worker.js'
]);
});

View File

@@ -0,0 +1,9 @@
import * as sapper from '../__sapper__/client.js';
window.start = () => sapper.start({
target: document.querySelector('#sapper')
});
window.prefetchRoutes = () => sapper.prefetchRoutes();
window.prefetch = href => sapper.prefetch(href);
window.goto = href => sapper.goto(href);

View File

@@ -0,0 +1,3 @@
<h1>{status}</h1>
<p>{error.message}</p>

View File

@@ -0,0 +1,3 @@
<h1>Great success!</h1>
<p>Woot!</p>

View File

@@ -0,0 +1,8 @@
import polka from 'polka';
import * as sapper from '../__sapper__/server.js';
const { PORT } = process.env;
polka()
.use(sapper.middleware())
.listen(PORT);

View File

@@ -0,0 +1,82 @@
import { timestamp, files, shell, routes } from '../__sapper__/service-worker.js';
const ASSETS = `cache${timestamp}`;
// `shell` is an array of all the files generated by webpack,
// `files` is an array of everything in the `static` directory
const to_cache = shell.concat(ASSETS);
const cached = new Set(to_cache);
self.addEventListener('install', event => {
event.waitUntil(
caches
.open(ASSETS)
.then(cache => cache.addAll(to_cache))
.then(() => {
self.skipWaiting();
})
);
});
self.addEventListener('activate', event => {
event.waitUntil(
caches.keys().then(async keys => {
// delete old caches
for (const key of keys) {
if (key !== ASSETS) await caches.delete(key);
}
self.clients.claim();
})
);
});
self.addEventListener('fetch', event => {
if (event.request.method !== 'GET') return;
const url = new URL(event.request.url);
// don't try to handle e.g. data: URIs
if (!url.protocol.startsWith('http')) return;
// ignore dev server requests
if (url.hostname === self.location.hostname && url.port !== self.location.port) return;
// always serve assets and webpack-generated files from cache
if (url.host === self.location.host && cached.has(url.pathname)) {
event.respondWith(caches.match(event.request));
return;
}
// for pages, you might want to serve a shell `index.html` file,
// which Sapper has generated for you. It's not right for every
// app, but if it's right for yours then uncomment this section
/*
if (url.origin === self.origin && routes.find(route => route.pattern.test(url.pathname))) {
event.respondWith(caches.match('/index.html'));
return;
}
*/
if (event.request.cache === 'only-if-cached') return;
// for everything else, try the network first, falling back to
// cache if the user is offline. (If the pages never change, you
// might prefer a cache-first approach to a network-first one.)
event.respondWith(
caches
.open(`offline${timestamp}`)
.then(async cache => {
try {
const response = await fetch(event.request);
cache.put(event.request, response.clone());
return response;
} catch(err) {
const response = await cache.match(event.request);
if (response) return response;
throw err;
}
})
);
});

View File

@@ -0,0 +1,14 @@
<!doctype html>
<html lang="en">
<head>
<meta charset='utf-8'>
%sapper.base%
%sapper.styles%
%sapper.head%
</head>
<body>
<div id='sapper'>%sapper.html%</div>
%sapper.scripts%
</body>
</html>

View File

@@ -0,0 +1,43 @@
import * as puppeteer from 'puppeteer';
import { build } from '../../../api';
import * as assert from "assert";
import { AppRunner } from '../AppRunner';
import * as fs from "fs";
import * as path from "path";
describe('with-sourcemaps-webpack', function() {
this.timeout(10000);
let runner: AppRunner;
let page: puppeteer.Page;
let base: string;
// helpers
let start: () => Promise<void>;
let prefetchRoutes: () => Promise<void>;
let prefetch: (href: string) => Promise<void>;
let goto: (href: string) => Promise<void>;
// hooks
before(async () => {
await build({ cwd: __dirname, bundler: 'webpack' });
runner = new AppRunner(__dirname, '__sapper__/build/server/server.js');
({ base, page, start, prefetchRoutes, prefetch, goto } = await runner.start());
});
it('does not put sourcemap files in service worker shell', async () => {
const serviceWorker = await import(`${__dirname}/__sapper__/service-worker.js`);
const shell: string[] = serviceWorker.shell;
assert.equal(shell.filter(_ => _.endsWith('.map')).length, 0,
'sourcemap files are not cached in SW');
const clientShellDir = path.resolve(`${__dirname}/__sapper__/build`, path.dirname(shell[0]));
const sourcemapFiles = fs.readdirSync(clientShellDir).filter(_ => _.endsWith('.map'));
assert.ok(sourcemapFiles.length > 0, 'sourcemap files exist');
});
after(() => runner.end());
});

View File

@@ -0,0 +1,73 @@
const webpack = require('webpack');
const config = require('../../../config/webpack.js');
const mode = process.env.NODE_ENV;
const dev = mode === 'development';
module.exports = {
client: {
entry: config.client.entry(),
output: config.client.output(),
resolve: {
extensions: ['.js', '.json', '.html'],
mainFields: ['svelte', 'module', 'browser', 'main']
},
module: {
rules: [
{
test: /\.html$/,
use: {
loader: 'svelte-loader',
options: {
dev,
hydratable: true,
hotReload: true
}
}
}
]
},
mode,
plugins: [
dev && new webpack.HotModuleReplacementPlugin(),
new webpack.DefinePlugin({
'process.browser': true,
'process.env.NODE_ENV': JSON.stringify(mode)
}),
].filter(Boolean),
devtool: dev ? 'inline-source-map' : 'source-map'
},
server: {
entry: config.server.entry(),
output: config.server.output(),
target: 'node',
resolve: {
extensions: ['.js', '.json', '.html'],
mainFields: ['svelte', 'module', 'browser', 'main']
},
module: {
rules: [
{
test: /\.html$/,
use: {
loader: 'svelte-loader',
options: {
css: false,
generate: 'ssr',
dev
}
}
}
]
},
mode: process.env.NODE_ENV
},
serviceworker: {
entry: config.serviceworker.entry(),
output: config.serviceworker.output(),
mode: process.env.NODE_ENV,
devtool: 'sourcemap'
}
};

View File

@@ -0,0 +1,64 @@
import resolve from 'rollup-plugin-node-resolve';
import replace from 'rollup-plugin-replace';
import svelte from 'rollup-plugin-svelte';
const mode = process.env.NODE_ENV;
const dev = mode === 'development';
const config = require('../../../config/rollup.js');
export default {
client: {
input: config.client.input(),
output: Object.assign({}, config.client.output(), { sourcemap: true }),
plugins: [
replace({
'process.browser': true,
'process.env.NODE_ENV': JSON.stringify(mode)
}),
svelte({
dev,
hydratable: true,
emitCss: true
}),
resolve()
],
// temporary, pending Rollup 1.0
experimentalCodeSplitting: true
},
server: {
input: config.server.input(),
output: config.server.output(),
plugins: [
replace({
'process.browser': false,
'process.env.NODE_ENV': JSON.stringify(mode)
}),
svelte({
generate: 'ssr',
dev
}),
resolve({
preferBuiltins: true
})
],
external: ['sirv', 'polka'],
// temporary, pending Rollup 1.0
experimentalCodeSplitting: true
},
serviceworker: {
input: config.serviceworker.input(),
output: config.serviceworker.output(),
plugins: [
resolve(),
replace({
'process.browser': true,
'process.env.NODE_ENV': JSON.stringify(mode)
})
]
}
};

View File

@@ -0,0 +1,9 @@
import * as sapper from '../__sapper__/client.js';
window.start = () => sapper.start({
target: document.querySelector('#sapper')
});
window.prefetchRoutes = () => sapper.prefetchRoutes();
window.prefetch = href => sapper.prefetch(href);
window.goto = href => sapper.goto(href);

View File

@@ -0,0 +1,3 @@
<h1>{status}</h1>
<p>{error.message}</p>

View File

@@ -0,0 +1,3 @@
<h1>Great success!</h1>
<p>Woot!</p>

View File

@@ -0,0 +1,8 @@
import polka from 'polka';
import * as sapper from '../__sapper__/server.js';
const { PORT } = process.env;
polka()
.use(sapper.middleware())
.listen(PORT);

View File

@@ -0,0 +1,82 @@
import { timestamp, files, shell, routes } from '../__sapper__/service-worker.js';
const ASSETS = `cache${timestamp}`;
// `shell` is an array of all the files generated by webpack,
// `files` is an array of everything in the `static` directory
const to_cache = shell.concat(ASSETS);
const cached = new Set(to_cache);
self.addEventListener('install', event => {
event.waitUntil(
caches
.open(ASSETS)
.then(cache => cache.addAll(to_cache))
.then(() => {
self.skipWaiting();
})
);
});
self.addEventListener('activate', event => {
event.waitUntil(
caches.keys().then(async keys => {
// delete old caches
for (const key of keys) {
if (key !== ASSETS) await caches.delete(key);
}
self.clients.claim();
})
);
});
self.addEventListener('fetch', event => {
if (event.request.method !== 'GET') return;
const url = new URL(event.request.url);
// don't try to handle e.g. data: URIs
if (!url.protocol.startsWith('http')) return;
// ignore dev server requests
if (url.hostname === self.location.hostname && url.port !== self.location.port) return;
// always serve assets and webpack-generated files from cache
if (url.host === self.location.host && cached.has(url.pathname)) {
event.respondWith(caches.match(event.request));
return;
}
// for pages, you might want to serve a shell `index.html` file,
// which Sapper has generated for you. It's not right for every
// app, but if it's right for yours then uncomment this section
/*
if (url.origin === self.origin && routes.find(route => route.pattern.test(url.pathname))) {
event.respondWith(caches.match('/index.html'));
return;
}
*/
if (event.request.cache === 'only-if-cached') return;
// for everything else, try the network first, falling back to
// cache if the user is offline. (If the pages never change, you
// might prefer a cache-first approach to a network-first one.)
event.respondWith(
caches
.open(`offline${timestamp}`)
.then(async cache => {
try {
const response = await fetch(event.request);
cache.put(event.request, response.clone());
return response;
} catch(err) {
const response = await cache.match(event.request);
if (response) return response;
throw err;
}
})
);
});

View File

@@ -0,0 +1,14 @@
<!doctype html>
<html lang="en">
<head>
<meta charset='utf-8'>
%sapper.base%
%sapper.styles%
%sapper.head%
</head>
<body>
<div id='sapper'>%sapper.html%</div>
%sapper.scripts%
</body>
</html>

View File

@@ -0,0 +1,43 @@
import * as puppeteer from 'puppeteer';
import { build } from '../../../api';
import * as assert from "assert";
import { AppRunner } from '../AppRunner';
import * as fs from 'fs';
import * as path from "path";
describe('with-sourcemaps', function() {
this.timeout(10000);
let runner: AppRunner;
let page: puppeteer.Page;
let base: string;
// helpers
let start: () => Promise<void>;
let prefetchRoutes: () => Promise<void>;
let prefetch: (href: string) => Promise<void>;
let goto: (href: string) => Promise<void>;
// hooks
before(async () => {
await build({ cwd: __dirname });
runner = new AppRunner(__dirname, '__sapper__/build/server/server.js');
({ base, page, start, prefetchRoutes, prefetch, goto } = await runner.start());
});
it('does not put sourcemap files in service worker shell', async () => {
const serviceWorker = await import(`${__dirname}/__sapper__/service-worker.js`);
const shell: string[] = serviceWorker.shell;
assert.equal(shell.filter(_ => _.endsWith('.map')).length, 0,
'sourcemap files are not cached in SW');
const clientShellDir = path.resolve(`${__dirname}/__sapper__/build`, path.dirname(shell[0]));
const sourcemapFiles = fs.readdirSync(clientShellDir).filter(_ => _.endsWith('.map'));
assert.ok(sourcemapFiles.length > 0, 'sourcemap files exist');
});
after(() => runner.end());
});