Compare commits

..

21 Commits

Author SHA1 Message Date
Rich Harris
3b714c0de3 -> v0.23.5 2018-10-24 21:40:04 -04:00
Rich Harris
28186227a9 add tests 2018-10-24 21:20:27 -04:00
Rich Harris
2ac0f2bf3d Merge branch 'search-params-decoding' of https://github.com/mrkishi/sapper into mrkishi-search-params-decoding 2018-10-24 21:10:35 -04:00
Rich Harris
4991f3b359 support non-native promise implementations 2018-10-24 21:05:25 -04:00
Rich Harris
65128118c7 Merge branch '487-async-route-errors' of https://github.com/nikku/sapper into nikku-487-async-route-errors 2018-10-24 20:58:52 -04:00
Rich Harris
3eced6fa4d Merge pull request #492 from sveltejs/lazy-css
fix lazy css bug, add tests
2018-10-24 20:58:08 -04:00
mrkishi
c4aee66c32 Fix search params decoding 2018-10-24 21:19:03 -03:00
Rich Harris
410c52df41 fix lazy css bug, add tests 2018-10-24 18:48:38 -04:00
Rich Harris
ffd56e2a20 -> v0.23.4 2018-10-24 15:51:44 -04:00
Rich Harris
1e5a87cf71 Merge pull request #491 from sveltejs/empty-href-export
ignore empty anchors when exporting
2018-10-24 15:51:28 -04:00
Rich Harris
281e183c61 ignore empty anchors when exporting 2018-10-24 15:38:45 -04:00
Nico Rehwaldt
3fe7b55955 async -> Promise.reject 2018-10-20 22:46:42 +02:00
Nico Rehwaldt
464924ed67 handle async route errors
Related to #487
2018-10-20 22:40:21 +02:00
Rich Harris
e5d7d8ab2b -> v0.23.3 2018-10-16 16:16:11 -04:00
Rich Harris
d3e560325d Merge pull request #477 from sveltejs/clear-errors
clear errors on successful render
2018-10-16 16:15:28 -04:00
Rich Harris
64e5065aa5 clear errors on successful render 2018-10-16 15:59:57 -04:00
Rich Harris
cb45bb0fbe -> v0.23.2 2018-10-16 08:58:02 -04:00
Rich Harris
f39455014a update deps 2018-10-16 08:44:22 -04:00
Rich Harris
4fe8df3696 Merge pull request #471 from sveltejs/missing-css
include css depended upon by entry point, even if also depended on by a lazily-loaded component
2018-10-16 08:38:20 -04:00
Rich Harris
44736754ad fix file extension 2018-10-15 21:27:52 -04:00
Rich Harris
1b9b559d82 include css depended upon by entry point, even if also depended on by a lazily-loaded component 2018-10-11 23:18:51 -04:00
27 changed files with 954 additions and 790 deletions

View File

@@ -1,5 +1,23 @@
# sapper changelog
## 0.23.5
* Include lazily-imported CSS in main CSS chunk ([#492](https://github.com/sveltejs/sapper/pull/492))
* Make search param decoding spec-compliant ([#493](https://github.com/sveltejs/sapper/pull/493))
* Handle async route errors ([#488](https://github.com/sveltejs/sapper/pull/488))
## 0.23.4
* Ignore empty anchors when exporting ([#491](https://github.com/sveltejs/sapper/pull/491))
## 0.23.3
* Clear `error` and `status` on successful render ([#477](https://github.com/sveltejs/sapper/pull/477))
## 0.23.2
* Fix entry point CSS ([#471](https://github.com/sveltejs/sapper/pull/471))
## 0.23.1
* Scroll to deeplink that matches current URL ([#472](https://github.com/sveltejs/sapper/pull/472))

1059
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "sapper",
"version": "0.23.1",
"version": "0.23.5",
"description": "Military-grade apps, engineered by Svelte",
"bin": {
"sapper": "./sapper"
@@ -17,53 +17,53 @@
"test": "test"
},
"dependencies": {
"html-minifier": "^3.5.16",
"html-minifier": "^3.5.20",
"shimport": "0.0.11",
"source-map-support": "^0.5.6",
"sourcemap-codec": "^1.4.1",
"source-map-support": "^0.5.9",
"sourcemap-codec": "^1.4.3",
"string-hash": "^1.1.3",
"tslib": "^1.9.1"
"tslib": "^1.9.3"
},
"devDependencies": {
"@types/mkdirp": "^0.5.2",
"@types/mocha": "^5.2.5",
"@types/node": "^10.7.1",
"@types/node": "^10.12.0",
"@types/puppeteer": "^1.9.0",
"@types/rimraf": "^2.0.2",
"agadoo": "^1.0.1",
"cheap-watch": "^0.3.0",
"cheap-watch": "^1.0.0",
"cookie": "^0.3.1",
"devalue": "^1.0.4",
"eslint": "^4.13.1",
"eslint-plugin-import": "^2.12.0",
"kleur": "^2.0.1",
"eslint": "^5.7.0",
"eslint-plugin-import": "^2.14.0",
"kleur": "^2.0.2",
"mkdirp": "^0.5.1",
"mocha": "^5.2.0",
"node-fetch": "^2.1.1",
"node-fetch": "^2.2.0",
"npm-run-all": "^4.1.3",
"polka": "^0.4.0",
"polka": "^0.5.1",
"port-authority": "^1.0.5",
"pretty-bytes": "^5.0.0",
"pretty-bytes": "^5.1.0",
"puppeteer": "^1.9.0",
"require-relative": "^0.8.7",
"rimraf": "^2.6.2",
"rollup": "^0.65.0",
"rollup-plugin-commonjs": "^9.1.3",
"rollup-plugin-json": "^3.0.0",
"rollup": "^0.66.6",
"rollup-plugin-commonjs": "^9.2.0",
"rollup-plugin-json": "^3.1.0",
"rollup-plugin-node-resolve": "^3.4.0",
"rollup-plugin-replace": "^2.0.0",
"rollup-plugin-replace": "^2.1.0",
"rollup-plugin-string": "^2.0.2",
"rollup-plugin-svelte": "^4.3.2",
"rollup-plugin-typescript": "^0.8.1",
"rollup-plugin-typescript": "^1.0.0",
"sade": "^1.4.1",
"sander": "^0.6.0",
"sirv": "^0.2.2",
"svelte": "^2.6.3",
"svelte-loader": "^2.9.0",
"svelte": "^2.13.5",
"svelte-loader": "^2.11.0",
"ts-node": "^7.0.1",
"typescript": "^2.8.3",
"webpack": "^4.8.3",
"webpack-format-messages": "^2.0.1"
"typescript": "^3.1.3",
"webpack": "^4.20.2",
"webpack-format-messages": "^2.0.3"
},
"scripts": {
"test": "mocha --opts mocha.opts",

View File

@@ -187,6 +187,6 @@ async function _export({
}
function get_href(attrs: string) {
const match = /href\s*=\s*(?:"(.+?)"|'(.+?)'|([^\s>]+))/.exec(attrs);
const match = /href\s*=\s*(?:"(.*?)"|'(.+?)'|([^\s>]+))/.exec(attrs);
return match[1] || match[2] || match[3];
}

View File

@@ -3,7 +3,7 @@ import * as path from 'path';
import hash from 'string-hash';
import * as codec from 'sourcemap-codec';
import { PageComponent, Dirs } from '../../interfaces';
import { CompileResult } from './interfaces';
import { CompileResult, Chunk } from './interfaces';
import { posixify } from '../../utils'
const inline_sourcemap_header = 'data:application/json;charset=utf-8;base64,';
@@ -46,6 +46,65 @@ type SourceMap = {
mappings: string;
};
function get_css_from_modules(modules: string[], css_map: Map<string, string>, dirs: Dirs) {
const parts: string[] = [];
const mappings: number[][][] = [];
const combined_map: SourceMap = {
version: 3,
file: null,
sources: [],
sourcesContent: [],
names: [],
mappings: null
};
modules.forEach(module => {
if (!/\.css$/.test(module)) return;
const css = css_map.get(module);
const { code, map } = extract_sourcemap(css, module);
parts.push(code);
if (map) {
const lines = codec.decode(map.mappings);
if (combined_map.sources.length > 0 || combined_map.names.length > 0) {
lines.forEach(line => {
line.forEach(segment => {
// adjust source index
segment[1] += combined_map.sources.length;
// adjust name index
if (segment[4]) segment[4] += combined_map.names.length;
});
});
}
combined_map.sources.push(...map.sources);
combined_map.sourcesContent.push(...map.sourcesContent);
combined_map.names.push(...map.names);
mappings.push(...lines);
}
});
if (parts.length > 0) {
combined_map.mappings = codec.encode(mappings);
combined_map.sources = combined_map.sources.map(source => path.relative(`${dirs.dest}/client`, source));
return {
code: parts.join('\n'),
map: combined_map
};
}
return null;
}
export default function extract_css(client_result: CompileResult, components: PageComponent[], dirs: Dirs) {
const result: {
main: string | null;
@@ -57,151 +116,94 @@ export default function extract_css(client_result: CompileResult, components: Pa
if (!client_result.css_files) return; // Rollup-only for now
const unaccounted_for = new Set();
let asset_dir = `${dirs.dest}/client`;
if (process.env.SAPPER_LEGACY_BUILD) asset_dir += '/legacy';
const css_map = new Map();
client_result.css_files.forEach(css => {
unaccounted_for.add(css.id);
css_map.set(css.id, css.code);
const unclaimed = new Set(client_result.css_files.map(x => x.id));
const lookup = new Map();
client_result.chunks.forEach(chunk => {
lookup.set(chunk.file, chunk);
});
const chunk_map = new Map();
client_result.chunks.forEach(chunk => {
chunk_map.set(chunk.file, chunk);
const css_map = new Map();
client_result.css_files.forEach(css_module => {
css_map.set(css_module.id, css_module.code);
});
const chunks_with_css = new Set();
// figure out which chunks belong to which components...
const component_owners = new Map();
// concatenate and emit CSS
client_result.chunks.forEach(chunk => {
chunk.modules.forEach(module => {
const component = posixify(path.relative(dirs.routes, module));
component_owners.set(component, chunk);
});
const css_modules = chunk.modules.filter(m => css_map.has(m));
if (!css_modules.length) return;
const css = get_css_from_modules(css_modules, css_map, dirs);
const { code, map } = css;
const output_file_name = chunk.file.replace(/\.js$/, '.css');
map.file = output_file_name;
map.sources = map.sources.map(source => path.relative(`${asset_dir}`, source));
fs.writeFileSync(`${asset_dir}/${output_file_name}`, `${code}\n/* sourceMappingURL=./${output_file_name}.map */`);
fs.writeFileSync(`${asset_dir}/${output_file_name}.map`, JSON.stringify(map, null, ' '));
chunks_with_css.add(chunk);
});
const chunks_depended_upon_by_component = new Map();
const entry = path.resolve(dirs.src, 'client.js');
const entry_chunk = client_result.chunks.find(chunk => chunk.modules.indexOf(entry) !== -1);
// ...so we can figure out which chunks don't belong
const entry_chunk_dependencies: Set<Chunk> = new Set([entry_chunk]);
const entry_css_modules: string[] = [];
// recursively find the chunks this component depends on
entry_chunk_dependencies.forEach(chunk => {
chunk.imports.forEach(file => {
entry_chunk_dependencies.add(lookup.get(file));
});
if (chunks_with_css.has(chunk)) {
chunk.modules.forEach(file => {
unclaimed.delete(file);
if (css_map.has(file)) {
entry_css_modules.push(file);
}
});
}
});
// figure out which (css-having) chunks each component depends on
components.forEach(component => {
const chunk = component_owners.get(component.file);
const resolved = path.resolve(dirs.routes, component.file);
const chunk: Chunk = client_result.chunks.find(chunk => chunk.modules.indexOf(resolved) !== -1);
if (!chunk) {
// this should never happen!
throw new Error(`Could not find chunk that owns ${component.file}`);
}
const chunks = new Set([chunk]);
chunks.forEach(chunk => {
chunk.imports.forEach((file: string) => {
const chunk = chunk_map.get(file);
if (chunk) chunks.add(chunk);
const chunk_dependencies: Set<Chunk> = new Set([chunk]);
const css_dependencies: string[] = [];
// recursively find the chunks this component depends on
chunk_dependencies.forEach(chunk => {
chunk.imports.forEach(file => {
chunk_dependencies.add(lookup.get(file));
});
});
chunks.forEach(chunk => {
chunk.modules.forEach((module: string) => {
unaccounted_for.delete(module);
});
});
if (chunks_with_css.has(chunk)) {
css_dependencies.push(chunk.file.replace(/\.js$/, '.css'));
chunks_depended_upon_by_component.set(
component,
chunks
);
});
function get_css_from_modules(modules: string[]) {
const parts: string[] = [];
const mappings: number[][][] = [];
const combined_map: SourceMap = {
version: 3,
file: null,
sources: [],
sourcesContent: [],
names: [],
mappings: null
};
modules.forEach(module => {
if (!/\.css$/.test(module)) return;
const css = css_map.get(module);
const { code, map } = extract_sourcemap(css, module);
parts.push(code);
if (map) {
const lines = codec.decode(map.mappings);
if (combined_map.sources.length > 0 || combined_map.names.length > 0) {
lines.forEach(line => {
line.forEach(segment => {
// adjust source index
segment[1] += combined_map.sources.length;
// adjust name index
if (segment[4]) segment[4] += combined_map.names.length;
});
});
}
combined_map.sources.push(...map.sources);
combined_map.sourcesContent.push(...map.sourcesContent);
combined_map.names.push(...map.names);
mappings.push(...lines);
chunk.modules.forEach(file => {
unclaimed.delete(file);
});
}
});
if (parts.length > 0) {
combined_map.mappings = codec.encode(mappings);
combined_map.sources = combined_map.sources.map(source => path.relative(`${dirs.dest}/client`, source));
return {
code: parts.join('\n'),
map: combined_map
};
}
return null;
}
let asset_dir = `${dirs.dest}/client`;
if (process.env.SAPPER_LEGACY_BUILD) asset_dir += '/legacy';
const replacements = new Map();
chunks_depended_upon_by_component.forEach((chunks, component) => {
const chunks_with_css = Array.from(chunks).filter(chunk => {
const css = get_css_from_modules(chunk.modules);
if (css) {
const { code, map } = css;
const output_file_name = chunk.file.replace(/\.js$/, '.css');
map.file = output_file_name;
map.sources = map.sources.map(source => path.relative(`${asset_dir}`, source));
fs.writeFileSync(`${asset_dir}/${output_file_name}`, `${code}\n/* sourceMappingURL=./${output_file_name}.map */`);
fs.writeFileSync(`${asset_dir}/${output_file_name}.map`, JSON.stringify(map, null, ' '));
return true;
}
});
const files = chunks_with_css.map(chunk => chunk.file.replace(/\.js$/, '.css'));
replacements.set(
component.file,
files
);
result.chunks[component.file] = files;
result.chunks[component.file] = css_dependencies;
});
fs.readdirSync(asset_dir).forEach(file => {
@@ -210,13 +212,17 @@ export default function extract_css(client_result: CompileResult, components: Pa
const source = fs.readFileSync(`${asset_dir}/${file}`, 'utf-8');
const replaced = source.replace(/["']__SAPPER_CSS_PLACEHOLDER:(.+?)__["']/g, (m, route) => {
return JSON.stringify(replacements.get(route));
return JSON.stringify(result.chunks[route]);
});
fs.writeFileSync(`${asset_dir}/${file}`, replaced);
});
const leftover = get_css_from_modules(Array.from(unaccounted_for));
unclaimed.forEach(file => {
entry_css_modules.push(file);
});
const leftover = get_css_from_modules(entry_css_modules, css_map, dirs);
if (leftover) {
const { code, map } = leftover;

View File

@@ -90,8 +90,8 @@ export function select_route(url: URL): Target {
const query: Record<string, string | true> = {};
if (url.search.length > 0) {
url.search.slice(1).split('&').forEach(searchParam => {
const [, key, value] = /([^=]+)(?:=(.*))?/.exec(searchParam);
query[key] = decodeURIComponent((value || '').replace(/\+/g, ' '));
const [, key, value] = /([^=]*)(?:=(.*))?/.exec(searchParam);
query[decodeURIComponent(key)] = decodeURIComponent((value || '').replace(/\+/g, ' '));
});
}
return { url, path, page, match, query };
@@ -310,7 +310,7 @@ export function prepare_page(target: Target): Promise<{
};
}
const props = { path, query };
const props = { path, query, error: null, status: null };
const data = {
path,
preloading: false,

View File

@@ -2,7 +2,7 @@ import { IGNORE } from '../placeholders';
import { Req, Res, ServerRoute } from './types';
export function get_server_route_handler(routes: ServerRoute[]) {
function handle_route(route: ServerRoute, req: Req, res: Res, next: () => void) {
async function handle_route(route: ServerRoute, req: Req, res: Res, next: () => void) {
req.params = route.params(route.pattern.exec(req.path));
const method = req.method.toLowerCase();
@@ -53,7 +53,7 @@ export function get_server_route_handler(routes: ServerRoute[]) {
};
try {
handle_method(req, res, handle_next);
await handle_method(req, res, handle_next);
} catch (err) {
handle_next(err);
}

View File

@@ -58,7 +58,8 @@ export class AppRunner {
start: () => this.page.evaluate(() => start()),
prefetchRoutes: () => this.page.evaluate(() => prefetchRoutes()),
prefetch: (href: string) => this.page.evaluate((href: string) => prefetch(href), href),
goto: (href: string) => this.page.evaluate((href: string) => goto(href), href)
goto: (href: string) => this.page.evaluate((href: string) => goto(href), href),
title: () => this.page.$eval('h1', node => node.textContent)
};
}

View File

@@ -0,0 +1,64 @@
import resolve from 'rollup-plugin-node-resolve';
import replace from 'rollup-plugin-replace';
import svelte from 'rollup-plugin-svelte';
const mode = process.env.NODE_ENV;
const dev = mode === 'development';
const config = require('../../../config/rollup.js');
export default {
client: {
input: config.client.input(),
output: config.client.output(),
plugins: [
replace({
'process.browser': true,
'process.env.NODE_ENV': JSON.stringify(mode)
}),
svelte({
dev,
hydratable: true,
emitCss: true
}),
resolve()
],
// temporary, pending Rollup 1.0
experimentalCodeSplitting: true
},
server: {
input: config.server.input(),
output: config.server.output(),
plugins: [
replace({
'process.browser': false,
'process.env.NODE_ENV': JSON.stringify(mode)
}),
svelte({
generate: 'ssr',
dev
}),
resolve({
preferBuiltins: true
})
],
external: ['sirv', 'polka'],
// temporary, pending Rollup 1.0
experimentalCodeSplitting: true
},
serviceworker: {
input: config.serviceworker.input(),
output: config.serviceworker.output(),
plugins: [
resolve(),
replace({
'process.browser': true,
'process.env.NODE_ENV': JSON.stringify(mode)
})
]
}
};

View File

@@ -0,0 +1,9 @@
import * as sapper from '../__sapper__/client.js';
window.start = () => sapper.start({
target: document.querySelector('#sapper')
});
window.prefetchRoutes = () => sapper.prefetchRoutes();
window.prefetch = href => sapper.prefetch(href);
window.goto = href => sapper.goto(href);

View File

@@ -0,0 +1,7 @@
<h1>Title</h1>
<style>
h1 {
color: green;
}
</style>

View File

@@ -0,0 +1,3 @@
<h1>{status}</h1>
<p>{error.message}</p>

View File

@@ -0,0 +1,11 @@
<svelte:component this={Title}/>
<script>
export default {
oncreate() {
import('./_components/Title.html').then(({ default: Title }) => {
this.set({ Title });
});
}
};
</script>

View File

@@ -0,0 +1,7 @@
<h1>Foo</h1>
<style>
h1 {
color: blue;
}
</style>

View File

@@ -0,0 +1,10 @@
<h1>Great success!</h1>
<a href="foo">foo</a>
<a href="bar">bar</a>
<style>
h1 {
color: red;
}
</style>

View File

@@ -0,0 +1,8 @@
import polka from 'polka';
import * as sapper from '../__sapper__/server.js';
const { PORT } = process.env;
polka()
.use(sapper.middleware())
.listen(PORT);

View File

@@ -0,0 +1,82 @@
import { timestamp, files, shell, routes } from '../__sapper__/service-worker.js';
const ASSETS = `cache${timestamp}`;
// `shell` is an array of all the files generated by webpack,
// `files` is an array of everything in the `static` directory
const to_cache = shell.concat(ASSETS);
const cached = new Set(to_cache);
self.addEventListener('install', event => {
event.waitUntil(
caches
.open(ASSETS)
.then(cache => cache.addAll(to_cache))
.then(() => {
self.skipWaiting();
})
);
});
self.addEventListener('activate', event => {
event.waitUntil(
caches.keys().then(async keys => {
// delete old caches
for (const key of keys) {
if (key !== ASSETS) await caches.delete(key);
}
self.clients.claim();
})
);
});
self.addEventListener('fetch', event => {
if (event.request.method !== 'GET') return;
const url = new URL(event.request.url);
// don't try to handle e.g. data: URIs
if (!url.protocol.startsWith('http')) return;
// ignore dev server requests
if (url.hostname === self.location.hostname && url.port !== self.location.port) return;
// always serve assets and webpack-generated files from cache
if (url.host === self.location.host && cached.has(url.pathname)) {
event.respondWith(caches.match(event.request));
return;
}
// for pages, you might want to serve a shell `index.html` file,
// which Sapper has generated for you. It's not right for every
// app, but if it's right for yours then uncomment this section
/*
if (url.origin === self.origin && routes.find(route => route.pattern.test(url.pathname))) {
event.respondWith(caches.match('/index.html'));
return;
}
*/
if (event.request.cache === 'only-if-cached') return;
// for everything else, try the network first, falling back to
// cache if the user is offline. (If the pages never change, you
// might prefer a cache-first approach to a network-first one.)
event.respondWith(
caches
.open(`offline${timestamp}`)
.then(async cache => {
try {
const response = await fetch(event.request);
cache.put(event.request, response.clone());
return response;
} catch(err) {
const response = await cache.match(event.request);
if (response) return response;
throw err;
}
})
);
});

View File

@@ -0,0 +1,14 @@
<!doctype html>
<html lang="en">
<head>
<meta charset='utf-8'>
%sapper.base%
%sapper.styles%
%sapper.head%
</head>
<body>
<div id='sapper'>%sapper.html%</div>
%sapper.scripts%
</body>
</html>

78
test/apps/css/test.ts Normal file
View File

@@ -0,0 +1,78 @@
import * as assert from 'assert';
import * as puppeteer from 'puppeteer';
import { build } from '../../../api';
import { AppRunner } from '../AppRunner';
import { wait } from '../../utils';
describe('css', function() {
this.timeout(10000);
let runner: AppRunner;
let page: puppeteer.Page;
let base: string;
// helpers
let start: () => Promise<void>;
let prefetchRoutes: () => Promise<void>;
let prefetch: (href: string) => Promise<void>;
let goto: (href: string) => Promise<void>;
let title: () => Promise<string>;
// hooks
before(async () => {
await build({ cwd: __dirname });
runner = new AppRunner(__dirname, '__sapper__/build/server/server.js');
({ base, page, start, prefetchRoutes, prefetch, goto, title } = await runner.start());
});
after(() => runner.end());
it('includes critical CSS with server render', async () => {
await page.goto(base);
assert.equal(
await page.evaluate(() => {
const h1 = document.querySelector('h1');
return getComputedStyle(h1).color;
}),
'rgb(255, 0, 0)'
);
});
it('loads CSS when navigating client-side', async () => {
await page.goto(base);
await start();
await prefetchRoutes();
await page.click(`[href="foo"]`);
await wait(50);
assert.equal(
await page.evaluate(() => {
const h1 = document.querySelector('h1');
return getComputedStyle(h1).color;
}),
'rgb(0, 0, 255)'
);
});
it('loads CSS for a lazily-rendered component', async () => {
await page.goto(base);
await start();
await prefetchRoutes();
await page.click(`[href="bar"]`);
await wait(50);
assert.equal(
await page.evaluate(() => {
const h1 = document.querySelector('h1');
return getComputedStyle(h1).color;
}),
'rgb(0, 128, 0)'
);
});
});

View File

@@ -1,11 +1,10 @@
<h1>{slug} ({message})</h1>
<h1>{slug} {JSON.stringify(query)}</h1>
<script>
export default {
preload({ params, query }) {
preload({ params }) {
return {
slug: params.slug,
message: query.message
slug: params.slug
};
}
};

View File

@@ -1,3 +1,3 @@
<h1>Great success!</h1>
<a href="echo/page/encöded?message=hëllö+wörld">link</a>
<a href="echo/page/encöded?message=hëllö+wörld&föo=bar&=baz">link</a>

View File

@@ -35,11 +35,11 @@ describe('encoding', function() {
});
it('encodes req.params and req.query for server-rendered pages', async () => {
await page.goto(`${base}/echo/page/encöded?message=hëllö+wörld`);
await page.goto(`${base}/echo/page/encöded?message=hëllö+wörld&föo=bar&=baz`);
assert.equal(
await page.$eval('h1', node => node.textContent),
'encöded (hëllö wörld)'
'encöded {"message":"hëllö wörld","föo":"bar","":"baz"}'
);
});
@@ -48,12 +48,12 @@ describe('encoding', function() {
await start();
await prefetchRoutes();
await page.click('a[href="echo/page/encöded?message=hëllö+wörld"]');
await page.click('a[href="echo/page/encöded?message=hëllö+wörld&föo=bar&=baz"]');
await wait(50);
assert.equal(
await page.$eval('h1', node => node.textContent),
'encöded (hëllö wörld)'
'encöded {"message":"hëllö wörld","föo":"bar","":"baz"}'
);
});

View File

@@ -0,0 +1,3 @@
export function get(req, res) {
return Promise.reject(new Error('oops'));
}

View File

@@ -0,0 +1,7 @@
<script>
export default {
preload() {
this.error(420, 'Enhance your calm');
}
};
</script>

View File

@@ -0,0 +1,3 @@
<h1>{error ? error.message : 'No error here'}</h1>
<a href="enhance-your-calm">Enhance your calm</a>

View File

@@ -14,13 +14,14 @@ describe('errors', function() {
// helpers
let start: () => Promise<void>;
let prefetchRoutes: () => Promise<void>;
let title: () => Promise<string>;
// hooks
before(async () => {
await build({ cwd: __dirname });
runner = new AppRunner(__dirname, '__sapper__/build/server/server.js');
({ base, page, start, prefetchRoutes } = await runner.start());
({ base, page, start, prefetchRoutes, title } = await runner.start());
});
after(() => runner.end());
@@ -110,4 +111,25 @@ describe('errors', function() {
'oops'
);
});
it('does not serve error page for async non-page error', async () => {
await page.goto(`${base}/async-throw.json`);
assert.equal(
await page.evaluate(() => document.body.textContent),
'oops'
);
});
it('clears props.error on successful render', async () => {
await page.goto(`${base}/no-error`);
await start();
await prefetchRoutes();
await page.click('[href="enhance-your-calm"]');
assert.equal(await title(), '420');
await page.goBack();
assert.equal(await title(), 'No error here');
});
});

View File

@@ -1,3 +1,4 @@
<h1>Great success!</h1>
<a href="blog">blog</a>
<a href="blog">blog</a>
<a href="">empty anchor</a>