feature/services-pages (#7)
Reviewed-on: #7 Co-authored-by: mifi <badmf@mifi.dev> Co-committed-by: mifi <badmf@mifi.dev>
This commit was merged in pull request #7.
This commit is contained in:
12
scripts/410-paths.mjs
Normal file
12
scripts/410-paths.mjs
Normal file
@@ -0,0 +1,12 @@
|
||||
/**
|
||||
* URL paths that return 410 Gone. Shared by copy-410-paths.mjs and generate-sitemap.mjs.
|
||||
* Keep in sync with nginx.conf location blocks.
|
||||
*/
|
||||
export const PATHS = [
|
||||
'2024/02/18/hello-world',
|
||||
'pt',
|
||||
'feed',
|
||||
'category/uncategorized/feed',
|
||||
'category/uncategorized',
|
||||
'comments/feed',
|
||||
];
|
||||
47
scripts/beasties.mjs
Normal file
47
scripts/beasties.mjs
Normal file
@@ -0,0 +1,47 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Post-build: inline critical CSS in dist/*.html (SvelteKit adapter-static output).
|
||||
* Runs after vite build; Beasties reads/writes relative to dist/.
|
||||
*
|
||||
* Beasties with preload:'default' adds preload tags; same options as legacy Critters.
|
||||
*/
|
||||
|
||||
import Beasties from 'beasties';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const ROOT = path.join(__dirname, '..');
|
||||
const DIST = path.join(ROOT, 'dist');
|
||||
|
||||
async function main() {
|
||||
if (!fs.existsSync(DIST)) {
|
||||
console.error('dist/ not found. Run vite build first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const beasties = new Beasties({
|
||||
path: DIST,
|
||||
preload: 'default',
|
||||
noscriptFallback: true,
|
||||
pruneSource: false,
|
||||
logLevel: 'warn',
|
||||
});
|
||||
|
||||
const files = fs.readdirSync(DIST).filter((f) => f.endsWith('.html'));
|
||||
for (const file of files) {
|
||||
const filePath = path.join(DIST, file);
|
||||
let html = fs.readFileSync(filePath, 'utf8');
|
||||
html = await beasties.process(html);
|
||||
fs.writeFileSync(filePath, html, 'utf8');
|
||||
console.log('✓ Critical CSS inlined → dist/' + file);
|
||||
}
|
||||
|
||||
console.log('Critical CSS step complete.');
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -9,19 +9,12 @@ import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
import { PATHS } from './410-paths.mjs';
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const DIST = path.join(__dirname, '..', 'dist');
|
||||
const SOURCE = path.join(DIST, '410.html');
|
||||
|
||||
const PATHS = [
|
||||
'2024/02/18/hello-world',
|
||||
'pt',
|
||||
'feed',
|
||||
'category/uncategorized/feed',
|
||||
'category/uncategorized',
|
||||
'comments/feed',
|
||||
];
|
||||
|
||||
function main() {
|
||||
if (!fs.existsSync(SOURCE)) {
|
||||
console.error('dist/410.html not found. Run build first.');
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Post-build: inline critical CSS in dist/*.html (SvelteKit adapter-static output).
|
||||
* Runs after vite build; Critters reads/writes relative to dist/.
|
||||
*
|
||||
* Critters with preload:'swap' adds onload but does not set rel="preload" as="style",
|
||||
* so the link stays render-blocking. We fix that in postProcessSwapLinks().
|
||||
*/
|
||||
|
||||
import Critters from 'critters';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const ROOT = path.join(__dirname, '..');
|
||||
const DIST = path.join(ROOT, 'dist');
|
||||
|
||||
/**
|
||||
* Critters leaves rel="stylesheet" on swap links; change to rel="preload" as="style"
|
||||
* so the full CSS loads async and only applies on load (non-blocking).
|
||||
*/
|
||||
// function postProcessSwapLinks(html) {
|
||||
// return html.replace(/<link\s+([^>]*)>/gi, (full, attrs) => {
|
||||
// if (
|
||||
// !/rel="stylesheet"/i.test(attrs) ||
|
||||
// !/onload="this\.rel='stylesheet'"/i.test(attrs)
|
||||
// ) {
|
||||
// return full;
|
||||
// }
|
||||
// const fixed = attrs
|
||||
// .replace(/\brel="stylesheet"\s*/i, 'rel="preload" as="style" ')
|
||||
// .replace(
|
||||
// /\bonload="this\.rel='stylesheet'"/i,
|
||||
// 'onload="this.onload=null;this.rel=\'stylesheet\'"',
|
||||
// );
|
||||
// return `<link ${fixed}>`;
|
||||
// });
|
||||
// }
|
||||
|
||||
async function main() {
|
||||
if (!fs.existsSync(DIST)) {
|
||||
console.error('dist/ not found. Run vite build first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const critters = new Critters({
|
||||
path: DIST,
|
||||
preload: 'default',
|
||||
noscriptFallback: true,
|
||||
pruneSource: false,
|
||||
logLevel: 'warn',
|
||||
});
|
||||
|
||||
const files = fs.readdirSync(DIST).filter((f) => f.endsWith('.html'));
|
||||
for (const file of files) {
|
||||
const filePath = path.join(DIST, file);
|
||||
let html = fs.readFileSync(filePath, 'utf8');
|
||||
html = await critters.process(html);
|
||||
// html = postProcessSwapLinks(html);
|
||||
fs.writeFileSync(filePath, html, 'utf8');
|
||||
console.log('✓ Critical CSS inlined → dist/' + file);
|
||||
}
|
||||
|
||||
console.log('Critical CSS step complete.');
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
99
scripts/generate-sitemap.mjs
Normal file
99
scripts/generate-sitemap.mjs
Normal file
@@ -0,0 +1,99 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Post-build: generate sitemap.xml from prerendered pages in dist/.
|
||||
* Scans for index.html (root and under each path), excludes 410 paths.
|
||||
* Run after vite build and beasties, before copy-410-paths so 410 dirs don't exist yet.
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
import { PATHS as PATHS_410 } from './410-paths.mjs';
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const DIST = path.join(__dirname, '..', 'dist');
|
||||
|
||||
/** Must match src/lib/seo.ts SEO_DEFAULTS.baseUrl */
|
||||
const BASE_URL = 'https://mifi.ventures';
|
||||
|
||||
const EXCLUDE_FILES = new Set(['404.html', '410.html']);
|
||||
const excludeSet = new Set(PATHS_410);
|
||||
|
||||
/**
|
||||
* adapter-static emits path.html or path/index.html. Walk dist and collect
|
||||
* every .html that represents a page (exclude 404/410 and 410-gone paths).
|
||||
*/
|
||||
function findPages(dir, basePath = '') {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
const pages = [];
|
||||
|
||||
for (const e of entries) {
|
||||
const rel = basePath ? `${basePath}/${e.name}` : e.name;
|
||||
|
||||
if (e.isDirectory()) {
|
||||
const indexPath = path.join(dir, e.name, 'index.html');
|
||||
if (fs.existsSync(indexPath)) {
|
||||
if (!excludeSet.has(rel)) {
|
||||
pages.push({ path: rel, indexPath });
|
||||
}
|
||||
} else {
|
||||
pages.push(...findPages(path.join(dir, e.name), rel));
|
||||
}
|
||||
} else if (e.name.endsWith('.html') && !EXCLUDE_FILES.has(e.name)) {
|
||||
const urlPath = e.name === 'index.html'
|
||||
? basePath
|
||||
: (basePath ? `${basePath}/${e.name.slice(0, -5)}` : e.name.slice(0, -5));
|
||||
if (!excludeSet.has(urlPath)) {
|
||||
pages.push({ path: urlPath, indexPath: path.join(dir, e.name) });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return pages;
|
||||
}
|
||||
|
||||
function escapeXml(s) {
|
||||
return s
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''');
|
||||
}
|
||||
|
||||
function main() {
|
||||
const indexHtml = path.join(DIST, 'index.html');
|
||||
if (!fs.existsSync(indexHtml)) {
|
||||
console.error('dist/index.html not found. Run build first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const pages = findPages(DIST).sort((a, b) => {
|
||||
if (a.path === '') return -1;
|
||||
if (b.path === '') return 1;
|
||||
return a.path.localeCompare(b.path);
|
||||
});
|
||||
const urlElements = [];
|
||||
|
||||
for (const { path: pagePath, indexPath } of pages) {
|
||||
const loc = pagePath ? `${BASE_URL}/${pagePath}` : BASE_URL;
|
||||
const stat = fs.statSync(indexPath);
|
||||
const lastmod = stat.mtime.toISOString().slice(0, 10);
|
||||
|
||||
urlElements.push(
|
||||
` <url>\n <loc>${escapeXml(loc)}</loc>\n <lastmod>${lastmod}</lastmod>\n </url>`
|
||||
);
|
||||
}
|
||||
|
||||
const xml = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
|
||||
${urlElements.join('\n')}
|
||||
</urlset>
|
||||
`;
|
||||
|
||||
fs.writeFileSync(path.join(DIST, 'sitemap.xml'), xml, 'utf8');
|
||||
console.log('✓ sitemap.xml generated with', pages.length, 'URLs.');
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Post-build: minify all JS in dist/assets/js/ (static scripts copied from static/assets/js/).
|
||||
* Runs after vite build (and optionally after critters). Uses esbuild for minification.
|
||||
* Runs after vite build (and optionally after beasties). Uses esbuild for minification.
|
||||
*/
|
||||
|
||||
import * as esbuild from 'esbuild';
|
||||
|
||||
27
scripts/run-e2e-in-docker.sh
Executable file
27
scripts/run-e2e-in-docker.sh
Executable file
@@ -0,0 +1,27 @@
|
||||
#!/usr/bin/env bash
|
||||
# Run Playwright e2e tests in the same Docker image as CI (and as snapshot generation).
|
||||
# Use when running locally on macOS/Windows so tests mirror CI; in CI or devcontainer use pnpm test:e2e directly.
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="${BASH_SOURCE%/*}"
|
||||
PROJECT_ROOT="${SCRIPT_DIR}/.."
|
||||
cd "$PROJECT_ROOT"
|
||||
PROJECT_ROOT="$(pwd)"
|
||||
|
||||
PLAYWRIGHT_IMAGE="${PLAYWRIGHT_IMAGE:-mcr.microsoft.com/playwright:v1.58.0-noble}"
|
||||
echo "Running e2e tests in Docker image: $PLAYWRIGHT_IMAGE (same as CI)"
|
||||
echo "Project root: $PROJECT_ROOT"
|
||||
echo ""
|
||||
|
||||
docker run --rm \
|
||||
-v "$PROJECT_ROOT:/app" -w /app \
|
||||
-e CI=1 \
|
||||
"$PLAYWRIGHT_IMAGE" \
|
||||
bash -c '
|
||||
corepack enable && corepack prepare pnpm@10.28.2 --activate
|
||||
pnpm install --no-frozen-lockfile || pnpm install
|
||||
pnpm run build
|
||||
npx serve dist -p 4173 &
|
||||
sleep 2
|
||||
pnpm exec playwright test
|
||||
'
|
||||
19
scripts/run-e2e.sh
Executable file
19
scripts/run-e2e.sh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
# Run Playwright e2e tests. Mirrors CI when possible.
|
||||
# - In CI: run playwright test (pipeline already built and started serve).
|
||||
# - Local with Docker: run tests in same Playwright image as CI (run-e2e-in-docker.sh).
|
||||
# - Local without Docker (e.g. devcontainer): build and run playwright test (webServer in config).
|
||||
set -e
|
||||
|
||||
if [ -n "$CI" ]; then
|
||||
pnpm exec playwright test
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if command -v docker >/dev/null 2>&1; then
|
||||
exec bash "$(dirname "$0")/run-e2e-in-docker.sh"
|
||||
fi
|
||||
|
||||
# No Docker: run in current environment (e.g. devcontainer; same image as CI)
|
||||
pnpm run build
|
||||
pnpm exec playwright test
|
||||
Reference in New Issue
Block a user