Reorganise source files into src and web

Use a more traditional source code layout with the code located in the
src/ dir and static web content in the web/ dir, while the other places
are for data not related to code or content.
This commit is contained in:
Hornwitser 2025-02-02 10:37:42 +01:00
parent 82323c9158
commit c8527f17f7
20 changed files with 35 additions and 36 deletions

106
src/cli.ts Normal file
View file

@ -0,0 +1,106 @@
import * as fs from "node:fs";
import * as posix from "node:path/posix";
import { prettify, htmlDocument } from "antihtml";
import { pages } from "./pages.js";
import type { Page } from "./types.js";
import { resolveRefs } from "./utils/resolve-refs.js";
import { createServer } from "./utils/http-server.js";
function pageToHtml(page: Page) {
if (!page.ref.startsWith("/")) {
throw new Error(`ref "${page.ref}" for "${page.title}" is not absolute.`);
}
return htmlDocument(
prettify(
resolveRefs(
page.content,
posix.dirname(page.ref),
)
)
)
}
function assembleResources() {
const webDir = "web";
const resources = new Map<string, string | Page>();
for (const entry of fs.readdirSync(webDir, { recursive: true, withFileTypes: true })) {
if (!entry.isFile())
continue;
const parentPath = entry.parentPath.replace(/\\/g, "/");
const ref = `${parentPath.slice(webDir.length)}/${entry.name}`;
resources.set(ref, `${parentPath}/${entry.name}`);
}
for (const page of pages) {
if (resources.has(page.ref)) {
const existing = resources.get(page.ref)!;
const other = typeof existing === "string" ? "a static resource" : `"${existing.title}"`;
throw new Error(`ref "${page.ref}" is taken up by both "${page.title}" and ${other}`)
}
resources.set(page.ref, page);
}
return resources;
}
function build() {
const outDir = "build/web";
const dirsCreated = new Set<string>()
for (const [ref, resource] of assembleResources()) {
const refDir = `${outDir}${posix.dirname(ref)}`;
if (!dirsCreated.has(refDir)) {
fs.mkdirSync(refDir, { recursive: true });
}
console.log(`writing ${outDir}${ref}`);
let content;
if (typeof resource === "string") {
content = fs.readFileSync(resource);
} else {
content = pageToHtml(resource);
}
fs.writeFileSync(`${outDir}${ref}`, content);
}
}
function serve() {
const resources = assembleResources();
const server = createServer(
(ref) => {
const resource = resources.get(ref);
if (resource === undefined)
return undefined;
if (typeof resource === "string")
return fs.readFileSync(resource);
return Buffer.from(pageToHtml(resource));
}
);
server.listen(8080);
console.log("Listening on http://localhost:8080");
}
function printUsage() {
console.log("Usage: cli.js <cmd>");
console.log(" build - Copy resources and generated pages to build directory.");
console.log(" serve - Host website on localhost:8080 for development purposes.");
}
function main(runtime: string, script: string, args: string[]) {
if (!args.length) {
printUsage();
process.exitCode = 1;
return;
}
const [command, ...commandArgs] = args;
if (command === "build") {
build();
} else if (command === "serve") {
serve();
} else {
console.log(`Error: Unkown sub-command ${command}`);
printUsage();
process.exitCode = 1;
}
}
const [runtime, script, ...args] = process.argv;
main(runtime, script, args)

View file

@ -0,0 +1,27 @@
import type { Node } from "antihtml";
interface BaseProps {
title: string;
children: Node | Node[],
}
export default function BasePage(props: BaseProps) {
return <html lang="en">
<head>
<meta charset="utf-8" />
<title>{props.title}</title>
<link rel="stylesheet" href="/style.css" />
</head>
<body>
<header class="header">
<nav>
<a href="/index.html">Home</a>
<a href="/updates.html">Updates</a>
<a href="/words.html">Words</a>
<a href="/projects.html">Projects</a>
<a href="/links.html">Links</a>
</nav>
</header>
{props.children}
</body>
</html>
}

31
src/content/ideas.txt Normal file
View file

@ -0,0 +1,31 @@
- Words
- Making a website with HTML
- What you want to have
- A way to write components that can be reused in pages with an HTML like syntax.
- You need this to maintain headers, footers, widgets and consistent site wide content without going insane.
- You want it to be HTML like so that you don't have to learn another language to write and inspect your HTML.
- A way to easily render those components into HTML files that you can open with your browser to test.
- A programming language you can use to write arbitrary code to combines your HTML components with data.
- A system that watches for changes during development and shows it in the browser immediately.
- How I implemented this with Node.js and TypeScript.
- Mention React+JSX, Vue.js, and Svelte.
- Consoles becoming shittier with each generation
- Why do I have to sign in to Xbox Live to play a CD?
- One time a friend of mine put on a CD, started the Audio Player app and it played the opening riff before stopping with the message "Audio Player needs an update".
- Why does it take longer and longer from when you insert a new game until you can play?
- Test showing difference from startup until playing on various generations of consoles (NES will probably stomp on this)
- How to fix sleep with 24.5 hour days.
- Leaving Microsoft Windows for Linux
- Incessant Copilot integration that can't be turned off into stuff like Edge.
- Please stop using framesets. They are horrible UX, mkay?
- Projects
- Buddhabrot renderer.
- Wooden Drawing Board.
- Blender to CSS export script.
- Art
- Flying Hornwitser papercraft.
- Safiria papercraft
- Prototype Soren Plush.
- Vlooi 3d head
- Download Archive: A zip file containing every page so that you can browse the site locally and keep it.
- Problem: module scripts require CORS sent by a server to work.

34
src/content/index.tsx Normal file
View file

@ -0,0 +1,34 @@
import BasePage from "../components/BasePage.js";
import { projects } from "./projects.js"
import { updates } from "./updates.js"
const title = "Hornwitser's Site";
export const index = {
title,
ref: "/index.html",
content: <BasePage title={title}>
<main>
<div class="hero" />
<div class="author">
<div style="width: 4em; height: 4em; background-color: grey" />
<hgroup>
<h1>Hi, I'm Hornwitser!</h1>
<p>
Grown up, autistic, he/him, aro, gray ace
</p>
</hgroup>
</div>
<p>
I'm a red dragon that mostly dabble in hobby programming and the occasional artwork.
</p>
<h2>Latest <a href="/updates.html">Updates</a></h2>
<ul>
{ updates.map(update => <li><a href={update.ref}>{update.title}</a></li>)}
</ul>
<h2>Projects</h2>
<ul>
{ projects.map(project => <li><a href={project.ref}>{project.title}</a></li>)}
</ul>
</main>
</BasePage>,
};

351
src/content/links.jsonc Normal file
View file

@ -0,0 +1,351 @@
{
"links": [
{
"title": "Like start-ups, most international communities fail why?",
"url": "https://aeon.co/essays/like-start-ups-most-intentional-communities-fail-why",
"tags": ["article", "international communities"],
"description": "A look into the reasons for why the majority of international communities fall apart.",
"read": "2024-06-17",
"author": "Alexa Clay",
},
{
"title": "Why Utopian Communities Fail",
"url": "https://areomagazine.com/2018/03/08/why-utopian-communities-fail/",
"altUrls": ["https://ewanmorrison.substack.com/p/why-utopian-communities-fail"],
"tags": ["article", "international communities"],
"description": "An depth look at the many ways the promises of utopian communities turn into suffering for its members.",
"read": "2024-06-17",
"author": "Ewan Morrison",
},
{
"title": "How Much Money Can Furry Art ACTUALLY Make?",
"url": "https://www.youtube.com/watch?v=HECfwGA4RPw",
"tags": ["video", "furry", "art", "monetisation"],
"description": "An experiment on trying to earn money selling furry art starting out with no artistic skills.",
"read": "2024-10-01",
"author": "Syonide",
},
{
"title": "Type",
"url": "https://exple.tive.org/blarg/2024/11/12/type/",
"description": "A scathing critique of The Elements of Typographic Style",
"tags": ["review", "typography"],
"read": "2025-01-05",
},
{
"title": "A Webring List",
"tags": ["webrings", "collections"],
"description": "A comprehensive collection of Webrings collected by Ray Thomas",
"url": "https://brisray.com/web/webring-list.htm",
"author": "Ray Thomas",
},
{
"title": "Webring History",
"tags": ["webrings", "history"],
"description": "A brief history of Webrings",
"url": "https://brisray.com/web/webring-history.htm",
"author": "Ray Thomas",
},
{
"title": "The Only Way to Beat Algorithms is to Retrain Your Audience",
"url": "https://kiriska.com/blog/2019/the-only-way-to-beat-algorithms-is-to-retrain-your-audience/",
"tags": ["articles", "distributed web"],
"read": "2025-01-06",
"author": "Kiri",
},
{
"title": "Power shifts in a multiple system",
"url": "https://sarahkreece.com/2013/12/23/power-shifts-in-a-multiple-system/",
"tags": ["articles", "multiplicity"],
"read": "2025-01-06",
"author": "Sarah K Reece",
},
{
"title": "Whats Wrong With … Exceptions",
"url": "https://blog.pdark.de/2007/06/29/whats-wrong-with-exceptions/",
"tags": ["articles", "programming", "exceptions"],
"read": "2025-01-06",
"author": "Philmann Dark",
},
{
"title": "Never Rewrite From Scratch",
"url": "https://blog.pdark.de/2024/04/24/never-rewrite-from-scratch/",
"tags": ["articles", "programming", "refactoring"],
"read": "2025-01-06",
"author": "Philmann Dark",
},
{
"title": "A Dragon's Tale",
"url": "https://www.philmann-dark.de/dragon.html",
"tags": ["short story", "dragons"],
"read": "2025-01-05",
"author": "Philmann Dark",
},
{
"title": "How I Learned to Stop Worrying and Love GC",
"url": "https://world-playground-deceit.net/blog/2024/11/how-i-learned-to-stop-worrying-and-love-gc.html",
"tags": ["articles", "programming", "garbage collection"],
"read": "2025-01-06",
},
{
"title": "Memory Management Reference",
"url": "https://www.memorymanagement.org/",
"tags": ["links", "programming", "memory management"],
},
{
"title": "In-group and out-group",
"url": "https://en.wikipedia.org/wiki/In-group_and_out-group",
"description": "Psychological phenomena describing people's strong bias towards their group and against people outside it.",
"tags": ["articles", "psychology", "moderation"],
"read": "2025-01-09",
},
{
"title": "User Expertise Stagnates at Low Levels",
"url": "https://www.nngroup.com/articles/stagnating-expertise/",
"tags": ["design", "ux"],
"read": "2025-01-10",
"author": "Jakob Nielsen",
},
{
"title": "The New Internet",
"url": "https://tailscale.com/blog/new-internet",
"tags": ["articles", "tailscale", "connectivity"],
"read": "2025-01-10",
"author": "Avery Pennarun",
},
{
"title": "The Free Web",
"url": "https://thehistoryoftheweb.com/the-free-web/",
"tags": ["articles", "internet", "culture"],
"read": "2025-01-10",
"author": "Jay Hoffmann",
},
{
"title": "Bad Shape",
"url": "https://www.wrecka.ge/bad-shape/",
"tags": ["articles", "big social media", "moderation"],
"read": "2025-01-10",
"author": "Erin Kissane",
},
{
"title": "An Abridged History of Safari Showstoppers",
"url": "https://webventures.rejh.nl/blog/2024/history-of-safari-show-stoppers/",
"tags": ["articles", "webdev", "safari"],
},
{
"title": "The Really Dark Truth About Bots",
"url": "https://www.youtube.com/watch?v=GZ5XN_mJE8Y",
"tags": ["video", "social media bots", "political discourse", "disinformation"],
"description": "A dive into the troll farms and systematic disinformation campaigns targeting political discourse.",
"read": "2025-01-12",
},
{
"title": "From Gongkai to Open Source",
"url": "https://www.bunniestudios.com/blog/2014/from-gongkai-to-open-source/",
"tags": ["article", "hardware", "chinese culture", "reverse engineering"],
"description": "Writeup on the reverse engineering efforts that went into making Fernvalue, an open source development platform for the Mediatek MT6260",
"read": "2025-01-12",
},
{
"title": "On \"Safe\" C++",
"url": "https://izzys.casa/2024/11/on-safe-cxx/",
"tags": ["article", "programming", "c++", "programming community"],
"description": "Unhinged rant on the C++ committee and community at large.",
"read": "2025-01-19",
"author": "Izzy Muerte",
},
{
"title": "The seven programming ur-languages",
"url": "https://madhadron.com/programming/seven_ur_languages.html",
"tags": ["article", "programming", "learning programming", "language design"],
"description": "A description of 7 root languages in programming that inspired most of the structure in all programming languages",
"read": "2025-01-20",
"author": "madhadron",
},
{
"title": "Our conventions are not inevitable",
"url": "https://madhadron.com/programming/conventions.html",
"tags": ["article", "programming", "conventions"],
"description": "Thoughts on programming conventions being largely inertia rather than fundamental.",
"read": "2025-01-20",
"author": "madhadron",
},
{
"title": "Proof of Thought",
"url": "https://erik.wiffin.com/posts/proof-of-thought/",
"tags": ["article", "organisations", "reporting"],
"description": "An interesting take on reports not meant to be read by anyone still being useful.",
"read": "2025-01-20",
"author": "Erik Wiffin",
},
{
"title": "On short URLs",
"url": "https://qntm.org/urls",
"tags": ["article", "websites", "short urls"],
"description": "qntm's reasoning for why use short URLs.",
"read": "2025-01-20",
"author": "qntm",
},
{
"title": "Short URLs: why and how",
"url": "https://sive.rs/su",
"tags": ["article", "websites", "short urls"],
"description": "Derek Sivers' reasoning for why use short URLs.",
"read": "2025-01-20",
"author": "Derke Sivers",
},
{
"title": "The Tyranny of the Marginal User",
"url": "https://nothinghuman.substack.com/p/the-tyranny-of-the-marginal-user",
"tags": ["article", "online services", "enshittification", "online dating"],
"description": "How focusing on the marginal user caused OKCupid to go downhill",
"read": "2025-01-21",
"author": "Ivan Vendrov",
},
{
"title": "The Website Obesity Crisis",
"url": "https://idlewords.com/talks/website_obesity.htm",
"tags": ["article", "webdev", "bloat"],
"description": "Observations on bloat of websites and the future of the web.",
"read": "2025-01-23",
"author": "Maciej Cegłowski",
},
{
"title": "The Small Website Discoverability Crisis",
"url": "https://www.marginalia.nu/log/19-website-discoverability-crisis/",
"tags": ["article", "small web", "discoverability", "bookmarks", "links"],
"description": "Victor argues a solution to the small web discoverability problem lies in sharing bookmarks.",
"read": "2025-01-24",
"author": "Viktor Lofgren",
},
{
"title": "Fan is A Tool-Using Animal",
"url": "https://idlewords.com/talks/fan_is_a_tool_using_animal.htm",
"tags": ["article", "fandom", "collaboration"],
"description": "Talk about spontaneous collaboration and culture in the fandom.",
"read": "2025-01-24",
"author": "Maciej Cegłowski",
},
{
"title": "Every site needs a Links Page / Why linking matters",
"url": "https://thoughts.melonking.net/thoughts/every-site-needs-a-links-page-why-linking-matters",
"tags": ["article", "small web", "links"],
"description": "Why websites should link to other websites.",
"read": "2025-01-24",
"author": "Melon",
},
{
"title": "Plurality as Portrayed in Cyberpunk 2077 and Xenoblade Chronicles 2",
"url": "https://xeiaso.net/blog/plurality-cyberpunk-xenoblade-2021-02-14/",
"tags": ["article", "plurality", "media portrayal"],
"description": "Stigmatisation and empowerment in the portrayal of plurals in Cyberpunk 2077 and Xenoblade Chronicles 2",
"read": "2025-01-25",
"author": "Xe Iaso",
},
{
"title": "Plurality-Driven Development",
"url": "https://xeiaso.net/blog/plurality-driven-development-2019-08-04/",
"tags": ["article", "plurality", "software development", "social stigma"],
"description": "The benefits of tulpas in software development and a bit on the social stigma around tulpas.",
"read": "2025-01-26",
"author": "Xe Iaso",
},
{
"title": "What It's Like to Be Me",
"url": "https://xeiaso.net/blog/what-its-like-to-be-me-2018-06-14/",
"tags": ["article", "plurality", "tulpas", "social stigma"],
"description": "A description of what it's like to have and the benefits of tulpas, along with some social stigmas.",
"read": "2025-01-26",
"author": "Xe Iaso",
},
{
"title": "Tulpanomicon",
"url": "https://tulpanomicon.guide/",
"tags": ["resource", "tulpas", "compendium"],
"description": "Collection of resources about tulpas.",
"author": "Xe Iaso",
},
{
"title": "One Acceptable Truth or a Million Fantasies",
"url": "https://economistwritingeveryday.com/2020/12/28/one-truth-or-a-million-fantasies/",
"tags": ["article", "group behaviour", "us politics"],
"related": "https://www.smbc-comics.com/comic/propaganda-2",
"description": "How sacrifices for group membership drives cohesion and applies to US politics.",
"read": "2025-01-26",
"author": "Michael D. Makowsky",
},
{
"title": "",
"url": "",
"tags": ["article", "", ""],
"description": "",
"read": "",
"author": "",
},
],
"to_read": [
"https://bitbashing.io/gc-for-systems-programmers.html",
],
"authors": {
"Ray Thomas": {
"urls": ["https://brisray.com/"],
},
"Kiri": {
"urls": ["https://kiriska.com/"],
},
"Sarah K Reece": {
"urls": ["https://sarahkreece.com/"],
},
"Philmann Dark": {
"urls": ["https://blog.pdark.de/about/"],
},
"Jakob Nielsen": {
"urls": ["https://www.nngroup.com/articles/author/jakob-nielsen/"],
},
"Jay Hoffmann": {
"urls": ["https://x.com/jay_hoffmann"],
},
"Erin Kissane": {
"urls": ["https://www.wrecka.ge/author/erin/"],
},
"Izzy Muerte": {
"urls": ["https://izzy.casa/"],
},
"madhadron": {
"urls": ["https://madhadron.com/"],
},
"Erik Wiffin": {
"urls": ["https://erik.wiffin.com/"],
},
"qntm": {
"urls": ["https://qntm.org/"],
},
"Derke Sivers": {
"urls": ["https://sive.rs/"],
},
"Ivan Vendrov": {
"urls": ["https://substack.com/@ivanvendrov"],
},
"Maciej Cegłowski": {
"urls": ["https://idlewords.com/"],
},
"Viktor Lofgren": {
"urls": ["https://www.marginalia.nu/"],
},
"Melon": {
"urls": ["https://melonland.net/", "https://melonking.net/"],
},
"Syonide": {
"urls": ["https://www.youtube.com/@Syonide"],
},
"Xe Iaso": {
"urls": ["https://xeiaso.net/"],
},
"Alexa Clay": {
"urls": ["https://aeon.co/users/alexa-clay"],
},
"": {
"urls": [""],
},
}
}

42
src/content/links.tsx Normal file
View file

@ -0,0 +1,42 @@
import { readFileSync } from "node:fs";
import BasePage from "../components/BasePage.js";
import type { Page } from "../types.js";
interface LinkData {
title: string,
url: string,
tags: string[],
description?: string,
read?: string,
author?: string,
}
interface Data {
links: LinkData[];
to_read: string[],
authors: Record<string, {
urls: string[],
}>;
}
function Link(props: { link: LinkData }) {
const link = props.link;
return <>
<a href={link.url}>{link.title}</a>
{" "}
{link.tags.join(", ")}
</>
}
const data: Data = eval(`(${readFileSync("src/content/links.jsonc", "utf8")})`);
data.links.pop(); // Remove template at the end
const title = "Links!";
export const links: Page = {
title,
ref: "/links.html",
content: <BasePage title={title}>
<main>
<h1>{title}</h1>
<ul>
{ data.links.map(link => <li><Link link={link} /></li>)}
</ul>
</main>
</BasePage>
}

29
src/content/projects.tsx Normal file
View file

@ -0,0 +1,29 @@
import BasePage from "../components/BasePage.js";
import type { Page } from "../types.js";
import mySite from "./projects/my-site.js";
export const projects: Page[] = [
mySite,
].map(page => ({
title: page.title,
ref: page.ref,
content: <BasePage title={page.title}>
<h1>{page.title}</h1>
{page.content}
</BasePage>
}));
const title = "Hornwitser's Projects";
export const projectsIndex: Page = {
title,
ref: "/projects.html",
content: <BasePage title={title}>
<main>
<h1>{title}</h1>
<ul>
{ projects.map(project => <li><a href={project.ref}>{project.title}</a></li>)}
</ul>
</main>
</BasePage>
}

View file

@ -0,0 +1,50 @@
import type { ProjectMeta } from "../../types.js";
const project: ProjectMeta = {
status: "draft",
title: "My Website",
ref: "/projects/my-site.html",
startedAt: "2025-01-20",
};
const content = <>
<p>
A decade ago I tried making my own website. I got a domain, wrote some code to generate my site, put some content on it and had big aspirations for what I would do with my very own place on the internet. Unfortunately that project lost steam more or less as soon as it started and was left abandoned gathering dust ever since.
</p>
<p>
This is my second attempt at making a website for myself. Inspired in large part when I surfed the small web and discovered personal pages like <a href="https://melvian.xyz/">Melvian's beautiful site</a>, the insanity that is <a href="https://melonland.net/">MelonLand</a>, and whole communities centred around small personal pages and projects like <a href="https://nekoweb.org/">Nekoweb</a>, and <a href="https://tildeverse.org/">tidleverse.org</a>.
</p>
<h2>Architecture</h2>
<p>
I've decided to use my website as a place to experiment with a hybrid architecture that sits in-between a static site generator and a fully dynamic site. Instead of having custom code generate the whole response on every request, my code instead generates the HTML and then writes it to a folder just like a static site generator would, but in response to requests from web clients.
</p>
<p>
This has some interesting advantages:
</p>
<ul>
<li>
HTML content is only generated when it's changed, rather than being redone on every request.
</li>
<li>
There's a high barrier against implementing features that would make every request return a different response, as that doesn't play nice with the system. This helps to make the site highly cacheable.
</li>
<li>
Caching works without me having to implement any special logic for it. The webserver detects when files are changed and handles correctly replying to clients with cached responses.
</li>
<li>
A complete archive of the site is always available as a bunch of HTML files in the root of the webserver's directory. This makes creating snapshots of the site trivial, just archive the directory!
</li>
</ul>
<p>
Of course, this would only work for a site who's content changes only rarely. Which is precisely what I expect the vast majority of the content on my site to be: Content that once made is rarely revised.
</p>
<h2>Frameworks on a diet</h2>
<p>
I thought about options for languages and frameworks to build my site on and eventually settled on using TypeScript with JSX running on Node.js to generate static pages. I wanted my site to be as close to working with plain HTML as practical, while still being easy to compose and reuse different parts together.
</p>
<p>
I opted to use JSX because I don't want to gouge my eyes out writing HTML with pure JavaScript notation (something I incidentally have tried to do before). To represent the HTML as data I wrote <a href="https://github.com/Hornwitser/antihtml">antihtml</a>, a tiny library that only does one thing: Treat HTML as a data structure that can be serialised.
</p>
<p>
What that means is that I write my content mostly as if it was plain HTML, but get to combine the parts that make up my site using JavaScript logic.
</p>
</>
export default { ...project, content };

32
src/content/updates.tsx Normal file
View file

@ -0,0 +1,32 @@
import BasePage from "../components/BasePage.js";
import type { Page } from "../types.js";
export const updates: Page[] = [
{
published: "2025-xx-xx",
title: "Website Launch",
ref: "/updates/site-launch.html",
}
].map(page => ({
title: page.title,
ref: page.ref,
content: <BasePage title={page.title}>
<h1>{page.title}</h1>
<p>Published: {page.published}</p>
<p>Placeholder content</p>
</BasePage>
}));
const title = "Website Updates";
export const updatesIndex: Page = {
title,
ref: "/updates.html",
content: <BasePage title={title}>
<main>
<h1>{title}</h1>
<ul>
{ updates.map(update => <li><a href={update.ref}>{update.title}</a></li>)}
</ul>
</main>
</BasePage>
}

29
src/content/words.tsx Normal file
View file

@ -0,0 +1,29 @@
import BasePage from "../components/BasePage.js";
import type { Page } from "../types.js";
import uselessDashboard from "./words/useless-dashboard.js";
export const words: Page[] = [
uselessDashboard,
].map(page => ({
title: page.title,
ref: page.ref,
content: <BasePage title={page.title}>
<h1>{page.title}</h1>
{page.content}
</BasePage>
}));
const title = "Ramblings and other wordy content";
export const wordsIndex: Page = {
title,
ref: "/words.html",
content: <BasePage title={title}>
<main>
<h1>{title}</h1>
<ul>
{ words.map(word => <li><a href={word.ref}>{word.title}</a></li>)}
</ul>
</main>
</BasePage>
}

View file

@ -0,0 +1,33 @@
import type { WordsMeta } from "../../types.js";
const words: WordsMeta = {
status: "draft",
title: "Useless Dashboards",
ref: "/words/useless-dashboard.html",
};
const content = <>
<p>
I have had the misfortune of trying to work with the Google Workspace's Admin panel and it was not a pleasant experience.
</p>
<p>
Key points
- Slow and sluggish to use, seemingly every interaction is behind extra clicks on panels that take seconds to load.
- No way to get a list of overrides for permissions. You need to click through every section of every sub-category of every app to just see if a group has different configs!
- No way to list the groups an external account is member of, you have to go wade through every group to check, or write code to do it.
</p>
<p>
A few notable bugs:
</p>
<ul>
<li>
If I'm logged into multiple Google accounts and not using my primary account and I click on a link in that takes me somewhere else, it often tries to open the panel as my primary user. Which of course doesn't have access to that part and redirect me to some unhelpful error page instead. This isn't just the admin panel that has this problem, seemingly every google product does this, and it's so bad you're better of using a separate browser profile for each Google account just so that links between Google apps will work.
</li>
<li>
I needed to be a member of a specific group to be able to create shared drives, as that group was the only one configured to not deny creating them. So I added myself to the group and I still couldn't create shared drives. The option was greyed out, and no amount of refreshing or logging out and in fix it. But of course when I toggled the permission off and on again it magically worked immediately. Makes me wonder what other access control can get stuck with the wrong value.
</li>
<li>
The API endpoint for checking if a member is part of a group gives you access denied if you try to use it on an external account that's part of the group. But other related endpoints work fine on external accounts.
</li>
</ul>
</>;
export default { ...words, content };

17
src/pages.tsx Normal file
View file

@ -0,0 +1,17 @@
import type { Page } from "./types.js";
import { index } from "./content/index.js";
import { updates, updatesIndex } from "./content/updates.js";
import { words, wordsIndex } from "./content/words.js";
import { projects, projectsIndex } from "./content/projects.js";
import { links } from "./content/links.js";
export const pages: Page[] = [
index,
updatesIndex,
...updates,
wordsIndex,
...words,
projectsIndex,
...projects,
links,
];

29
src/types.ts Normal file
View file

@ -0,0 +1,29 @@
import type { Node, Element } from "antihtml";
export interface Page {
title: string,
ref: string,
content: Element,
}
export interface ProjectMeta {
status: 'draft' | 'published',
title: string,
ref: string,
startedAt: string,
endedAt?: string,
};
export interface Project extends ProjectMeta {
content: Node,
};
export interface WordsMeta {
status: "draft" | "published",
title: string,
ref: string,
}
export interface Words extends WordsMeta {
content: Node,
};

View file

@ -0,0 +1,97 @@
import * as assert from "node:assert/strict";
import { after, before, suite, test } from "node:test";
import * as http from "node:http";
import { createServer } from "./http-server.js";
import { once } from "node:events";
import type { AddressInfo } from "node:net";
suite("function createServer", () => {
let server: ReturnType<typeof createServer>;
let baseUrl: URL;
before(async () => {
server = createServer(
ref => {
if (ref === "/test.html") return Buffer.from("<p>Test!</p>");
if (ref === "/style.css") return Buffer.from("p { font-weight: bold; }");
if (ref === "/script.js") return Buffer.from("alert('Hello world!');");
return undefined;
}
);
server.listen(0, "localhost");
await once(server, "listening");
baseUrl = new URL(`http://localhost:${(server.address() as AddressInfo).port}`)
});
after(() => {
server.close();
})
async function makeRequest(method: string, ref: string) {
const url = new URL(ref, baseUrl);
const request = http.request(url, {
method,
timeout: 1000,
});
request.end();
const response: http.IncomingMessage & { body: Buffer } = (await once(request, "response"))[0];
response.body = Buffer.concat(await response.toArray());
return { request, response };
}
async function getTest(ref: string, statusCode: number, content: Buffer, mediaType: string) {
const { response } = await makeRequest("GET", ref);
assert.equal(response.statusCode, statusCode);
assert.equal(response.headers["content-type"], mediaType);
assert.equal(Number.parseInt(response.headers["content-length"]!), content.length);
assert.deepEqual(response.body, content);
}
async function headTest(ref: string, statusCode: number, content: Buffer, mediaType: string) {
const { response } = await makeRequest("HEAD", ref);
assert.equal(response.statusCode, statusCode);
assert.equal(response.headers["content-type"], mediaType);
assert.equal(Number.parseInt(response.headers["content-length"]!), content.length);
assert.deepEqual(response.body, Buffer.alloc(0));
}
test("GET /test.html", async () => {
await getTest("/test.html", 200, Buffer.from("<p>Test!</p>"), "text/html")
});
test("GET /style.css", async () => {
await getTest("/style.css", 200, Buffer.from("p { font-weight: bold; }"), "text/css")
});
test("GET /script.js", async () => {
await getTest("/script.js", 200, Buffer.from("alert('Hello world!');"), "text/javascript")
});
test("GET /does-not-exist", async () => {
await getTest("/does-not-exist", 404, Buffer.from("404 Not Found"), "text/plain")
});
test("HEAD /test.html", async () => {
await headTest("/test.html", 200, Buffer.from("<p>Test!</p>"), "text/html")
});
test("HEAD /style.css", async () => {
await headTest("/style.css", 200, Buffer.from("p { font-weight: bold; }"), "text/css")
});
test("HEAD /script.js", async () => {
await headTest("/script.js", 200, Buffer.from("alert('Hello world!');"), "text/javascript")
});
test("HEAD /does-not-exist", async () => {
await headTest("/does-not-exist", 404, Buffer.from("404 Not Found"), "text/plain")
});
test("POST /test.html", async () => {
const { response } = await makeRequest("POST", "/test.html");
const content = Buffer.from("400 Bad Request");
assert.equal(response.statusCode, 400);
assert.equal(response.headers["content-type"], "text/plain");
assert.equal(Number.parseInt(response.headers["content-length"]!), content.length);
assert.deepEqual(response.body, content);
});
});

82
src/utils/http-server.ts Normal file
View file

@ -0,0 +1,82 @@
import * as http from "node:http";
import * as posix from "node:path/posix";
function writeResponse(
response: http.ServerResponse,
statusCode: number,
statusMessage: string,
mimeType: string | undefined,
content: Buffer,
suppressContent = false,
) {
response.statusCode = statusCode;
response.statusMessage = statusMessage;
if (mimeType !== undefined) {
response.setHeader("Content-Type", mimeType);
}
response.setHeader("Content-Length", content.length);
if (!suppressContent) {
response.end(content);
} else {
response.end();
}
};
function writeNotFound(response: http.ServerResponse, suppressContent = false) {
writeResponse(
response,
404, "Not Found",
"text/plain",
Buffer.from("404 Not Found"),
suppressContent,
);
}
function writeBadRequest(response: http.ServerResponse, suppressContent = false) {
writeResponse(
response,
400, "Bad Request",
"text/plain",
Buffer.from("400 Bad Request"),
suppressContent,
);
}
const extToMimeType = new Map([
[".js", "text/javascript"],
[".html", "text/html"],
[".css", "text/css"],
]);
export function createServer(
get: (ref: string) => Buffer | undefined
) {
const server = http.createServer(
{
joinDuplicateHeaders: true,
// @ts-expect-error missing in type declaration
rejectNonStandardBodyWrites: true,
},
(request, response) => {
const url = new URL(`http://localhost${request.url}`);
if (request.method === "GET" || request.method === "HEAD") {
const content = get(url.pathname);
const isHead = request.method === "HEAD";
if (!content) {
writeNotFound(response, isHead);
return;
}
writeResponse(
response,
200, "OK",
extToMimeType.get(posix.extname(url.pathname)),
content,
isHead,
)
return;
}
writeBadRequest(response);
},
);
return server;
}

View file

@ -0,0 +1,57 @@
import * as assert from "node:assert/strict";
import { suite, test } from "node:test";
import { resolveRefs } from "./resolve-refs.js";
import type { Element } from "antihtml";
suite("function resolveRefs", () => {
test("root to root relative href", () => {
const el = resolveRefs(<a href="/page.html">Link</a>, "/");
assert.equal((el as Element).attributes.get("href"), "page.html");
});
test("root to subdir relative href", () => {
const el = resolveRefs(<a href="/dir/page.html">Link</a>, "/");
assert.equal((el as Element).attributes.get("href"), "dir/page.html");
});
test("subdir to root relative href", () => {
const el = resolveRefs(<a href="/page.html">Link</a>, "/subdir");
assert.equal((el as Element).attributes.get("href"), "../page.html");
});
test("subdir to subdir relative href", () => {
const el = resolveRefs(<a href="/alt/page.html">Link</a>, "/subdir");
assert.equal((el as Element).attributes.get("href"), "../alt/page.html");
});
test("nested element", () => {
const el = resolveRefs(<div>Content with <a href="/page.html">Link</a></div>, "/");
assert.equal((el.childNodes[1] as Element).attributes.get("href"), "page.html");
});
test("returns element if no changes", () => {
const el = <div>Content with <em>emphasis</em></div>;
const resEl = resolveRefs(el, "/");
assert.equal(el, resEl);
});
test("returns new element if changed", () => {
const el = <div>Content with <a href="/page.html">Link</a></div>;
const resEl = resolveRefs(el, "/");
assert.notEqual(el, resEl);
});
test("does not modify input", () => {
const elFn = () => <div>Content with <a href="/page.html">Link</a></div>;
const el = elFn();
resolveRefs(el, "/");
assert.deepEqual(el, elFn());
});
test("ignores absolute URIs", () => {
const el = <div>Content with <a href="https://example.org/page.html">Link</a></div>;
const resEl = resolveRefs(el, "/");
assert.equal(el, resEl);
});
});

54
src/utils/resolve-refs.ts Normal file
View file

@ -0,0 +1,54 @@
import * as posix from "node:path/posix";
import { Node, Element } from "antihtml";
function shallowCopyElement(element: Element) {
const copy = new Element(element.name);
copy.attributes = new Map(element.attributes);
copy.childNodes = element.childNodes;
return copy;
}
/**
Resolves absolute href attributes in a and link elements the Node tree into relative references from the given directory.
@param node Node tree to transform
@param dir Absolute path to directory to resolve references from.
@returns new node tree with href attributes transformed, or the original node if no transformations took place.
*/
export function resolveRefs(node: Node, dir: string) {
if (!(node instanceof Element)) {
return node;
}
let resolvedNode = node;
const name = node.name;
if (
(name === "link" || name === "a")
&& node.attributes.has("href")
) {
const original = node.attributes.get("href")!
if (/^[a-z][a-z+.-]*:/i.test(original)) {
// Ignore refs that start with a URI scheme.
/* node:coverage ignore next 3 */
} else if (!original.startsWith("/")) {
console.log(`Warning: found relative href to ${original}`);
} else {
const ref = posix.relative(dir, original);
resolvedNode = shallowCopyElement(node);
resolvedNode.attributes.set("href", ref);
}
}
const resolvedChildren: Node[] = [];
let modifiedChildren = false;
for (const child of resolvedNode.childNodes) {
const resolvedChild = resolveRefs(child, dir);
if (child !== resolvedChild) {
modifiedChildren = true;
}
resolvedChildren.push(resolvedChild);
}
if (modifiedChildren) {
resolvedNode = shallowCopyElement(node);
resolvedNode.childNodes = resolvedChildren;
}
return resolvedNode;
}