diff --git a/.nvmrc b/.nvmrc new file mode 100644 index 0000000..b5045cc --- /dev/null +++ b/.nvmrc @@ -0,0 +1 @@ +21 \ No newline at end of file diff --git a/src/inject.js b/src/inject.js index b6202f1..563399d 100644 --- a/src/inject.js +++ b/src/inject.js @@ -1,136 +1,6 @@ -import fs from "node:fs"; -import url from "node:url"; -import { parse } from "es-module-lexer"; -import { Parser } from "htmlparser2"; -import { resolve, parseFromString } from '@import-maps/resolve' - -async function getImports(script) { - const [imports] = await parse(script); - return imports.map((imp) => imp.n); -} - -class ImportCollector { - constructor({ imports, baseUrl, importMap, noFetch }) { - this.imports = imports; - this.baseUrl = baseUrl; - this.dependencies = new Set(); - this.noFetch = noFetch - this.importMap = importMap - } - async visit(specifier, parent) { - let resolvedImport = null - if (parent.protocol === "file:") { - const resolved = resolve(specifier, this.importMap, parent); - resolvedImport = resolved.resolvedImport - if (specifier.startsWith('/')) { - resolvedImport = new URL('.' + specifier, this.baseUrl) - } - } else if (parent.protocol.startsWith("http")) { - resolvedImport = new URL(specifier, parent.origin) - } - if (!resolvedImport) { - throw 'could not resolve import: ' + specifier - }; - if (this.dependencies.has(resolvedImport.href)) return; - this.dependencies.add(resolvedImport.href); - if (resolvedImport.protocol === "file:") { - try { - const contents = await fs.promises.readFile(resolvedImport, "utf8"); - const deps = await getImports(contents); - await Promise.all( - deps.map((dep) => this.visit(dep, resolvedImport)) - ); - } catch (e) { - console.warn('WARNING: could not read file: ' + resolvedImport.href + ' - skipping') - } - } else if (resolvedImport.protocol.startsWith("http")) { - if (!this.noFetch) { - const contents = await fetch(resolvedImport).then(res => res.text()) - const deps = await getImports(contents); - await Promise.all( - deps.map((dep) => this.visit(dep, resolvedImport)) - ); - } - } - } - async collect() { - const parent = new URL('./index.js', this.baseUrl) - await Promise.all(this.imports.map((entry) => this.visit(entry, parent))); - return [...this.dependencies].map((dep) => dep.replace(this.baseUrl.href, "/")); - } -} - -async function parseHtml(contents) { - const scripts = []; - let inScript = false; - let inImportMap = false; - let importMapString = "" - const parser = new Parser({ - onopentag(name, attributes) { - if (name === "script" && attributes.type === "module") { - inScript = true; - scripts.unshift(""); - } - if (name === "script" && attributes.type === "importmap") { - inImportMap = true; - } - }, - ontext(text) { - if (inScript) { - scripts[0] += text; - } - if (inImportMap) { - importMapString += text; - } - }, - onclosetag(tagname) { - if (tagname === "script") { - inScript = false; - inImportMap = false; - } - }, - }); - parser.write(contents); - parser.end(); - const imports = new Set() - for (let script of scripts) { - const deps = await getImports(script); - deps.forEach((d) => imports.add(d)); - } - return { imports: [...imports], importMapString: importMapString || '{}' }; -} - -export async function getDependencies(contents, baseUrl, { noFetch } = {}) { - if (!baseUrl) { - throw new Error('baseUrl is required') - } - if (!baseUrl.href.endsWith('/')) { - baseUrl.href += '/' - } - const { importMapString, imports } = await parseHtml(contents); - const importMap = parseFromString(importMapString, baseUrl); - const collector = new ImportCollector({ imports, baseUrl, importMap, noFetch }); - return collector.collect(); -} - -export function injectPreloads(contents, dependencies) { - let preloads = ""; - for (const dep of dependencies) { - preloads += `\n`; - } - return contents.replace("", `${preloads}`); -} - -export async function link(htmlContentsOrUrl, { baseUrl: providedBaseUrl, noFetch } = {}) { - let html = htmlContentsOrUrl; - let baseUrl = providedBaseUrl; - if (htmlContentsOrUrl instanceof URL) { - html = await fs.promises.readFile(htmlContentsOrUrl, "utf8"); - baseUrl = new URL('./', htmlContentsOrUrl) - } - const dependencies = await getDependencies(html, baseUrl, { noFetch }); - return injectPreloads(html, dependencies); -} +import fs from 'node:fs' +import url from 'node:url' +import link from './link.js' export async function inject(htmlPath, { out, root, noFetch } = {}) { const htmlUrl = url.pathToFileURL(htmlPath) diff --git a/src/link.js b/src/link.js new file mode 100644 index 0000000..6d21bb0 --- /dev/null +++ b/src/link.js @@ -0,0 +1,145 @@ +import fs from "node:fs"; +import url from "node:url"; +import { parse } from "es-module-lexer"; +import { Parser } from "htmlparser2"; +import { resolve, parseFromString } from '@import-maps/resolve' + +async function getImports(script) { + const [imports] = await parse(script); + return imports.map((imp) => imp.n); +} + +class ImportCollector { + constructor({ imports, baseUrl, importMap, noFetch }) { + this.imports = imports; + this.baseUrl = baseUrl; + this.dependencies = new Set(); + this.noFetch = noFetch + this.importMap = importMap + } + async visit(specifier, parent) { + let resolvedImport = null + if (parent.protocol === "file:") { + const resolved = resolve(specifier, this.importMap, parent); + resolvedImport = resolved.resolvedImport + if (specifier.startsWith('/')) { + resolvedImport = new URL('.' + specifier, this.baseUrl) + } + } else if (parent.protocol.startsWith("http")) { + resolvedImport = new URL(specifier, parent.origin) + } + if (!resolvedImport) { + console.warn(`WARNING: could not resolve import specifier: ${specifier} from parent: ${parent} - skipping`) + return + }; + if (this.dependencies.has(resolvedImport.href)) return; + this.dependencies.add(resolvedImport.href); + if (resolvedImport.protocol === "file:") { + let contents = null + try { + contents = await fs.promises.readFile(resolvedImport, "utf8"); + } catch (e) { + console.warn('WARNING: could not read file: ' + resolvedImport.href + ' - skipping') + } + if (contents) { + const deps = await getImports(contents); + await Promise.all( + deps.map((dep) => this.visit(dep, resolvedImport)) + ); + } + // console.error(e) + } else if (resolvedImport.protocol.startsWith("http")) { + if (!this.noFetch) { + const contents = await fetch(resolvedImport).then(res => res.text()) + const deps = await getImports(contents); + await Promise.all( + deps.map((dep) => this.visit(dep, resolvedImport)) + ); + } + } + } + async collect() { + const parent = new URL('./index.js', this.baseUrl) + await Promise.all(this.imports.map((entry) => this.visit(entry, parent))); + return [...this.dependencies].map((dep) => dep.replace(this.baseUrl.href, "/")); + } +} + +async function parseHtml(contents) { + const scripts = []; + let inScript = false; + let inImportMap = false; + let importMapString = "" + const parser = new Parser({ + onopentag(name, attributes) { + if (name === "script" && attributes.type === "module") { + inScript = true; + scripts.unshift(""); + } + if (name === "script" && attributes.type === "importmap") { + inImportMap = true; + } + }, + ontext(text) { + if (inScript) { + scripts[0] += text; + } + if (inImportMap) { + importMapString += text; + } + }, + onclosetag(tagname) { + if (tagname === "script") { + inScript = false; + inImportMap = false; + } + }, + }); + parser.write(contents); + parser.end(); + const imports = new Set() + for (let script of scripts) { + const deps = await getImports(script); + deps.forEach((d) => imports.add(d)); + } + return { imports: [...imports], importMapString: importMapString || '{}' }; +} + +export async function getDependencies(contents, baseUrl, { noFetch } = {}) { + if (!baseUrl) { + throw new Error('baseUrl is required') + } + if (!baseUrl.href.endsWith('/')) { + baseUrl.href += '/' + } + const { importMapString, imports } = await parseHtml(contents); + const importMap = parseFromString(importMapString, baseUrl); + const collector = new ImportCollector({ imports, baseUrl, importMap, noFetch }); + return collector.collect(); +} + +export function injectPreloads(contents, dependencies) { + let preloads = ""; + for (const dep of dependencies) { + preloads += `\n`; + } + if (contents.includes("")) { + return contents.replace("", `${preloads}`); + } else if (contents.includes("")) { + return contents.replace("", `\n${preloads}`); + } else { + console.warn('WARNING: could not find or in HTML - skipping.') + return contents; + } +} + +export default async function link(htmlContentsOrUrl, { baseUrl: providedBaseUrl, noFetch } = {}) { + let html = htmlContentsOrUrl; + let baseUrl = providedBaseUrl; + if (htmlContentsOrUrl instanceof URL) { + html = await fs.promises.readFile(htmlContentsOrUrl, "utf8"); + baseUrl = new URL('./', htmlContentsOrUrl) + } + const dependencies = await getDependencies(html, baseUrl, { noFetch }); + return injectPreloads(html, dependencies); +} diff --git a/tests/fixtures/absolute-dep.js b/tests/fixtures/link/absolute-dep.js similarity index 100% rename from tests/fixtures/absolute-dep.js rename to tests/fixtures/link/absolute-dep.js diff --git a/tests/fixtures/index.html b/tests/fixtures/link/index.html similarity index 100% rename from tests/fixtures/index.html rename to tests/fixtures/link/index.html diff --git a/tests/fixtures/js/dep.js b/tests/fixtures/link/js/dep.js similarity index 100% rename from tests/fixtures/js/dep.js rename to tests/fixtures/link/js/dep.js diff --git a/tests/fixtures/js/secondary-dep.js b/tests/fixtures/link/js/secondary-dep.js similarity index 100% rename from tests/fixtures/js/secondary-dep.js rename to tests/fixtures/link/js/secondary-dep.js diff --git a/tests/fixtures/link/no-head.html b/tests/fixtures/link/no-head.html new file mode 100644 index 0000000..d764bc7 --- /dev/null +++ b/tests/fixtures/link/no-head.html @@ -0,0 +1,11 @@ + + + + +

Hello, world!

+ + + + \ No newline at end of file diff --git a/tests/inject.test.js b/tests/inject.test.js deleted file mode 100644 index 9cafbf4..0000000 --- a/tests/inject.test.js +++ /dev/null @@ -1,7 +0,0 @@ -import { link } from '../src/inject.js'; -import test from 'node:test'; - -test('inject', async (t) => { - const htmlUrl = new URL('./fixtures/index.html', import.meta.url) - console.log(await link(htmlUrl, { noFetch: true })) -}); diff --git a/tests/link.test.js b/tests/link.test.js new file mode 100644 index 0000000..ee8bcab --- /dev/null +++ b/tests/link.test.js @@ -0,0 +1,19 @@ +import link from '../src/link.js'; +import { describe, it } from 'node:test'; +import fs from 'node:fs'; +import assert from 'node:assert'; + +describe('link', () => { + it('injects dependencies into HTML', async () => { + const htmlUrl = new URL('./fixtures/link/index.html', import.meta.url) + const result = await link(htmlUrl, { noFetch: true }) + const expected = await fs.promises.readFile(new URL('./snapshots/link/index.snapshot.html', import.meta.url), 'utf8') + assert.strictEqual(result, expected) + }) + it('injects dependencies to top of if there is no tag', async () => { + const htmlUrl = new URL('./fixtures/link/no-head.html', import.meta.url) + const result = await link(htmlUrl, { noFetch: true }) + const expected = await fs.promises.readFile(new URL('./snapshots/link/no-head.snapshot.html', import.meta.url), 'utf8') + assert.strictEqual(result, expected) + }); +}) diff --git a/tests/snapshots/link/index.snapshot.html b/tests/snapshots/link/index.snapshot.html new file mode 100644 index 0000000..6169a9f --- /dev/null +++ b/tests/snapshots/link/index.snapshot.html @@ -0,0 +1,26 @@ + + + + + My HTML File + + + + + + + + +

Hello, world!

+ + + + \ No newline at end of file diff --git a/tests/snapshots/link/no-head.snapshot.html b/tests/snapshots/link/no-head.snapshot.html new file mode 100644 index 0000000..81e5156 --- /dev/null +++ b/tests/snapshots/link/no-head.snapshot.html @@ -0,0 +1,13 @@ + + + + + + +

Hello, world!

+ + + + \ No newline at end of file