making things relocatable (#122)

This commit is contained in:
Max Howell 2022-09-08 16:40:35 -04:00 committed by GitHub
parent 9d7e506e15
commit 0f8b83870a
21 changed files with 528 additions and 75 deletions

View file

@ -22,6 +22,8 @@ jobs:
defaults:
run:
working-directory: pantry
outputs:
pkgs: ${{ steps.sorted.outputs.pkgs }} ${{ steps.sorted.outputs.pre-install }}
steps:
- name: co pantry
uses: actions/checkout@v3
@ -66,17 +68,17 @@ jobs:
- uses: teaxyz/setup@v0
env:
TEA_SECRET: ${{ secrets.TEA_SECRET }}
with:
prefix: /opt
if: ${{ matrix.os == 'macos-11' }}
- name: sort topologically
run: scripts/sort.ts ${{ inputs.projects }}
id: sorted
- name: install deps
run: ./scripts/install.ts ${{ steps.sorted.outputs.pre-install }}
- run: scripts/install.ts ${{ steps.sorted.outputs.pre-install }}
- name: build
run: ./scripts/build.ts ${{ steps.sorted.outputs.pkgs }}
- run: scripts/build.ts ${{ steps.sorted.outputs.pkgs }}
id: build
env:
GITHUB_TOKEN: ${{ github.token }}
@ -106,9 +108,61 @@ jobs:
path: ${{ steps.bottle.outputs.filenames }}
if-no-files-found: error
verify-relocatable:
needs: [build]
runs-on: ${{ matrix.os }}
defaults:
run:
working-directory: pantry
strategy:
matrix:
os:
- macos-11
- ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
path: pantry
- name: co cli
uses: actions/checkout@v3
with:
path: cli
repository: teaxyz/cli
token: ${{ secrets.TEMP_JACOBS_GITHUB_PAT }}
- name: HACKS
run: |
mkdir -p ~/opt/tea.xyz/var
ln -s $GITHUB_WORKSPACE/pantry ~/opt/tea.xyz/var/pantry
mkdir ../.git
cp README.md ..
- uses: teaxyz/setup@v0
id: tea
env:
TEA_SECRET: ${{ secrets.TEA_SECRET }}
- name: download bottles
uses: actions/download-artifact@v3
with:
name: ${{ matrix.os }}
path: ${{ steps.tea.outputs.prefix }}
- run: find ${{ steps.tea.outputs.prefix }}/tea.xyz/var/www
- run: scripts/deps.ts -i ${{ needs.build.outputs.pkgs }}
id: deps
- run: scripts/install.ts ${{ steps.deps.outputs.pkgs }}
- run:
echo ${{ inputs.projects }} | xargs -tn1
scripts/test.ts
notify:
if: always()
needs: [build]
needs: [verify-relocatable]
runs-on: ubuntu-latest
steps:
- uses: rtCamp/action-slack-notify@v2

18
.github/workflows/ci-scripts.yml vendored Normal file
View file

@ -0,0 +1,18 @@
on:
pull_request:
paths: scripts/**.ts
jobs:
typecheck:
runs-on: ubuntu-latest
env:
TEA_SECRET: ${{ secrets.TEA_SECRET }}
steps:
- uses: actions/checkout@v3
# disabled until live because private repos are tedious
# - uses: teaxyz/setup@v0
# with:
# target: typecheck
# env:
# GITHUB_TOKEN: ${{ github.token }}

View file

@ -56,3 +56,11 @@ knowledge. Please keep it tidy.
```sh
scripts/ls.ts | xargs scripts/sort.ts | xargs scripts/build.ts
```
## Typecheck
```sh
for x in scripts/*.ts src/app.ts; do
deno check --import-map=$SRCROOT/import-map.json $x
done
```

View file

@ -5,6 +5,12 @@ distributable:
versions:
github: python/cpython/tags
provides:
- bin/python
- bin/python3
- bin/pip
- bin/pip3
dependencies:
# recommended but none are actually required
zlib.net: 1
@ -28,6 +34,7 @@ build:
freedesktop.org/pkg-config: ^0.29
#TODO use the diff
#TODO $libdir/_sysconfigdata__darwin_darwin.py contains non-relocatable paths
script: |
sed -i.bak -e 's|system_lib_dirs = .*|system_lib_dirs = os.getenv("LIBRARY_PATH").split(":")|' ./setup.py
sed -i.bak -e 's|system_include_dirs = .*|system_include_dirs = os.getenv("CPATH").split(":")|' ./setup.py
@ -36,10 +43,28 @@ build:
make --jobs {{ hw.concurrency }}
make install
cd {{ prefix }}/bin
ln -sf python{{ version.marketing }} python
# provide unversioned symlinks
cd {{prefix}}/bin
for x in python pip idle pydoc; do
ln -sf ${x}{{ version.marketing }} $x
done
ln -sf python{{ version.marketing }}-config python-config
ln -sf pip{{ version.marketing }} pip
# strangely pip3 is a copy of pip3.10
if test ! -L pip{{version.major}}; then
rm pip{{version.major}}
ln -s pip{{version.marketing}} pip{{version.major}}
fi
# make relocatable
cd {{prefix}}
for binfile in $shebangs $confdir/python-config.py; do
sed -i.bak -e 's|#!{{ prefix }}/bin/|#!/usr/bin/env |g' $binfile
rm $binfile.bak
done
sed -i.bak -e 's|{{ prefix }}|\\$(shell tea --prefix)|g' $confdir/Makefile
rm $confdir/Makefile.bak
env:
ARGS:
@ -58,6 +83,21 @@ build:
#- --with-lto
# on macOS we want an option that is instead --enable-framework
- --enable-shared
libdir:
lib/python{{version.marketing}}
darwin:
confdir:
$libdir/config-{{version.marketing}}-darwin
linux:
ARCH: ${{hw.arch}}
confdir:
$libdir/config-{{version.marketing}}-${ARCH/-/_}-linux-gnu
shebangs:
- bin/2to3-{{version.marketing}}
- bin/idle{{version.marketing}}
- bin/pip{{version.marketing}}
- bin/pydoc{{version.marketing}}
- bin/python{{version.marketing}}-config
#FIXME get rid of these v*
OPENSSL_INCLUDES: /opt/openssl.org/v*/include

View file

@ -23,4 +23,4 @@ test:
OUT=$(echo "$INPUT" | ./a.out | ./a.out -d)
test "$OUT" = "$INPUT"
env:
INPUT: tea.xyz
INPUT: Hello, World!

28
scripts/bottle-all.ts Executable file
View file

@ -0,0 +1,28 @@
#!/usr/bin/env -S tea -E
/*---
args:
- deno
- run
- --allow-net
- --allow-run
- --allow-read
- --allow-write={{ tea.prefix }}
- --allow-env=GITHUB_TOKEN
- --import-map={{ srcroot }}/import-map.json
---*/
import { bottle } from "./bottle.ts"
import { ls } from "./ls.ts"
import useCellar from "hooks/useCellar.ts"
const cellar = useCellar()
for await (const {path} of ls()) {
const pkg = (await cellar.resolve(path)).pkg
try {
await bottle({ path, pkg })
} catch (error) {
console.verbose({ 'bottling-failure': pkg, error })
}
}

View file

@ -7,8 +7,8 @@ args:
- --allow-net
- --allow-run
- --allow-env
- --allow-read=/opt/
- --allow-write=/opt/
- --allow-read
- --allow-write
- --import-map={{ srcroot }}/import-map.json
--- */
@ -21,43 +21,48 @@ import useFlags from "hooks/useFlags.ts"
import { crypto } from "deno/crypto/mod.ts"
import { encodeToString } from "encodeToString"
useFlags()
const cellar = useCellar()
const filesListName = 'files.txt'
const bottles: Path[] = []
const fileLists: Path[] = []
for (const pkg of Deno.args.map(parsePackageRequirement)) {
console.log({ bottling: { pkg } })
const installation = await cellar.resolve(pkg)
const path = await bottle(installation)
const checksum = await sha256(path)
//-------------------------------------------------------------------------- main
if (!path) throw new Error("wtf: bottle already exists")
if (!checksum) throw new Error("failed to compute checksum")
if (import.meta.main) {
useFlags()
console.log({ bottled: { path } })
const bottles: Path[] = []
const fileLists: Path[] = []
for (const pkg of Deno.args.map(parsePackageRequirement)) {
console.log({ bottling: { pkg } })
bottles.push(path)
bottles.push(checksum)
fileLists.push(installation.path.join(filesListName))
const installation = await cellar.resolve(pkg)
const path = await bottle(installation)
const checksum = await sha256(path)
if (!path) throw new Error("wtf: bottle already exists")
if (!checksum) throw new Error("failed to compute checksum")
console.log({ bottled: { path } })
bottles.push(path)
bottles.push(checksum)
fileLists.push(installation.path.join(filesListName))
}
if (bottles.length === 0) throw new Error("no input provided")
const encode = (() => { const e = new TextEncoder(); return e.encode.bind(e) })()
const bottleList = bottles.map(x => x.string).join(" ")
await Deno.stdout.write(encode(`::set-output name=bottles::${bottleList}\n`))
const paths = [...bottles, ...fileLists].map(x => x.string).join('%0A')
await Deno.stdout.write(encode(`::set-output name=filenames::${paths}\n`))
}
if (bottles.length === 0) throw new Error("no input provided")
const encode = (() => { const e = new TextEncoder(); return e.encode.bind(e) })()
const bottleList = bottles.map(x => x.string).join(" ")
await Deno.stdout.write(encode(`::set-output name=bottles::${bottleList}\n`))
const paths = [...bottles, ...fileLists].map(x => x.string).join('%0A')
await Deno.stdout.write(encode(`::set-output name=filenames::${paths}\n`))
//------------------------------------------------------------------------- funcs
async function bottle({ path: kegdir, pkg }: Installation): Promise<Path> {
export async function bottle({ path: kegdir, pkg }: Installation): Promise<Path> {
const files = await walk(kegdir, path => {
/// HACK: `go` requires including the `src` dir
@ -126,4 +131,4 @@ async function sha256(file: Path): Promise<Path> {
await Deno.writeFile(checksum_file.string, checksum_contents)
return checksum_file
}
}

View file

@ -6,7 +6,7 @@ args:
- run
- --allow-net
- --allow-run
- --allow-read=/opt,/Library/Developer/CommandLineTools
- --allow-read
- --allow-write=/opt
- --allow-env
- --import-map={{ srcroot }}/import-map.json
@ -29,6 +29,7 @@ const pantry = usePantry()
const cellar = useCellar()
const dry = Deno.args.map(parsePackageRequirement)
const gha = !!Deno.env.get("GITHUB_ACTIONS")
const rv: Package[] = []
for (const pkgrq of dry) {
@ -37,7 +38,20 @@ for (const pkgrq of dry) {
if (!version) throw "no-version-found"
const pkg = { project: pkgrq.project, version }
console.log({ building: pkgrq.project })
const installation = await cellar.isInstalled(pkg)
if (installation) {
console.log({ cleaning: installation.path })
for await (const [path, {name}] of installation.path.ls()) {
if (name == 'src') continue
path.rm({ recursive: true })
}
}
if (gha) {
console.log("::group::", `${pkg.project}@${pkg.version}`)
} else {
console.log({ building: pkgrq.project })
}
// Get the source
const prebuild = async () => {
@ -66,6 +80,10 @@ for (const pkgrq of dry) {
await link({ path, pkg })
rv.push(pkg)
if (gha) {
console.log("::endgroup::")
}
}
const built_pkgs = rv.map(({ project, version }) => `${project}@${version}`).join(" ")

20
scripts/clean-all.sh Executable file
View file

@ -0,0 +1,20 @@
#!/usr/bin/env -S tea -E
_="
---
args: /bin/sh
---
"
ROOTS=$(ls /opt/tea.xyz/var/pantry/projects)
for x in $ROOTS
do
if [ "X$x" = "X" ]
then
continue
fi
rm -rf /opt/"$x"
done
rm /opt/tea.xyz/var/www/*.tar.?z

View file

@ -35,8 +35,8 @@ const wet = await hydrate(dry, get_deps)
const gas = wet.pkgs.compactMap(({project}) => {
if (Deno.args.includes('-i')) {
return project
} else {
return explicit.has(project) || project
} else if (!explicit.has(project)){
return project
}
})

45
scripts/fetch-src.ts Executable file
View file

@ -0,0 +1,45 @@
#!/usr/bin/env -S tea -E
/*---
args:
- deno
- run
- --allow-net
- --allow-run
- --allow-read
- --allow-write={{ tea.prefix }}
- --allow-env
- --import-map={{ srcroot }}/import-map.json
---*/
import usePantry from "hooks/usePantry.ts"
import useCache from "hooks/useCache.ts"
import useCellar from "hooks/useCellar.ts"
import useSourceUnarchiver from "hooks/useSourceUnarchiver.ts"
import { parsePackageRequirement, semver } from "types"
import { Command } from "cliffy/command/mod.ts"
import { print } from "utils"
const { args } = await new Command()
.name("tea-fetch-src")
.arguments("<pkgspec:string>")
.parse(Deno.args)
const pantry = usePantry()
const req = parsePackageRequirement(args[0])
const versions = await pantry.getVersions(req)
const version = semver.maxSatisfying(versions, req.constraint)
if (!version) throw "no-version-found"
const pkg = { project: req.project, version }; console.debug(pkg)
const dstdir = useCellar().mkpath(pkg).join("src")
const { url, stripComponents } = await pantry.getDistributable(pkg)
const { download } = useCache()
const zip = await download({ pkg, url, type: 'src' })
await useSourceUnarchiver().unarchive({
dstdir,
zipfile: zip,
stripComponents
})
await print(`${dstdir}\n`)

42
scripts/fixup-checksums.ts Executable file
View file

@ -0,0 +1,42 @@
#!/usr/bin/env -S tea -E
/*---
args:
- deno
- run
- --allow-net
- --allow-env=AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,S3_BUCKET
- --import-map={{ srcroot }}/import-map.json
---*/
import { S3 } from "s3";
import { crypto } from "deno/crypto/mod.ts";
import { readerFromStreamReader, readAll } from "deno/streams/conversion.ts";
import { encodeToString } from "encodeToString";
const s3 = new S3({
accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!,
secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!,
region: "us-east-1",
});
const bucket = s3.getBucket(Deno.env.get("S3_BUCKET")!);
for await (const pkg of bucket.listAllObjects({ batchSize: 200 })) {
if (!pkg.key?.endsWith('.tar.gz')) { continue }
console.log({ checking: pkg.key });
if (!await bucket.headObject(`${pkg.key}.sha256sum`)) {
console.log({ missingChecksum: pkg.key })
const reader = (await bucket.getObject(pkg.key))!.body.getReader()
const contents = await readAll(readerFromStreamReader(reader))
const basename = pkg.key.split("/").pop()
const sha256sum = encodeToString(new Uint8Array(await crypto.subtle.digest("SHA-256", contents)))
const body = new TextEncoder().encode(`${sha256sum} ${basename}`)
await bucket.putObject(`${pkg.key}.sha256sum`, body);
console.log({ uploaded: `${pkg.key}.sha256sum` });
}
}

View file

@ -23,7 +23,7 @@ import useFlags from "hooks/useFlags.ts"
useFlags()
const pkgs = Deno.args.map(project => {
const match = project.match(/projects\/(.*)\/package.yml/)
const match = project.match(/projects\/(.+)\/package.yml/)
return match ? match[1] : project
}).map(parsePackageRequirement)

78
scripts/inventory.ts Executable file
View file

@ -0,0 +1,78 @@
#!/usr/bin/env -S tea -E
/*---
args:
- deno
- run
- --allow-net
- --allow-env=AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,S3_BUCKET
- --import-map={{ srcroot }}/import-map.json
---*/
import { S3 } from "s3";
import { stringify as yaml } from "deno/encoding/yaml.ts"
import { stringify as csv } from "deno/encoding/csv.ts"
import { Inventory } from "../src/hooks/useInventory.ts";
const s3 = new S3({
accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!,
secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!,
region: "us-east-1",
});
const bucket = s3.getBucket(Deno.env.get("S3_BUCKET")!);
const inventory: Inventory = {}
const flat = []
for await (const pkg of bucket.listAllObjects({ batchSize: 200 })) {
if (!pkg.key?.endsWith('.tar.gz')) { continue }
const matches = pkg.key.match(new RegExp("^(.*)/(.*)/(.*)/v([0-9]+\.[0-9]+\.[0-9]+)\.tar\.gz$"))
if (!matches) { continue }
const [_, project, platform, arch, version] = matches
if (!inventory[project]) inventory[project] = {}
if (!inventory[project][platform]) inventory[project][platform] = {}
if (!inventory[project][platform]) inventory[project][platform] = {}
inventory[project][platform][arch] = [...(inventory[project]?.[platform]?.[arch] ?? []), version]
flat.push({ project, platform, arch, version })
}
/// For ultimate user-friendliness, we store this data 4 ways:
/// YAML, JSON, CSV, flat text
const te = new TextEncoder()
// YAML: type Inventory
const yml = te.encode(yaml(inventory))
bucket.putObject("versions.yml", yml)
// JSON: type Inventory
const json = te.encode(JSON.stringify(inventory))
bucket.putObject("versions.json", json)
// CSV: project,platform,arch,version
const csvData = te.encode(await csv(flat, ["project", "platform", "arch", "version"]))
bucket.putObject("versions.csv", csvData)
// TXT: per project/platform/arch, newline-delimited
for(const [project, platforms] of Object.entries(inventory)) {
for (const [platform, archs] of Object.entries(platforms)) {
for (const [arch, versions] of Object.entries(archs)) {
const txt = te.encode(versions.join("\n"))
bucket.putObject(`${project}/${platform}/${arch}/versions.txt`, txt)
}
}
}
//end

View file

@ -7,24 +7,28 @@ args:
- deno
- run
- --allow-env
- --allow-read=/opt/tea.xyz/var/pantry
- --allow-read
- --import-map={{ srcroot }}/import-map.json
---*/
import { Path } from "types"
import useFlags from "hooks/useFlags.ts"
import useCellar from "hooks/useCellar.ts"
const flags = useFlags()
const prefix = new Path(`${useCellar().prefix}/tea.xyz/var/pantry/projects`)
interface Entry {
project: string
path: Path
}
const prefix = new Path('/opt/tea.xyz/var/pantry/projects')
//FIXME unfortunately importing executes the script below
//------------------------------------------------------------------------- funcs
export async function* ls(): AsyncGenerator<Entry> {
for await (const path of _ls_pantry(prefix)) {
yield {
name: path.parent().relative({ to: prefix }),
path: path.string
project: path.parent().relative({ to: prefix }),
path
}
}
}
@ -43,22 +47,23 @@ async function* _ls_pantry(dir: Path): AsyncGenerator<Path> {
}
}
interface Entry {
name: string
path: string
}
//-------------------------------------------------------------------------- main
if (import.meta.main) {
const flags = useFlags()
const rv: Entry[] = []
for await (const item of ls()) {
rv.push(item)
}
const rv: Entry[] = []
for await (const item of ls()) {
rv.push(item)
}
if (Deno.env.get("GITHUB_ACTIONS")) {
const projects = rv.map(x => x.name).join(":")
console.log(`::set-output name=projects::${projects}`)
} else if (flags.json) {
const output = JSON.stringify(rv, null, 2)
console.log(output)
} else {
console.log(rv.map(x => x.name).join("\n"))
if (Deno.env.get("GITHUB_ACTIONS")) {
const projects = rv.map(x => x.project).join(":")
console.log(`::set-output name=projects::${projects}`)
} else if (flags.json) {
const obj = rv.map(({ path, project }) => ({ path: path.string, project }))
const out = JSON.stringify(obj, null, 2)
console.log(out)
} else {
console.log(rv.map(x => x.project).join("\n"))
}
}

View file

@ -6,8 +6,8 @@ args:
- deno
- run
- --allow-net
- --allow-read=/opt
- --allow-write=/opt
- --allow-read={{ tea.prefix }}
- --allow-write={{ tea.prefix }}
- --allow-run # uses `/bin/ln`
- --import-map={{ srcroot }}/import-map.json
---

View file

@ -6,8 +6,8 @@ args:
- run
- --allow-net
- --allow-run
- --allow-read=/opt,/Library/Developer/CommandLineTools
- --allow-write=/opt
- --allow-read
- --allow-write={{ tea.prefix }}
- --allow-env
- --import-map={{ srcroot }}/import-map.json
---*/

View file

@ -29,7 +29,10 @@ const pantry = usePantry()
const pkg = await (async () => {
if (magic) {
const i = await cellar.resolve(parsePackageRequirement(Deno.args[0]))
const project = Deno.args[0]
const match = project.match(/projects\/(.+)\/package.yml/)
const parsable = match ? match[1] : project
const i = await cellar.resolve(parsePackageRequirement(parsable))
return i.pkg
} else {
return parsePackage(Deno.args[0])

27
scripts/uninstall.ts Executable file
View file

@ -0,0 +1,27 @@
#!/usr/bin/env -S tea -E
/*
---
args:
- deno
- run
- --allow-net
- --allow-read
- --allow-write={{ tea.prefix }}
- --import-map={{ srcroot }}/import-map.json
---
*/
import { parsePackageRequirement } from "types"
import useCellar from "hooks/useCellar.ts"
import repairLinks from "prefab/repair-links.ts"
const pkgs = Deno.args.map(parsePackageRequirement); console.verbose({ received: pkgs })
const { resolve } = useCellar()
for (const pkg of pkgs) {
console.info({ uninstalling: pkg })
const installation = await resolve(pkg)
installation.path.rm({ recursive: true })
await repairLinks(pkg.project) //FIXME this is overkill, be precise
}

60
scripts/upload-sync.ts Executable file
View file

@ -0,0 +1,60 @@
#!/usr/bin/env -S tea -E
/*---
args:
- deno
- run
- --allow-net
- --allow-read={{ tea.prefix }}/tea.xyz/var/www
- --allow-env=AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,S3_BUCKET
- --import-map={{ srcroot }}/import-map.json
---*/
import { S3 } from "s3";
import { crypto } from "deno/crypto/mod.ts";
import useCache from "hooks/useCache.ts";
import { encodeToString } from "encodeToString";
import { readAll, readerFromStreamReader } from "deno/streams/mod.ts";
const s3 = new S3({
accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!,
secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!,
region: "us-east-1",
});
const bucket = s3.getBucket(Deno.env.get("S3_BUCKET")!);
for (const pkg of await useCache().ls()) {
const key = useCache().s3Key(pkg)
const bottle = useCache().bottle(pkg)
console.log({ checking: key });
const inRepo = await bucket.headObject(key)
const repoChecksum = inRepo ? await checksum(`https://dist.tea.xyz/${key}.sha256sum`) : undefined
// path.read() returns a string; this is easier to get a UInt8Array
const contents = await Deno.readFile(bottle.string);
const sha256sum = encodeToString(new Uint8Array(await crypto.subtle.digest("SHA-256", contents)))
if (!inRepo || repoChecksum !== sha256sum) {
const basename = key.split("/").pop()
const body = new TextEncoder().encode(`${sha256sum} ${basename}`)
console.log({ uploading: key });
await bucket.putObject(key, contents);
await bucket.putObject(`${key}.sha256sum`, body);
console.log({ uploaded: key });
}
}
async function checksum(url: string) {
const rsp = await fetch(url)
if (!rsp.ok) throw new Error(`404-not-found: ${url}`)
const rdr = rsp.body?.getReader()
if (!rdr) throw new Error(`Couldnt read: ${url}`)
const r = await readAll(readerFromStreamReader(rdr))
return new TextDecoder().decode(r).split(' ')[0]
}

View file

@ -5,8 +5,7 @@ args:
- deno
- run
- --allow-net
- --allow-read=/opt
- --allow-write=/opt/tea.xyz/var/www
- --allow-read
- --allow-env
- --import-map={{ srcroot }}/import-map.json
---*/
@ -47,8 +46,11 @@ for (const filename of Deno.args) {
const req = parsePackageRequirement(`${match[1]}@${match[2]}`)
if (path.basename().match(/\.sha256sum$/)) { checksums.add(`${req.project}@${req.constraint.raw}`) }
else { bottles.add(req) }
if (path.basename().match(/\.sha256sum$/)) {
checksums.add(`${req.project}@${req.constraint.raw}`)
} else {
bottles.add(req)
}
}
// Ensure our sets are the same: