This commit is contained in:
Max Howell 2022-09-27 15:11:35 -04:00
parent 5657f04b30
commit cc1613c749
7 changed files with 78 additions and 42 deletions

View file

@ -31,6 +31,7 @@ jobs:
run:
working-directory: pantry
outputs:
built: ${{ steps.build.outputs.pkgs }}
pkgs: ${{ steps.sorted.outputs.pkgs }} ${{ steps.sorted.outputs.pre-install }}
steps:
- name: co pantry
@ -190,6 +191,9 @@ jobs:
scripts/test.ts
xz-bottles:
defaults:
run:
working-directory: pantry
needs: [verify-relocatable, build]
runs-on: ubuntu-latest
strategy:
@ -227,14 +231,14 @@ jobs:
name: ${{ matrix.platform }}
path: ${{ steps.tea.outputs.prefix }}
- run: scripts/bottle.ts ${{ needs.build.outputs.pkgs }}
- run: scripts/bottle.ts ${{ needs.build.outputs.built }}
id: bottle
env:
COMPRESSION: xz
- name: upload bottles
run: scripts/upload.ts
--pkgs ${{ needs.build.outputs.pkgs }}
--pkgs ${{ needs.build.outputs.built }}
--bottles ${{ steps.bottle.outputs.bottles }}
--checksums ${{ steps.bottle.outputs.checksums }}
env:

View file

@ -17,7 +17,7 @@ args:
--- */
import { Installation } from "types"
import { useCellar, useCache, usePrefix, useFlags } from "hooks"
import { useCellar, usePrefix, useFlags, useCache } from "hooks"
import { run, pkg as pkgutils } from "utils"
import { crypto } from "deno/crypto/mod.ts"
import { encode } from "deno/encoding/hex.ts"
@ -67,7 +67,7 @@ if (import.meta.main) {
//------------------------------------------------------------------------- funcs
export async function bottle({ path: kegdir, pkg }: Installation, compression: 'gz' | 'xz'): Promise<Path> {
const tarball = useCache().bottle(pkg, compression)
const tarball = useCache().path({ pkg, type: 'bottle', compression })
const z = compression == 'gz' ? 'z' : 'J'
const cwd = usePrefix()
const cmd = ["tar", `c${z}f`, tarball, kegdir.relative({ to: cwd })]

View file

@ -9,9 +9,10 @@ args:
- --import-map={{ srcroot }}/import-map.json
---*/
import { S3 } from "s3"
import { S3, S3Object } from "s3"
import { Sha256 } from "deno/hash/sha256.ts"
import { readerFromStreamReader, readAll } from "deno/streams/conversion.ts"
import Path from "../src/vendor/Path.ts"
const s3 = new S3({
accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!,
@ -22,20 +23,29 @@ const s3 = new S3({
const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!);
for await (const pkg of bucket.listAllObjects({ batchSize: 200 })) {
if (!pkg.key?.endsWith('.tar.gz')) { continue }
console.log({ checking: pkg.key });
const keys = get_keys(pkg)
if (!keys) continue
if (!await bucket.headObject(`${pkg.key}.sha256sum`)) {
console.log({ missingChecksum: pkg.key })
const reader = (await bucket.getObject(pkg.key))!.body.getReader()
console.log({ checking: keys.checksum });
if (!await bucket.headObject(keys.checksum.string)) {
console.log({ missing: keys.checksum })
const reader = (await bucket.getObject(keys.bottle.string))!.body.getReader()
const contents = await readAll(readerFromStreamReader(reader))
const basename = pkg.key.split("/").pop()
const sha256sum = new Sha256().update(contents).toString()
const body = new TextEncoder().encode(`${sha256sum} ${basename}`)
const body = new TextEncoder().encode(`${sha256sum} ${keys.bottle.basename()}`)
await bucket.putObject(keys.checksum.string, body)
await bucket.putObject(`${pkg.key}.sha256sum`, body);
console.log({ uploaded: `${pkg.key}.sha256sum` });
console.log({ uploaded: keys.checksum })
}
}
function get_keys(pkg: S3Object): { bottle: Path, checksum: Path } | undefined {
if (!pkg.key) return
if (!/\.tar\.[gx]z$/.test(pkg.key)) return
return {
bottle: new Path(pkg.key),
checksum: new Path(`${pkg.key}.sha256sum`)
}
}

View file

@ -10,10 +10,11 @@ args:
- --import-map={{ srcroot }}/import-map.json
---*/
import { S3 } from "s3";
import { S3 } from "s3"
import { stringify as yaml } from "deno/encoding/yaml.ts"
import { stringify as csv } from "deno/encoding/csv.ts"
import { Inventory } from "hooks/useInventory.ts"
import * as semver from "semver"
const s3 = new S3({
accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!,
@ -21,16 +22,15 @@ const s3 = new S3({
region: "us-east-1",
});
const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!);
const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!)
const inventory: Inventory = {}
const flat = []
for await (const pkg of bucket.listAllObjects({ batchSize: 200 })) {
if (!pkg.key?.endsWith('.tar.gz')) { continue }
const matches = pkg.key.match(new RegExp("^(.*)/(.*)/(.*)/v([0-9]+\.[0-9]+\.[0-9]+)\.tar\.gz$"))
if (!/\.tar\.[gx]z$/.test(pkg.key ?? '')) { continue }
const matches = pkg.key!.match(new RegExp(`^(.*)/(.*)/(.*)/v(${semver.regex})\.tar\.[xg]z$`))
if (!matches) { continue }
const [_, project, platform, arch, version] = matches

View file

@ -26,7 +26,7 @@ const output: FileInfo[] = []
for await(const obj of bucket.listAllObjects({ batchSize: 200 })) {
const { key, lastModified } = obj
if (!key?.match(/\.tar.gz$/)) { continue }
if (!key?.match(/\.tar\.[gx]z$/)) { continue }
output.push({ key: key!, lastModified: lastModified! })
}
@ -44,4 +44,4 @@ console.table(output)
interface FileInfo {
key: string
lastModified: Date
}
}

View file

@ -10,10 +10,12 @@ args:
- --import-map={{ srcroot }}/import-map.json
---*/
import { S3 } from "s3"
import { Sha256 } from "deno/hash/sha256.ts"
import { useCache } from "hooks"
import { readAll, readerFromStreamReader } from "deno/streams/mod.ts"
import { useCache, useOffLicense } from "hooks"
import { Package } from "types"
import { Sha256 } from "deno/hash/sha256.ts"
import { S3 } from "s3"
import Path from "path"
const s3 = new S3({
accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!,
@ -23,9 +25,8 @@ const s3 = new S3({
const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!)
for (const pkg of await useCache().ls()) {
const key = useCache().s3Key(pkg)
const bottle = useCache().bottle(pkg)
for (const stowed of await useCache().ls()) {
const key = useOffLicense('s3').key(stowed)
console.log({ checking: key })
@ -33,7 +34,7 @@ for (const pkg of await useCache().ls()) {
const repoChecksum = inRepo ? await checksum(`https://dist.tea.xyz/${key}.sha256sum`) : undefined
// path.read() returns a string; this is easier to get a UInt8Array
const contents = await Deno.readFile(bottle.string)
const contents = await Deno.readFile(stowed.path.string)
const sha256sum = new Sha256().update(contents).toString()
if (!inRepo || repoChecksum !== sha256sum) {
@ -56,4 +57,6 @@ async function checksum(url: string) {
if (!rdr) throw new Error(`Couldnt read: ${url}`)
const r = await readAll(readerFromStreamReader(rdr))
return new TextDecoder().decode(r).split(' ')[0]
}
}
type RV = Package & {bottle: Path}

View file

@ -11,11 +11,12 @@ args:
---*/
import { S3 } from "s3"
import { panic, pkg as pkgutils } from "utils"
import { useCache, useFlags } from "hooks"
import { Package } from "types"
import { host, pkg as pkgutils } from "utils"
import { useFlags, useOffLicense } from "hooks"
import { Package, PackageRequirement } from "types"
import SemVer, * as semver from "semver"
import { dirname, basename } from "deno/path/mod.ts"
import Path from "path"
useFlags()
@ -29,11 +30,13 @@ const s3 = new S3({
const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!)
const encode = (() => { const e = new TextEncoder(); return e.encode.bind(e) })()
const { arch, platform } = host()
const pkgs = args_get("pkgs").map(pkgutils.parse).map(x => "version" in x ? x : panic<Package>())
const pkgs = args_get("pkgs").map(pkgutils.parse).map(assert_pkg)
const bottles = args_get("bottles")
const checksums = args_get("checksums")
function args_get(key: string): string[] {
const it = Deno.args[Symbol.iterator]()
while (true) {
@ -51,14 +54,19 @@ function args_get(key: string): string[] {
}
for (const [index, pkg] of pkgs.entries()) {
const bottle = bottles[index]
const bottle = new Path(bottles[index])
const checksum = checksums[index]
const key = useCache().s3Key(pkg)
const compression = bottle.extname() == '.tar.gz' ? 'gz' : 'xz'
const key = useOffLicense('s3').key({
pkg,
type: 'bottle',
compression,
})
//FIXME stream it to S3
const bottle_contents = await Deno.readFile(bottle)
const checksum_contents = fixup_checksum(await Deno.readFile(checksum), basename(bottle))
const versions = await get_versions(pkg)
const bottle_contents = await Deno.readFile(bottle.string)
const checksum_contents = fixup_checksum(await Deno.readFile(checksum), bottle.basename())
const versions = await get_versions(key, pkg)
console.log({ uploading: key })
@ -71,8 +79,8 @@ for (const [index, pkg] of pkgs.entries()) {
//end
async function get_versions(pkg: Package): Promise<SemVer[]> {
const prefix = dirname(useCache().s3Key(pkg))
async function get_versions(key: string, pkg: Package): Promise<SemVer[]> {
const prefix = dirname(key)
const rsp = await bucket.listObjects({ prefix })
//FIXME? API isnt clear if these nulls indicate failure or not
@ -94,4 +102,15 @@ async function get_versions(pkg: Package): Promise<SemVer[]> {
function fixup_checksum(data: Uint8Array, new_file_name: string) {
const checksum = new TextDecoder().decode(data).split(" ")[0]
return new TextEncoder().encode(`${checksum} ${new_file_name}`)
}
function assert_pkg(pkg: Package | PackageRequirement) {
if ("version" in pkg) {
return pkg
} else {
return {
project: pkg.project,
version: new SemVer(pkg.constraint)
}
}
}