fix making tar.xz bottles (#158)

This commit is contained in:
Max Howell 2022-09-28 11:14:46 -04:00
parent dbce359eb0
commit ee504738cf
8 changed files with 103 additions and 112 deletions

View file

@ -57,7 +57,10 @@ jobs:
mkdir .git ../cli/.git
#FIXME needed for gdk-pixbuf
apt --yes install shared-mime-info curl
apt --yes install shared-mime-info
#FIXME **we provide curl** but it fails, we must figure out why
apt --yes install curl
;;
macos-11)
# screws up a lot of build scripts
@ -78,11 +81,13 @@ jobs:
*)
exit 1
esac
touch /opt/.hack
- uses: teaxyz/setup@v0
env:
TEA_SECRET: ${{ secrets.TEA_SECRET }}
VERBOSE: 1
id: tea
with:
prefix: /opt
@ -95,53 +100,23 @@ jobs:
- run: scripts/build.ts ${{ steps.sorted.outputs.pkgs }}
id: build
env:
# TODO: GITHUB_TOKEN doesn't have private access to teaxyz/cli. This can be restored once that repo is public.
# GITHUB_TOKEN: ${{ github.token }}
# GITHUB_TOKEN doesn't have private access to teaxyz/cli.
# TODO restore to ${{ github.token }} when public
GITHUB_TOKEN: ${{ secrets.TEMP_JACOBS_GITHUB_PAT }}
FORCE_UNSAFE_CONFIGURE: 1 # some configure scripts refuse to run as root
- name: test
run: echo ${{ steps.build.outputs.pkgs }} | xargs -tn1 scripts/test.ts
- name: bottle
run: scripts/bottle.ts ${{ steps.build.outputs.pkgs }}
id: bottle
# TODO only upload if all jobs succeed
# TODO only upload when we merge
# TODO upload to a staging location until we release new pantry versions
- name: upload bottles
run: scripts/upload.ts
--pkgs ${{ steps.build.outputs.pkgs }}
--bottles ${{ steps.bottle.outputs.bottles }}
--checksums ${{ steps.bottle.outputs.checksums }}
env:
AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- name: upload artifacts
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.os }}
path: ${{ steps.bottle.outputs.artifacts }}
path: |
${{ steps.build.outputs.paths }}
${{ steps.tea.outputs.prefix }}/.hack
# ^^ so the uploaded artifacts keep eg. foo.com/v1.2.3 as prefixes
if-no-files-found: error
invalidate-cloudfront:
test:
needs: [build]
runs-on: ubuntu-latest
steps:
#FIXME incredibly inefficient - have upload.ts tell us what to invalidate
- uses: chetan/invalidate-cloudfront-action@v2
env:
DISTRIBUTION: ${{ secrets.AWS_CF_DISTRIBUTION_ID }}
PATHS: "/*"
AWS_REGION: "us-east-1"
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
verify-relocatable:
needs: [invalidate-cloudfront, build]
runs-on: ${{ matrix.os }}
defaults:
run:
@ -167,7 +142,6 @@ jobs:
run: |
mkdir -p ~/opt/tea.xyz/var
ln -s $GITHUB_WORKSPACE/pantry ~/opt/tea.xyz/var/pantry
mkdir ../.git
cp README.md ..
- uses: teaxyz/setup@v0
@ -175,32 +149,27 @@ jobs:
env:
TEA_SECRET: ${{ secrets.TEA_SECRET }}
- name: download bottles
uses: actions/download-artifact@v3
- uses: actions/download-artifact@v3
with:
name: ${{ matrix.os }}
path: ${{ steps.tea.outputs.prefix }}
- run: scripts/deps.ts -i ${{ needs.build.outputs.pkgs }}
id: deps
- run: echo ${{ inputs.projects }} | xargs -tn1 scripts/test.ts
- run: ../cli/scripts/install.ts ${{ steps.deps.outputs.pkgs }}
- run:
echo ${{ inputs.projects }} | xargs -tn1
scripts/test.ts
xz-bottles:
bottle:
defaults:
run:
working-directory: pantry
needs: [verify-relocatable, build]
runs-on: ubuntu-latest
needs: [test, build]
runs-on: ${{ matrix.platform }}
strategy:
matrix:
platform:
- macos-11
- ubuntu-latest
compression:
- xz
- gz
steps:
- uses: actions/checkout@v3
with:
@ -217,7 +186,6 @@ jobs:
run: |
mkdir -p ~/opt/tea.xyz/var
ln -s $GITHUB_WORKSPACE/pantry ~/opt/tea.xyz/var/pantry
mkdir ../.git
cp README.md ..
- uses: teaxyz/setup@v0
@ -225,8 +193,7 @@ jobs:
env:
TEA_SECRET: ${{ secrets.TEA_SECRET }}
- name: download bottles
uses: actions/download-artifact@v3
- uses: actions/download-artifact@v3
with:
name: ${{ matrix.platform }}
path: ${{ steps.tea.outputs.prefix }}
@ -234,9 +201,12 @@ jobs:
- run: scripts/bottle.ts ${{ needs.build.outputs.built }}
id: bottle
env:
COMPRESSION: xz
COMPRESSION: ${{ matrix.compression }}
- run: ls -la ${{ steps.bottle.outputs.bottles }}
- name: upload bottles
id: upload
run: scripts/upload.ts
--pkgs ${{ needs.build.outputs.built }}
--bottles ${{ steps.bottle.outputs.bottles }}
@ -246,11 +216,19 @@ jobs:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
#TODO cloudfront invalidation
#NOTE ideally wed invalidate all at once so this is atomic
# however GHA cant consolidate outputs from a matrix :/
- uses: chetan/invalidate-cloudfront-action@v2
env:
PATHS: ${{ steps.upload.outputs.cf-invalidation-paths }}
DISTRIBUTION: ${{ secrets.AWS_CF_DISTRIBUTION_ID }}
AWS_REGION: us-east-1
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
notify:
if: always()
needs: [verify-relocatable]
needs: [bottle]
runs-on: ubuntu-latest
steps:
- uses: rtCamp/action-slack-notify@v2

View file

@ -1,5 +1,5 @@
distributable:
url: https://zlib.net/zlib-{{ version }}.tar.gz
url: https://zlib.net/zlib-{{version}}.tar.gz
strip-components: 1
versions:

View file

@ -21,6 +21,7 @@ import { useCellar, usePrefix, useFlags, useCache } from "hooks"
import { run, pkg as pkgutils } from "utils"
import { crypto } from "deno/crypto/mod.ts"
import { encode } from "deno/encoding/hex.ts"
import { set_output } from "./utils/gha.ts"
import Path from "path"
const cellar = useCellar()
@ -32,36 +33,24 @@ if (import.meta.main) {
useFlags()
const compression = Deno.env.get("COMPRESSION") == 'xz' ? 'xz' : 'gz'
const checksums: string[] = []
const bottles: Path[] = []
const checksums: Path[] = []
const artifacts: Path[] = []
for (const pkg of Deno.args.map(pkgutils.parse)) {
console.log({ bottling: { pkg } })
console.log({ bottling: pkg })
const installation = await cellar.resolve(pkg)
const path = await bottle(installation, compression)
const checksum = await sha256(path)
console.log({ bottled: { path } })
console.log({ bottled: path })
bottles.push(path)
checksums.push(checksum)
}
if (bottles.length === 0) throw new Error("no input provided")
const encode = (() => { const e = new TextEncoder(); return e.encode.bind(e) })()
const bottles_out = bottles.map(x => x.string).join(' ')
await Deno.stdout.write(encode(`::set-output name=bottles::${bottles_out}\n`))
const checksums_out = checksums.map(x => x.string).join(' ')
await Deno.stdout.write(encode(`::set-output name=checksums::${checksums_out}\n`))
// newline separated for the upload-artifact action
artifacts.push(...bottles, ...checksums)
await Deno.stdout.write(encode(`::set-output name=artifacts::${artifacts.join('%0A')}\n`))
await set_output("bottles", bottles)
await set_output("checksums", checksums)
}
@ -75,10 +64,8 @@ export async function bottle({ path: kegdir, pkg }: Installation, compression: '
return tarball
}
async function sha256(file: Path): Promise<Path> {
const sha = await Deno.open(file.string, { read: true })
async function sha256(file: Path): Promise<string> {
return await Deno.open(file.string, { read: true })
.then(file => crypto.subtle.digest("SHA-256", file.readable))
.then(buf => new TextDecoder().decode(encode(new Uint8Array(buf))))
const text = `${sha} ${file.basename()}`
return new Path(`${file}.sha256sum`).write({ text, force: true })
}

View file

@ -13,10 +13,11 @@ args:
---*/
import { usePantry } from "hooks"
import build from "./build/build.ts"
import { Package } from "types"
import { Installation, Package } from "types"
import { pkg as pkgutils } from "utils"
import { useFlags, usePrefix } from "hooks"
import { set_output } from "./utils/gha.ts"
import build from "./build/build.ts"
useFlags()
@ -24,7 +25,7 @@ const pantry = usePantry()
const dry = Deno.args.map(pkgutils.parse)
const gha = !!Deno.env.get("GITHUB_ACTIONS")
const group_it = gha && dry.length > 1
const rv: Package[] = []
const rv: Installation[] = []
if (usePrefix().string != "/opt") {
console.error({ TEA_PREFIX: usePrefix().string })
@ -40,14 +41,13 @@ for (const rq of dry) {
console.log({ building: pkg.project })
}
await build(pkg)
rv.push(pkg)
const install = await build(pkg)
rv.push(install)
if (group_it) {
console.log("::endgroup::")
}
}
const built_pkgs = rv.map(pkgutils.str).join(" ")
const txt = `::set-output name=pkgs::${built_pkgs}\n`
await Deno.stdout.write(new TextEncoder().encode(txt))
await set_output("pkgs", rv.map(x => pkgutils.str(x.pkg)))
await set_output("paths", rv.map(x => x.path), '%0A')

View file

@ -14,7 +14,7 @@ const { platform } = host()
export default async function _build(pkg: Package) {
try {
await __build(pkg)
return await __build(pkg)
} catch (e) {
cellar.keg(pkg).isDirectory()?.isEmpty()?.rm() // dont leave empty kegs around
throw e

View file

@ -23,14 +23,16 @@ const s3 = new S3({
region: "us-east-1",
})
const offy = useOffLicense('s3')
const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!)
for (const stowed of await useCache().ls()) {
const url = useOffLicense('s3').url(stowed)
const url = offy.url(stowed)
const key = offy.key(stowed)
console.log({ checking: url })
const inRepo = await bucket.headObject(url.pathname)
const inRepo = await bucket.headObject(key)
const repoChecksum = inRepo ? await checksum(`${url}.sha256sum`) : undefined
// path.read() returns a string; this is easier to get a UInt8Array
@ -43,8 +45,8 @@ for (const stowed of await useCache().ls()) {
console.log({ uploading: url })
await bucket.putObject(url.pathname, contents)
await bucket.putObject(`${url.pathname}.sha256sum`, body)
await bucket.putObject(key, contents)
await bucket.putObject(`${key}.sha256sum`, body)
console.log({ uploaded: url })
}

View file

@ -12,11 +12,12 @@ args:
import { S3 } from "s3"
import { pkg as pkgutils } from "utils"
import { useFlags, useOffLicense } from "hooks"
import { useFlags, useOffLicense, useCache } from "hooks"
import { Package, PackageRequirement } from "types"
import SemVer, * as semver from "semver"
import { dirname, basename } from "deno/path/mod.ts"
import Path from "path"
import { set_output } from "./utils/gha.ts"
useFlags()
@ -30,6 +31,7 @@ const s3 = new S3({
const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!)
const encode = (() => { const e = new TextEncoder(); return e.encode.bind(e) })()
const cache = useCache()
const pkgs = args_get("pkgs").map(pkgutils.parse).map(assert_pkg)
const bottles = args_get("bottles")
@ -52,30 +54,33 @@ function args_get(key: string): string[] {
}
}
const rv: string[] = []
const put = async (key: string, body: string | Path | Uint8Array) => {
console.log({ uploading: body, to: key })
rv.push(`/${key}`)
if (body instanceof Path) {
body = await Deno.readFile(body.string)
} else if (typeof body === "string") {
body = encode(body)
}
return bucket.putObject(key, body)
}
for (const [index, pkg] of pkgs.entries()) {
const bottle = new Path(bottles[index])
const checksum = checksums[index]
const compression = bottle.extname() == '.tar.gz' ? 'gz' : 'xz'
const key = useOffLicense('s3').key({
pkg,
type: 'bottle',
compression,
})
//FIXME stream it to S3
const bottle_contents = await Deno.readFile(bottle.string)
const checksum_contents = fixup_checksum(await Deno.readFile(checksum), bottle.basename())
const stowed = cache.decode(bottle)!
const key = useOffLicense('s3').key(stowed)
const versions = await get_versions(key, pkg)
console.log({ uploading: key })
await bucket.putObject(key, bottle_contents)
await bucket.putObject(`${key}.sha256sum`, checksum_contents)
await bucket.putObject(`${dirname(key)}/versions.txt`, encode(versions.join("\n")))
console.log({ uploaded: key })
//FIXME stream the bottle (at least) to S3
await put(key, bottle)
await put(`${key}.sha256sum`, `${checksum} ${basename(key)}`)
await put(`${dirname(key)}/versions.txt`, versions.join("\n"))
}
await set_output('cf-invalidation-paths', rv)
//end
async function get_versions(key: string, pkg: Package): Promise<SemVer[]> {

19
scripts/utils/gha.ts Normal file
View file

@ -0,0 +1,19 @@
const e = new TextEncoder()
const encode = e.encode.bind(e)
export function set_output<T>(name: string, arr: T[], separator = " ") {
const value = arr.map(escape).join(separator)
const txt = `::set-output name=${name}::${value}`
return Deno.stdout.write(encode(`${txt}\n`))
}
//TODO HTML escapes probs
function escape<T>(input: T): string {
const out = `${input}`
if (/[<>]/.test(out)) {
return `"${out}"`
} else {
return out
}
}