move all actions to brewkit (#546)

* move all actions to `brewkit`

* move scripts

* prep for merge

* top-level @v0 actions
This commit is contained in:
Jacob Heider 2023-03-10 21:14:21 -05:00 committed by GitHub
parent 2593cefed7
commit be290d2ccd
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
21 changed files with 18 additions and 1027 deletions

View file

@ -1,37 +0,0 @@
name: tea/pantry/bottle
description: internal tea.xyz specific at this time
inputs:
gpg-key-id:
description: gpg key id
required: true
gpg-key-passphrase:
description: gpg key passphrase
required: true
built:
description: packages to bottle
required: true
compression:
description: compression to use (gz or xz)
required: true
outputs:
bottles:
description: bottle files
value: ${{ steps.bottle.outputs.bottles }}
checksums:
description: checksum files
value: ${{ steps.bottle.outputs.checksums }}
signatures:
description: signature files
value: ${{ steps.bottle.outputs.signatures }}
runs:
using: composite
steps:
- run: ${{ github.action_path }}/bottle.ts ${{ inputs.built }}
id: bottle
shell: sh
env:
COMPRESSION: ${{ inputs.compression }}
GPG_KEY_ID: ${{ inputs.gpg-key-id }}
GPG_PASSPHRASE: ${{ inputs.gpg-passphrase }}

View file

@ -1,102 +0,0 @@
#!/usr/bin/env tea
/* ---
dependencies:
gnu.org/tar: ^1.34
tukaani.org/xz: ^5
zlib.net: 1
gnupg.org: ^2
args:
- deno
- run
- --allow-net
- --allow-run
- --allow-env
- --allow-read
- --allow-write
--- */
import { Installation } from "types"
import { useCellar, usePrefix, useFlags, useCache } from "hooks"
import { backticks, panic, run } from "utils"
import { crypto } from "deno/crypto/mod.ts"
import { encode } from "deno/encoding/hex.ts"
import { encode as base64Encode } from "deno/encoding/base64.ts"
import { set_output } from "../../scripts/utils/gha.ts"
import * as ARGV from "../../scripts/utils/args.ts"
import Path from "path"
const cellar = useCellar()
//-------------------------------------------------------------------------- main
if (import.meta.main) {
useFlags()
const compression = Deno.env.get("COMPRESSION") == 'xz' ? 'xz' : 'gz'
const gpgKey = Deno.env.get("GPG_KEY_ID") ?? panic("missing GPG_KEY_ID")
const gpgPassphrase = Deno.env.get("GPG_PASSPHRASE") ?? panic("missing GPG_PASSPHRASE")
const checksums: string[] = []
const signatures: string[] = []
const bottles: Path[] = []
for await (const pkg of ARGV.pkgs()) {
console.log({ bottling: pkg })
const installation = await cellar.resolve(pkg)
const path = await bottle(installation, compression)
const checksum = await sha256(path)
const signature = await gpg(path, { gpgKey, gpgPassphrase })
console.log({ bottled: path })
bottles.push(path)
checksums.push(checksum)
signatures.push(signature)
}
await set_output("bottles", bottles.map(b => b.relative({ to: usePrefix() })))
await set_output("checksums", checksums)
await set_output("signatures", signatures)
}
//------------------------------------------------------------------------- funcs
export async function bottle({ path: kegdir, pkg }: Installation, compression: 'gz' | 'xz'): Promise<Path> {
const tarball = useCache().path({ pkg, type: 'bottle', compression })
const z = compression == 'gz' ? 'z' : 'J'
const cwd = usePrefix()
const cmd = ["tar", `c${z}f`, tarball, kegdir.relative({ to: cwd })]
await run({ cmd, cwd })
return tarball
}
export async function sha256(file: Path): Promise<string> {
return await Deno.open(file.string, { read: true })
.then(file => crypto.subtle.digest("SHA-256", file.readable))
.then(buf => new TextDecoder().decode(encode(new Uint8Array(buf))))
}
interface GPGCredentials {
gpgKey: string
gpgPassphrase: string
}
async function gpg(file: Path, { gpgKey, gpgPassphrase }: GPGCredentials): Promise<string> {
const rv = await backticks({
cmd: [
"gpg",
"--detach-sign",
"--armor",
"--output",
"-",
"--local-user",
gpgKey,
"--passphrase",
gpgPassphrase,
file.string
]
})
return base64Encode(rv)
}

View file

@ -1,28 +0,0 @@
name: tea.xyz/pantry/actions/cache
description: cache deno deps
inputs:
cache-name:
description: name of the job to use on the cache key
required: true
runs:
using: composite
steps:
- run: |
if test "$RUNNER_OS" = "macOS"; then
echo "cache=~/Library/Caches/deno" >> $GITHUB_OUTPUT
else
echo "cache=~/.cache/deno" >> $GITHUB_OUTPUT
fi
id: os-cache
shell: sh
- uses: actions/cache@v3
with:
path: |
~/.deno
${{ steps.os-cache.outputs.cache }}
# This isn't perfect (can't hash stuff outside github.workspace, and if the there scripts change, the hash won't)
# but it's good enough for now. It's slightly conservative, since it monitors all .ts files, but that's fine.
key: ${{ runner.os }}-deno-${{ inputs.cache-name }}-${{ hashFiles('**/deno.jsonc', '**/*.ts') }}

View file

@ -1,98 +0,0 @@
name: Apple Codesigning
description: Codesigns macOS binaries
inputs:
p12-file-base64:
description: Base64 encoded p12 file
required: true
p12-password:
description: Password for p12 file
required: true
identity:
description: Identity to use for signing
required: true
paths:
description: paths to sign
required: true
runs:
using: composite
steps:
# Only runs on macOS
- name: Check platform
shell: sh
run: |
if [[ "$RUNNER_OS" != "macOS" ]]; then
echo "This action only runs on macOS"
exit 1
fi
# the next three steps bless our code for Apple. It might be the case they should be
# encapulated separately.
# FIXME: using an explicit commit in a PR isn't great, but the last release was almost 3 years
# ago, and we need bugfixes.
# FIXME: replace this with a tea script based on https://localazy.com/blog/how-to-automatically-sign-macos-apps-using-github-actions
# github has a doc with similar content, but it's not returning to me atm.
# apple-actions/import-codesign-certs will fail if the keychain already exists, so we prophylactically
# delete it if it does.
- name: Delete keychain
shell: sh
run: security delete-keychain signing_temp.keychain || true
- uses: apple-actions/import-codesign-certs@d54750db52a4d3eaed0fc107a8bab3958f3f7494
with:
p12-file-base64: ${{ inputs.p12-file-base64 }}
p12-password: ${{ inputs.p12-password }}
- name: Create file list
shell: sh
id: files
run: |
echo "sign<<EOF" >> $GITHUB_OUTPUT
/usr/bin/find $PATHS \
-type f \
-not -name '*.py' \
-not -name '*.pyc' \
-not -name '*.txt' \
-not -name '*.h' | \
/usr/bin/sed -e 's/ /\\ /g' >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
# `tea` won't pass strict checking due to a deno bug with the way
# MachO headers are created
# https://github.com/denoland/deno/issues/17753
echo "check<<EOF" >> $GITHUB_OUTPUT
/usr/bin/find $PATHS \
-type f \
-not -name '*.py' \
-not -name '*.pyc' \
-not -name '*.txt' \
-not -name '*.h' \
-not -name tea | \
/usr/bin/sed -e 's/ /\\ /g' >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
env:
PATHS: ${{ inputs.paths }}
- name: Codesign files
shell: sh
run: |
echo "$FILES" | \
/usr/bin/xargs /usr/bin/codesign -s "$IDENTITY" --force -v --timestamp || true
env:
FILES: ${{ steps.files.outputs.sign }}
IDENTITY: ${{ inputs.identity }}
# This isn't very informative, but even a no-op is safer than none
- name: Check codesigning
shell: sh
run: echo "$FILES" | /usr/bin/xargs /usr/bin/codesign -vvv --strict
env:
FILES: ${{ steps.files.outputs.check }}
# Needed for self-hosted runner, since it doesn't destroy itself automatically.
- name: Delete keychain
if: always()
shell: sh
run: security delete-keychain signing_temp.keychain

View file

@ -1,35 +0,0 @@
name: tea/pantry/fetch-pr-artifacts
description: internal tea.xyz specific at this time
inputs:
platform:
description: platform+arch to fetch
required: true
token:
description: github token
default: ${{ github.token }}
required: true
AWS_S3_BUCKET:
description: AWS S3 bucket to use for cache
required: true
AWS_ACCESS_KEY_ID:
description: AWS access key id
required: true
AWS_SECRET_ACCESS_KEY:
description: AWS secret access key
required: true
runs:
using: composite
steps:
- run:
${{ github.action_path }}/fetch-pr-artifacts.ts
${{ github.repository }}
${{ github.sha }}
${{ inputs.platform }} >>$GITHUB_ENV
shell: sh
env:
GITHUB_TOKEN: ${{ inputs.token }}
AWS_S3_BUCKET: ${{ inputs.AWS_S3_BUCKET }}
AWS_ACCESS_KEY_ID: ${{ inputs.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.AWS_SECRET_ACCESS_KEY }}

View file

@ -1,135 +0,0 @@
#!/usr/bin/env tea
/*---
args:
- deno
- run
- --allow-net
- --allow-env
- --allow-write=./artifacts.tgz
---*/
/// Test
/// ./scripts/fetch-pr-artifacts.ts e582b03fe6efedde80f9569403555f4513dbec91
import { S3 } from "s3";
import { panic, undent } from "utils";
/// Main
/// -------------------------------------------------------------------------------
if (import.meta.main) {
const usage = "usage: fetch-pr-artifacts.ts {REPO} {SHA} {platform+arch}"
const repo = Deno.args[0] ?? panic(usage)
const ref = Deno.args[1] ?? panic(usage)
const flavor = Deno.args[2] ?? panic(usage)
const pr = await find_pr(repo, ref)
if (!pr) throw new Error(`No PR found for commit ${ref} in ${repo}`)
const s3 = new S3({
accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!,
secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!,
region: "us-east-1",
})
const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!)
const key = `pull-request/${repo.split("/")[1]}/${pr}/${flavor}`
const artifacts = (await bucket.getObject(key)) ?? panic("No artifacts found")
const file = await Deno.open("artifacts.tgz", { create: true, write: true })
await artifacts.body.pipeTo(file.writable)
Deno.stdout.write(new TextEncoder().encode(`PR=${pr}`))
}
/// Functions
/// -------------------------------------------------------------------------------
export async function find_pr(repo: string, ref: string): Promise<number | undefined> {
const res = await queryGraphQL<CommitQuery>(prQuery(repo))
const node = res.repository?.ref?.target?.history?.edges.find(n => n.node.oid === ref)
const nodes = node?.node.associatedPullRequests.nodes
if (!nodes || nodes.length === 0) return
return nodes[0].number
}
async function queryGraphQL<T>(query: string): Promise<T> {
const headers: HeadersInit = {}
const token = Deno.env.get("GITHUB_TOKEN") ?? panic("GitHub GraphQL requires you set $GITHUB_TOKEN")
if (token) headers['Authorization'] = `bearer ${token}`
const rsp = await fetch('https://api.github.com/graphql', {
method: 'POST',
body: JSON.stringify({ query }),
headers
})
const json = await rsp.json()
if (!rsp.ok) {
console.error({ rsp, json })
throw new Error()
}
return json.data as T ?? panic("No `data` returns from GraphQL endpoint")
}
/// Types
/// -------------------------------------------------------------------------------
type CommitQuery = {
repository: {
ref: {
target: {
history: {
edges: Node[]
}
}
}
}
}
type Node = {
node: {
url: URL
oid: string
associatedPullRequests: { nodes: PullRequest[] }
}
}
type PullRequest = {
number: number
}
/// Queries
/// -------------------------------------------------------------------------------
function prQuery(repo: string): string {
const [owner, name] = repo.split("/")
return undent`
query {
repository(name: "${name}", owner: "${owner}") {
ref(qualifiedName: "main") {
target {
... on Commit {
history(first: 100) {
edges {
node {
url
oid
associatedPullRequests(first: 1) {
nodes {
number
}
}
}
}
}
}
}
}
}
}`
}

View file

@ -1,39 +0,0 @@
name: tea/pantry/get-platform
description: Outputs the platform spec we need for builds
inputs:
platform:
description: >
The platform+arch to get specs for
required: true
outputs:
os:
description: the OS for general tasks
value: ${{ steps.platform.outputs.os }}
build-os:
description: the OS for build tasks
value: ${{ steps.platform.outputs.build-os }}
container:
description: the container for build tasks
value: ${{ steps.platform.outputs.container }}
test-matrix:
description: the matrix of os/containers for test tasks
value: ${{ steps.platform.outputs.test-matrix }}
runs:
using: composite
steps:
- uses: teaxyz/setup@v0
with:
srcroot: null
- uses: teaxyz/pantry.core/.github/actions/cache@main
with:
cache-name: get-platform
- run: ${{github.action_path}}/get-platform.ts
shell: sh
id: platform
env:
PLATFORM: ${{ inputs.platform }}

View file

@ -1,104 +0,0 @@
#!/usr/bin/env tea
/*---
args:
- deno
- run
- --allow-read
- --allow-env
- --allow-write
---*/
import { panic } from "utils"
// These are only needed if we switch back to GHA runners
// const exceptions: { [project: string]: number } = {
// "deno.land": 4,
// "ziglang.org": 8,
// }
// const packages = await ARGV.toArray(ARGV.pkgs())
type Output = {
os: OS,
buildOs: OS,
container?: string,
testMatrix: { os: OS, container?: string }[]
}
type OS = string | string[]
const platform = Deno.env.get("PLATFORM") ?? panic("$PLATFORM not set")
const output: Output = (() => {
switch(platform) {
case "darwin+x86-64": {
const os = "macos-11"
return {
os,
buildOs: ["self-hosted", "macOS", "X64"],
testMatrix: [{ os }],
}
}
case "darwin+aarch64": {
const os = ["self-hosted", "macOS", "ARM64"]
return {
os,
buildOs: os,
testMatrix: [{ os }],
}
}
case "linux+aarch64": {
const os = ["self-hosted", "linux", "ARM64"]
return {
os,
buildOs: os,
testMatrix: [{ os }],
}
}
case "linux+x86-64": {
// buildOs: sizedUbuntu(packages),
const os = "ubuntu-latest"
const container = "ghcr.io/teaxyz/infuser:latest"
return {
os,
buildOs: ["self-hosted", "linux", "X64"],
// container,
testMatrix: [
{ os },
{ os, container },
{ os, container: "debian:buster-slim" }
],
}
}
default:
panic(`Invalid platform description: ${platform}`)
}})()
const rv = `os=${JSON.stringify(output.os)}\n` +
`build-os=${JSON.stringify(output.buildOs)}\n` +
`container=${JSON.stringify(output.container)}\n` +
`test-matrix=${JSON.stringify(output.testMatrix)}\n`
Deno.stdout.write(new TextEncoder().encode(rv))
if (Deno.env.get("GITHUB_OUTPUT")) {
const envFile = Deno.env.get("GITHUB_OUTPUT")!
await Deno.writeTextFile(envFile, rv, { append: true})
}
// Leaving this in case we need to switch back to GHA runners
// function sizedUbuntu(packages: (Package | PackageRequirement)[]): string {
// const size = Math.max(2, ...packages.map(p => exceptions[p.project] ?? 2))
// if (size == 2) {
// return "ubuntu-latest"
// } else if ([4, 8, 16].includes(size)) {
// return `ubuntu-latest-${size}-cores`
// } else {
// panic(`Invalid size: ${size}`)
// }
// }

View file

@ -1,47 +0,0 @@
name: tea/pantry/has-artifacts
description: Determines if PR artifacts exist for a given SHA
inputs:
repo:
description: The repo to check for artifacts
required: true
sha:
description: The SHA to check for artifacts
required: true
token:
description: The GitHub token to use
required: true
s3-bucket:
description: The S3 bucket to use
required: true
aws-access-key-id:
description: The AWS access key ID to use
required: true
aws-secret-access-key:
description: The AWS secret access key to use
required: true
outputs:
has-artifacts:
description: whether there is a PR associated with that SHA with artifacts in staging
value: ${{ steps.has-artifacts.outputs.has-artifacts }}
runs:
using: composite
steps:
- uses: teaxyz/setup@v0
with:
srcroot: null
- uses: teaxyz/pantry.core/.github/actions/cache@main
with:
cache-name: has-artifacts
- run: ${{github.action_path}}/has-artifacts.ts ${{ inputs.REPO }} ${{ inputs.SHA }} >>$GITHUB_OUTPUT
shell: sh
id: has-artifacts
env:
GITHUB_TOKEN: ${{ inputs.token }}
AWS_S3_CACHE: ${{ inputs.s3-bucket }}
AWS_ACCESS_KEY_ID: ${{ inputs.aws-access-key-id }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.aws-secret-access-key }}

View file

@ -1,45 +0,0 @@
#!/usr/bin/env tea
/*---
args:
- deno
- run
- --allow-net
- --allow-env
---*/
/// Test
/// ./scripts/has-artifacts.ts e582b03fe6efedde80f9569403555f4513dbec91
import { S3 } from "s3"
import { panic } from "utils"
import { find_pr } from "../fetch-pr-artifacts/fetch-pr-artifacts.ts"
/// Main
/// -------------------------------------------------------------------------------
if (import.meta.main) {
const usage = "usage: has-artifacts.ts {REPO} {SHA}"
const repo = Deno.args[0] ?? panic(usage)
const ref = Deno.args[1] ?? panic(usage)
const pr = await find_pr(repo, ref)
if (!pr) {
Deno.stdout.write(new TextEncoder().encode("has-artifacts=false"))
Deno.exit(0)
}
const s3 = new S3({
accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!,
secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!,
region: "us-east-1",
})
const bucket = s3.getBucket(Deno.env.get("AWS_S3_CACHE")!)
const objects = await bucket.listObjects({ prefix: `pull-request/${repo.split("/")[1]}/${pr}/` })
const hasArtifacts = (objects?.contents?.length || 0) > 0
Deno.stdout.write(new TextEncoder().encode(`has-artifacts=${hasArtifacts ? "true" : "false"}`))
}

View file

@ -1,39 +0,0 @@
name: +tea.xyz/brewkit
description: sets up tea, tea.xyz/brewkit & caching
# inputs and outputs are `teaxyz/setup` passthrough
inputs:
prefix:
description: >
Where tea stows its packages.
Defaults to `$HOME/.tea`.
required: false
outputs:
version:
description: Your projects version.
value: ${{ steps.tea.outputs.version }}
prefix:
description: The prefix you specified.
value: ${{ steps.tea.outputs.prefix }}
runs:
using: composite
steps:
- uses: teaxyz/setup@v0
id: tea
with:
prefix: ${{ inputs.prefix }}
+: tea.xyz/brewkit^0.7.1
# prevent pantry from reassigning TEA_PREFIX etc.
srcroot: null
- uses: teaxyz/pantry.core/.github/actions/cache@main
with:
cache-name: setup
- run: |
if test -d "${{ github.workspace }}"/projects; then
echo "TEA_PANTRY_PATH=${{ github.workspace }}" >> $GITHUB_ENV
fi
shell: sh

View file

@ -1,40 +0,0 @@
name: tea/pantry/stage-build-artifacts
description: internal tea.xyz specific at this time
inputs:
platform:
description: ''
required: true
AWS_S3_BUCKET:
description: ''
required: true
AWS_ACCESS_KEY_ID:
description: ''
required: true
AWS_SECRET_ACCESS_KEY:
description: ''
required: true
runs:
using: composite
steps:
- uses: actions/download-artifact@v3
with:
name: ${{ inputs.platform }}
- uses: teaxyz/setup@v0
- uses: teaxyz/pantry.core/.github/actions/cache@main
with:
cache-name: stage
- run: ${{ github.action_path }}/cache-artifacts.ts
${{github.repository}}
${{github.ref}}
${{inputs.platform}}
artifacts.tgz
shell: sh
env:
AWS_S3_BUCKET: ${{ inputs.AWS_S3_BUCKET }}
AWS_ACCESS_KEY_ID: ${{ inputs.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.AWS_SECRET_ACCESS_KEY }}

View file

@ -1,43 +0,0 @@
#!/usr/bin/env tea
/*---
args:
- deno
- run
- --allow-net
- --allow-read
- --allow-env
---*/
import { S3 } from "s3"
import { panic } from "utils"
import Path from "path"
const usage = "usage: cache-artifacts.ts {REPO} {REF} {destname} {file}"
const repo = Deno.args[0] ?? panic(usage);
const ref = Deno.args[1] ?? panic(usage);
const dest = Deno.args[2] ?? panic(usage);
const artifacts = Deno.args[3] ?? panic(usage);
if (!repo.startsWith("teaxyz/")) throw new Error(`offical teaxyz repos only: ${repo}`)
const pr = parseInt(ref.replace(/refs\/pull\/(\d+)\/merge/, "$1"))
if (isNaN(pr)) throw new Error(`invalid ref: ${ref}`)
console.log({artifacts})
console.log({file: Path.cwd().join(artifacts)})
console.log({exists: Path.cwd().join(artifacts).isFile()})
console.log({cwd: Path.cwd()})
const file = Path.cwd().join(artifacts).isFile() ?? panic(`invalid archive: ${Path.cwd().join(artifacts)}`)
const s3 = new S3({
accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!,
secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!,
region: "us-east-1",
})
const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!)
const key = `pull-request/${repo.split("/")[1]}/${pr}/${dest}`
const body = await Deno.readFile(file.string)
console.log({ uploadingTo: key })
await bucket.putObject(key, body)

View file

@ -1,52 +0,0 @@
name: tea/pantry/upload
description: internal tea.xyz specific at this time
inputs:
pkgs:
description: packages to upload
required: true
srcs:
description: source tarballs
required: true
bottles:
description: bottles
required: true
checksums:
description: checksums
required: true
signatures:
description: signature files
required: true
AWS_S3_BUCKET:
description: AWS S3 bucket
required: true
AWS_ACCESS_KEY_ID:
description: AWS access key ID
required: true
AWS_SECRET_ACCESS_KEY:
description: AWS secret access key
required: true
outputs:
cf-invalidation-paths:
description: CloudFront invalidation paths
value: ${{ steps.upload.outputs.cf-invalidation-paths }}
runs:
using: composite
steps:
- uses: teaxyz/pantry.core/.github/actions/cache@main
with:
cache-name: upload
- run: ${{ github.action_path }}/upload.ts
--pkgs ${{ inputs.pkgs }}
--srcs ${{ inputs.srcs }}
--bottles ${{ inputs.bottles }}
--checksums ${{ inputs.checksums }}
--signatures ${{ inputs.signatures }}
shell: sh
id: upload
env:
AWS_S3_BUCKET: ${{ inputs.AWS_S3_BUCKET }}
AWS_ACCESS_KEY_ID: ${{ inputs.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.AWS_SECRET_ACCESS_KEY }}

View file

@ -1,145 +0,0 @@
#!/usr/bin/env tea
/*---
args:
- deno
- run
- --allow-net
- --allow-read
- --allow-env
- --allow-write
---*/
import { S3, S3Bucket } from "s3"
import { pkg as pkgutils } from "utils"
import { useCache, useFlags, useOffLicense, usePrefix } from "hooks"
import { Package, PackageRequirement } from "types"
import SemVer, * as semver from "semver"
import { basename, dirname } from "deno/path/mod.ts"
import { retry } from "deno/async/retry.ts"
import { decode as base64Decode } from "deno/encoding/base64.ts"
import Path from "path"
import { set_output } from "../../scripts/utils/gha.ts"
import { sha256 } from "../bottle/bottle.ts"
//------------------------------------------------------------------------- funcs
function args_get(key: string): string[] {
const it = Deno.args[Symbol.iterator]()
while (true) {
const { value, done } = it.next()
if (done) throw new Error()
if (value === `--${key}`) break
}
const rv: string[] = []
while (true) {
const { value, done } = it.next()
if (done) return rv
if (value.startsWith("--")) return rv
rv.push(value)
}
}
function assert_pkg(pkg: Package | PackageRequirement) {
if ("version" in pkg) {
return pkg
} else {
return {
project: pkg.project,
version: new SemVer(pkg.constraint, {tolerant: true}),
}
}
}
async function get_versions(key: string, pkg: Package, bucket: S3Bucket): Promise<SemVer[]> {
const prefix = dirname(key)
const rsp = await bucket.listObjects({ prefix })
//FIXME? API isnt clear if these nulls indicate failure or not
//NOTE if this is a new package then some empty results is expected
const got = rsp
?.contents
?.compact((x) => x.key)
.map((x) => basename(x))
.filter((x) => x.match(/v.*\.tar\.gz$/))
.map((x) => x.replace(/v(.*)\.tar\.gz/, "$1")) ??
[]
// have to add pkg.version as put and get are not atomic
return [...new Set([...got, pkg.version.toString()])]
.compact(semver.parse)
.sort(semver.compare)
}
async function put(key: string, body: string | Path | Uint8Array, bucket: S3Bucket) {
console.log({ uploading: body, to: key })
rv.push(`/${key}`)
if (body instanceof Path) {
body = await Deno.readFile(body.string)
} else if (typeof body === "string") {
body = encode(body)
}
// @ts-ignore typescript doesn't narrow the types properly here
return retry(()=>bucket.putObject(key, body))
}
//------------------------------------------------------------------------- main
useFlags()
if (Deno.args.length === 0) throw new Error("no args supplied")
const s3 = new S3({
accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!,
secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!,
region: "us-east-1",
})
const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!)
const encode = (() => {
const e = new TextEncoder()
return e.encode.bind(e)
})()
const cache = useCache()
const pkgs = args_get("pkgs").map(pkgutils.parse).map(assert_pkg)
const srcs = args_get("srcs")
const bottles = args_get("bottles")
const checksums = args_get("checksums")
const signatures = args_get("signatures")
const rv: string[] = []
for (const [index, pkg] of pkgs.entries()) {
const bottle = usePrefix().join(bottles[index])
const checksum = checksums[index]
const signature = base64Decode(signatures[index])
const stowed = cache.decode(bottle)!
const key = useOffLicense("s3").key(stowed)
const versions = await get_versions(key, pkg, bucket)
//FIXME stream the bottle (at least) to S3
await put(key, bottle, bucket)
await put(`${key}.sha256sum`, `${checksum} ${basename(key)}`, bucket)
await put(`${key}.asc`, signature, bucket)
await put(`${dirname(key)}/versions.txt`, versions.join("\n"), bucket)
// mirror the sources
if (srcs[index] != "~") {
const src = usePrefix().join(srcs[index])
if (src.isDirectory()) {
// we almost certainly expanded `~` to the users home directory
continue
}
const srcKey = useOffLicense("s3").key({
pkg: stowed.pkg,
type: "src",
extname: src.extname(),
})
const srcChecksum = await sha256(src)
const srcVersions = await get_versions(srcKey, pkg, bucket)
await put(srcKey, src, bucket)
await put(`${srcKey}.sha256sum`, `${srcChecksum} ${basename(srcKey)}`, bucket)
await put(`${dirname(srcKey)}/versions.txt`, srcVersions.join("\n"), bucket)
}
}
await set_output("cf-invalidation-paths", rv)

View file

@ -1,22 +0,0 @@
const e = new TextEncoder()
const encode = e.encode.bind(e)
export async function set_output<T>(name: string, arr: T[], separator = " ") {
const value = arr.map(escape).join(separator)
const txt = `${name}=${value}`
const outfile = Deno.env.get("GITHUB_OUTPUT")
if (outfile) {
await Deno.writeTextFile(outfile, `${name}=${value}\n`, { append: true})
}
return await Deno.stdout.write(encode(`${txt}\n`))
}
//TODO HTML escapes probs
function escape<T>(input: T): string {
const out = `${input}`
if (/[<>~]/.test(out)) {
return `"${out}"`
} else {
return out
}
}

View file

@ -22,7 +22,7 @@ jobs:
os: ${{ steps.platform.outputs.os }}
cache-set: ${{ steps.platform.outputs.cache-set }}
steps:
- uses: teaxyz/pantry.core/.github/actions/get-platform@main
- uses: teaxyz/brewkit/actions/get-platform@v0
id: platform
with:
platform: ${{ inputs.platform }}
@ -35,7 +35,7 @@ jobs:
built: ${{ env.built }}
pr: ${{ env.PR }}
steps:
- uses: teaxyz/pantry.core/.github/actions/setup-brewkit@main
- uses: teaxyz/brewkit/actions/setup-brewkit@v0
id: tea
- uses: actions/download-artifact@v3
@ -43,7 +43,7 @@ jobs:
with:
name: ${{ inputs.platform }}
- uses: teaxyz/pantry.core/.github/actions/fetch-pr-artifacts@main
- uses: teaxyz/brewkit/actions/fetch-pr-artifacts@v0
if: ${{ !inputs.new-version }}
with:
platform: ${{ inputs.platform }}
@ -80,7 +80,7 @@ jobs:
env:
GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
- uses: teaxyz/pantry.core/.github/actions/bottle@main
- uses: teaxyz/brewkit/actions/bottle@v0
id: bottle-xz
with:
built: ${{ env.built }}
@ -88,7 +88,7 @@ jobs:
gpg-key-id: ${{ secrets.GPG_KEY_ID }}
gpg-key-passphrase: ${{ secrets.GPG_PASSPHRASE }}
- uses: teaxyz/pantry.core/.github/actions/bottle@main
- uses: teaxyz/brewkit/actions/bottle@v0
id: bottle-gz
with:
built: ${{ env.built }}
@ -121,7 +121,7 @@ jobs:
needs: [bottle]
runs-on: ubuntu-latest
steps:
- uses: teaxyz/pantry.core/.github/actions/setup-brewkit@main
- uses: teaxyz/brewkit/actions/setup-brewkit@v0
with:
prefix: ${{ github.workspace }}
@ -136,7 +136,7 @@ jobs:
echo "$file=$(cat $file)" >>$GITHUB_ENV
done
- uses: teaxyz/pantry.core/.github/actions/upload@main
- uses: teaxyz/brewkit/actions/upload@v0
id: upload
with:
pkgs: ${{ needs.bottle.outputs.built }} ${{ needs.bottle.outputs.built }}

View file

@ -24,7 +24,7 @@ jobs:
test-matrix: ${{ steps.platform.outputs.test-matrix }}
cache-set: ${{ steps.platform.outputs.cache-set }}
steps:
- uses: teaxyz/pantry.core/.github/actions/get-platform@main
- uses: teaxyz/brewkit/actions/get-platform@v0
id: platform
with:
platform: ${{ inputs.platform }}
@ -36,7 +36,7 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: teaxyz/pantry.core/.github/actions/setup-brewkit@main
- uses: teaxyz/brewkit/actions/setup-brewkit@v0
id: tea
with:
prefix: /opt
@ -61,7 +61,7 @@ jobs:
TEA_PREFIX: ${{ steps.tea.outputs.prefix }}
# sign macOS binaries
- uses: teaxyz/pantry.core/.github/actions/codesign@main
- uses: teaxyz/brewkit/actions/codesign@v0
if: startsWith(inputs.platform, 'darwin+') && github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name
with:
p12-file-base64: ${{ secrets.APPLE_CERTIFICATE_P12 }}
@ -104,7 +104,7 @@ jobs:
container: ${{ matrix.platform.container }}
steps:
- uses: actions/checkout@v3
- uses: teaxyz/pantry.core/.github/actions/setup-brewkit@main
- uses: teaxyz/brewkit/actions/setup-brewkit@v0
- uses: teaxyz/setup@v0
with:
@ -169,7 +169,7 @@ jobs:
with:
name: ${{ inputs.platform }}
- uses: teaxyz/pantry.core/.github/actions/stage-build-artifacts@main
- uses: teaxyz/brewkit/actions/stage-build-artifacts@v0
with:
platform: ${{ inputs.platform }}
AWS_S3_BUCKET: ${{ secrets.AWS_S3_CACHE }}

View file

@ -14,14 +14,15 @@ jobs:
with:
repo: teaxyz/pantry.core
- uses: teaxyz/pantry.core/.github/actions/setup-brewkit@main
- uses: teaxyz/brewkit/actions/setup-brewkit@v0
# TODO: convert to teaxyz/brewkit/actions/map-projects-to-githubs
- run: .github/scripts/map-projects-to-githubs.ts
env:
WATCHER_URL: ${{ secrets.WATCHER_URL }}
TEA_API_TOKEN: ${{ secrets.TEA_API_TOKEN }}
- uses: teaxyz/pantry.core/.github/actions/has-artifacts@main
- uses: teaxyz/brewkit/actions/has-artifacts@v0
id: has-artifacts
with:
repo: ${{ github.repository }}

View file

@ -13,7 +13,7 @@ jobs:
- uses: teaxyz/setup@v0
with:
srcroot: .github
- uses: teaxyz/pantry.core/.github/actions/cache@main
- uses: teaxyz/brewkit/actions/cache@v0
with:
cache-name: ci-scripts
- run: deno check --unstable **/*.ts

View file

@ -15,7 +15,8 @@ jobs:
- uses: teaxyz/setup@v0
with:
srcroot: null
- uses: teaxyz/pantry.core/.github/actions/cache@main
- uses: teaxyz/brewkit/actions/cache@v0
# TODO: convert to teaxyz/brewkit/actions/index-packages
- run: ./.github/scripts/index-packages.ts ${{ inputs.projects }}
env:
TEA_PANTRY_PATH: ${{ github.workspace }}