use PR build artifacts on merge (#78)

* Uploads artifacts to S3, bottles on a different workflow

* rip slack notifications

* be more explicit about AWS_S3_CACHE

* Revert "fix build-deps outputting stuff we need to build"

This reverts commit 972f0715f4.

Co-authored-by: Max Howell <mxcl@me.com>
This commit is contained in:
Jacob Heider 2022-12-20 16:54:26 -05:00 committed by GitHub
parent f39b513abb
commit 11c863706e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 488 additions and 243 deletions

243
.github/workflows/bottle.yml vendored Normal file
View file

@ -0,0 +1,243 @@
name: bottle
on: workflow_call
jobs:
bottle:
runs-on: ${{ matrix.platform.os }}
defaults:
run:
working-directory: tea.xyz/var/pantry
strategy:
matrix:
platform:
- os: macos-11
name: darwin+x86-64
- os: ubuntu-latest
name: linux+x86-64
- os: [self-hosted, macOS, ARM64]
name: darwin+aarch64
- os: [self-hosted, linux, ARM64]
name: linux+aarch64
outputs:
srcs: ${{ env.srcs }}
built: ${{ env.built }}
steps:
- uses: actions/checkout@v3
with:
path: tea.xyz/var/cli
repository: teaxyz/cli
- uses: actions/checkout@v3
with:
path: pantry
- uses: teaxyz/setup@v0
id: tea
with:
srcroot: tea.xyz/var/pantry
prefix: ${{ github.workspace }}
- run: |
# in case this PR contains updates to the scripts
#TODO only do for PRs
if test "$GITHUB_REPOSITORY" = "teaxyz/pantry.core"; then
cp -rv $GITHUB_WORKSPACE/pantry/scripts/* $(tea --prefix)/tea.xyz/var/pantry/scripts
fi
- uses: actions/download-artifact@v3
if: ${{ !startsWith(github.ref, 'refs/pull/')}}
with:
name: ${{ matrix.platform.name }}
path: tea.xyz/var/pantry
- run: scripts/fetch-pr-artifacts.ts ${{ github.repository }} ${{ github.sha }} ${{ matrix.platform.name }}
if: startsWith(github.ref, 'refs/pull/')
env:
GITHUB_TOKEN: ${{github.token}}
AWS_S3_CACHE: ${{ secrets.AWS_S3_CACHE }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- run: tar xzf artifacts.tgz -C ${{ steps.tea.outputs.prefix }}
- run: |
for file in built relative-paths srcs; do
echo "$file=$(cat $file)" >>$GITHUB_ENV
done
working-directory: ${{ steps.tea.outputs.prefix }}
# the next three steps bless our code for Apple. It might be the case they should be
# encapulated separately.
# FIXME: using an explicit commit in a PR isn't great, but the last release was almost 3 years
# ago, and we need bugfixes.
# FIXME: replace this with a tea script based on https://localazy.com/blog/how-to-automatically-sign-macos-apps-using-github-actions
# github has a doc with similar content, but it's not returning to me atm.
- uses: apple-actions/import-codesign-certs@d54750db52a4d3eaed0fc107a8bab3958f3f7494
if: matrix.platform.name == 'darwin+aarch64' || matrix.platform.name == 'darwin+x86-64'
with:
p12-file-base64: ${{ secrets.APPLE_CERTIFICATE_P12 }}
p12-password: ${{ secrets.APPLE_CERTIFICATE_P12_PASSWORD }}
# Codesign libs and bins
- name: Codesign package
if: matrix.platform.name == 'darwin+aarch64' || matrix.platform.name == 'darwin+x86-64'
run: |
for PKG in ${{ env.relative-paths }}; do
find /opt/$PKG -name '*.so' -or -name '*.dylib' -print0 | \
xargs -0 codesign -s "Developer ID Application: Tea Inc. (7WV56FL599)" --force -v --deep --timestamp --preserve-metadata=entitlements -o runtime || true
codesign -s "Developer ID Application: Tea Inc. (7WV56FL599)" -v --force --deep --timestamp --preserve-metadata=entitlements -o runtime /opt/$PKG/bin/* || true
done
working-directory: ${{ steps.tea.outputs.prefix }}
# This isn't very informative, but even a no-op is safer than none
- name: Check codesigning
if: matrix.platform.name == 'darwin+aarch64' || matrix.platform.name == 'darwin+x86-64'
run: |
for PKG in ${{ env.relative-paths }}; do
for SIG in `find /opt/$PKG -name '*.so' -or -name '*.dylib'` `find /opt/$PKG/bin -type f`; do
codesign -vvv --deep --strict "$SIG"
done
done
working-directory: ${{ steps.tea.outputs.prefix }}
# Needed for self-hosted runner, since it doesn't destroy itself automatically.
- name: Delete keychain
if: always() && matrix.platform.name == 'darwin+aarch64'
run: security delete-keychain signing_temp.keychain
- run: |
tea +gnupg.org gpg-agent --daemon || true
echo $GPG_PRIVATE_KEY | \
base64 -d | \
tea +gnupg.org gpg --import --batch --yes
env:
GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
- run: scripts/bottle.ts $built
id: bottle-xz
env:
COMPRESSION: xz
GPG_KEY_ID: ${{ secrets.GPG_KEY_ID }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
- run: scripts/bottle.ts $built
id: bottle-gz
env:
COMPRESSION: gz
GPG_KEY_ID: ${{ secrets.GPG_KEY_ID }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
- run: |
echo ${{ steps.bottle-gz.outputs.bottles }} ${{ steps.bottle-xz.outputs.bottles }} >bottles
echo ${{ steps.bottle-gz.outputs.checksums }} ${{ steps.bottle-xz.outputs.checksums }} >checksums
echo ${{ steps.bottle-gz.outputs.signatures }} ${{ steps.bottle-xz.outputs.signatures }} >signatures
tar cf $GITHUB_WORKSPACE/artifacts.tar \
$srcs \
${{ steps.bottle-gz.outputs.bottles }} \
${{ steps.bottle-xz.outputs.bottles }} \
bottles checksums signatures
working-directory: ${{ steps.tea.outputs.prefix }}
- name: upload artifacts
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.platform.name }}-bottles
path: artifacts.tar
if-no-files-found: error
upload:
needs: [bottle]
runs-on: ubuntu-latest
defaults:
run:
working-directory: tea.xyz/var/pantry
strategy:
matrix:
platform:
- os: macos-11
name: darwin+x86-64
- os: ubuntu-latest
name: linux+x86-64
- os: [self-hosted, macOS, ARM64]
name: darwin+aarch64
- os: [self-hosted, linux, ARM64]
name: linux+aarch64
steps:
- uses: actions/checkout@v3
with:
path: tea.xyz/var/cli
repository: teaxyz/cli
- uses: actions/checkout@v3
with:
path: pantry
- uses: teaxyz/setup@v0
id: tea
with:
srcroot: tea.xyz/var/pantry
prefix: ${{ github.workspace }}
- uses: actions/download-artifact@v3
with:
name: ${{ matrix.platform.name }}-bottles
- run: |
tar xvf $GITHUB_WORKSPACE/artifacts.tar
for file in bottles checksums signatures; do
echo "$file=$(cat $file)" >>$GITHUB_ENV
done
working-directory: ${{ steps.tea.outputs.prefix }}
- run: |
# in case this PR contains updates to the scripts
#TODO only do for PRs
if test "$GITHUB_REPOSITORY" = "teaxyz/pantry.core"; then
cp -rv $GITHUB_WORKSPACE/pantry/scripts/* $(tea --prefix)/tea.xyz/var/pantry/scripts
fi
- name: upload bottles
id: upload
run: scripts/upload.ts
--pkgs ${{ needs.bottle.outputs.built }} ${{ needs.bottle.outputs.built }}
--srcs ${{ needs.bottle.outputs.srcs }} ${{ needs.bottle.outputs.srcs }}
--bottles $bottles
--checksums $checksums
--signatures $signatures
env:
AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
#NOTE ideally wed invalidate all at once so this is atomic
# however GHA cant consolidate outputs from a matrix :/
- uses: chetan/invalidate-cloudfront-action@v2
env:
PATHS: ${{ steps.upload.outputs.cf-invalidation-paths }}
DISTRIBUTION: ${{ secrets.AWS_CF_DISTRIBUTION_ID }}
AWS_REGION: us-east-1
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
cleanup:
runs-on: ubuntu-latest
needs: [upload]
if: startsWith(github.ref, 'refs/pull/') && startsWith(github.repository, 'teaxyz/pantry.')
steps:
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- run: |
REPO=$(echo ${{github.repository}} | sed -e 's_teaxyz/__')
PR=$(echo ${{github.ref}} | sed -e 's_refs/pull/\(.*\)/merge_\1_')
aws s3 rm --recursive s3://$AWS_S3_CACHE/pull-request/$REPO/$PR
env:
AWS_S3_CACHE: ${{ secrets.AWS_S3_CACHE }}

View file

@ -6,35 +6,28 @@ on:
projects:
required: true
type: string
upload:
required: false
type: boolean
default: false
env:
TEA_PANTRY_PATH: ${{ github.workspace }}/pantry
jobs:
build:
runs-on: ${{ matrix.os }}
runs-on: ${{ matrix.platform.os }}
strategy:
matrix:
include:
platform:
- os: macos-11
name: darwin+x86-64
- os: ubuntu-latest
name: linux+x86-64
container:
image: debian:buster-slim
options: --memory=24g
- os: [self-hosted, macOS, ARM64]
tag: darwin-aarch64
name: darwin+aarch64
- os: [self-hosted, linux, ARM64]
tag: linux-aarch64
container: ${{ matrix.container }}
outputs:
built: ${{ steps.build.outputs.pkgs }}
relative-paths: ${{ steps.build.outputs.relative-paths }}
srcs: ${{ steps.build.outputs.srcs }}
pkgs: ${{ steps.sorted.outputs.pkgs }} ${{ steps.sorted.outputs.pre-install }}
name: linux+aarch64
container: ${{ matrix.platform.container }}
steps:
- name: co pantry
uses: actions/checkout@v3
@ -56,7 +49,7 @@ jobs:
- name: HACKS
run: |
case ${{ matrix.os }} in
case ${{ matrix.platform.os }} in
ubuntu-latest)
#FIXME our LLVM doesn't provide c/c++ headers for some reason
apt-get update
@ -92,38 +85,53 @@ jobs:
GITHUB_TOKEN: ${{ github.token }}
FORCE_UNSAFE_CONFIGURE: 1 # some configure scripts refuse to run as root
- name: delete `.la` files
run: find ${{ steps.tea.outputs.prefix }} -name '*.la' -delete
# cache data we'll need in the bottling job
- run: |
echo ${{ steps.build.outputs.pkgs }} >built
echo ${{ steps.build.outputs.relative-paths }} >relative-paths
echo ${{ steps.build.outputs.srcs }} >srcs
working-directory: ${{ steps.tea.outputs.prefix }}
# tarring ourselves ∵ GHA-artifacts (ludicrously) lose permissions
# /ref https://github.com/actions/upload-artifact/issues/38
- run:
tar czf $GITHUB_WORKSPACE/artifacts.tgz
${{ steps.build.outputs.relative-paths }}
${{ steps.build.outputs.srcs-relative-paths }}
built relative-paths srcs
working-directory: ${{ steps.tea.outputs.prefix }}
- name: upload artifacts
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag || matrix.os }}
name: ${{ matrix.platform.name }}
path: artifacts.tgz
if-no-files-found: error
test:
needs: [build]
runs-on: ${{ matrix.os }}
runs-on: ${{ matrix.platform.os }}
strategy:
matrix:
include:
platform:
- os: macos-11
name: darwin+x86-64
- os: ubuntu-latest
name: linux+x86-64
- os: ubuntu-latest
name: linux+x86-64
container: ghcr.io/teaxyz/infuser:slim-latest
- os: ubuntu-latest
name: linux+x86-64
container: debian:buster-slim
- os: [self-hosted, macOS, ARM64]
tag: darwin-aarch64
name: darwin+aarch64
- os: [self-hosted, linux, ARM64]
tag: linux-aarch64
container: ${{ matrix.container }}
name: linux+aarch64
container: ${{ matrix.platform.container }}
steps:
- uses: actions/checkout@v3
with:
@ -149,23 +157,20 @@ jobs:
if test "$GITHUB_REPOSITORY" = "teaxyz/pantry.core"; then
cp -rv pantry/scripts/* $(tea --prefix)/tea.xyz/var/pantry/scripts
fi
if: ${{ matrix.container != '' }}
if: ${{ matrix.platform.container != '' }}
- uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag || matrix.os }}
name: ${{ matrix.platform.name }}
- run: tar xzf artifacts.tgz
- run: tea.xyz/var/pantry/scripts/test.ts ${{ inputs.projects }}
bottle:
needs: [test, build]
if: ${{ inputs.upload }}
runs-on: ${{ matrix.platform.os }}
defaults:
run:
working-directory: tea.xyz/var/pantry
stage:
needs: [test]
if: startsWith(github.ref, 'refs/pull/') && startsWith(github.repository, 'teaxyz/pantry.')
runs-on: ubuntu-latest
strategy:
matrix:
platform:
@ -174,10 +179,8 @@ jobs:
- os: ubuntu-latest
name: linux+x86-64
- os: [self-hosted, macOS, ARM64]
tag: darwin-aarch64
name: darwin+aarch64
- os: [self-hosted, linux, ARM64]
tag: linux-aarch64
name: linux+aarch64
steps:
- uses: actions/checkout@v3
@ -204,190 +207,15 @@ jobs:
- uses: actions/download-artifact@v3
with:
name: ${{ matrix.platform.tag || matrix.platform.os }}
path: ${{ steps.tea.outputs.prefix }}
name: ${{ matrix.platform.name }}
- run: tar xzf artifacts.tgz
working-directory: ${{ steps.tea.outputs.prefix }}
- name: delete `.la` files
run: find ${{ steps.tea.outputs.prefix }} -name '*.la' -delete
# the next three steps bless our code for Apple. It might be the case they should be
# encapulated separately.
# FIXME: using an explicit commit in a PR isn't great, but the last release was almost 3 years
# ago, and we need bugfixes.
# FIXME: replace this with a tea script based on https://localazy.com/blog/how-to-automatically-sign-macos-apps-using-github-actions
# github has a doc with similar content, but it's not returning to me atm.
- uses: apple-actions/import-codesign-certs@d54750db52a4d3eaed0fc107a8bab3958f3f7494
if: matrix.platform.name == 'darwin+aarch64' || matrix.platform.name == 'darwin+x86-64'
with:
p12-file-base64: ${{ secrets.APPLE_CERTIFICATE_P12 }}
p12-password: ${{ secrets.APPLE_CERTIFICATE_P12_PASSWORD }}
# Codesign libs and bins
- name: Codesign package
if: matrix.platform.name == 'darwin+aarch64' || matrix.platform.name == 'darwin+x86-64'
run: |
for PKG in ${{ needs.build.outputs.relative-paths }}; do
find /opt/$PKG -name '*.so' -or -name '*.dylib' -print0 | \
xargs -0 codesign -s "Developer ID Application: Tea Inc. (7WV56FL599)" --force -v --deep --timestamp --preserve-metadata=entitlements -o runtime || true
codesign -s "Developer ID Application: Tea Inc. (7WV56FL599)" -v --force --deep --timestamp --preserve-metadata=entitlements -o runtime /opt/$PKG/bin/* || true
done
working-directory: ${{ steps.tea.outputs.prefix }}
# This isn't very informative, but even a no-op is safer than none
- name: Check codesigning
if: matrix.platform.name == 'darwin+aarch64' || matrix.platform.name == 'darwin+x86-64'
run: |
for PKG in ${{ needs.build.outputs.relative-paths }}; do
for SIG in `find /opt/$PKG -name '*.so' -or -name '*.dylib'` `find /opt/$PKG/bin -type f`; do
codesign -vvv --deep --strict "$SIG"
done
done
working-directory: ${{ steps.tea.outputs.prefix }}
# Needed for self-hosted runner, since it doesn't destroy itself automatically.
- name: Delete keychain
if: always() && matrix.platform.name == 'darwin+aarch64'
run: security delete-keychain signing_temp.keychain
- run: |
tea +gnupg.org gpg-agent --daemon || true
echo $GPG_PRIVATE_KEY | \
base64 -d | \
tea +gnupg.org gpg --import --batch --yes
- run:
tea.xyz/var/pantry/scripts/cache-artifacts.ts
${{github.repository}}
${{github.ref}}
${{matrix.platform.name}}
artifacts.tgz
env:
GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
- run: scripts/bottle.ts ${{ needs.build.outputs.built }}
id: bottle-xz
env:
COMPRESSION: xz
GPG_KEY_ID: ${{ secrets.GPG_KEY_ID }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
- run: scripts/bottle.ts ${{ needs.build.outputs.built }}
id: bottle-gz
env:
COMPRESSION: gz
GPG_KEY_ID: ${{ secrets.GPG_KEY_ID }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
- run: |
echo ${{ steps.bottle-gz.outputs.bottles }} ${{ steps.bottle-xz.outputs.bottles }} >bottles
echo ${{ steps.bottle-gz.outputs.checksums }} ${{ steps.bottle-xz.outputs.checksums }} >checksums
echo ${{ steps.bottle-gz.outputs.signatures }} ${{ steps.bottle-xz.outputs.signatures }} >signatures
# Don't try to archive "~"
SRCS=$(echo ${{ needs.build.outputs.srcs }} | sed -e 's/~//g')
tar cf $GITHUB_WORKSPACE/artifacts.tar \
$SRCS \
${{ steps.bottle-gz.outputs.bottles }} \
${{ steps.bottle-xz.outputs.bottles }} \
bottles checksums signatures
working-directory: ${{ steps.tea.outputs.prefix }}
- name: upload artifacts
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.platform.name }}-bottles
path: artifacts.tar
if-no-files-found: error
upload:
needs: [build, bottle]
if: ${{ inputs.upload }}
runs-on: ubuntu-latest
defaults:
run:
working-directory: tea.xyz/var/pantry
strategy:
matrix:
platform:
- os: macos-11
name: darwin+x86-64
- os: ubuntu-latest
name: linux+x86-64
- os: [self-hosted, macOS, ARM64]
tag: darwin-aarch64
name: darwin+aarch64
- os: [self-hosted, linux, ARM64]
tag: linux-aarch64
name: linux+aarch64
steps:
- uses: actions/checkout@v3
with:
path: tea.xyz/var/cli
repository: teaxyz/cli
- uses: actions/checkout@v3
with:
path: pantry
- uses: teaxyz/setup@v0
id: tea
with:
srcroot: tea.xyz/var/pantry
prefix: ${{ github.workspace }}
- uses: actions/download-artifact@v3
with:
name: ${{ matrix.platform.name }}-bottles
path: ${{ steps.tea.outputs.prefix }}
- run: tar xvf artifacts.tar
working-directory: ${{ steps.tea.outputs.prefix }}
- run: |
# in case this PR contains updates to the scripts
#TODO only do for PRs
if test "$GITHUB_REPOSITORY" = "teaxyz/pantry.core"; then
cp -rv $GITHUB_WORKSPACE/pantry/scripts/* $(tea --prefix)/tea.xyz/var/pantry/scripts
fi
cd ${{ steps.tea.outputs.prefix }}
for file in bottles checksums signatures; do
echo "$file=$(cat $file)" >>$GITHUB_ENV
done
- name: upload bottles
id: upload
run: scripts/upload.ts
--pkgs ${{ needs.build.outputs.built }} ${{ needs.build.outputs.built }}
--srcs ${{ needs.build.outputs.srcs }} ${{ needs.build.outputs.srcs }}
--bottles ${{ env.bottles }}
--checksums ${{ env.checksums }}
--signatures ${{ env.signatures }}
env:
AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }}
AWS_S3_CACHE: ${{ secrets.AWS_S3_CACHE }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
#NOTE ideally wed invalidate all at once so this is atomic
# however GHA cant consolidate outputs from a matrix :/
- uses: chetan/invalidate-cloudfront-action@v2
env:
PATHS: ${{ steps.upload.outputs.cf-invalidation-paths }}
DISTRIBUTION: ${{ secrets.AWS_CF_DISTRIBUTION_ID }}
AWS_REGION: us-east-1
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
notify:
if: always()
needs: [test, build, upload]
runs-on: ubuntu-latest
steps:
- uses: martialonline/workflow-status@v3
id: status
- uses: rtCamp/action-slack-notify@v2
if: ${{ env.SLACK_WEBHOOK != '' }}
env:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
SLACK_MESSAGE: build job for ${{ inputs.projects }} ${{ steps.status.outputs.status }}
SLACK_COLOR: ${{ steps.status.outputs.status }}

View file

@ -10,26 +10,12 @@ jobs:
steps:
- uses: actions/checkout@v3
- run: >
grep -E ' github: [^\w]+/[^\w]+' projects/**/package.yml |
grep -E ' github: [^\w]+/[^\w/]+' projects/**/package.yml |
sed -e 's|^projects/\(.*\)/package.yml: *github: \([^/]*/[^/]*\).*|{ "project": "\1", "github": "\2" }|' |
jq -sc . |
curl https://app.tea.xyz/api/receiveWatcherProjects --fail -X PUT \
-H "content-type: application/json" -H "authorization: bearer ${{ secrets.TEA_API_TOKEN }}" -d @-
get-diff:
runs-on: ubuntu-latest
outputs:
diff: ${{ steps.diff.outputs.diff }}
steps:
- uses: actions/checkout@v3
- uses: technote-space/get-diff-action@v6
id: diff
with:
PATTERNS: projects/**/package.yml
build:
needs: [get-diff]
uses: ./.github/workflows/build.yml
with:
projects: ${{ needs.get-diff.outputs.diff }}
upload: true
bottle:
#FIXME: will fail (harmlessly) on non-merge/non-new-version runs
uses: ./.github/workflows/bottle.yml
secrets: inherit
if: ${{ needs.get-diff.outputs.diff != '' }}

24
.github/workflows/cleanup.yml vendored Normal file
View file

@ -0,0 +1,24 @@
name: cleanup
on:
pull_request:
types: [closed]
jobs:
cleanup:
runs-on: ubuntu-latest
steps:
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: remove staged artifacts
run: |
REPO=$(echo ${{github.repository}} | sed -e 's_teaxyz/__')
PR=$(echo ${{github.ref}} | sed -e 's_refs/pull/\(.*\)/merge_\1_')
aws s3 rm --recursive s3://$AWS_S3_CACHE/pull-request/$REPO/$PR
if: startsWith(github.ref, 'refs/pull/') && startsWith(github.repository, 'teaxyz/pantry.')
env:
AWS_S3_CACHE: ${{ secrets.AWS_S3_CACHE }}

View file

@ -13,5 +13,8 @@ jobs:
uses: ./.github/workflows/build.yml
with:
projects: ${{ inputs.projects }}
upload: true
secrets: inherit
secrets: inherit
bottle:
needs: [build]
uses: ./.github/workflows/bottle.yml
secrets: inherit

View file

@ -32,20 +32,13 @@ const get_deps = async (pkg: Package | PackageRequirement) => {
}
}
const bootstrap_required = new Set<string>()
const set = new Set<string>()
let rv: PackageRequirement[] = []
const rv: PackageRequirement[] = []
for await (const pkg of ARGV.pkgs()) {
const deps = await get_deps(pkg)
const wet = await hydrate(deps)
rv.push(...wet.pkgs)
set.add(pkg.project)
wet.bootstrap_required.forEach(x => bootstrap_required.add(x))
}
// we dont want to pre-install packages we intend to build
rv = rv.filter(({ project }) => !set.has(project) || bootstrap_required.has(project))
const gas = rv.map(pkg.str)
if (Deno.env.get("GITHUB_ACTIONS")) {

44
scripts/cache-artifacts.ts Executable file
View file

@ -0,0 +1,44 @@
#!/usr/bin/env -S tea -E
/*---
args:
- deno
- run
- --allow-net
- --allow-read
- --allow-env
- --import-map={{ srcroot }}/import-map.json
---*/
import { S3 } from "s3"
import { panic } from "utils"
import Path from "path"
const usage = "usage: cache-artifacts.ts {REPO} {REF} {destname} {file}"
const repo = Deno.args[0] ?? panic(usage);
const ref = Deno.args[1] ?? panic(usage);
const dest = Deno.args[2] ?? panic(usage);
const artifacts = Deno.args[3] ?? panic(usage);
if (!repo.startsWith("teaxyz/")) throw new Error(`offical teaxyz repos only: ${repo}`)
const pr = parseInt(ref.replace(/refs\/pull\/(\d+)\/merge/, "$1"))
if (isNaN(pr)) throw new Error(`invalid ref: ${ref}`)
console.log({artifacts})
console.log({file: Path.cwd().join(artifacts)})
console.log({exists: Path.cwd().join(artifacts).isFile()})
console.log({cwd: Path.cwd()})
const file = Path.cwd().join(artifacts).isFile() ?? panic(`invalid archive: ${Path.cwd().join(artifacts)}`)
const s3 = new S3({
accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!,
secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!,
region: "us-east-1",
})
const bucket = s3.getBucket(Deno.env.get("AWS_S3_CACHE")!)
const key = `pull-request/${repo.split("/")[1]}/${pr}/${dest}`
const body = await Deno.readFile(file.string)
console.log({ uploadingTo: key })
await bucket.putObject(key, body)

124
scripts/fetch-pr-artifacts.ts Executable file
View file

@ -0,0 +1,124 @@
#!/usr/bin/env -S tea -E
/*---
args:
- deno
- run
- --allow-net
- --allow-env
- --allow-write=./artifacts.tgz
- --import-map={{ srcroot }}/import-map.json
---*/
/// Test
/// ./scripts/fetch-pr-artifacts.ts e582b03fe6efedde80f9569403555f4513dbec91
import { S3 } from "https://deno.land/x/s3@0.5.0/mod.ts";
import { panic, undent } from "utils/index.ts";
/// Main
/// -------------------------------------------------------------------------------
const usage = "usage: fetch-pr-artifacts.ts {REPO} {SHA} {platform+arch}"
const repo = Deno.args[0] ?? panic(usage)
const ref = Deno.args[1] ?? panic(usage)
const flavor = Deno.args[2] ?? panic(usage)
const res = await queryGraphQL<CommitQuery>(prQuery(repo))
const node = res.repository?.ref?.target?.history?.edges.find(n => n.node.oid === ref)
const pr = node?.node.associatedPullRequests.nodes[0].number
const s3 = new S3({
accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!,
secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!,
region: "us-east-1",
})
const bucket = s3.getBucket(Deno.env.get("AWS_S3_CACHE")!)
const key = `pull-request/${repo.split("/")[1]}/${pr}/${flavor}`
const artifacts = (await bucket.getObject(key)) ?? panic("No artifacts found")
const file = await Deno.open("artifacts.tgz", { create: true, write: true })
await artifacts.body.pipeTo(file.writable)
/// Functions
/// -------------------------------------------------------------------------------
async function queryGraphQL<T>(query: string): Promise<T> {
const headers: HeadersInit = {}
const token = Deno.env.get("GITHUB_TOKEN") ?? panic("GitHub GraphQL requires you set $GITHUB_TOKEN")
if (token) headers['Authorization'] = `bearer ${token}`
const rsp = await fetch('https://api.github.com/graphql', {
method: 'POST',
body: JSON.stringify({ query }),
headers
})
const json = await rsp.json()
if (!rsp.ok) {
console.error({ rsp, json })
throw new Error()
}
return json.data as T ?? panic("No `data` returns from GraphQL endpoint")
}
/// Types
/// -------------------------------------------------------------------------------
type CommitQuery = {
repository: {
ref: {
target: {
history: {
edges: Node[]
}
}
}
}
}
type Node = {
node: {
url: URL
oid: string
associatedPullRequests: { nodes: PullRequest[] }
}
}
type PullRequest = {
number: number
}
/// Queries
/// -------------------------------------------------------------------------------
function prQuery(repo: string): string {
const [owner, name] = repo.split("/")
return undent`
query {
repository(name: "${name}", owner: "${owner}") {
ref(qualifiedName: "main") {
target {
... on Commit {
history(first: 100) {
edges {
node {
url
oid
associatedPullRequests(first: 1) {
nodes {
number
}
}
}
}
}
}
}
}
}
}`
}