Compare commits

..

4 commits

Author SHA1 Message Date
Jacob Heider
562495da87 fix patch path 2023-07-24 17:26:14 +03:00
Jacob Heider
cfd5f9476a simplify script 2023-07-24 17:26:14 +03:00
124b6c39a2 github.com/adegtyarev/streebog: fix build on linux 2023-07-24 17:26:14 +03:00
70c1413e1d github.com/adegtyarev/streebog: init at 0.13 2023-07-24 17:26:14 +03:00
1590 changed files with 5732 additions and 40569 deletions

View file

@ -1,3 +1,3 @@
{
"postAttachCommand": "sh <(curl https://pkgx.sh) && pkgx --integrate && echo pkgx integrated && exec bash -i <(echo 'dev && echo dev environment loaded || echo error loading dev environment; exec $SHELL')"
"postAttachCommand": "sh <(curl https://tea.xyz) --yes && exec bash -i"
}

View file

@ -1,14 +1,20 @@
name: pkgx/pantry/complain
name: tea/pantry/complain
description: creates an issue for failure conditions
inputs:
pkg:
description: project to complain about
projects:
description: projects to complain about
required: true
platform:
description: platform key
required: true
token:
description: github token
required: true
default: ${{ github.token }}
slack-webhook:
required: false
slack-channel:
required: false
runs:
using: composite
@ -20,7 +26,7 @@ runs:
actions: 'find-issues'
token: ${{ inputs.token }}
issue-state: 'open'
title-includes: '❌ build issues: ${{ inputs.pkg }}'
title-includes: "❌ build issues: ${{ inputs.projects }}"
labels: 'build-failure'
- name: Create Issue
@ -30,8 +36,8 @@ runs:
with:
actions: 'create-issue'
token: ${{ inputs.token }}
title: '❌ build issues: ${{ inputs.pkg }}'
body: 'Running log of build failure for ${{ inputs.pkg }}'
title: "❌ build issues: ${{ inputs.projects }}"
body: "Running log of build failures for ${{ inputs.projects }}"
labels: 'build-failure'
assignees: 'jhheider'
@ -43,6 +49,18 @@ runs:
issue-number: ${{ steps.create.outputs.issue-number || fromJSON(steps.find.outputs.issues)[0].number }}
body: |
# Build failure
## ${{ inputs.pkg }}
## ${{ inputs.projects }}
### ${{ inputs.platform }}
logs: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
- uses: martialonline/workflow-status@v3
id: status
- uses: rtCamp/action-slack-notify@v2
if: ${{ inputs.slack-webhook != '' }}
env:
SLACK_WEBHOOK: ${{ inputs.slack-webhook }}
SLACK_CHANNEL: ${{ inputs.slack-channel }}
SLACK_MESSAGE: new-version:${{ inputs.projects }} (${{ inputs.platform }}) ${{ steps.status.outputs.status }}
SLACK_COLOR: ${{ steps.status.outputs.status }}

View file

@ -1,4 +1,4 @@
name: pkgx/pantry/request-qa
name: tea/pantry/request-qa
description: Requests QA for a new version of a project
inputs:

View file

@ -1,54 +0,0 @@
name: pkgx/pantry/setup-codesign
description: Codesigns macOS binaries using Apple tools
inputs:
p12-file-base64:
description: Base64 encoded p12 file
required: true
p12-password:
description: Password for p12 file
required: true
APPLE_IDENTITY:
required: false
runs:
using: composite
steps:
# - name: purge tool PATH
# run: |
# if [ -d /usr/local/bin ]; then
# tmp=$(mktemp -d)
# sudo mv /usr/local/bin $tmp
# fi
# shell: bash
- name: export APPLE_IDENTITY
run: echo 'APPLE_IDENTITY=${{inputs.identity || '-'}}' >> $GITHUB_ENV
shell: bash
# the next three steps bless our code for Apple. It might be the case they should be
# encapulated separately.
# FIXME: using an explicit commit in a PR isn't great, but the last release was almost 3 years
# ago, and we need bugfixes.
# FIXME: replace this with a pkgx script based on https://localazy.com/blog/how-to-automatically-sign-macos-apps-using-github-actions
# github has a doc with similar content, but it's not returning to me atm.
# apple-actions/import-codesign-certs will fail if the keychain already exists, so we prophylactically
# delete it if it does.
- name: Delete keychain
shell: sh
if: runner.os == 'macOS' && inputs.p12-file-password && inputs.p12-file-base64
run: security delete-keychain signing_temp.keychain || true
- uses: apple-actions/import-codesign-certs@v2
if: runner.os == 'macOS' && inputs.p12-file-password && inputs.p12-file-base64
with:
p12-file-base64: ${{ inputs.p12-file-base64 }}
p12-password: ${{ inputs.p12-password }}
# Needed for self-hosted runner, since it doesn't destroy itself automatically.
- name: Delete keychain
uses: webiny/action-post-run@3.0.0
if: runner.os == 'macOS' && inputs.p12-file-password && inputs.p12-file-base64
with:
run: security delete-keychain signing_temp.keychain

6
.github/deno.jsonc vendored
View file

@ -3,14 +3,12 @@
"allowJs": false,
"strict": true
},
"pkgx": {
"tea": {
"dependencies": {
"deno.land": "^1.30"
}
},
"imports": {
"pkgx": "https://deno.land/x/libpkgx@v0.15.1/mod.ts",
"pkgx/": "https://deno.land/x/libpkgx@v0.15.1/src/",
"is-what": "https://deno.land/x/is_what@v4.1.15/src/index.ts"
"tea": "https://deno.land/x/libtea@v0.6.2/mod.ts"
}
}

View file

@ -1,82 +0,0 @@
#!/usr/bin/env -S pkgx deno run -A
import { hooks, utils } from "pkgx"
import { isString, isArray } from "is-what"
const rvv: Record<string, any>[] = []
for (const arg of Deno.args) {
const pkg = utils.pkg.parse(arg)
const config = await get_config(pkg)
for (const platform of config.platforms) {
const rv = {} as Record<string, any>
rv['platform'] = get_matrix(platform)
rv['pkg'] = arg
rvv.push(rv)
}
}
const ghout = Deno.env.get("GITHUB_OUTPUT")
if (ghout) {
const json = JSON.stringify(rvv)
Deno.writeTextFileSync(ghout, `matrix=${json}`, {append: true})
} else {
const json = JSON.stringify(rvv, null, 2)
console.log(json)
}
///////////////////////////////////////////////////////////////////////
//TODO should be in libpkgx!
async function get_config(pkg: {project: string}) {
let { platforms, test } = await hooks.usePantry().project(pkg).yaml()
const get_platforms = (() => {
if (!platforms) return ["linux/x86-64", "linux/aarch64", "darwin/x86-64", "darwin/aarch64"]
if (isString(platforms)) platforms = [platforms]
if (!isArray(platforms)) throw new Error(`invalid platform node: ${platforms}`)
const rv = []
for (const platform of platforms) {
if (platform.match(/^(linux|darwin)\/(aarch64|x86-64)$/)) rv.push(platform)
else if (platform.match(/^(linux|darwin)$/)) rv.push(`${platform}/x86-64`, `${platform}/aarch64`)
else throw new Error(`invalid platform: ${platform}`)
}
return rv
})
const qaRequired = test?.["qa-required"] === true
return {
platforms: get_platforms(),
qaRequired
}
}
function get_matrix(platform: string) {
const name = platform.replace('/', '+')
switch (platform) {
case 'darwin/aarch64': {
const os = ["self-hosted", "macOS", "ARM64"]
return {
os, name,
tinyname: "²"
}}
case 'darwin/x86-64': {
const os = ["self-hosted", "macOS", "X64"]
return {
os, name,
tinyname: "x64"
}}
case 'linux/x86-64': {
const os = {group: "linux-x86-64"}
return {
os, name,
container: "debian:buster-slim",
tinyname: "*nix64"
}}
case 'linux/aarch64': {
const os = ["self-hosted", "linux", "ARM64"]
return {
os, name,
tinyname: "*nix·ARM64"
}}}
}

View file

@ -1,87 +0,0 @@
#!/usr/bin/env -S pkgx deno run -A
import { hooks, utils } from "pkgx"
import { isString, isArray } from "is-what"
const pkg = utils.pkg.parse(Deno.args[0])
const config = await get_config(pkg)
const rv = {} as Record<string, any>
for (const platform of config.platforms) {
const key = platform.replace('/', '+')
rv[key] = get_matrix(platform)
}
const ghout = Deno.env.get("GITHUB_OUTPUT")
if (ghout) {
const json = JSON.stringify(Object.values(rv))
Deno.writeTextFileSync(ghout, `matrix=${json}`, {append: true})
} else {
const json = JSON.stringify(rv, null, 2)
console.log(json)
}
///////////////////////////////////////////////////////////////////////
//TODO should be in libpkgx!
async function get_config(pkg: {project: string}) {
let { platforms, test } = await hooks.usePantry().project(pkg).yaml()
const get_platforms = (() => {
if (!platforms) return ["linux/x86-64", "linux/aarch64", "darwin/x86-64", "darwin/aarch64"]
if (isString(platforms)) platforms = [platforms]
if (!isArray(platforms)) throw new Error(`invalid platform node: ${platforms}`)
const rv = []
for (const platform of platforms) {
if (platform.match(/^(linux|darwin)\/(aarch64|x86-64)$/)) rv.push(platform)
else if (platform.match(/^(linux|darwin)$/)) rv.push(`${platform}/x86-64`, `${platform}/aarch64`)
else throw new Error(`invalid platform: ${platform}`)
}
return rv
})
const qaRequired = test?.["qa-required"] === true
return {
platforms: get_platforms(),
qaRequired
}
}
// https://github.com/actions/runner-images#available-images
function get_matrix(platform: string) {
const name = platform.replace('/', '+')
switch (platform) {
case 'darwin/aarch64': {
const os = ["self-hosted", "macOS", "ARM64"]
return {
os, name,
"test-os": ["macos-13-xlarge", "macos-14"],
"test-container": [null],
tinyname: "²"
}}
case 'darwin/x86-64': {
const os = ["self-hosted", "macOS", "X64"]
return {
os, name,
"test-os": ["macos-12", "macos-13", "macos-14-large"],
"test-container": [null],
tinyname: "x64"
}}
case 'linux/x86-64': {
const os = {group: "linux-x86-64"}
return {
os, name,
container: "debian:buster-slim",
"test-os": [os],
"test-container": ["debian:buster-slim", "ubuntu", "archlinux"],
tinyname: "*nix64"
}}
case 'linux/aarch64': {
const os = ["self-hosted", "linux", "ARM64"]
return {
os, name,
"test-os": [os],
"test-container": [null],
tinyname: "*nix·ARM64"
}}}
}

View file

@ -1,23 +0,0 @@
#!/usr/bin/env -S pkgx deno run --allow-net --allow-env=GITHUB_OUTPUT --allow-write
const [project] = Deno.args
let versions: string[] = []
for (const platform of ["linux", "darwin"]) {
for (const arch of ["x86-64", "aarch64"]) {
const url = `https://dist.pkgx.dev/${project}/${platform}/${arch}/versions.txt`
const txt = await (await fetch(url)).text()
versions.push(...txt.split("\n"))
}
}
versions = [...new Set(versions.filter(x => x.trim()))]
const ghout = Deno.env.get("GITHUB_OUTPUT")
if (ghout) {
const json = JSON.stringify(versions)
Deno.writeTextFileSync(ghout, `versions=${json}`, {append: true})
} else {
console.log(JSON.stringify(versions))
}

View file

@ -1,9 +0,0 @@
#!/usr/bin/env -S pkgx deno run --allow-read
import { hooks } from "pkgx"
const project = Deno.args[0]
const yml = await hooks.usePantry().project(project).yaml()
const qaRequired = yml?.["test"]?.["qa-required"] === true
Deno.exit(qaRequired ? 0 : 1)

58
.github/scripts/utils/args.ts vendored Normal file
View file

@ -0,0 +1,58 @@
import { Installation, Package, PackageRequirement, hooks, utils } from "tea"
const { useCellar } = hooks
/// processes Deno.args unless STDIN is not a TTY and has input
export async function *args(): AsyncGenerator<string> {
if (Deno.isatty(Deno.stdin.rid)) {
for (const arg of Deno.args) {
if (arg[0] != '-') yield arg
}
} else {
let yielded_something = false
const buf = new Uint8Array(10)
const decode = (() => { const d = new TextDecoder(); return d.decode.bind(d) })()
let n: number | null
let txt = ''
const rx = /\s*(.*?)\s+/
while ((n = await Deno.stdin.read(buf)) !== null) {
txt += decode(buf.subarray(0, n))
while (true) {
const match = txt.match(rx)
if (!match) break
yield match[1]
txt = txt.slice(match[0].length)
yielded_something = true
}
}
if (txt) {
yield txt
} else if (!yielded_something) {
for (const arg of Deno.args) {
yield arg
}
}
}
}
export async function *pkgs(): AsyncGenerator<Package | PackageRequirement> {
for await (const arg of args()) {
const match = arg.match(/projects\/(.*)\/package.yml/)
const project = match ? match[1] : arg
yield utils.pkg.parse(project)
}
}
export async function *installs(): AsyncGenerator<Installation> {
const cellar = useCellar()
for await (const pkg of pkgs()) {
yield await cellar.resolve(pkg)
}
}
export async function toArray<T>(input: AsyncGenerator<T>) {
const rv: T[] = []
for await (const i of input) {
rv.push(i)
}
return rv
}

170
.github/workflows/bottle.yml vendored Normal file
View file

@ -0,0 +1,170 @@
name: bottle
on:
workflow_call:
inputs:
new-version:
type: boolean
required: false
default: false
platform:
required: true
type: string
projects:
required: false
type: string
outputs:
pr:
description: "The PR number"
value: ${{ jobs.bottle.outputs.pr }}
qa-required:
description: "Whether QA is required"
value: ${{ jobs.upload.outputs.qa-required }}
jobs:
get-platform:
runs-on: ubuntu-latest
outputs:
os: ${{ steps.platform.outputs.os }}
cache-set: ${{ steps.platform.outputs.cache-set }}
available: ${{ steps.platform.outputs.available }}
steps:
- uses: teaxyz/brewkit/actions/get-platform@v0
id: platform
with:
platform: ${{ inputs.platform }}
projects: ${{ inputs.projects }}
bottle:
needs: [get-platform]
if: ${{ !inputs.new-version || needs.get-platform.outputs.available != '' }}
runs-on: ${{ fromJson(needs.get-platform.outputs.os) }}
outputs:
srcs: ${{ env.srcs }}
built: ${{ env.built }}
pr: ${{ env.PR }}
steps:
- uses: teaxyz/brewkit/actions/setup-brewkit@v0
id: tea
- uses: actions/download-artifact@v3
if: ${{ inputs.new-version }}
with:
name: ${{ inputs.platform }}
- uses: teaxyz/brewkit/actions/fetch-pr-artifacts@v0
if: ${{ !inputs.new-version }}
with:
platform: ${{ inputs.platform }}
token: ${{ github.token }}
AWS_S3_BUCKET: ${{ secrets.AWS_S3_CACHE }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- name: clean destination
# Note: needed when changing a directory to a symlink, for example in
# https://github.com/teaxyz/pantry/pull/435
run: |
tar tzf $GITHUB_WORKSPACE/artifacts.tgz | \
awk '{ print length, $0 }' | \
sort -n -s -r | \
cut -d" " -f2- | \
xargs rm -rf
working-directory: ${{ steps.tea.outputs.prefix }}
- run: tar xzvf $GITHUB_WORKSPACE/artifacts.tgz
working-directory: ${{ steps.tea.outputs.prefix }}
- run: |
for file in built srcs; do
echo "$file=$(cat $file)" >>$GITHUB_ENV
done
working-directory: ${{ steps.tea.outputs.prefix }}
- run: |
tea +gnupg.org gpg-agent --daemon || true
echo $GPG_PRIVATE_KEY | \
base64 -d | \
tea +gnupg.org gpg --import --batch --yes
env:
GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
- uses: teaxyz/brewkit/actions/bottle@v0
id: bottle-xz
with:
built: ${{ env.built }}
compression: xz
gpg-key-id: ${{ secrets.GPG_KEY_ID }}
- uses: teaxyz/brewkit/actions/bottle@v0
id: bottle-gz
with:
built: ${{ env.built }}
compression: gz
gpg-key-id: ${{ secrets.GPG_KEY_ID }}
- run: |
echo ${{ steps.bottle-gz.outputs.bottles }} ${{ steps.bottle-xz.outputs.bottles }} >bottles
echo ${{ steps.bottle-gz.outputs.checksums }} ${{ steps.bottle-xz.outputs.checksums }} >checksums
echo ${{ steps.bottle-gz.outputs.signatures }} ${{ steps.bottle-xz.outputs.signatures }} >signatures
SRCS=$(echo $srcs | tr -d '~')
tar cf $GITHUB_WORKSPACE/artifacts.tar \
$SRCS \
${{ steps.bottle-gz.outputs.bottles }} \
${{ steps.bottle-xz.outputs.bottles }} \
bottles checksums signatures
working-directory: ${{ steps.tea.outputs.prefix }}
- name: upload artifacts
uses: actions/upload-artifact@v3
with:
name: ${{ inputs.platform }}-bottles
path: artifacts.tar
if-no-files-found: error
upload:
needs: [get-platform, bottle]
if: ${{ !inputs.new-version || needs.get-platform.outputs.available != '' }}
runs-on: ubuntu-latest
outputs:
qa-required: ${{ steps.upload.outputs.qa-required }}
steps:
- uses: teaxyz/brewkit/actions/setup-brewkit@v0
with:
prefix: ${{ github.workspace }}/.tea
- uses: actions/download-artifact@v3
with:
name: ${{ inputs.platform }}-bottles
- run: |
tar xvf artifacts.tar
for file in bottles checksums signatures; do
echo "$file=$(cat $file)" >>$GITHUB_ENV
done
- uses: teaxyz/brewkit/actions/upload@v0
id: upload
with:
qa: ${{ inputs.new-version }}
pkgs: ${{ needs.bottle.outputs.built }} ${{ needs.bottle.outputs.built }}
srcs: ${{ needs.bottle.outputs.srcs }} ${{ needs.bottle.outputs.srcs }}
bottles: ${{ env.bottles }}
checksums: ${{ env.checksums }}
signatures: ${{ env.signatures }}
AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }}
AWS_S3_STAGING_BUCKET: ${{ secrets.AWS_S3_CACHE }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- uses: chetan/invalidate-cloudfront-action@v2
if: ${{ steps.upload.outputs.cf-invalidation-paths != '' }}
env:
PATHS: ${{ steps.upload.outputs.cf-invalidation-paths }}
DISTRIBUTION: ${{ secrets.AWS_CF_DISTRIBUTION_ID }}
AWS_REGION: us-east-1
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

153
.github/workflows/build.yml vendored Normal file
View file

@ -0,0 +1,153 @@
name: build
on:
workflow_call:
inputs:
projects:
required: true
type: string
platform:
required: true
type: string
jobs:
get-platform:
runs-on: ubuntu-latest
outputs:
os: ${{ steps.platform.outputs.os }}
build-os: ${{ steps.platform.outputs.build-os }}
container: ${{ steps.platform.outputs.container }}
test-matrix: ${{ steps.platform.outputs.test-matrix }}
cache-set: ${{ steps.platform.outputs.cache-set }}
available: ${{ steps.platform.outputs.available }}
steps:
- uses: teaxyz/brewkit/actions/get-platform@v0
id: platform
with:
platform: ${{ inputs.platform }}
projects: ${{ inputs.projects }}
build:
runs-on: ${{ fromJson(needs.get-platform.outputs.build-os) }}
container: ${{ fromJson(needs.get-platform.outputs.container) }}
needs: [get-platform]
if: ${{ needs.get-platform.outputs.available != '' }}
steps:
- uses: actions/checkout@v3
- uses: teaxyz/brewkit/actions/setup-brewkit@v0
id: tea
with:
prefix: /opt
- name: sanitize macOS runners
if: fromJson(needs.get-platform.outputs.build-os) == 'macos-11'
run: sudo mv /usr/local/bin/* /tmp/
# setup macOS codesigning
- uses: teaxyz/brewkit/actions/setup-codesign@v0
if: startsWith(inputs.platform, 'darwin+') && github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name
with:
p12-file-base64: ${{ secrets.APPLE_CERTIFICATE_P12 }}
p12-password: ${{ secrets.APPLE_CERTIFICATE_P12_PASSWORD }}
- run: pkg build ${{ needs.get-platform.outputs.available }}
id: build
env:
GITHUB_TOKEN: ${{ github.token }}
FORCE_UNSAFE_CONFIGURE: 1 # some configure scripts refuse to run as root
APPLE_IDENTITY: ${{ secrets.APPLE_IDENTITY || '-' }}
- run: |
ABS_PATHS=$(echo $PATHS | tr ' ' '\n' | sed -e "s_^_$TEA_PREFIX/_" | tr '\n' ' ')
echo "paths=$ABS_PATHS" >> $GITHUB_OUTPUT
if: startsWith(inputs.platform, 'darwin+')
id: absolute-paths
env:
PATHS: ${{ steps.build.outputs.relative-paths }}
TEA_PREFIX: ${{ steps.tea.outputs.prefix }}
# cache data we'll need in the bottling job
- name: assemble artifact metadata
run: |
echo ${{ steps.build.outputs.pkgs }} >built
echo ${{ steps.build.outputs.srcs-relative-paths }} >srcs
working-directory: ${{ steps.tea.outputs.prefix }}
# tarring ourselves ∵ GHA-artifacts (ludicrously) lose permissions
# /ref https://github.com/actions/upload-artifact/issues/38
- name: create artifacts.tgz
run: tar czvf $GITHUB_WORKSPACE/artifacts.tgz
${{ steps.build.outputs.relative-paths }}
${{ steps.build.outputs.srcs-relative-paths }}
built srcs
working-directory: ${{ steps.tea.outputs.prefix }}
- name: upload artifacts
uses: actions/upload-artifact@v3
with:
name: ${{ inputs.platform }}
path: artifacts.tgz
if-no-files-found: error
test:
needs: [get-platform, build]
if: ${{ needs.get-platform.outputs.available != '' }}
runs-on: ${{ matrix.platform.os }}
strategy:
matrix:
platform: ${{ fromJson(needs.get-platform.outputs.test-matrix) }}
name: test ${{ matrix.platform.name-extra }}
outputs:
HAS_SECRETS: ${{ env.HAS_SECRETS }}
container: ${{ matrix.platform.container }}
steps:
- uses: actions/checkout@v3
- uses: teaxyz/brewkit/actions/setup-brewkit@v0
- uses: actions/download-artifact@v3
with:
name: ${{ inputs.platform }}
- name: clean destination
# Note: needed when changing a directory to a symlink, for example in
# https://github.com/teaxyz/pantry/pull/435
run: |
tar tzf $GITHUB_WORKSPACE/artifacts.tgz | \
awk '{ print length, $0 }' | \
sort -n -s -r | \
cut -d" " -f2- | \
xargs rm -rf
working-directory: ${{ env.TEA_PREFIX }}
- name: extract bottles
run: tar xzvf artifacts.tgz -C $TEA_PREFIX
- run: pkg test ${{ needs.get-platform.outputs.available }}
env:
GITHUB_TOKEN: ${{ github.token }}
# FIXME: this shouldn't be necessary, but it currently is for the
# ubuntu+container test matrix entries. :/
TEA_PANTRY_PATH: ${{ github.workspace }}
- name: "[post]"
run: echo "HAS_SECRETS=$HAS_SECRETS" >>$GITHUB_ENV
env:
HAS_SECRETS: ${{ secrets.AWS_S3_CACHE != null }}
stage:
needs: [get-platform, test]
# this only works for PRs from our team to our repo (security! :( )
if: startsWith(github.ref, 'refs/pull/') && github.repository_owner == 'teaxyz' && needs.test.outputs.HAS_SECRETS == 'true' && needs.get-platform.outputs.available != ''
runs-on: ubuntu-latest
steps:
- uses: actions/download-artifact@v3
with:
name: ${{ inputs.platform }}
- uses: teaxyz/brewkit/actions/stage-build-artifacts@v0
with:
platform: ${{ inputs.platform }}
AWS_S3_BUCKET: ${{ secrets.AWS_S3_CACHE }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

View file

@ -1,35 +0,0 @@
name: cd
run-name: "cd: pantry.tgz"
on:
push:
branches: main
paths:
- projects/**/*
- .github/workflows/cd.pantry.tgz.yml
concurrency:
group: cd.pantry.tgz
cancel-in-progress: true
jobs:
tarball:
runs-on: ubuntu-latest
steps:
- uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- uses: actions/checkout@v4
with:
path: co
- run: find co -type f -not -name package.yml -delete -o -type l -delete
- run: tar -C co -czf pantry.tgz .
- run: aws s3 cp
./pantry.tgz
s3://${{ secrets.AWS_S3_BUCKET }}/pantry.tgz
- run: aws cloudfront create-invalidation
--distribution-id ${{ secrets.AWS_CF_DISTRIBUTION_ID }}
--paths /pantry.tgz

View file

@ -1,20 +1,79 @@
name: cd
run-name: "cd: ${{ github.event.head_commit.message }}"
run-name: 'cd: ${{ github.event.head_commit.message }}'
on:
push:
branches: main
paths:
- projects/**/*
- .github/workflows/cd.yml
branches: [main]
jobs:
ingest:
cd:
runs-on: ubuntu-latest
outputs:
projects: ${{ steps.diff.outputs.diff }}
has-artifacts: ${{ steps.has-artifacts.outputs.has-artifacts }}
platforms: ${{ steps.has-artifacts.outputs.platforms }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
# ^^ NOTE probably no longer required but I dont dare try to remove it
- uses: teaxyz/brewkit/actions/setup-brewkit@v0
- uses: teaxyz/brewkit/actions/has-artifacts@v0
id: has-artifacts
with:
repo: ${{ github.repository }}
sha: ${{ github.sha }}
token: ${{github.token}}
s3-bucket: ${{ secrets.AWS_S3_CACHE }}
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
bottle-pr:
strategy:
fail-fast: false
matrix:
platform: ${{ fromJson(needs.cd.outputs.platforms) }}
needs: [cd]
if: ${{ needs.cd.outputs.has-artifacts == 'true' }}
uses: ./.github/workflows/bottle.yml
with:
platform: ${{ matrix.platform }}
secrets: inherit
cleanup:
needs: [bottle-pr]
runs-on: ubuntu-latest
if: ${{ needs.cd.outputs.has-artifacts == 'true' }}
env:
PR: ${{ needs.bottle.outputs.pr }}
steps:
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- run: |
REPO=$(echo ${{github.repository}} | sed -e 's_teaxyz/__')
if test -z "$PR"; then
echo "no PR to clean up"
exit 0
fi
aws s3 rm --recursive s3://$AWS_S3_CACHE/pull-request/$REPO/$PR
env:
AWS_S3_CACHE: ${{ secrets.AWS_S3_CACHE }}
PR: ${{ needs.bottle.outputs.pr }}
bottle-standalone:
runs-on: ubuntu-latest
needs: [cd]
permissions:
contents: read
actions: write
if: ${{ needs.cd.outputs.has-artifacts == 'false' }}
steps:
- uses: actions/checkout@v3
- uses: technote-space/get-diff-action@v6
id: get-diff
with:
@ -22,50 +81,12 @@ jobs:
- id: diff
run: |
for x in ${{ steps.get-diff.outputs.diff }}; do
y=$(echo $x | sed 's#projects/\(.*\)/[^/]*#\1#')
RESULT="$RESULT ${y//$'\n'/}"
y=$(echo $x | sed 's#projects/\(.*\)/package.yml#\1#')
RESULT="$RESULT $y"
done
echo "diff=$RESULT" >> $GITHUB_OUTPUT
pkg:
strategy:
fail-fast: false
needs: ingest
permissions:
issues: write
uses: ./.github/workflows/new-version.yml
with:
projects: ${{ needs.ingest.outputs.projects }}
secrets: inherit
notify:
needs: [pkg, ingest]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 1
- name: check if new pkgs
id: check-new-files
run: |
NEW_FILES=$(git diff --name-status HEAD $(git merge-base HEAD origin/main) | grep '^A\s*projects/.+?/package\.yml$' | wc -l)
if [ "$NEW_FILES" -gt 0 ]; then
echo "New files were added in this push."
echo "::set-output name=new_files::true"
fi
- name: make payload
run: |
RV=""
for x in ${{ needs.ingest.outputs.projects }}; do
RV="${RV:+$RV,} {\"title\": \"$x\", \"url\": \"https://pkgx.dev/pkgs/$x/\"}"
done
echo "{\"embeds\": [$RV], \"content\": \"new pkgs\"}" >> payload.json
- uses: tsickert/discord-webhook@v5.4.0
if: steps.check-new-files.outputs.new_files == 'true'
with:
webhook-url: ${{ secrets.DISCORD_WEBHOOK }}
raw-data: ./payload.json
- run: gh workflow run new-version.yml -f "projects=$PROJECTS"
if: ${{ steps.diff.outputs.diff != '' }}
env:
GH_TOKEN: ${{ github.token }}
PROJECTS: ${{ steps.diff.outputs.diff }}

View file

@ -1,30 +0,0 @@
name: ci²
run-name: ci²
on:
pull_request:
paths:
- .github/workflows/pkg.yml
- .github/workflows/pkg-platform.yml
- .github/workflows/ci-squared.yml
concurrency:
group: pulls/${{ github.ref }}
cancel-in-progress: true
jobs:
ci:
name: ci²
uses: ./.github/workflows/pkg.yml
strategy:
fail-fast: false
matrix:
pkg:
- r-wos.org/gti
- github.com/ggerganov/llama.cpp # has platform restrictions
permissions:
issues: write
with:
pkg: ${{ matrix.pkg }}
dry-run: true
secrets: inherit

View file

@ -1,88 +1,39 @@
name: ci
run-name: "ci: ${{ github.event.pull_request.title }}"
run-name: 'ci: ${{ github.event.pull_request.title }}'
on:
pull_request:
paths:
- projects/**/*
- .github/workflows/ci.yml
concurrency:
group: ci/${{ github.event.pull_request.head.ref }}
cancel-in-progress: true
on: pull_request
jobs:
plan:
get-projects:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.process-diff.outputs.matrix }}
diff: ${{ steps.diff.outputs.diff }}
steps:
- uses: pkgxdev/setup@v2
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: technote-space/get-diff-action@v6
id: get-diff
with:
PATTERNS: projects/**/package.yml
- name: process diff
id: process-diff
env:
PKGX_PANTRY_PATH: ${{ github.workspace }}
- id: diff
run: |
if [ -n "${{ steps.get-diff.outputs.diff }}" ]; then
for x in ${{ steps.get-diff.outputs.diff }}; do
y=$(echo $x | sed 's#projects/\(.*\)/[^/]*#\1#')
RESULT="$RESULT ${y//$'\n'/}"
done
else
RESULT="zlib.net kernel.org/linux-headers"
fi
./.github/scripts/get-ci-matrix.ts $RESULT
- run: echo '${{ steps.process-diff.outputs.matrix }}' | jq
for x in ${{ steps.get-diff.outputs.diff }}; do
y=$(echo $x | sed 's#projects/\(.*\)/package.yml#\1#')
RESULT="$RESULT $y"
done
echo "diff=$RESULT" >> $GITHUB_OUTPUT
build:
needs: plan
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.plan.outputs.matrix) }}
runs-on: ${{ matrix.platform.os }}
container: ${{ matrix.platform.container }}
name: ${{ matrix.pkg }} ${{ matrix.platform.tinyname }}
env:
PKGX_PANTRY_PATH: ${{ github.workspace }}
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/setup
with:
p12-file-base64: ${{ secrets.APPLE_CERTIFICATE_P12 }}
p12-password: ${{ secrets.APPLE_CERTIFICATE_P12_PASSWORD }}
APPLE_IDENTITY: ${{ secrets.APPLE_IDENTITY }}
- uses: pkgxdev/setup@v2
with:
PKGX_DIR: /opt
- uses: pkgxdev/brewkit/build@v1
id: build
with:
pkg: ${{ matrix.pkg }}
- uses: pkgxdev/brewkit/audit@v1
with:
pkg: ${{ steps.build.outputs.pkgspec }}
# prevent tests passing because the build directory is still there
# requires `sudo` because `go` makes unremovable files…
- name: wipe builds directory
run: |
if command -v sudo >/dev/null; then
SUDO=sudo
fi
$SUDO rm -rf builds
- uses: pkgxdev/brewkit/test@v1
with:
pkg: ${{ steps.build.outputs.pkgspec }}
platform:
- darwin+x86-64
- linux+x86-64
- darwin+aarch64
- linux+aarch64
needs: [get-projects]
uses: ./.github/workflows/build.yml
name: ${{ matrix.platform }}
with:
projects: ${{ needs.get-projects.outputs.diff || 'zlib.net^1.2' }}
platform: ${{ matrix.platform }}
secrets: inherit

30
.github/workflows/cleanup.yml vendored Normal file
View file

@ -0,0 +1,30 @@
# cleans up our S3 staging area if a PR is closed without merge
name: teaxyz s3 cleanup
run-name: 'cleanup: ${{ github.event.pull_request.title }}'
on:
pull_request:
types: [closed]
jobs:
cleanup:
runs-on: ubuntu-latest
if: github.event.pull_request.merged == false
steps:
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: remove staged artifacts
run: |
REPO=$(echo ${{github.repository}} | sed -e 's_teaxyz/__')
PR=$(echo ${{github.ref}} | sed -e 's_refs/pull/\(.*\)/merge_\1_')
aws s3 rm --recursive s3://$AWS_S3_CACHE/pull-request/$REPO/$PR
if: startsWith(github.ref, 'refs/pull/') && github.repository_owner == 'teaxyz'
env:
AWS_S3_CACHE: ${{ secrets.AWS_S3_CACHE }}

View file

@ -1,34 +0,0 @@
# cleans up our issues based on tags applied
name: library bot
run-name: "handling #${{ github.event.issue.number }}: ${{ github.event.issue.title }}"
on:
issues:
types: [labeled]
jobs:
close-issue:
permissions:
issues: write
runs-on: ubuntu-latest
steps:
- name: Close reason
id: close-reason
run: |
case "${{ github.event.label.name }}" in
"old-version")
echo "reason=not_planned" >>$GITHUB_OUTPUT
;;
"gha-issue"|"404"|"version-lockstep-required")
echo "reason=completed" >>$GITHUB_OUTPUT
;;
esac
- name: Close issue
uses: actions-cool/issues-helper@v3
if: steps.close-reason.outputs.reason != ''
with:
actions: close-issue
token: ${{ github.token }}
issue-number: ${{ github.event.issue.number }}
close-reason: ${{ steps.close-reason.outputs.reason }}

View file

@ -1,47 +1,70 @@
name: build pkg version
name: new-version
run-name: building ${{ inputs.projects }}
on:
workflow_call:
inputs:
projects:
description: eg. `foo.com=1.2.3 bar.com^2.3.4`
required: true
type: string
complain:
type: boolean
default: false
workflow_dispatch:
inputs:
projects:
description: eg. `foo.com=1.2.3 bar.com^2.3.4`
required: true
type: string
complain:
type: boolean
default: false
jobs:
ingest:
runs-on: ubuntu-latest
outputs:
pkgs: ${{ steps.divide.outputs.pkgs }}
steps:
- run: |
var="$(echo -n ${{ inputs.projects }} | jq -R -s -c 'split(" ")')"
echo "pkgs=$var" >> $GITHUB_OUTPUT
id: divide
pkg:
needs: ingest
build:
strategy:
fail-fast: false
matrix:
pkg: ${{ fromJSON(needs.ingest.outputs.pkgs) }}
uses: ./.github/workflows/pkg.yml
platform:
- darwin+x86-64
- linux+x86-64
- darwin+aarch64
- linux+aarch64
uses: ./.github/workflows/build.yml
with:
projects: ${{ inputs.projects }}
platform: ${{ matrix.platform }}
secrets: inherit
bottle:
strategy:
fail-fast: false
matrix:
platform:
- darwin+x86-64
- linux+x86-64
- darwin+aarch64
- linux+aarch64
needs: [build]
uses: ./.github/workflows/bottle.yml
with:
new-version: true
platform: ${{ matrix.platform }}
projects: ${{ inputs.projects }}
secrets: inherit
request-qa:
needs: [bottle]
if: ${{ needs.bottle.outputs.qa-required != '[]' }}
runs-on: ubuntu-latest
strategy:
matrix:
project: ${{ fromJson(needs.bottle.outputs.qa-required) }}
steps:
- uses: teaxyz/pantry/.github/actions/request-qa@main
with:
project: ${{ matrix.project }}
slack-webhook: ${{ secrets.SLACK_QA_WEBHOOK }}
complain:
needs: [build, bottle]
if: failure()
permissions:
issues: write
with:
pkg: ${{ matrix.pkg }}
complain: ${{ github.event_name == 'workflow_call' || inputs.complain }}
secrets: inherit
runs-on: ubuntu-latest
steps:
- uses: teaxyz/pantry/.github/actions/complain@main
with:
projects: ${{ inputs.projects }}
platform: ${{ inputs.platform }}
slack-webhook: ${{ secrets.SLACK_WEBHOOK }}
slack-channel: ${{ secrets.SLACK_CHANNEL }}

View file

@ -1,258 +0,0 @@
name: pkg for platform
run-name: pkging ${{ fromJSON(inputs.pkg).project }} (${{ inputs.name }})
on:
workflow_call:
inputs:
name:
description: brewkit platform unique ID
type: string
tinyname:
description: >
GitHub Actions has a non resizable sidebar so we need a shorter name
or its much harder to differentiate the different jobs.
type: string
default: ${{ inputs.name }}
os:
type: string
container:
required: false
type: string
pkg:
description: eg. `example.com@1.2.3`
type: string
dry-run:
description: dry runs do not modify bottle storage
type: boolean
default: false
test-os:
description: a JSON array of runner-names
type: string
test-container:
description: >
A JSON array of docker image names or `[null]`.
Indeed! You cannot leave this as `null` or undefined.
Sorry, GHA is not flexible enough to efficiently work around this.
type: string
complain:
type: boolean
default: false
invalidate-cloudfront:
type: boolean
default: true
secrets:
APPLE_CERTIFICATE_P12: { required: false }
APPLE_CERTIFICATE_P12_PASSWORD: { required: false }
APPLE_IDENTITY: { required: false }
GPG_KEY_ID: { required: true }
GPG_PRIVATE_KEY: { required: true }
AWS_ACCESS_KEY_ID: { required: false }
AWS_S3_BUCKET: { required: true }
AWS_SECRET_ACCESS_KEY: { required: true }
AWS_CF_DISTRIBUTION_ID: { required: true }
env:
BREWKIT_PKGJSON: ${{ inputs.pkg }}
jobs:
build:
name: build ${{inputs.tinyname}}
runs-on: ${{ fromJSON(inputs.os) }}
container: ${{ inputs.container }}
permissions: {}
outputs:
project: ${{ steps.build.outputs.project }}
version: ${{ steps.build.outputs.version }}
platform: ${{ steps.build.outputs.platform }}
arch: ${{ steps.build.outputs.arch }}
env:
PKGX_PANTRY_PATH: ${{ github.workspace }}
steps:
- uses: actions/checkout@v4
- uses: pkgxdev/setup@v2
with:
PKGX_DIR: /opt
- uses: ./.github/actions/setup
with:
p12-file-base64: ${{ secrets.APPLE_CERTIFICATE_P12 }}
p12-password: ${{ secrets.APPLE_CERTIFICATE_P12_PASSWORD }}
APPLE_IDENTITY: ${{ secrets.APPLE_IDENTITY }}
- uses: pkgxdev/brewkit/build@v1
with:
pkg: ${{ inputs.pkg }}
id: build
- uses: styfle/cancel-workflow-action@0.12.0
if: steps.build.outputs.noop
- uses: pkgxdev/brewkit/audit@v1
with:
pkg: ${{ inputs.pkg }}
- uses: pkgxdev/brewkit/upload-build-artifact@v1
test:
name: test ${{inputs.tinyname}} ${{ matrix.container || ''}} ${{ matrix.container || '' }} ${{ join(matrix.os, '+') }}
needs: build
strategy:
matrix:
os: ${{ fromJSON(inputs.test-os) }}
container: ${{ fromJSON(inputs.test-container) }}
permissions: {}
runs-on: ${{ matrix.os }}
container: ${{ matrix.container }}
env:
PKGX_PANTRY_PATH: ${{ github.workspace }}
steps:
- uses: pkgxdev/setup@v2
- uses: actions/checkout@v4
- uses: pkgxdev/brewkit/download-build-artifact@v1
with:
pkg: ${{ inputs.pkg }}
- uses: pkgxdev/brewkit/test@v1
with:
pkg: ${{ inputs.pkg }}
bottle:
name: bottle (${{inputs.tinyname}}.${{matrix.compression}})
needs: [build, test]
permissions: {}
strategy:
matrix:
compression: [xz, gz]
runs-on: ubuntu-latest
env:
AWS: ${{ inputs.dry-run && 'echo' || 'aws' }}
PREFIX: ${{ needs.build.outputs.project }}/${{ needs.build.outputs.platform }}/${{ needs.build.outputs.arch }}/v${{ needs.build.outputs.version }}.tar.${{ matrix.compression }}
steps:
- uses: pkgxdev/setup@v2
- uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: import GPG key
run: echo $GPG_PRIVATE_KEY |
base64 -d |
pkgx gpg --import --batch --yes
env:
GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
- uses: pkgxdev/brewkit/download-build-artifact@v1
id: dl
with:
pkg: ${{ inputs.pkg }}
platform: ${{ inputs.name }}
extract: false
- uses: pkgxdev/brewkit/bottle@v1
id: bottle
with:
file: ${{ steps.dl.outputs.filename }}
compression: ${{ matrix.compression }}
- name: gpg
run: pkgx gpg
--detach-sign
--armor
--output ${{ steps.bottle.outputs.filename }}.asc
--local-user ${{ secrets.GPG_KEY_ID }}
${{ steps.bottle.outputs.filename }}
- name: sha
run: pkgx
sha256sum
${{ steps.bottle.outputs.filename }} > ${{ steps.bottle.outputs.filename }}.sha256sum
- name: s3 put
run: |
$AWS s3 cp ${{ steps.bottle.outputs.filename }} $URL
$AWS s3 cp ${{ steps.bottle.outputs.filename }}.asc $URL.asc
$AWS s3 cp ${{ steps.bottle.outputs.filename }}.sha256sum $URL.sha256sum
echo "cf-paths=/$PREFIX /$PREFIX.asc /$PREFIX.sha256sum" >> $GITHUB_OUTPUT
env:
URL: s3://${{ secrets.AWS_S3_BUCKET }}/${{ env.PREFIX }}
id: put
- name: s3 put file listing
if: ${{ matrix.compression == 'gz' }}
id: files
run: |
PREFIX=$(dirname $PREFIX)
tar tf ${{ steps.bottle.outputs.filename }} \
| grep -v '/$' \
| grep -v '^venv/' \
> $FILENAME
$AWS s3 cp $FILENAME s3://${{ secrets.AWS_S3_BUCKET }}/$PREFIX/$FILENAME
echo "cf-paths=/$PREFIX/$FILENAME" >> $GITHUB_OUTPUT
env:
FILENAME: v${{ needs.build.outputs.version }}.files.txt
- name: invalidate cloudfront
run: $AWS cloudfront create-invalidation
--distribution-id ${{ secrets.AWS_CF_DISTRIBUTION_ID }}
--paths
${{ steps.put.outputs.cf-paths }} ${{ steps.files.outputs.cf-paths }}
if: inputs.invalidate-cloudfront
publish:
name: publish ${{inputs.tinyname}} ${{ inputs.dry-run && '(dry-run)' }}
runs-on: ubuntu-latest
needs: [bottle, build]
permissions: {}
env:
AWS: ${{ inputs.dry-run && 'echo' || 'aws' }}
DIRNAME: ${{ needs.build.outputs.project }}/${{ needs.build.outputs.platform }}/${{ needs.build.outputs.arch }}
steps:
- uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- uses: pkgxdev/setup@v2
- name: generate versions.txt
run: |
if ! aws s3 cp \
s3://${{ secrets.AWS_S3_BUCKET }}/$DIRNAME/versions.txt \
./remote-versions.txt; then
touch remote-versions.txt
fi
echo "$SCRIPT" > script.ts
pkgx deno run -A script.ts ./remote-versions.txt ${{ needs.build.outputs.version }} > versions.txt
env:
SCRIPT: |
import SemVer, { compare } from "https://raw.githubusercontent.com/pkgxdev/libpkgx/main/src/utils/semver.ts"
const versions = new Set(Deno.readTextFileSync(Deno.args[0]).trim().split("\n").filter(x => x))
versions.add(Deno.args[1])
const out = [...versions].map(x => new SemVer(x)).sort(compare).join("\n")
await Deno.stdout.write(new TextEncoder().encode(out.trim()))
- name: s3 put
run: $AWS s3 cp versions.txt s3://${{ secrets.AWS_S3_BUCKET }}/$DIRNAME/versions.txt
- name: invalidate cloudfront
run: $AWS cloudfront create-invalidation
--distribution-id ${{ secrets.AWS_CF_DISTRIBUTION_ID }}
--paths
/$DIRNAME/versions.txt
if: inputs.invalidate-cloudfront
complain:
needs: bottle
if: failure() && !inputs.dry-run && inputs.complain
runs-on: ubuntu-latest
permissions:
issues: write
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/complain
with:
pkg: ${{ fromJSON(inputs.pkg).project }}=${{ fromJSON(inputs.pkg).version.value }}

View file

@ -1,57 +0,0 @@
name: pkg
run-name: pkging ${{inputs.pkg}}
on:
workflow_call:
inputs:
pkg:
required: true
type: string
dry-run:
type: boolean
default: false
complain:
type: boolean
default: false
invalidate-cloudfront:
type: boolean
default: true
jobs:
plan:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.matrix.outputs.matrix }}
pkg: ${{ steps.id.outputs.pkg-json }}
steps:
- uses: pkgxdev/setup@v2
- uses: actions/checkout@v4
- run: .github/scripts/get-matrix.ts ${{ inputs.pkg }}
id: matrix
- uses: pkgxdev/brewkit/id@v1
id: id
with:
pkg: ${{ inputs.pkg }}
pkg:
needs: plan
name: ${{matrix.platform.tinyname}}
strategy:
fail-fast: false
matrix:
platform: ${{ fromJSON(needs.plan.outputs.matrix) }}
uses: ./.github/workflows/pkg-platform.yml
permissions:
issues: write
with:
pkg: ${{ needs.plan.outputs.pkg }}
name: ${{ matrix.platform.name }}
os: ${{ toJSON(matrix.platform.os) }}
container: ${{ matrix.platform.container }}
test-os: ${{ toJSON(matrix.platform.test-os) }}
test-container: ${{ toJSON(matrix.platform.test-container) }}
dry-run: ${{ inputs.dry-run }}
tinyname: ${{ matrix.platform.tinyname }}
complain: ${{ inputs.complain }}
invalidate-cloudfront: ${{ inputs.invalidate-cloudfront }}
secrets: inherit

View file

@ -1,60 +0,0 @@
name: restock pkg inventory
run-name: restocking ${{ inputs.project }}
on:
workflow_dispatch:
inputs:
project:
description: a single project, eg. `foo.com`
required: true
type: string
jobs:
ingest:
runs-on: ubuntu-latest
outputs:
versions: ${{ steps.inventory.outputs.versions }}
steps:
- uses: pkgxdev/setup@v2
- uses: actions/checkout@v4
- run: ./.github/scripts/inventory.ts ${{ inputs.project }}
id: inventory
pkg:
needs: ingest
strategy:
fail-fast: false
matrix:
version: ${{ fromJSON(needs.ingest.outputs.versions) }}
uses: ./.github/workflows/pkg.yml
permissions:
issues: write #FIXME we dont want this but I dont think we can alter the way permissions are inherited
with:
pkg: ${{inputs.project}}=${{ matrix.version }}
invalidate-cloudfront: false # we do it all at once below otherwise
secrets: inherit
invalidate-cloudfront:
needs: pkg
runs-on: ubuntu-latest
if: always()
# ^^ not ideal but often <5% builds fail because we have modified the build script
# in a non backward compatible way over time and we still want to invalidate cloudfront
# for most of the builds.
steps:
- uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
# FIXME ideally we would get the exact path list from the above matrix
# but GitHub Actions has no clean way to do that. This is more ideal as
# we dont want to invalidate paths that failed and certainly want to
# avoid invalidations if all failed
- name: invalidate cloudfront
run: aws cloudfront create-invalidation
--distribution-id ${{ secrets.AWS_CF_DISTRIBUTION_ID }}
--paths /${{inputs.project}}/*

7
.gitignore vendored
View file

@ -1,13 +1,8 @@
/srcs
/builds
/testbeds
/homes
/prefixes
.DS_Store
/prefixes
#TODO commit after v1
/deno.lock
# IntelliJ IDE Folder
.idea/

View file

@ -3,4 +3,5 @@
"deno.lint": true,
"deno.unstable": true,
"deno.config": ".github/deno.jsonc",
"deno.importMap": "../cli/import-map.json"
}

132
README.md
View file

@ -1,45 +1,52 @@
![pkgx](https://pkgx.dev/banner.png)
![tea](https://tea.xyz/banner.png)
# What is a Pantry?
Pantries provide consistent metadata about open source packages. This
metadata shouldnt require manual collection, but at this current state in
open source it does.
It is collected and duplicated thousands of times. A huge waste of effort.
tea aims to eradicate this wasted effort, though unfortunately, the journey
there will require—to some extent—doing that duplication one more time.
## Doing it a Little Better This Time
Our format is YAML, which is at least non-proprietary and could be used by
other tools without an independent parser. And were pulling in data from
other sources as much as possible, eg. versions are taken from the
“source” whenever possible.
&nbsp;
pkg metadata and build instructions.
# Contributing
Assuming you have `pkgx` with shell integration:
Assuming you have tea (w/magic) installed:
```sh
$ git clone https://github.com/pkgxdev/pantry
$ git clone https://github.com/teaxyz/pantry
$ cd pantry
# all the following commands operate in `./tea.out`
# your tea installation remains untouched
$ dev # https://docs.pkgx.sh/dev
# ^^ adds brewkit (ie. the `bk` command) to your devenv
# ^^ IMPORTANT! Otherwise the `bk` command will not be found
# ^^ Alternatively, you can use pkgx and prefix your commands with the ENV
# ^^ PKGX_PANTRY_PATH=$(pwd) pkgx bk [command]
$ bk init
$ pkg init
# ^^ creates a “wip” package.yml
# ^^ if you already know the name, you can pass it as an argument
$ bk edit
$ pkg edit
# ^^ opens the new package.yml in your EDITOR
$ bk build
# builds to `./builds`
$ pkg build
# ^^ needs a zero permissions GITHUB_TOKEN to use the GitHub API
# either set `GITHUB_TOKEN` or run `gh auth login`
$ pkgx yq .provides <projects/$(bk status | tr -d '[:space:]')/package.yml
- bin/foo
# ^^ purely demonstrative for the next step
$ pkgx foo
$ foo
# ^^ anything in the `provides:` key will now run
$ bk audit
# ^^ worth doing an audit to check for common pkging issues
$ bk test
$ pkg test
# ^^ you need to write a test that verifies the package works
$ gh repo fork
@ -48,79 +55,66 @@ $ git push origin my-new-package
$ gh pr create
```
> [!TIP]
> * `bk build` and `bk test` can be invoked eg. `bk docker build` to run
> inside a Docker container for Linux builds and testing
> * All commands take an optional pkg-spec eg. `bk build node@19`
> * While inside the pantry `dev` environment you can run commands from any
> built packages provided you specified their `provides:` key in the
> `package.yml`.
> * `pkg` can be run without magic via `tea -E pkg` (this dev-env provides `+tea.xyz/brewkit`).
> * `gh` can be run without magic via `tea gh`.
> * `git` can be run without magic via `tea git`.
> * `pkg build` and `pkg test` take a `-L` flag to run in a Linux Docker container
> * All commands take an optional pkg-spec eg. `pkg build zlib.net^1.1`
> [!NOTE]
> We use a special package called [`brewkit`] to build packages both here and
> in CI/CD. `brewkit` provides the `bk` command.
> [!IMPORTANT]
> brewkit installs the built products to `${PKGX_DIR:-$HOME/.pkgx}` which
> means they are installed to your users pkgx cache.
While inside a pantry dev-env you can run commands from any built packages
provided you specified their `provides:` key.
## GitHub Codespaces
`pantry` also works in GitHub Codespaces. The default configuration
provided with the repository will install/update `pkgx` at the time
you attach, so you should be able to quickly work on test packages
provides with the repository will install/update `tea` at the time
you attach, so you should be able to quickly work on or test packages
in a remote linux environment (or work from a device with just a web browser).
## Providers
If the package you want to add to tea can be executed simply eg. you want
`foo` to run `npx foo`, then you can add a one-line entry to
[`npmjs.com/provider.yml`].
We currently also support this for `pipx`. Adding support for other such
dependency manager execution handlers is easy and welcome.
At this time, if the package has tea dependencies or requires compilation,
it should be packaged as a `package.yml`.
## Packaging Guide
Packaging can be cumbersome.
Our [wiki] is our packaging knowledge base.
For other assistance, start a [discussion].
The best way to figure out solutions for your problems is to read other
examples from the pantry.
## After Your Contribution
# After Your Contribution
We build “bottles” (tard binaries) and upload them to both our centralized
bottle storage and decentralized [IPFS].
We build “bottles” (tard binaries) and upload them to our CDN. Thus your
contribution will be available at merge-time + build-time + CDN distribution
time.
tea automatically builds new releases of packages *as soon as they are
released* (usually starting the builds within seconds). There is no need to
submit PRs for updates.
`pkgx` should
automatically sync the pantry to your local machine if you ask for something
it doesnt know about, but in the case where that fails do a `pkgx --sync`
first.
> [!NOTE]
> The pantry automatically builds new releases of packages *as soon as they
> are released* (usually starting the builds within seconds). There is no need
> to submit PRs for updates.
Note that while in the pantry `dev` environment you can use your new package
if you built it. However this will not work outside the pantry `dev` unless
you either:
1. You set `PKGX_PANTRY_PATH`
2. Get your PR merged!
# Working on Other Peoples Pull Requests
## Working on Other Peoples Pull Requests
Packaging can be fiddly so we all pitch in. If you want to help someone else
with their pull request then you can use GitHubs CLI:
```sh
```
$ gh pr checkout 123
# or you can copy paste the URL:
$ gh pr checkout https://github.com/pkgxdev/pantry/pull/123
$ gh pr checkout https://github.com/teaxyz/pantry/pull/123
# then open for editing:
$ bk edit
$ pkg edit
```
[wiki]: https://github.com/pkgxdev/pantry/wiki
[discussion]: https://github.com/orgs/pkgxdev/discussions
[wiki]: https://github.com/teaxyz/pantry/wiki
[discussion]: https://github.com/orgs/teaxyz/discussions
[IPFS]: https://ipfs.tech
[`npmjs.com/provider.yml`]: ./projects/npmjs.com/provider.yml
[`brewkit`]: https://github.com/pkgxdev/brewkit

View file

@ -1,4 +0,0 @@
dependencies:
pkgx.sh/brewkit: ^0 || ^1
env:
PKGX_PANTRY_PATH: ${{srcroot}}

View file

@ -8,38 +8,41 @@ versions:
build:
dependencies:
cmake.org: ^3
tea.xyz/gx/cc: c99
tea.xyz/gx/make: '*'
script:
- |
cmake -S . -B build \
-DCMAKE_CXX_STANDARD=17 \
-DBUILD_SHARED_LIBS=ON \
-DCMAKE_INSTALL_RPATH={{ prefix }}/lib \
-DCMAKE_BINARY_DIR={{ prefix }}/bin \
-DABSL_PROPAGATE_CXX_STD=ON \
-DCMAKE_INSTALL_PREFIX={{ prefix }} \
-DCMAKE_INSTALL_LIBDIR={{ prefix }}/lib \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_FIND_FRAMEWORK=LAST \
-DCMAKE_VERBOSE_MAKEFILE=ON \
-Wno-dev \
-DBUILD_TESTING=OFF
- cmake --build build
- cmake --install build
script: |
cmake -S . -B build \
-DCMAKE_CXX_STANDARD=17 \
-DBUILD_SHARED_LIBS=ON \
-DCMAKE_INSTALL_RPATH={{ prefix }}/lib \
-DCMAKE_BINARY_DIR={{ prefix }}/bin \
-DABSL_PROPAGATE_CXX_STD=ON \
-DCMAKE_INSTALL_PREFIX={{ prefix }} \
-DCMAKE_INSTALL_LIBDIR={{ prefix }}/lib \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_FIND_FRAMEWORK=LAST \
-DCMAKE_VERBOSE_MAKEFILE=ON \
-Wno-dev \
-DBUILD_TESTING=OFF
cmake --build build
cmake --install build
# Remove bad flags in .pc files.
# https://github.com/abseil/abseil-cpp/issues/1408
if [[ "$(uname)" == "Darwin" ]]; then
sed -i '' 's/-Xarch_x86_64 -Xarch_x86_64 -Xarch_arm64 //g' {{ prefix }}/lib/pkgconfig/absl_random_internal_randen_hwaes{_impl,}.pc
fi
# Remove bad flags in .pc files.
# https://github.com/abseil/abseil-cpp/issues/1408
- run: sed -i 's/-Xarch_x86_64 -Xarch_x86_64 -Xarch_arm64 //g' *.pc
working-directory: ${{ prefix }}/lib/pkgconfig
if: darwin
- run: sed -i
-e "s:{{ pkgx.prefix }}:\$\{CMAKE_CURRENT_LIST_DIR\}/../../../../..:g"
-e "s/\+brewing//g"
cd "{{prefix}}/lib/cmake/absl"
sed -i.bak \
-e "s:$(tea --prefix):\$\{CMAKE_CURRENT_LIST_DIR\}/../../../../..:g" \
abslTargets{,-release}.cmake
working-directory: '{{prefix}}/lib/cmake/absl'
rm abslTargets{,-release}.cmake.bak
test:
dependencies:
tea.xyz/gx/cc: c99
script: |
c++ -std=c++17 -I{{ prefix }}/include -L{{ prefix }}/lib -labsl_strings test.cc
ls -l
c++ -std=c++17 -I{{ prefix }}/include -L{{ prefix }}/lib -labsl_strings test.cc
test "$(./a.out)" = "Joined string: foo-bar-baz\n"

View file

@ -1,28 +0,0 @@
distributable:
url: https://github.com/acorn-io/runtime/archive/refs/tags/v{{version}}.tar.gz
strip-components: 1
# if theres a github then we can parse the versions
versions:
github: acorn-io/runtime
build:
dependencies:
go.dev: '*'
env:
CGO_ENABLED: 0
LDFLAGS:
- -s
- -w
#- -X github.com/acorn-io/runtime/cmd.Version={{ version }}
#- -X github.com/acorn-io/runtime/cmd.GitCommit=$( git rev-parse HEAD )
linux:
LDFLAGS:
- -buildmode=pie
script: go build -v -ldflags="${LDFLAGS}" -o "{{ prefix }}"/bin/acorn
provides:
- bin/acorn
#test: test "$(acorn version)" = {{version}}
test: acorn help

View file

@ -1,20 +1,27 @@
#!/bin/sh
#FIXME a `git init` type system would be better than a single directory :/
set -e
workspace_directory="${XDG_DOCUMENTS_DIR:-$HOME/Documents}/Auto-GPT"
WD="${XDG_DATA_HOME:-$HOME/.local/share}/auto-gpt"
# if the data|plugins directories dont exist auto-gpt bails
mkdir -p "$workspace_directory"/data "$workspace_directory"/plugins
mkdir -p "$WD"/data "$WD"/plugins
if ! test -f "$workspace_directory/prompt_settings.yaml"; then
cp "$VIRTUAL_ENV"/../share/prompt_settings.yaml "$workspace_directory"
fi
PREFIX="$(cd "$(dirname "$0")/../.." && pwd)"
# change to this directory because auto-gpt assumes it is running
# in its own checkout and is not suitable to be run from anywhere
cd "$workspace_directory"
cd "$WD"
exec "$VIRTUAL_ENV"/bin/python -m autogpt --workspace-directory="$workspace_directory" "$@"
if ! test -f .env; then
cp "$PREFIX"/share/env.template .env
fi
echo "tea: auto-gpt output goes here: $WD"
echo "tea: auto-gpt config is here: $WD/.env"
echo "tea: You need to set \`OPENAI_API_KEY\` before running auto-gpt or add it to the above config"
echo "tea: unless you have access to GPT4 you need to run \`--gpt3only\`"
echo
exec "$VIRTUAL_ENV"/bin/python -m autogpt --workspace-directory="$WD" "$@"

View file

@ -1,63 +0,0 @@
#!/usr/bin/env -S tea bash
#---
# dependencies:
# charm.sh/gum: '*'
#---
set -eo pipefail
# attempt to get the key from the users shell rc files (if set)
if [ -z "$OPENAI_API_KEY" -a -n "$SHELL" ]; then
export OPENAI_API_KEY="$(env -i "$SHELL" -ic 'echo $OPENAI_API_KEY')"
fi
if [ -z "$OPENAI_API_KEY" ]; then
gum format <<EoMD
# OpenAI API key
Auto-GPT requires an OpenAI API.
> https://platform.openai.com/account/api-keys
GPT4 is recommended (but you gotta sign up for the
the [waitlist](https://openai.com/waitlist/gpt-4-api))
**this key will not be persisted by tea!**
EoMD
echo # spacer
export OPENAI_API_KEY="$(gum input --placeholder 'key pls')"
fi
gum format <<EoMD
# gpt version?
which gpt version does your OpenAI API key support?
> sadly this must be specified explicitly, so we gotta ask
EoMD
echo #spacer
GPT="$(gum choose {GPT4,GPT3})"
docs="${XDG_DOCUMENTS_DIR:-$HOME/Documents}/Auto-GPT"
gum format <<EoMD
# fyi
* output goes here: \`$docs\`
# exe
running **Auto-GPT**… (*this might take a few minutes*)
EoMD
if test "$GPT" = GPT3
then
exec auto-gpt --gpt3only
else
exec auto-gpt
fi

View file

@ -10,32 +10,33 @@ versions:
platforms:
- darwin
entrypoint: tea ./entrypoint.sh
dependencies:
python.org: '>=3.10<3.12'
python.org: ^3.10
redis.io: ^7
tea.xyz: ^0
build:
dependencies:
tea.xyz/gx/cc: c99
script:
# `pip install` seems to miss some vital .json files, so we must manually copy
# we copy everything as we're not 100% sure which files are missing
# we do this first so any file movements from `pip install` takes precedence
- run: cp -R $SRCROOT/autogpt .
working-directory: ${{prefix}}/venv/lib/python{{deps.python.org.version.marketing}}/site-packages
- run:
cp -R $SRCROOT/autogpt .
working-directory:
${{prefix}}/venv/lib/python{{deps.python.org.version.marketing}}/site-packages
- python-venv.py {{prefix}}/bin/auto-gpt --requirements-txt
# still pretty new and thus provides no executable, so we made one
- cp props/auto-gpt {{prefix}}/venv/bin
- working-directory: '{{prefix}}/share'
run: |
cp $SRCROOT/.env.template env.template
cp $SRCROOT/prompt_settings.yaml .
# this is broken and means any environment variable that is set is ignored
- sed -i.bak s/^OPENAI_API_KEY=your-openai-api-key$//g .env.template
- cp props/entrypoint.sh {{prefix}}
- |
mkdir {{prefix}}/share
cp .env.template {{prefix}}/share/env.template
provides:
- bin/auto-gpt

View file

@ -1,33 +0,0 @@
distributable:
url: https://www.agwa.name/projects/git-crypt/downloads/git-crypt-{{version}}.tar.gz
strip-components: 1
versions:
url: https://www.agwa.name/projects/git-crypt/
match: /git-crypt-\d+\.\d+\.\d+\.tar\.gz/
strip:
- /^git-crypt-/
- /\.tar\.gz$/
dependencies:
openssl.org: '^1.1'
build:
dependencies:
docbook.org: '*'
docbook.org/xsl: '*'
gnome.org/libxslt: '*'
script:
- sed -i "s|http://docbook.sourceforge.net/release/xsl/current|{{deps.docbook.org/xsl.prefix}}/libexec/docbook-xsl|g" Makefile
- make ENABLE_MAN=yes PREFIX={{prefix}} install
env:
XML_CATALOG_FILES: ${{prefix}}/etc/xml/catalog
CFLAGS: $CFLAGS -DOPENSSL_API_COMPAT=0x30000000L
provides:
- bin/git-crypt
test:
- git-crypt keygen keyfile
- ls | grep keyfile
- git-crypt version | grep {{version}}

View file

@ -13,9 +13,14 @@ platforms:
- linux
build:
dependencies:
tea.xyz/gx/cc: c99
tea.xyz/gx/make: '*'
script:
- ./configure $ARGS
- make --jobs {{ hw.concurrency }} install
env:
ARGS:
- --prefix="{{prefix}}"
@ -27,6 +32,8 @@ provides:
- bin/aserver
test:
dependencies:
tea.xyz/gx/cc: c99
script:
- cc test.c -lasound -o test
- ./test
- ./test

View file

@ -1,48 +0,0 @@
distributable:
url: https://github.com/Ph0enixKM/Amber/archive/refs/tags/{{ version.tag }}.tar.gz
strip-components: 1
provides:
- bin/amber
interprets:
extensions: ab
args: amber
versions:
github: Ph0enixKM/Amber
strip: /-alpha/ # until there's a stable release
companions:
# complex math
gnu.org/bc: 1
# string functions in `std`
gnu.org/sed: 4
# tr for `std:lower()`, `std:upper()`
gnu.org/coreutils: 9
# awk for `std:sum()`
gnu.org/gawk: 5
# this should be necessary, but compiler.rs hardcodes the
# path to /bin/bash
# https://github.com/Ph0enixKM/Amber/issues/109
# gnu.org/bash: 5
build:
dependencies:
rust-lang.org: '>=1.56'
rust-lang.org/cargo: '*'
script: cargo install --locked --path . --root {{prefix}}
test:
dependencies:
pkgx.sh: ^1
script:
- run: cat $FIXTURE >test.ab
fixture:
extname: ab
content: |
echo ((12 + 34) * 9) % 4
- test "$(amber test.ab)" = 2
- test "$(pkgx test.ab)" = 2
- amber test.ab test.sh
- test "$(./test.sh)" = 2

View file

@ -1,72 +0,0 @@
distributable:
url: https://github.com/AmrDeveloper/GQL/archive/refs/tags/{{version}}.tar.gz
strip-components: 1
display-name: gitql
versions:
github: AmrDeveloper/GQL
dependencies:
libgit2.org: ~1.7 # links to libgit2.so.1.7
build:
dependencies:
rust-lang.org: ^1.65
rust-lang.org/cargo: '*'
cmake.org: ^3
script: cargo install --path . --root {{prefix}}
provides:
- bin/gitql
test:
dependencies:
git-scm.org: ^2
gnu.org/sed: '*'
fixture: |
SELECT 1
SELECT 1 + 2
SELECT LEN("Git Query Language")
SELECT "One" IN ("One", "Two", "Three")
SELECT "Git Query Language" LIKE "%Query%"
SELECT DISTINCT title AS tt message FROM commits
SELECT name, COUNT(name) AS commit_num FROM commits GROUP BY name ORDER BY commit_num DESC LIMIT 10
SELECT commit_count FROM branches WHERE commit_count BETWEEN 0 .. 10
SELECT * FROM refs WHERE type = "branch"
SELECT * FROM refs ORDER BY type
SELECT * FROM commits
SELECT name, email FROM commits
SELECT name, email FROM commits ORDER BY name DESC, email ASC
SELECT name, email FROM commits WHERE name LIKE "%gmail%" ORDER BY name
SELECT * FROM commits WHERE LOWER(name) = "mxcl"
SELECT name FROM commits GROUP By name
SELECT name FROM commits GROUP By name having name = "mxcl"
SELECT * FROM branches
SELECT * FROM branches WHERE is_head = true
SELECT name, LEN(name) FROM branches
SELECT * FROM tags
SELECT * FROM tags OFFSET 1 LIMIT 1
script:
- git clone https://github.com/pkgxdev/pkgx
- run: |
echo 'exit' >>$FIXTURE
cat $FIXTURE | gitql --repos pkgx
if: <0.10.0
- run: |
gitql <$FIXTURE
gitql -q 'SELECT 1'
if: '>=0.10.0<0.22.0 || >=0.22.1'
working-directory: pkgx
# bug in v0.22.0, queries without FROM error out, fixed in v0.22.1
- run: |
sed -i '/FROM/!d' $FIXTURE
gitql <$FIXTURE
gitql -q 'SELECT 1 AS just_a_number FROM tags LIMIT 1'
if: 0.22.0
working-directory: pkgx

View file

@ -1,40 +0,0 @@
distributable:
url: git+https://github.com/anchore/syft.git
ref: ${{version.tag}}
versions:
github: anchore/syft
build:
dependencies:
go.dev: ^1.21
script:
go build $ARGS -ldflags="$LDFLAGS" ./cmd/syft
env:
COMMIT: $(git describe --always --abbrev=8 --dirty)
DATE: $(date -u +%FT%TZ)
ARGS:
- -trimpath
- -o={{prefix}}/bin/syft
linux:
ARGS:
- -buildmode=pie
LDFLAGS:
- -s
- -w
- -X main.version={{version}}
- -X main.gitCommit=${COMMIT}
- -X main.buildDate=${DATE}
provides:
- bin/syft
test:
dependencies:
curl.se: '*'
script:
- curl -L "${TEST_JSON}" -o micronaut.json
- syft convert micronaut.json | grep 'netty-codec-http2'
- syft --version | grep {{version}}
env:
TEST_JSON: https://raw.githubusercontent.com/anchore/syft/934644232ab115b2518acdb5d240ae31aaf55989/syft/pkg/cataloger/java/test-fixtures/graalvm-sbom/micronaut.json

View file

@ -1,62 +0,0 @@
distributable: ~
versions:
url: https://developer.android.com/studio
match: /commandlinetools-mac-\d+_latest\.zip/
strip:
- /^commandlinetools-mac-/
- /_latest\.zip/
warnings:
- vendored
dependencies:
openjdk.org: '>=17'
runtime:
env:
ANDROID_HOME: ${{prefix}}/libexec
PATH: $PATH:$ANDROID_HOME/platform-tools:$ANDROID_HOME/tools:$ANDROID_HOME/tools/bin:$ANDROID_HOME/emulator
build:
dependencies:
info-zip.org/unzip: '*'
curl.se: '*'
script:
- curl -L "$DIST_URL" -o android-commandlinetools.zip
- unzip android-commandlinetools.zip
# we need to use `cmdline-tools/latest` path to avoid:
# Error: Either specify it explicitly with --sdk_root=
- run: mkdir -p libexec/cmdline-tools/latest
working-directory: ${{prefix}}
- run: cp -r * {{prefix}}/libexec/cmdline-tools/latest/
working-directory: cmdline-tools
- run: |
ln -s ../libexec/cmdline-tools/latest/bin/apkanalyzer apkanalyzer
ln -s ../libexec/cmdline-tools/latest/bin/avdmanager avdmanager
ln -s ../libexec/cmdline-tools/latest/bin/lint lint
ln -s ../libexec/cmdline-tools/latest/bin/profgen profgen
ln -s ../libexec/cmdline-tools/latest/bin/resourceshrinker resourceshrinker
ln -s ../libexec/cmdline-tools/latest/bin/retrace retrace
ln -s ../libexec/cmdline-tools/latest/bin/screenshot2 screenshot2
ln -s ../libexec/cmdline-tools/latest/bin/sdkmanager sdkmanager
working-directory: ${{prefix}}/bin
env:
linux:
DIST_URL: https://dl.google.com/android/repository/commandlinetools-linux-{{version.raw}}_latest.zip
darwin:
DIST_URL: https://dl.google.com/android/repository/commandlinetools-mac-{{version.raw}}_latest.zip
provides:
- bin/apkanalyzer
- bin/avdmanager
- bin/lint
- bin/profgen
- bin/resourceshrinker
- bin/retrace
- bin/screenshot2
- bin/sdkmanager
test:
- echo y | sdkmanager --install "platforms;android-30"
- cat $ANDROID_HOME/platforms/android-30/source.properties | grep "AndroidVersion.ApiLevel=30"

View file

@ -1,36 +0,0 @@
distributable:
url: https://registry.npmjs.org/@angular/cli/-/cli-{{version}}.tgz
strip-components: 1
versions:
npm: '@angular/cli'
dependencies:
nodejs.org: ^20
build:
dependencies:
npmjs.com: ^10
script:
- npm i $ARGS .
- run: ln -s ../libexec/bin/ng ng
working-directory: '{{prefix}}/bin'
env:
ARGS:
- -ddd
- --global
- --build-from-source
- --prefix={{prefix}}/libexec
- --install-links
- --unsafe-perm
provides:
- bin/ng
test:
script:
- ng new angular-test --skip-install --defaults
- ls -l angular-test | grep angular.json
- ng version | grep {{version}}
env:
NG_CLI_ANALYTICS: false

View file

@ -1,26 +0,0 @@
distributable:
url: https://github.com/ansible/ansible-lint/archive/refs/tags/{{version.tag}}.tar.gz
strip-components: 1
versions:
github: ansible/ansible-lint
dependencies:
pkgx.sh: ^1
build:
dependencies:
python.org: ~3.11
script:
- bkpyvenv stage {{prefix}} {{version}}
- ${{prefix}}/venv/bin/pip install .
- bkpyvenv seal {{prefix}} ansible-lint
test:
fixture: |
[defaults]
fact_caching_timeout=invalid-value
script: ansible-lint $FIXTURE
provides:
- bin/ansible-lint

View file

@ -1,33 +1,40 @@
distributable:
url: https://github.com/ansible/ansible/archive/{{version.tag}}.tar.gz
url: https://github.com/ansible/ansible/archive/v{{version}}.tar.gz
strip-components: 1
versions:
github: ansible/ansible/tags # reads github *tags*
github: ansible/ansible/tags # reads github *tags*
strip: /^v/
dependencies:
pkgx.sh: 1
python.org: '>=3.7'
build:
dependencies:
python.org: '>=3.7<3.12'
script:
- bkpyvenv stage {{prefix}} {{version}}
script: |
python-venv.sh {{prefix}}/bin/ansible
# manually register all the ansible-* executables with tea
#FIXME dirty hack, replace with a proper solution
for _EXEC in {{prefix}}/venv/bin/ansible-*; do
CMD_NAME="${_EXEC##*/}" # get text after the last slash (the command name)
TRG_BIN_NAME="{{prefix}}/bin/$CMD_NAME"
cp -v {{prefix}}/bin/ansible $TRG_BIN_NAME # copy the tea-configured executable with the correct name
# replace the original 'ansible' with the correct $CMD_NAME
sed -i.bak -e \
"s|/bin/ansible|/bin/$CMD_NAME|" \
$TRG_BIN_NAME
done
- ${{prefix}}/venv/bin/pip install .
# install paramiko, a python ssh library sometimes used with ansible
- ${{prefix}}/venv/bin/pip install paramiko
{{prefix}}/venv/bin/pip install paramiko #FIXME should we pin a version?
- bkpyvenv seal {{prefix}} ansible ansible-config ansible-connection ansible-console ansible-doc ansible-galaxy ansible-inventory ansible-playbook ansible-pull ansible-test ansible-vault
rm -v {{prefix}}/bin/ansible-*.bak
# needs libpython
- run: cp -a {{deps.python.org.prefix}}/lib/libpython* .
working-directory: ${{prefix}}/lib/
test:
script:
- ansible --version
- ansible-playbook playbook.yml -i hosts.ini
script: |
ansible --version
ansible-playbook playbook.yml -i hosts.ini
#FIXME below is a test from the brew formula, but I'm not sure what it's testing
# ^ especially considering that 'requests' is not a part of ansible's dependencies
@ -36,20 +43,22 @@ test:
# {{prefix}}/venv/bin/python -c "$py_script"
# Ensure ansible-vault can encrypt/decrypt files.
- echo $SECRET_MESSAGE > vault-test-file.txt
- echo $VAULT_PASSWORD > vault-password.txt
echo $SECRET_MESSAGE > vault-test-file.txt
echo $VAULT_PASSWORD > vault-password.txt
- ansible-vault encrypt --vault-password-file vault-password.txt vault-test-file.txt
- test "$(cat vault-test-file.txt)" != "$SECRET_MESSAGE" # encrypted
ansible-vault encrypt --vault-password-file vault-password.txt vault-test-file.txt
test "$(cat vault-test-file.txt)" != "$SECRET_MESSAGE" # encrypted
- ansible-vault decrypt --vault-password-file vault-password.txt vault-test-file.txt
- test "$(cat vault-test-file.txt)" = "$SECRET_MESSAGE" # decrypted
ansible-vault decrypt --vault-password-file vault-password.txt vault-test-file.txt
test "$(cat vault-test-file.txt)" = "$SECRET_MESSAGE" # decrypted
# ensure paramiko is installed
- ${{prefix}}/venv/bin/python -c "import paramiko"
{{prefix}}/venv/bin/python -c "import paramiko"
env:
VAULT_PASSWORD: '123456789'
SECRET_MESSAGE: 'Hello world!'
VAULT_PASSWORD: "123456789"
SECRET_MESSAGE: "Hello world!"
provides:
- bin/ansible

View file

@ -12,7 +12,9 @@ build:
dependencies:
x86-64:
nasm.us: 2
tea.xyz/gx/cc: c99
cmake.org: ^3
tea.xyz/gx/make: '*'
working-directory:
out
script: |

View file

@ -10,6 +10,8 @@ provides:
build:
dependencies:
tea.xyz/gx/cc: c99
tea.xyz/gx/make: '*'
apache.org/apr: '*'
openssl.org: '*'
libexpat.github.io: '*'

View file

@ -6,6 +6,9 @@ versions:
github: apache/apr/tags
build:
dependencies:
tea.xyz/gx/cc: c99
tea.xyz/gx/make: '*'
script: |
./configure $ARGS
make --jobs {{ hw.concurrency }}
@ -23,7 +26,7 @@ build:
cd ../build-1
sed -i.bak \
-e "s_{{prefix}}_\$(subst /bin/apr-{{version.major}}-config,,\$(shell command -v apr-{{version.major}}-config))_g" \
-e "s_${PKGX_DIR}_\$(subst /apache.org/apr/v{{version}}/bin/apr-{{version.major}}-config,,\$(shell command -v apr-{{version.major}}-config))_g" \
-e "s_${TEA_PREFIX}_\$(subst /apache.org/apr/v{{version}}/bin/apr-{{version.major}}-config,,\$(shell command -v apr-{{version.major}}-config))_g" \
apr_rules.mk
rm apr_rules.mk.bak
env:
@ -33,6 +36,8 @@ build:
- --disable-debug
test:
dependencies:
tea.xyz/gx/cc: c99
fixture: |
#include <stdio.h>
#include <apr-1/apr_version.h>

View file

@ -1,107 +0,0 @@
distributable:
url: https://archive.apache.org/dist/arrow/arrow-{{version}}/apache-arrow-{{version}}.tar.gz
strip-components: 1
versions:
url: https://archive.apache.org/dist/arrow/
match: /arrow-\d+\.\d+\.\d+/
strip:
- /^arrow-/
dependencies:
github.com/aws/aws-sdk-cpp: '*'
github.com/google/brotli: '*'
sourceware.org/bzip2: '*'
google.com/glog: '*'
grpc.io: '*'
lz4.org: '*'
openssl.org: '*'
protobuf.dev: 25.2.0 # match grpc.io, so gdal.org can build
rapidjson.org: '*'
github.com/google/re2: '*'
google.github.io/snappy: '*'
apache.org/thrift: '*'
github.com/JuliaStrings/utf8proc: '*'
facebook.com/zstd: '*'
build:
dependencies:
boost.org: '*'
cmake.org: '*'
llvm.org: ~16
python.org: '<3.12'
working-directory: cpp
script:
- cmake -B $BUILD_DIR $CMAKE_ARGS
- cmake --build $BUILD_DIR
- cmake --install $BUILD_DIR
- run: |
sed -i.bak "s|$PKGX_DIR|\${pcfiledir}/../../../..|g" ./*.pc
rm ./*.bak
working-directory: '{{prefix}}/lib/pkgconfig'
env:
BUILD_DIR: $(mktemp -d)
CC: clang
CXX: clang++
LD: clang
CMAKE_ARGS:
- -DCMAKE_INSTALL_PREFIX="{{prefix}}
- -DCMAKE_INSTALL_LIBDIR=lib
- -DCMAKE_BUILD_TYPE=Release
- -DCMAKE_FIND_FRAMEWORK=LAST
- -DCMAKE_VERBOSE_MAKEFILE=ON
- -Wno-dev
- -DBUILD_TESTING=OFF
- -DCMAKE_INSTALL_RPATH={{prefix}}
- -DARROW_ACERO=ON
- -DARROW_COMPUTE=ON
- -DARROW_CSV=ON
- -DARROW_DATASET=ON
- -DARROW_FILESYSTEM=ON
- -DARROW_GANDIVA=ON
- -DARROW_HDFS=ON
- -DARROW_JSON=ON
- -DARROW_ORC=ON
- -DARROW_PARQUET=ON
- -DARROW_PROTOBUF_USE_SHARED=ON
- -DARROW_S3=ON
- -DARROW_WITH_BZ2=ON
- -DARROW_WITH_ZLIB=ON
- -DARROW_WITH_ZSTD=ON
- -DARROW_WITH_LZ4=ON
- -DARROW_WITH_SNAPPY=ON
- -DARROW_WITH_BROTLI=ON
- -DARROW_WITH_UTF8PROC=ON
- -DARROW_INSTALL_NAME_RPATH=OFF
- -DPARQUET_BUILD_EXECUTABLES=ON
aarch64:
CMAKE_ARGS:
- -DARROW_MIMALLOC=ON
provides:
- bin/parquet-dump-schema
- bin/parquet-reader
- bin/parquet-scan
test:
dependencies:
freedesktop.org/pkg-config: '*'
script:
- run: |
cat << EOF > test.cpp
#include "arrow/api.h"
int main(void) {
arrow::int64();
return 0;
}
EOF
- c++ test.cpp -std=c++17 -larrow -o test
- ./test
- run: |
cat << EOF > version.cpp
#include <arrow/api.h>
#include <iostream>
int main() {
std::cout << "Apache Arrow Version: " << ARROW_VERSION_STRING << std::endl;
return 0;
}
EOF
- c++ version.cpp -std=c++17 -larrow -o version
- ./version | grep {{version}}
- pkg-config --modversion arrow | grep {{version}}

View file

@ -1,47 +0,0 @@
distributable:
url: https://github.com/apache/avro/archive/release-{{version}}.tar.gz
strip-components: 1
versions:
github: apache/avro
strip:
- /^release-/
dependencies:
digip.org/jansson: '*'
google.github.io/snappy: '*'
tukaani.org/xz: '*'
zlib.net: '*'
build:
dependencies:
cmake.org: '*'
freedesktop.org/pkg-config: '*'
linux:
gnu.org/gcc: '*'
gnu.org/make: '*'
working-directory: lang/c
script:
- cmake -S . -B build $CMAKE_ARGS
- cmake --build build
- cmake --install build
env:
CMAKE_ARGS:
- -DCMAKE_INSTALL_PREFIX="{{prefix}}
- -DCMAKE_INSTALL_LIBDIR=lib
- -DCMAKE_BUILD_TYPE=Release
- -DCMAKE_FIND_FRAMEWORK=LAST
- -DCMAKE_VERBOSE_MAKEFILE=ON
- -Wno-dev
- -DBUILD_TESTING=OFF
provides:
- bin/avroappend
- bin/avrocat
- bin/avromod
- bin/avropipe
test:
dependencies:
freedesktop.org/pkg-config: '*'
linux:
gnu.org/gcc: '*'
script:
- cc quickstop.c -o test -lavro
- ./test
- pkg-config --modversion avro-c | grep {{version}}

View file

@ -1,238 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <avro.h>
#include <stdio.h>
#include <stdlib.h>
#ifdef DEFLATE_CODEC
#define QUICKSTOP_CODEC "deflate"
#else
#define QUICKSTOP_CODEC "null"
#endif
avro_schema_t person_schema;
int64_t id = 0;
/* A simple schema for our tutorial */
const char PERSON_SCHEMA[] =
"{\"type\":\"record\",\
\"name\":\"Person\",\
\"fields\":[\
{\"name\": \"ID\", \"type\": \"long\"},\
{\"name\": \"First\", \"type\": \"string\"},\
{\"name\": \"Last\", \"type\": \"string\"},\
{\"name\": \"Phone\", \"type\": \"string\"},\
{\"name\": \"Age\", \"type\": \"int\"}]}";
/* Parse schema into a schema data structure */
void init_schema(void)
{
if (avro_schema_from_json_literal(PERSON_SCHEMA, &person_schema)) {
fprintf(stderr, "Unable to parse person schema\n");
exit(EXIT_FAILURE);
}
}
/* Create a value to match the person schema and save it */
void
add_person(avro_file_writer_t db, const char *first, const char *last,
const char *phone, int32_t age)
{
avro_value_iface_t *person_class =
avro_generic_class_from_schema(person_schema);
avro_value_t person;
avro_generic_value_new(person_class, &person);
avro_value_t id_value;
avro_value_t first_value;
avro_value_t last_value;
avro_value_t age_value;
avro_value_t phone_value;
if (avro_value_get_by_name(&person, "ID", &id_value, NULL) == 0) {
avro_value_set_long(&id_value, ++id);
}
if (avro_value_get_by_name(&person, "First", &first_value, NULL) == 0) {
avro_value_set_string(&first_value, first);
}
if (avro_value_get_by_name(&person, "Last", &last_value, NULL) == 0) {
avro_value_set_string(&last_value, last);
}
if (avro_value_get_by_name(&person, "Age", &age_value, NULL) == 0) {
avro_value_set_int(&age_value, age);
}
if (avro_value_get_by_name(&person, "Phone", &phone_value, NULL) == 0) {
avro_value_set_string(&phone_value, phone);
}
if (avro_file_writer_append_value(db, &person)) {
fprintf(stderr,
"Unable to write Person value to memory buffer\nMessage: %s\n", avro_strerror());
exit(EXIT_FAILURE);
}
/* Decrement all our references to prevent memory from leaking */
avro_value_decref(&person);
avro_value_iface_decref(person_class);
}
int print_person(avro_file_reader_t db, avro_schema_t reader_schema)
{
avro_value_iface_t *person_class =
avro_generic_class_from_schema(person_schema);
avro_value_t person;
avro_generic_value_new(person_class, &person);
int rval;
rval = avro_file_reader_read_value(db, &person);
if (rval == 0) {
int64_t id;
int32_t age;
int32_t *p;
size_t size;
avro_value_t id_value;
avro_value_t first_value;
avro_value_t last_value;
avro_value_t age_value;
avro_value_t phone_value;
if (avro_value_get_by_name(&person, "ID", &id_value, NULL) == 0) {
avro_value_get_long(&id_value, &id);
fprintf(stdout, "%"PRId64" | ", id);
}
if (avro_value_get_by_name(&person, "First", &first_value, NULL) == 0) {
avro_value_get_string(&first_value, &p, &size);
fprintf(stdout, "%15s | ", p);
}
if (avro_value_get_by_name(&person, "Last", &last_value, NULL) == 0) {
avro_value_get_string(&last_value, &p, &size);
fprintf(stdout, "%15s | ", p);
}
if (avro_value_get_by_name(&person, "Phone", &phone_value, NULL) == 0) {
avro_value_get_string(&phone_value, &p, &size);
fprintf(stdout, "%15s | ", p);
}
if (avro_value_get_by_name(&person, "Age", &age_value, NULL) == 0) {
avro_value_get_int(&age_value, &age);
fprintf(stdout, "%"PRId32" | ", age);
}
fprintf(stdout, "\n");
/* We no longer need this memory */
avro_value_decref(&person);
avro_value_iface_decref(person_class);
}
return rval;
}
int main(void)
{
int rval;
avro_file_reader_t dbreader;
avro_file_writer_t db;
avro_schema_t projection_schema, first_name_schema, phone_schema;
int64_t i;
const char *dbname = "quickstop.db";
char number[15] = {0};
/* Initialize the schema structure from JSON */
init_schema();
/* Delete the database if it exists */
remove(dbname);
/* Create a new database */
rval = avro_file_writer_create_with_codec
(dbname, person_schema, &db, QUICKSTOP_CODEC, 0);
if (rval) {
fprintf(stderr, "There was an error creating %s\n", dbname);
fprintf(stderr, " error message: %s\n", avro_strerror());
exit(EXIT_FAILURE);
}
/* Add lots of people to the database */
for (i = 0; i < 1000; i++)
{
sprintf(number, "(%d)", (int)i);
add_person(db, "Dante", "Hicks", number, 32);
add_person(db, "Randal", "Graves", "(555) 123-5678", 30);
add_person(db, "Veronica", "Loughran", "(555) 123-0987", 28);
add_person(db, "Caitlin", "Bree", "(555) 123-2323", 27);
add_person(db, "Bob", "Silent", "(555) 123-6422", 29);
add_person(db, "Jay", "???", number, 26);
}
/* Close the block and open a new one */
avro_file_writer_flush(db);
add_person(db, "Super", "Man", "123456", 31);
avro_file_writer_close(db);
fprintf(stdout, "\nNow let's read all the records back out\n");
/* Read all the records and print them */
if (avro_file_reader(dbname, &dbreader)) {
fprintf(stderr, "Error opening file: %s\n", avro_strerror());
exit(EXIT_FAILURE);
}
for (i = 0; i < id; i++) {
if (print_person(dbreader, NULL)) {
fprintf(stderr, "Error printing person\nMessage: %s\n", avro_strerror());
exit(EXIT_FAILURE);
}
}
avro_file_reader_close(dbreader);
/* You can also use projection, to only decode only the data you are
interested in. This is particularly useful when you have
huge data sets and you'll only interest in particular fields
e.g. your contacts First name and phone number */
projection_schema = avro_schema_record("Person", NULL);
first_name_schema = avro_schema_string();
phone_schema = avro_schema_string();
avro_schema_record_field_append(projection_schema, "First",
first_name_schema);
avro_schema_record_field_append(projection_schema, "Phone",
phone_schema);
/* Read only the record you're interested in */
fprintf(stdout,
"\n\nUse projection to print only the First name and phone numbers\n");
if (avro_file_reader(dbname, &dbreader)) {
fprintf(stderr, "Error opening file: %s\n", avro_strerror());
exit(EXIT_FAILURE);
}
for (i = 0; i < id; i++) {
if (print_person(dbreader, projection_schema)) {
fprintf(stderr, "Error printing person: %s\n",
avro_strerror());
exit(EXIT_FAILURE);
}
}
avro_file_reader_close(dbreader);
avro_schema_decref(first_name_schema);
avro_schema_decref(phone_schema);
avro_schema_decref(projection_schema);
/* We don't need this schema anymore */
avro_schema_decref(person_schema);
return 0;
}

View file

@ -1,9 +1,9 @@
distributable:
url: https://archive.apache.org/dist/httpd/httpd-{{version}}.tar.gz
url: https://dlcdn.apache.org/httpd/httpd-2.4.57.tar.gz
strip-components: 1
versions:
url: https://archive.apache.org/dist/httpd/
url: https://dlcdn.apache.org/httpd/
match: /httpd-\d+\.\d+\.\d+.tar.gz/
strip:
- /^httpd-/
@ -19,40 +19,49 @@ dependencies:
gnome.org/libxml2: '*'
zlib.net: '*'
libexpat.github.io: '*'
runtime:
env:
HTTPD_ROOT: '{{prefix}}'
APACHE_CONFDIR: '{{prefix}}/conf'
HTTPD_ROOT: '{{prefix}}'
APACHE_CONFDIR: '{{prefix}}/conf'
build:
dependencies:
tea.xyz/gx/cc: c99
tea.xyz/gx/make: '*'
gnu.org/libtool: '*'
gnu.org/autoconf: '*'
script:
- ./configure $ARGS
- make
- make install
script: |
./configure $ARGS
make
make install
- HTTPD_CONF=$({{prefix}}/bin/httpd -V 2>/dev/null | grep SERVER_CONFIG_FILE | sed 's/.*"\(.*\)"/\1/')
- |
sed -i.bak 's/#ServerName www.example.com:8080/ServerName localhost:8080/g' {{prefix}}/$HTTPD_CONF
rm {{prefix}}/$HTTPD_CONF.bak
HTTPD_CONF=$(tea httpd -V 2>/dev/null | grep SERVER_CONFIG_FILE | sed 's/.*"\(.*\)"/\1/')
port=8080
while lsof -i:$port >/dev/null 2>&1; do
((port++))
done
if [ "$port" -ne 8080 ]; then
sed -i '' "s/Listen 8080/Listen $port/" {{prefix}}/$HTTPD_CONF
fi
sed -i.bak 's/#ServerName www.example.com:8080/ServerName localhost:'"$port"'/g' {{prefix}}/$HTTPD_CONF
- run: |
sed -i.bak \
-e "s_{{prefix}}_\$(cd \$(dirname \$0)/.. \&\& pwd)_g" \
-e 's/^HTTPD='\''\(.*\)'\''$/HTTPD="$(cd $(dirname $0)\/.. \&\& pwd)\/bin\/httpd"/' \
apachectl envvars envvars-std
rm apachectl.bak envvars.bak envvars-std.bak
working-directory: ${{prefix}}/bin
cd "{{prefix}}/bin"
sed -i.bak \
-e "s_{{prefix}}_\$(cd \$(dirname \$0)/.. \&\& pwd)_g" \
-e "s/^HTTPD=\'\(.*\)\'$/HTTPD=\"\1\"/" \
apachectl envvars envvars-std
sed -i.bak \
-e 's_{{prefix}}_$(cd $(dirname $0)/.. && pwd)_g' \
-e 's/^HTTPD='\''\(.*\)'\''$/HTTPD="$(cd $(dirname $0)\/.. \&\& pwd)\/bin\/httpd"/' \
apachectl envvars envvars-std
- run: |
sed -i.bak \
-e 's|{{prefix}}|\${HTTPD_ROOT}|' \
httpd.conf
rm httpd.conf.bak
working-directory: ${{prefix}}/conf
rm apachectl.bak envvars.bak envvars-std.bak
cd ../conf
sed -i.bak \
-e 's_{{prefix}}_\${HTTPD\_ROOT}_' \
httpd.conf
rm httpd.conf.bak
env:
ARGS:
@ -97,7 +106,7 @@ provides:
- bin/rotatelogs
- bin/suexec
test:
script:
- httpd -v | grep {{version}}
- apachectl -t -f "$APACHE_CONFDIR/httpd.conf"
test:
script: |
httpd -v | grep {{version}}
apachectl -t -f "$APACHE_CONFDIR/httpd.conf"

View file

@ -1,75 +0,0 @@
distributable:
url: https://dlcdn.apache.org/jmeter/binaries/apache-jmeter-{{version.raw}}.tgz
strip-components: 1
versions:
url: https://dlcdn.apache.org/jmeter/binaries/
match: /apache-jmeter-\d+\.\d+(\.\d+)*\.tgz/
strip:
- /^apache-jmeter-/
- /\.tgz/
warnings:
- vendored
dependencies:
openjdk.org: '*'
runtime:
env:
JAVA_HOME: '{{deps.openjdk.org.prefix}}'
build:
dependencies:
gnu.org/wget: '*'
script:
- rm -r bin/*.bat bin/*.cmd
- mkdir -p {{prefix}}
- mv bin docs extras lib {{prefix}}/
- run: |
wget -O $PLUGINS_MANAGER_FILE $PLUGINS_MANAGER_URL
mv $PLUGINS_MANAGER_FILE {{prefix}}/lib/ext/
working-directory: plugins-manager
env:
PLUGINS_MANAGER_URL: https://search.maven.org/remotecontent?filepath=kg/apc/jmeter-plugins-manager/1.9/jmeter-plugins-manager-1.9.jar
PLUGINS_MANAGER_FILE: jmeter-plugins-manager-1.9.jar
provides:
- bin/jmeter
- bin/jmeter-server
- bin/mirror-server
test:
# Caused by: java.nio.file.InvalidPathException: Malformed input or input contains unmappable characters: /__w/pantry/pantry/testbeds/apache.org???jmeter-5.6.3
working-directory: $(mktemp -d)
script:
- cat $FIXTURE > test.jmx
- jmeter -n -t test.jmx | grep 'end of run'
- jmeter --version | grep {{version}}
fixture: |
<?xml version="1.0" encoding="UTF-8"?>
<jmeterTestPlan version="1.2" properties="5.0" jmeter="5.5">
<hashTree>
<TestPlan guiclass="TestPlanGui" testclass="TestPlan" testname="Test Plan" enabled="true">
</TestPlan>
<hashTree>
<ThreadGroup guiclass="ThreadGroupGui" testclass="ThreadGroup" testname="Thread Group" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">1</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">1</stringProp>
</ThreadGroup>
<hashTree>
<DebugSampler guiclass="TestBeanGUI" testclass="DebugSampler" testname="Debug Sampler" enabled="true">
</DebugSampler>
<hashTree>
<JSR223PostProcessor guiclass="TestBeanGUI" testclass="JSR223PostProcessor" testname="JSR223 PostProcessor" enabled="true">
<stringProp name="cacheKey">true</stringProp>
<stringProp name="script">import java.util.Random
Random rand = new Random();
// This will break unless Groovy accepts the current version of the JDK
int rand_int1 = rand.nextInt(1000);
</stringProp>
<stringProp name="scriptLanguage">groovy</stringProp>
</JSR223PostProcessor>
<hashTree/>
</hashTree>
</hashTree>
</hashTree>
</hashTree>
</jmeterTestPlan>

View file

@ -24,14 +24,16 @@ dependencies:
build:
dependencies:
freedesktop.org/pkg-config: ^0.29
tea.xyz/gx/cc: c99
tea.xyz/gx/make: '*'
swig.org: ^4
python.org: ~3.11
python.org: ^3.11
script: |
./configure $ARGS
make --jobs {{ hw.concurrency }}
make install
env:
CFLAGS: '$CFLAGS -I{{deps.apache.org/apr-util.prefix}}/include/apr-1'
CFLAGS: "$CFLAGS -I{{deps.apache.org/apr-util.prefix}}/include/apr-1"
ARGS:
- --prefix="{{prefix}}"
- --disable-debug
@ -59,6 +61,4 @@ provides:
- bin/svnversion
test:
script: svn --version
# svn doen't like UTF-8 directories
working-directory: $(mktemp -d)
svn --version

View file

@ -11,6 +11,8 @@ dependencies:
build:
dependencies:
tea.xyz/gx/cc: c99
tea.xyz/gx/make: '*'
gnu.org/automake: '*'
gnu.org/autoconf: '*'
gnu.org/libtool: '*'
@ -51,16 +53,12 @@ build:
- --without-lua
- --without-rs
- --without-swift
PY_PREFIX: '{{prefix}}'
PHP_PREFIX: '{{prefix}}'
# 0.19.0 has an unused `int j = 0;`.
CXXFLAGS: $CXXFLAGS -Wno-unused-but-set-variable
linux/aarch64:
CXXFLAGS: $CXXFLAGS -Wno-unused-but-set-variable -fPIC
darwin:
MACOSX_DEPLOYMENT_TARGET: 11.0
PY_PREFIX: "{{prefix}}"
PHP_PREFIX: "{{prefix}}"
provides:
- bin/thrift
test:
script: thrift --version | grep {{version}}
script:
thrift --version | grep {{version}}

View file

@ -1,18 +0,0 @@
# Apache ZooKeeper Usage Notes
## Starting the Server
To start the ZooKeeper server, specify the path to your configuration directory using the `ZOOCFGDIR` environment variable:
```bash
export ZOOCFGDIR=/path/to/your/config
zkServer start
```
## Stopping the Server
```bash
zkServer stop
```
That's it! Happy ZooKeeping!

View file

@ -1,73 +0,0 @@
distributable:
url: https://archive.apache.org/dist/zookeeper/zookeeper-{{version}}/apache-zookeeper-{{version}}.tar.gz
strip-components: 1
versions:
url: https://archive.apache.org/dist/zookeeper/
match: /zookeeper-\d+\.\d+\.\d+/
strip:
- /^zookeeper-/
dependencies:
openjdk.org: '*'
openssl.org: '*'
runtime:
env:
ZOODIR: '{{prefix}}'
build:
dependencies:
gnu.org/autoconf: '*'
gnu.org/automake: '*'
freedesktop.org/cppunit: '*'
gnu.org/libtool: '*'
maven.apache.org: '*'
freedesktop.org/pkg-config: '*'
linux:
gnu.org/gcc: '*'
script:
- cd ..
- rm -rf ./zookeeper || true
- mv $SRCROOT zookeeper
- cd zookeeper
- run: mkdir -p etc/zookeeper var/log/zookeeper var/run/zookeeper/data
working-directory: '{{prefix}}'
- mvn install -Pfull-build -DskipTests
- tar -xf zookeeper-assembly/target/apache-zookeeper-{{version}}-bin.tar.gz
- run: |
rm -f bin/*.cmd bin/*.txt
cp -r ./* {{prefix}}/
working-directory: apache-zookeeper-{{version}}-bin
- tar -xf zookeeper-assembly/target/apache-zookeeper-{{version}}-lib.tar.gz
- run: |
cp -r usr/include {{prefix}}/
cp -r usr/lib {{prefix}}/
working-directory: apache-zookeeper-{{version}}-lib
- run: rm -f *.txt *.md
working-directory: '{{prefix}}'
- run: |
ln -s zkCleanup.sh zkCleanup
ln -s zkCli.sh zkCli
ln -s zkEnv.sh zkEnv
ln -s zkServer-initialize.sh zkServer-initialize
ln -s zkServer.sh zkServer
ln -s zkSnapshotComparer.sh zkSnapshotComparer
ln -s zkSnapshotRecursiveSummaryToolkit.sh zkSnapshotRecursiveSummaryToolkit
ln -s zkSnapShotToolkit.sh zkSnapShotToolkit
ln -s zkTxnLogToolkit.sh zkTxnLogToolkit
working-directory: '{{prefix}}/bin'
- run: |
cp zoo_sample.cfg zoo.cfg
sed -i.bak 's|dataDir=/tmp/zookeeper|dataDir=\$ZOODIR/var/run/zookeper|' zoo.cfg
rm -f zoo.cfg.bak
working-directory: '{{prefix}}/conf'
provides:
- bin/zkCleanup
- bin/zkCli
- bin/zkEnv
- bin/zkServer-initialize
- bin/zkServer
- bin/zkSnapshotComparer
- bin/zkSnapshotRecursiveSummaryToolkit
- bin/zkSnapShotToolkit
- bin/zkTxnLogToolkit
test:
script:
- zkServer version | grep {{version}}

View file

@ -1,37 +0,0 @@
distributable:
url: https://github.com/iBotPeaches/Apktool/releases/download/v{{version}}/apktool_{{version}}.jar
versions:
github: iBotPeaches/Apktool
warnings:
- vendored
dependencies:
openjdk.org: ^21
build:
script:
- run: mkdir -p bin libexec/lib
working-directory: ${{prefix}}
- cp apktool.org-{{version}}.jar {{prefix}}/libexec/lib/
- run: |
echo '#!/bin/sh' > apktool
echo 'java -jar $(dirname $0)/../libexec/lib/apktool.org-{{version}}.jar "$@"' >> apktool
chmod +x apktool
working-directory: ${{prefix}}/bin
provides:
- bin/apktool
test:
dependencies:
curl.se: '*'
script:
- apktool --version | grep {{version}}
- curl -L "$TEST_APK" -o test.apk
- apktool d test.apk
- apktool b test
- ls test/dist | grep test.apk
env:
TEST_APK: https://raw.githubusercontent.com/facebook/redex/fa32d542d4074dbd485584413d69ea0c9c3cbc98/test/instr/redex-test.apk

View file

@ -1,29 +0,0 @@
distributable:
url: https://github.com/apollographql/rover/archive/refs/tags/v{{ version }}.tar.gz
strip-components: 1
provides:
- bin/rover
dependencies:
openssl.org: ^1.1
zlib.net: ^1
libgit2.org: ~1.7 # links to libgit2.so.1.7
versions:
github: apollographql/rover
strip: /v/
build:
dependencies:
rust-lang.org: '>=1.65'
rust-lang.org/cargo: '*'
freedesktop.org/pkg-config: ^0
linux:
gnu.org/make: '*'
perl.org: ^5 # openssl mod
script: cargo install --locked --path . --root {{prefix}}
test:
script:
- test "$(rover --version)" = "Rover {{version}}"

View file

@ -1,35 +0,0 @@
distributable:
url: https://registry.npmjs.org/appium/-/appium-{{version}}.tgz
strip-components: 1
versions:
url: https://npmjs.com/package/appium?activeTab=versions
match: /v\/\d+\.\d+\.\d+/
strip:
- /^v\//
dependencies:
npmjs.com: '*'
nodejs.org: ^10.13.0 || ^12 || ^14 || ^16 || ^18 || ^20
openjdk.org: '*'
build:
dependencies:
linux/x86-64:
python.org: ^3 # node-gyp needs python to build
script:
- chmod +x lib/appium.js
# required but not added
- run: EXTRA_PACKAGES="@appium/logger"
if: '>=2.7'
- npm install . $EXTRA_PACKAGES
--global
--prefix="{{prefix}}"
--install-links
provides:
- bin/appium
test:
- appium --version | grep {{version}}
- appium driver install uiautomator2

View file

@ -1,54 +0,0 @@
distributable:
url: https://github.com/apple-oss-distributions/remote_cmds/archive/refs/tags/{{version.tag}}.tar.gz
strip-components: 1
versions:
github: apple-oss-distributions/remote_cmds/tags
strip: /^remote_cmds-/
display-name: telnet
platforms:
- darwin
build:
dependencies:
curl.se: '*'
script:
- run: |
curl -L 'https://github.com/apple-oss-distributions/libtelnet/archive/refs/tags/libtelnet-13.tar.gz' | tar -xz --strip-components=1
xcodebuild \
OBJROOT=build/Intermediates \
SYMROOT=build/Products \
DSTROOT=build/Archive \
-IDEBuildLocationStyle=Custom \
-IDECustomDerivedDataLocation=$SRCROOT \
-arch $(uname -m)
cp build/Products/Release/libtelnet.a ./
cp -r build/Products/Release/usr/local/include/libtelnet ./
working-directory: libtelnet
- |
xcodebuild \
OBJROOT=build/Intermediates \
SYMROOT=build/Products \
DSTROOT=build/Archive \
OTHER_CFLAGS="${inherited} $CFLAGS -I$SRCROOT/libtelnet" \
OTHER_LDFLAGS="${inherited} $LDFLAGS -L$SRCROOT/libtelnet" \
-IDEBuildLocationStyle=Custom \
-IDECustomDerivedDataLocation=$SRCROOT \
-sdk macosx \
-arch $(uname -m) \
-target telnet
- install -D build/Products/Release/telnet {{prefix}}/bin/telnet
provides:
- bin/telnet
test:
- server=pkgx.dev
- port=80
- |
(echo -e "GET / HTTP/1.1\nHost: $server\n\n"; sleep 1; echo "quit") | telnet $server $port > response.txt || true
- cat response.txt | grep '301 Moved Permanently'

View file

@ -1,38 +0,0 @@
distributable:
url: git+https://github.com/aquasecurity/tfsec.git
ref: ${{version.tag}}
versions:
github: aquasecurity/tfsec
build:
dependencies:
go.dev: '>=1.19'
script:
- scripts/install.sh v{{version}}
- mkdir -p {{prefix}}/bin
- install tfsec {{prefix}}/bin/
provides:
- bin/tfsec
test:
- mkdir -p good bad
- run: |
cat <<EOF > good/main.tf
resource "aws_alb_listener" "my-alb-listener" {
port = "443"
protocol = "HTTPS"
}
EOF
- run: |
cat <<EOF > bad/main.tf
resource "aws_security_group_rule" "world" {
description = "A security group triggering tfsec AWS006."
type = "ingress"
cidr_blocks = ["0.0.0.0/0"]
}
EOF
- tfsec good | grep 'No problems'
- tfsec bad || true # tfsec returns 1 on problems detected, it's ok
- tfsec --version | grep {{version}}

View file

@ -1,37 +0,0 @@
distributable:
url: git+https://github.com/aquasecurity/trivy.git
ref: ${{version.tag}}
versions:
github: aquasecurity/trivy
dependencies:
curl.se/ca-certs: '*'
build:
dependencies:
go.dev: '>=1.21'
linux:
# running gcc failed: exit status 1
gnu.org/gcc: '*'
script:
- go build $ARGS -ldflags="$LDFLAGS" ./cmd/trivy
env:
ARGS:
- -trimpath
- -o={{prefix}}/bin/trivy
LDFLAGS:
- -s
- -w
- -X github.com/aquasecurity/trivy/pkg/version.ver={{version}}
- -X github.com/aquasecurity/trivy/pkg/version/app.ver={{version}}
linux:
LDFLAGS:
- -buildmode=pie
provides:
- bin/trivy
test:
- trivy image alpine:3.10
- trivy --version | grep {{version}}

View file

@ -1,14 +1,12 @@
distributable:
- url: https://github.com/arduino/arduino-cli/archive/refs/tags/{{version}}.tar.gz
strip-components: 1
- url: https://github.com/arduino/arduino-cli/archive/refs/tags/v{{version}}.tar.gz
strip-components: 1
url: https://github.com/arduino/arduino-cli/archive/refs/tags/{{version}}.tar.gz
strip-components: 1
versions:
github: arduino/arduino-cli
warnings:
- vendored
- vendored
build:
dependencies:

View file

@ -1,28 +0,0 @@
distributable:
url: https://github.com/matejak/argbash/archive/refs/tags/{{version}}.tar.gz
strip-components: 1
versions:
github: matejak/argbash
dependencies:
gnu.org/bash: '>=3'
gnu.org/autoconf: '*'
build:
script:
- mkdir -p '{{prefix}}'
- cp -r bin src '{{prefix}}'
provides:
- bin/argbash
- bin/argbash-init
- bin/argbash-1to2
test:
- argbash --version | grep {{version}}
- argbash --help | grep 'Argbash is an argument parser generator for Bash'
- argbash-init --pos positional-arg --opt option --opt-bool print minimal.m4
- cat minimal.m4 | grep 'This is just a script template'
- argbash minimal.m4 -o minimal.sh
- ./minimal.sh --help | grep 'Usage:'

View file

@ -1,32 +0,0 @@
distributable:
url: https://github.com/argoproj/argo-cd/archive/v{{version}}.tar.gz
strip-components: 1
display-name: argo-cd
versions:
github: argoproj/argo-cd
build:
dependencies:
go.dev: '*'
nodejs.org: '*'
yarnpkg.com: '*'
linux:
git-scm.org: '*'
script:
- make --jobs {{hw.concurrency}} dep-ui-local
- run: yarn build
working-directory: ui
- make --jobs {{hw.concurrency}} cli-local
- mkdir -p {{prefix}}/bin
- install dist/argocd {{prefix}}/bin/
env:
NODE_ENV: production
NODE_ONLINE_ENV: online
LDFLAGS:
provides:
- bin/argocd
test:
script:
- argocd --help
- touch argocd-config
- chmod 0600 argocd-config
- argocd context --config ./argocd-config | grep "CURRENT NAME SERVER"

View file

@ -1,27 +0,0 @@
distributable:
url: https://github.com/argoproj/argo-workflows/archive/v{{version}}.tar.gz
strip-components: 1
display-name: argo-workflows
versions:
github: argoproj/argo-workflows
build:
dependencies:
go.dev: '*'
script:
- make GIT_TAG="v{{version}}" GIT_COMMIT="v{{version}}" RELEASE_TAG=true STATIC_FILES=false GIT_TREE_STATE=clean GOARGS= dist/argo-linux-amd64
- mkdir -p {{prefix}}/bin
- install dist/argo-linux-amd64 {{prefix}}/bin/argo
env:
LDFLAGS:
- -s
- -w
provides:
- bin/argo
test:
script:
- argo version | grep "v{{version}}"

View file

@ -17,6 +17,8 @@ dependencies:
build:
dependencies:
tea.xyz/gx/cc: c99
tea.xyz/gx/make: '*'
gnupg.org/libgcrypt: ^1
gnupg.org/libgpg-error: ^1
freedesktop.org/pkg-config: '*'

View file

@ -1,31 +0,0 @@
distributable:
url: git+https://github.com/alexellis/arkade
ref: ${{version.tag}}
display-name: Arkade
versions:
github: alexellis/arkade/releases/tags
provides:
- bin/arkade
build:
dependencies:
go.dev: ^1.21
git-scm.org: ^2
script:
go build -v -ldflags="$LDFLAGS" -o "{{prefix}}"/bin/arkade
env:
LDFLAGS:
- -s
- -w
- -X github.com/alexellis/arkade/pkg.Version={{version}}
- -X github.com/alexellis/arkade/pkg.GitCommit=$(git rev-parse HEAD)"
linux:
LDFLAGS:
- -buildmode=pie
test:
- arkade version | grep {{version}}
- arkade info openfaas

View file

@ -12,6 +12,8 @@ dependencies:
build:
dependencies:
rubygems.org: '*'
tea.xyz/gx/make: '*'
tea.xyz/gx/cc: '*'
env:
GEM_HOME: ${{prefix}}
GEM_PATH: ${{prefix}}

View file

@ -1,24 +0,0 @@
distributable:
url: https://github.com/asciinema/agg/archive/refs/tags/v{{version}}.tar.gz
strip-components: 1
display-name: agg
versions:
github: asciinema/agg/tags # reads github tags from github
dependencies:
rust-lang.org: ^1.56
build:
dependencies:
rust-lang.org/cargo: '*'
script:
cargo install --path . --root {{prefix}}
provides:
- bin/agg
test:
script:
test "$(agg --version)" = "agg {{version}}"

View file

@ -1,21 +0,0 @@
distributable:
url: https://github.com/asciinema/asciinema/archive/refs/tags/v{{version}}.tar.gz
strip-components: 1
display-name: asciinema
versions:
github: asciinema/asciinema/tags # reads github tags from github
dependencies:
python.org: ^3.12
build:
script: python-venv.sh {{prefix}}/bin/asciinema
provides:
- bin/asciinema
test:
script:
test "$(asciinema --version)" = "asciinema {{version}}"

View file

@ -26,6 +26,8 @@ runtime:
build:
dependencies:
tea.xyz/gx/cc: c99
tea.xyz/gx/make: '*'
curl.se: '*'
gnu.org/patch: '*'
gnu.org/sed: '*'

View file

@ -1,43 +0,0 @@
distributable:
url: https://github.com/assimp/assimp/archive/v{{version}}.tar.gz
strip-components: 1
versions:
github: assimp/assimp
dependencies:
zlib.net: '*'
build:
dependencies:
gnu.org/make: '*'
cmake.org: '*'
ninja-build.org: '*'
darwin:
curl.se: '*'
gnu.org/patch: '*'
script:
- run: curl $PATCH | patch -p1 || true
if: <5.3.0
- cmake -S . -B build -G Ninja $CMAKE_ARGS
- cmake --build build
- cmake --install build
- run: cp -a contrib {{prefix}}/include
if: '>=5.3.0'
env:
PATCH: https://github.com/assimp/assimp/commit/5a89d6fee138f8bc979b508719163a74ddc9a384.patch?full_index=1
CMAKE_ARGS:
- -DCMAKE_INSTALL_PREFIX="{{prefix}}
- -DCMAKE_INSTALL_LIBDIR=lib
- -DCMAKE_BUILD_TYPE=Release
- -DCMAKE_FIND_FRAMEWORK=LAST
- -DCMAKE_VERBOSE_MAKEFILE=ON
- -Wno-dev
- -DASSIMP_BUILD_TESTS=OFF
- -DASSIMP_BUILD_ASSIMP_TOOLS=ON
- -DCMAKE_INSTALL_RPATH="{{prefix}}"
provides:
- bin/assimp
test:
script:
- cc -std=c++11 test.cpp -lassimp -o test
- ./test
- assimp export test.obj test.ply
- assimp version | grep {{version.marketing}}

View file

@ -1,5 +0,0 @@
#include <assimp/Importer.hpp>
int main() {
Assimp::Importer importer;
return 0;
}

View file

@ -1,28 +0,0 @@
# WaveFront .obj file - a single square based pyramid
# Start a new group:
g MySquareBasedPyramid
# List of vertices:
# Front left
v -0.5 0 0.5
# Front right
v 0.5 0 0.5
# Back right
v 0.5 0 -0.5
# Back left
v -0.5 0 -0.5
# Top point (top of pyramid).
v 0 1 0
# List of faces:
# Square base (note: normals are placed anti-clockwise).
f 4 3 2 1
# Triangle on front
f 1 2 5
# Triangle on back
f 3 4 5
# Triangle on left side
f 4 1 5
# Triangle on right side
f 2 3 5

View file

@ -1,27 +0,0 @@
distributable:
url: https://github.com/ast-grep/ast-grep/archive/refs/tags/{{version}}.tar.gz
strip-components: 1
versions:
github: ast-grep/ast-grep
build:
dependencies:
rust-lang.org: '*'
rust-lang.org/cargo: '*'
script:
- cargo install $CARGO_ARGS
env:
linux/aarch64:
RUSTFLAGS: "-C linker=cc"
CARGO_ARGS:
- --locked
- --root="{{prefix}}"
- --path=crates/cli
provides:
- bin/sg
- bin/ast-grep
test:
fixture: console.log('it is me')
script:
- mv $FIXTURE hi.js
- sg run -l js -p console.log hi.js | grep 'it is me'
- ast-grep --version | grep {{version}}

View file

@ -1,43 +0,0 @@
distributable:
url: https://github.com/astral-sh/ruff/archive/refs/tags/{{ version.tag }}.tar.gz
strip-components: 1
versions:
github: charliermarsh/ruff
strip: /^v /
build:
dependencies:
rust-lang.org: '>=1.60'
rust-lang.org/cargo: '*'
script:
- run: CRATE=ruff_cli
if: <0.0.242
- run: CRATE=crates/ruff_cli
if: '>=0.0.242<0.1.14'
- run: CRATE=crates/ruff
if: '>=0.1.14'
- cargo install --locked --path $CRATE --root {{prefix}}
provides:
- bin/ruff
test:
script:
# v0.1.12 introduced this test error:
# ruff failed
# Cause: Failed to create cache file '/__w/pantry/pantry/testbeds/astral.sh__ruff-0.1.12/.ruff_cache/0.1.12/10391082687706843805'
# Cause: No such file or directory (os error 2)
- run: mkdir -p .ruff_cache/{{version}}
if: '>=0.1.12'
- run:
- ruff -e $FIXTURE | grep "\`os\` imported but unused"
- ruff --fix $FIXTURE
if: <0.5
- run:
- (ruff check $FIXTURE || true) | grep "\`os\` imported but unused"
- ruff check --fix $FIXTURE
if: '>=0.5'
- test ! -s $FIXTURE
fixture: |
import os

View file

@ -1,63 +0,0 @@
distributable:
url: https://github.com/astral-sh/uv/releases/download/{{version}}/source.tar.gz
strip-components: 1
versions:
github: astral-sh/uv
provides:
- bin/uv
companions:
python.org: '*'
dependencies:
libgit2.org: ~1.7 # links to libgit2.so.1.7
build:
dependencies:
linux:
nixos.org/patchelf: ^0.18
cmake.org: ^3.28
rust-lang.org/cargo: ^0.77
maturin.rs: ^1.4.0
info-zip.org/unzip: ^6
script:
- maturin build --locked --release --out ./out
- run:
- unzip ./uv-{{version}}-*.whl
- install -D ./uv-{{version}}.data/scripts/uv {{prefix}}/bin/uv
working-directory: out
test:
dependencies:
curl.se: '*'
script:
- uv --version | grep {{version}}
- fixture:
contents: |
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello_world():
return "<p>Hello, World!</p>"
extname: .py
run:
- |
if which flask; then
false
fi
- |
uv venv
source .venv/bin/activate
uv pip install flask
- |
mv $FIXTURE app.py
set -m
flask run &
PID=$!
# otherwise the server may not be ready
- sleep 4
- test "$(curl 127.0.0.1:5000)" = "<p>Hello, World!</p>"
# TODO need to install a signal handler or our build servers could be left with a running flask process
- kill $PID

View file

@ -1,38 +0,0 @@
distributable:
url: https://github.com/ariga/atlas/archive/refs/tags/v{{version}}.tar.gz
strip-components: 1
versions:
github: ariga/atlas
provides:
- bin/atlas
build:
dependencies:
go.dev: ^1.20
working-directory: cmd/atlas
script:
- go mod download
- go build -v -trimpath -ldflags="$LDFLAGS" -o $BUILDLOC .
env:
GOPROXY: https://proxy.golang.org,direct
GOSUMDB: sum.golang.org
GO111MODULE: on
BUILDLOC: '{{prefix}}/bin/atlas'
LDFLAGS:
- -s
- -w
- -X ariga.io/atlas/cmd/atlas/internal/cmdapi.version=v{{version}}
linux:
# or segmentation fault
# fix found here https://github.com/docker-library/golang/issues/402#issuecomment-982204575
LDFLAGS:
- -buildmode=pie
test:
script:
- run: test "$(atlas version | head -n1)" = "atlas version v{{version}}"
if: <0.23
- run: test "$(atlas version | head -n1)" = "atlas unofficial version v{{version}}"
if: '>=0.23'

View file

@ -1,16 +0,0 @@
distributable:
url: git+https://github.com/python-attrs/attrs.git
ref: ${{version.tag}}
versions:
github: python-attrs/attrs
dependencies:
python.org: ~3.11
runtime:
env:
PYTHONPATH: ${{prefix}}/lib/python{{deps.python.org.version.marketing}}/site-packages:$PYTHONPATH
build: python -m pip install --prefix={{prefix}} .
test: python -c 'import attrs; print(attrs.__version__)' | grep {{version}}

View file

@ -12,6 +12,7 @@ dependencies:
build:
dependencies:
tea.xyz/gx/make: '*'
gnu.org/autoconf: '*'
gnu.org/automake: '*'
gnu.org/bison: '*'

View file

@ -1,7 +1,7 @@
distributable:
url: https://registry.npmjs.org/aws-cdk/-/aws-cdk-{{version}}.tgz
strip-components: 1
display-name: aws/cdk
display-name: aws-cdk
versions:
url: https://www.npmjs.com/package/aws-cdk?activeTab=versions
match: /v\/\d+\.\d+\.\d+/
@ -31,4 +31,4 @@ provides:
- bin/cdk
test:
script:
- cdk --version | grep {{version}}
- cdk --version | grep {{version}}

View file

@ -8,18 +8,17 @@ versions:
github: aws/aws-cli/tags
dependencies:
python.org: ">=3.7"
sourceware.org/libffi: ^3
pkgx.sh: ^1
build:
dependencies:
rust-lang.org: '>=1.48.0' # needed for cryptography
rust-lang.org/cargo: '*'
python.org: '>=3.7<3.12'
script:
- bkpyvenv stage {{prefix}} {{version}}
- ${{prefix}}/venv/bin/pip install .
- bkpyvenv seal {{prefix}} aws
tea.xyz/gx/cc: c99
tea.xyz/gx/make: "*"
rust-lang.org: ">=1.48.0" # needed for cryptography
rust-lang.org/cargo: "*"
script: |
python-venv.sh {{prefix}}/bin/aws
env:
# python includes are subdirectoried under the version
# frankly, python's versioning system causes a lot of
@ -28,7 +27,7 @@ build:
# instead of the beginning.
CPATH: $CPATH:{{deps.python.org.prefix}}/include/python{{deps.python.org.version.marketing}}
test:
test: |
# Pretty much anything else appears to require AWS credentials
aws --version

View file

@ -1,25 +0,0 @@
distributable:
url: https://github.com/aws/aws-sam-cli/archive/refs/tags/{{version.tag}}.tar.gz
strip-components: 1
versions:
github: aws/aws-sam-cli
dependencies:
pkgx.sh: ^1
build:
dependencies:
python.org: ^3.11
rust-lang.org: '*'
script:
- bkpyvenv stage {{prefix}} {{version}}
- ${{prefix}}/venv/bin/pip install .
- bkpyvenv seal {{prefix}} sam
provides:
- bin/sam
test:
- sam --version | grep {{version}}
- sam validate --region us-east-1 2>&1 | grep 'is a valid SAM Template'

View file

@ -1,17 +0,0 @@
AWSTemplateFormatVersion: '2010-09-09'
Transform: AWS::Serverless-2016-10-31
Description: A simple SAM template for a Lambda function triggered by API Gateway
Resources:
HelloWorldFunction:
Type: AWS::Serverless::Function
Properties:
Handler: index.handler
Runtime: nodejs14.x
CodeUri: .
Events:
HelloWorldApi:
Type: Api
Properties:
Path: /hello
Method: get

View file

@ -1,68 +0,0 @@
distributable:
url: https://github.com/DannyBen/bashly/archive/v{{version}}.tar.gz
strip-components: 1
versions:
github: DannyBen/bashly
dependencies:
ruby-lang.org: ^3.1
rubygems.org: '*'
companions:
gnu.org/bash: '>=4' # requires newers bashes to work properly
build:
dependencies:
rubygems.org: '*'
env:
GEM_HOME: ${{prefix}}
GEM_PATH: ${{prefix}}
script:
- gem build bashly.gemspec
- gem install
--no-user-install
--bindir={{prefix}}/gems/bin
--no-document
bashly-{{version}}.gem
- run: |
cat $PROP >bashly
chmod +x bashly
working-directory: ${{prefix}}/bin
prop: |
#!/bin/sh
export GEM_HOME="$(cd "$(dirname "$0")"/.. && pwd)"
export GEM_PATH="$GEM_HOME"
export PATH="$GEM_HOME/gems/bin:$PATH"
exec "$GEM_HOME"/gems/bin/bashly "$@"
provides:
- bin/bashly
test:
- bashly init --minimal
- run: cp $FIXTURE bashly.yml
fixture: |
name: download
help: Sample minimal application without commands
version: 0.1.0
args:
- name: source
required: true
help: URL to download from
- name: target
help: "Target filename (default: same as source)"
flags:
- long: --force
short: -f
help: Overwrite existing files
examples:
- download example.com
- download example.com ./output -f
- bashly generate
- ./download --help

View file

@ -8,7 +8,12 @@ versions:
dependencies:
zlib.net: '*'
build: make LDFLAGS=-lz PREFIX={{prefix}} install
build:
dependencies:
tea.xyz/gx/cc: c99
tea.xyz/gx/make: '*'
script: |
make LDFLAGS=-lz PREFIX={{prefix}} install
provides:
- bin/bcrypt
@ -20,3 +25,4 @@ test:
mv test.txt.bfe test.out.txt.bfe
printf '12345678\n' | bcrypt -r test.out.txt.bfe
cat test.out.txt

View file

@ -1,26 +0,0 @@
distributable:
url: https://github.com/biomejs/biome/archive/refs/tags/cli/v{{ version }}.tar.gz
strip-components: 1
provides:
- bin/biome
versions:
github: biomejs/biome
strip: /cli/v/
build:
working-directory: crates/biome_cli
dependencies:
rust-lang.org: '>=1.65'
rust-lang.org/cargo: '*'
script:
cargo install --locked --path . --root {{prefix}}
env:
RUSTFLAGS: "-C strip=symbols"
BIOME_VERSION: 'v{{version}}'
test:
script:
- |
test "$(biome --version)" = "Version: v{{version}}"

View file

@ -12,29 +12,25 @@ dependencies:
libevent.org: ^2
zeromq.org: ^4
sqlite.org: ^3
linux:
gnu.org/gcc/libstdcxx: '*' # c++20 support and libc++
build:
dependencies:
gnu.org/autoconf: ^2
gnu.org/automake: ^1
tea.xyz/gx/cc: c99
tea.xyz/gx/make: '*'
freedesktop.org/pkg-config: ^0.29
gnu.org/libtool: ^2
linux:
gnu.org/gcc: '*' # c++20 support and libc++
script:
- ./autogen.sh
- ./configure $ARGS
- make --jobs {{ hw.concurrency }} install
script: |
./autogen.sh
./configure $ARGS
make --jobs {{ hw.concurrency }} install
env:
linux:
LDFLAGS: $LDFLAGS -lstdc++fs
ARGS:
- --prefix={{prefix}}
- --disable-debug
- --disable-tests # needs: `hexedit`
- --disable-bench # fails: linux/aarch64/v24.0.1
- --disable-tests # needs: `hexedit`
- --disable-bench # fails: linux/aarch64/v24.0.1
provides:
- bin/bitcoin-cli
@ -43,4 +39,5 @@ provides:
- bin/bitcoin-wallet
- bin/bitcoind
test: bitcoind -version
test:
bitcoind -version

View file

@ -1,25 +0,0 @@
distributable:
url: https://github.com/opentensor/bittensor/archive/refs/tags/{{version.tag}}.tar.gz
strip-components: 1
display-name: Bittensor
versions:
github: opentensor/bittensor
dependencies:
pkgx.sh: ^1
build:
dependencies:
python.org: ~3.11
script:
- bkpyvenv stage {{prefix}} {{version}}
- ${{prefix}}/venv/bin/pip install .
- bkpyvenv seal {{prefix}} btcli
test:
btcli --help | grep {{version}}
provides:
- bin/btcli

View file

@ -1,41 +0,0 @@
distributable:
url: https://registry.npmjs.org/@bitwarden/cli/-/cli-{{version}}.tgz
strip-components: 1
versions:
npm: '@bitwarden/cli'
dependencies:
nodejs.org: ^20
build:
dependencies:
npmjs.com: '*'
linux/x86-64:
python.org: ^3 # needed to build some native modules
script:
- npm i husky
- git init
- npm i $ARGS .
- run: ln -s ../libexec/bin/bw bw
working-directory: ${{prefix}}/bin
env:
linux:
CC: clang
CXX: clang++
LD: clang
ARGS:
- -ddd
- --global
- --build-from-source
- --prefix={{prefix}}/libexec
- --install-links
- --unsafe-perm
provides:
- bin/bw
test:
- bw generate --length 10
- echo "Testing" | bw encode | grep 'VGVzdGluZw'
- bw --version | grep {{version}}

View file

@ -9,6 +9,7 @@ versions:
build:
dependencies:
gnu.org/gcc: '*'
tea.xyz/gx/make: '*'
script: |
./configure $ARGS
make --jobs {{ hw.concurrency }} install

View file

@ -6,7 +6,7 @@ versions:
github: bloomreach/s4cmd/tags
dependencies:
python.org: '>=3<3.12'
python.org: ^3
build:
python-venv.sh {{prefix}}/bin/s4cmd

View file

@ -10,31 +10,24 @@ dependencies:
facebook.com/zstd: ^1
build:
script:
- ./bootstrap.sh --prefix={{ prefix }}
- ./b2 $ARGS
# boost.org has libs that end up with name @rpath/libboost_atomic.dylib (offset 24)
# so we need to add @loader_path to the rpath
- run: |
for LIB in *.dylib; do
install_name_tool -add_rpath @loader_path $LIB
done
working-directory: ${{prefix}}/lib
if: darwin
dependencies:
tea.xyz/gx/cc: c99
tea.xyz/gx/make: '*'
script: |
./bootstrap.sh --prefix={{ prefix }}
./b2 $ARGS
env:
ARGS:
- install
- --prefix={{ prefix }}
darwin:
ARGS:
- linkflags=-Wl,-headerpad_max_install_names
linux:
linux/x86-64:
ARGS:
- cxxflags=-fPIC
- linkflags=-fPIC
test:
dependencies:
tea.xyz/gx/cc: c99
script: |
c++ test.cpp -std=c++14 -lboost_iostreams -lzstd
./a.out

View file

@ -1,23 +0,0 @@
distributable:
url: https://github.com/ekzhang/bore/archive/refs/tags/v{{ version }}.tar.gz
strip-components: 1
provides:
- bin/bore
versions:
github: ekzhang/bore
strip: /v/
build:
dependencies:
rust-lang.org: '>=1.65'
rust-lang.org/cargo: '*'
script:
# The --locked flag was causing trouble:
# It complained about proc-macro2 (proc_macro_span_shrink)
- cargo install --path . --root {{prefix}}
test:
script:
- test "$(bore --version)" = "bore-cli {{version}}"

View file

@ -1,50 +0,0 @@
distributable:
url: https://breakfastquay.com/files/releases/rubberband-{{version}}.tar.bz2
strip-components: 1
display-name: rubberband
versions:
url: https://breakfastquay.com/files/releases/
match: /rubberband-\d+\.\d+\.\d+\.tar\.bz2/
strip:
- /^rubberband-/
- /\.tar\.bz2$/
dependencies:
github.com/libsndfile/libsamplerate: ^0.2
github.com/libsndfile/libsndfile: ^1.2
linux:
fftw.org: ^3.3
ladspa.org: ^1.17
vamp-plugins.org: ^2.9
build:
dependencies:
mesonbuild.com: ^1.3.2
ninja-build.org: '*'
script:
- meson setup build $ARGS
- meson compile -C build --verbose
- meson install -C build
env:
ARGS:
- --prefix={{prefix}}
- --libdir={{prefix}}/lib
- --buildtype=release
- --wrap-mode=nofallback
- -Dresampler=libsamplerate
linux:
# undefined reference to `main'
# error: ld returned 1 exit status
LDFLAGS: -fPIC
ARGS:
- -Dfft=fftw
provides:
- bin/rubberband
test:
- rubberband -V 2>&1 | grep {{version}}
- rubberband -t2 test.wav out.wav 2>&1 | grep 'Processing...'
- ls | grep out.wav

View file

@ -1,22 +0,0 @@
distributable:
url: https://github.com/brxken128/dexios/archive/refs/tags/v{{ version }}.tar.gz
strip-components: 1
provides:
- bin/dexios
versions:
github: brxken128/dexios
strip: /v/
build:
working-directory: dexios
dependencies:
rust-lang.org: '>=1.65'
rust-lang.org/cargo: '*'
script:
cargo install --locked --path . --root {{prefix}}
test:
script:
- test "$(dexios --version)" = "dexios {{version}}"

View file

@ -1,31 +0,0 @@
distributable:
url: https://github.com/budimanjojo/talhelper/archive/refs/tags/{{version.tag}}.tar.gz
strip-components: 1
versions:
github: budimanjojo/talhelper
build:
dependencies:
go.dev: ^1.21
script: go build $ARGS -ldflags="$LDFLAGS"
env:
ARGS:
- -trimpath
- -o={{prefix}}/bin/talhelper
LDFLAGS:
- -s
- -w
- -X github.com/budimanjojo/talhelper/cmd.version={{version}}
linux:
LDFLAGS:
- -buildmode=pie
provides:
- bin/talhelper
test:
# removed in v3, oddly
- run: talhelper --version | grep {{version}}
if: <3
- talhelper gensecret | grep 'bootstraptoken'

View file

@ -1,34 +0,0 @@
distributable:
url: https://github.com/bufbuild/buf/archive/refs/tags/v{{version}}.tar.gz
strip-components: 1
versions:
github: bufbuild/buf
provides:
- bin/buf
build:
script: |
go mod download
mkdir -p "{{ prefix }}"/bin
go build -v -trimpath -ldflags="$LDFLAGS" -o $BUILDLOC ./cmd/buf
dependencies:
go.dev: ^1.20
env:
GO111MODULE: on
CGO_ENABLED: 0
BUILDLOC: '{{prefix}}/bin/buf'
LDFLAGS:
- -s
- -w
- -X main.version={{version}}
- -X main.debugMode=false
linux:
# or segmentation fault
# fix found here https://github.com/docker-library/golang/issues/402#issuecomment-982204575
LDFLAGS:
- -buildmode=pie
test: |
buf --version | grep {{version}}

View file

@ -1,24 +0,0 @@
distributable:
url: https://github.com/buildpacks/pack/archive/refs/tags/{{version.tag}}.tar.gz
strip-components: 1
versions:
github: buildpacks/pack
build:
dependencies:
go.dev: ^1.22
script: go build -v -ldflags="${GO_LDFLAGS}" -o "{{ prefix }}"/bin/pack ./cmd/pack
env:
GO_LDFLAGS:
- -s
- -w
- -X github.com/buildpacks/pack.Version={{ version }}
linux:
GO_LDFLAGS:
- -buildmode=pie
provides:
- bin/pack
test: test "$(pack version)" = "{{ version }}"

View file

@ -5,8 +5,8 @@ warnings:
- vendored
versions:
github: oven-sh/bun
strip: /^bun-/
github: oven-sh/bun/releases
strip: /^Bun /
#FIXME proper system for re-using pre-built binaries
# we must require the vendor to provide signatures against a published public

View file

@ -1,59 +0,0 @@
distributable:
url: https://github.com/bytebase/bytebase/archive/refs/tags/{{version.tag}}.tar.gz
strip-components: 1
versions:
github: bytebase/bytebase
dependencies:
nodejs.org: '>=20.10'
platforms:
- linux
- darwin/aarch64
# this changed in 2.16.0:
# case runtime.GOOS == "darwin" && runtime.GOARCH == "arm64":
# tarName = "mongoutil-1.6.1-darwin-arm64.txz"
# case runtime.GOOS == "linux" && runtime.GOARCH == "amd64":
# tarName = "mongoutil-1.6.1-linux-amd64.txz"
# case runtime.GOOS == "linux" && runtime.GOARCH == "arm64":
# tarName = "mongoutil-1.6.1-linux-arm64.txz"
# default:
# return "", "", errors.Errorf("unsupported platform: %s/%s", runtime.GOOS, runtime.GOARCH)
build:
dependencies:
go.dev: ~1.22
pnpm.io: '*'
linux:
kerberos.org: 1 # added in 2.16.0
# error: invalid linker name in argument '-fuse-ld=gold'
gnu.org/gcc: '*'
script:
# otherwise segfaults
- run: sed -i 's/-ldflags "/-ldflags "-buildmode=pie /' build_bytebase.sh
if: linux
working-directory: scripts
- ./scripts/build_bytebase.sh {{prefix}}/bin
provides:
- bin/bytebase
test:
dependencies:
curl.se: '*'
pkgx.sh: ^1
script:
- bytebase version | grep {{version}}
# we'd love to test this more on linux, but we have issues running initdb as root;
# so, we'll satisfy ourselves with the above and testing on darwin
- run: exit 0
if: linux
- FREE_PORT=$(pkgx get-port | tail -n1)
- bytebase --port $FREE_PORT > out.log 2>&1 &
- PID=$!
- sleep 35
- curl -L http://localhost:$FREE_PORT | grep 'Bytebase' || cat out.log
- kill $PID
- cat out.log | grep "has started on port $FREE_PORT"

View file

@ -10,6 +10,9 @@ versions:
- /.tar.gz/
build:
dependencies:
tea.xyz/gx/cc: c99
tea.xyz/gx/make: '*'
script: |
./configure --prefix={{ prefix }}
make --jobs {{hw.concurrency}}
@ -21,6 +24,8 @@ build:
LDFLAGS: -headerpad_max_install_names $LDFLAGS
LDXXFLAGS: -headerpad_max_install_names $LDXXFLAGS
test:
dependencies:
tea.xyz/gx/cc: c99
script: |
cc test.c -o test -lmpdec
./test

Some files were not shown because too many files have changed in this diff Show more