mirror of
https://github.com/ivabus/pantry
synced 2024-11-26 18:25:08 +03:00
+cJSON using +tea.xyz/brewkit
This commit is contained in:
parent
faa280a0ea
commit
35f8578d04
25 changed files with 159 additions and 1437 deletions
12
.github/workflows/build.yml
vendored
12
.github/workflows/build.yml
vendored
|
@ -62,10 +62,7 @@ jobs:
|
||||||
echo "$PWD/scripts:$TEA_PREFIX/tea.xyz/var/pantry/scripts" >> $GITHUB_PATH
|
echo "$PWD/scripts:$TEA_PREFIX/tea.xyz/var/pantry/scripts" >> $GITHUB_PATH
|
||||||
echo "TEA_PANTRY_PATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV
|
echo "TEA_PANTRY_PATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV
|
||||||
|
|
||||||
- run: sort.ts ${{ inputs.projects }}
|
- run: tea +tea.xyz/brewkit build ${{ inputs.projects }}
|
||||||
id: sorted
|
|
||||||
|
|
||||||
- run: build.ts ${{ steps.sorted.outputs.pkgs }}
|
|
||||||
id: build
|
id: build
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ github.token }}
|
GITHUB_TOKEN: ${{ github.token }}
|
||||||
|
@ -112,14 +109,13 @@ jobs:
|
||||||
container: ${{ matrix.platform.container }}
|
container: ${{ matrix.platform.container }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v2
|
||||||
with:
|
with:
|
||||||
path: ${{ needs.get-platform.outputs.cache-set }}
|
path: ${{ needs.get-platform.outputs.cache-set }}
|
||||||
key: ${{ runner.os }}-deno-test-${{ hashFiles('deno.jsonc')}}
|
key: ${{ runner.os }}-deno-test-${{ hashFiles('deno.jsonc')}}
|
||||||
- uses: teaxyz/setup@v0
|
|
||||||
|
|
||||||
- name: configure scripts PATH
|
- uses: teaxyz/setup@v0
|
||||||
run: echo "$PWD/scripts:$TEA_PREFIX/tea.xyz/var/pantry/scripts" >> $GITHUB_PATH
|
|
||||||
|
|
||||||
- uses: actions/download-artifact@v3
|
- uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
|
@ -128,7 +124,7 @@ jobs:
|
||||||
- name: extract bottles
|
- name: extract bottles
|
||||||
run: tar xzf artifacts.tgz -C $TEA_PREFIX
|
run: tar xzf artifacts.tgz -C $TEA_PREFIX
|
||||||
|
|
||||||
- run: test.ts ${{ inputs.projects }}
|
- run: tea +tea.xyz/brewkit test ${{ inputs.projects }}
|
||||||
env:
|
env:
|
||||||
TEA_PANTRY_PATH: ${{ github.workspace }}
|
TEA_PANTRY_PATH: ${{ github.workspace }}
|
||||||
|
|
||||||
|
|
6
.github/workflows/ci.yml
vendored
6
.github/workflows/ci.yml
vendored
|
@ -9,9 +9,13 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: technote-space/get-diff-action@v6
|
- uses: technote-space/get-diff-action@v6
|
||||||
id: diff
|
id: get-diff
|
||||||
with:
|
with:
|
||||||
PATTERNS: projects/**/package.yml
|
PATTERNS: projects/**/package.yml
|
||||||
|
- id: diff
|
||||||
|
run: |
|
||||||
|
RESULT=$(echo ${{ steps.get-diff.outputs.diff }} | sed 's#projects/\(.*\)/.*#\1#')
|
||||||
|
echo "diff=$RESULT" >> $GITHUB_OUTPUT
|
||||||
build:
|
build:
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
|
|
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -1,2 +1,4 @@
|
||||||
# for pre 1!
|
/tea.out
|
||||||
|
|
||||||
|
#TODO commit after v1
|
||||||
/deno.lock
|
/deno.lock
|
||||||
|
|
97
README.md
97
README.md
|
@ -9,13 +9,102 @@ request against [pantry.extra] first.
|
||||||
|
|
||||||
# Use with tea/cli
|
# Use with tea/cli
|
||||||
|
|
||||||
tea/cli clones/updates this pantry and [pantry.extra] when installed with the
|
[tea/cli] clones/updates this pantry and [pantry.extra] when installed with
|
||||||
installer or when you run `tea --sync`. At this time pantries are not
|
the installer or when you run `tea --sync`. At this time pantries are not
|
||||||
versioned.
|
versioned.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Contributing
|
# Contributing
|
||||||
|
|
||||||
See the contributing guide in [pantry.zero].
|
```sh
|
||||||
|
$ git clone https://github.com/teaxyz/pantry.core
|
||||||
|
|
||||||
[pantry.zero]: https://github.com/teaxyz/pantry.zero#contributing
|
$ cd pantry.core
|
||||||
|
# all the following commands operate in `./tea.out`
|
||||||
|
# your tea installation remains untouched
|
||||||
|
|
||||||
|
$ xc init
|
||||||
|
# ^^ creates a “wip” package.yml
|
||||||
|
|
||||||
|
$ xc edit
|
||||||
|
# ^^ opens the new package.yml in your EDITOR
|
||||||
|
|
||||||
|
$ xc build
|
||||||
|
# ^^ will probably require a (zero permissions) GitHub [PAT].
|
||||||
|
# Using `gh auth login` is the easiest way to set this up.
|
||||||
|
|
||||||
|
$ xc test
|
||||||
|
# ^^ you need to write a test that verifies the package works
|
||||||
|
|
||||||
|
$ gh repo fork
|
||||||
|
$ git branch -m my-new-package
|
||||||
|
$ git push origin my-new-package
|
||||||
|
$ gh pr create
|
||||||
|
```
|
||||||
|
|
||||||
|
## Packaging Guide
|
||||||
|
|
||||||
|
Packaging can be cumbersome.
|
||||||
|
Our [wiki] is our packaging knowledge base.
|
||||||
|
For other assistance, start a [discussion].
|
||||||
|
|
||||||
|
## After Your Contribution
|
||||||
|
|
||||||
|
We build “bottles” (tar’d binaries) and upload them to both our centralized
|
||||||
|
bottle storage and decentralized [IPFS].
|
||||||
|
|
||||||
|
tea automatically builds new releases of packages *as soon as they are
|
||||||
|
released* (usually starting the builds within seconds). There is no need to
|
||||||
|
submit PRs for updates.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Tasks
|
||||||
|
|
||||||
|
The following can all be run with `xc`, eg. `xc init`.
|
||||||
|
|
||||||
|
## Init
|
||||||
|
|
||||||
|
Creates a new package at `./projects/wip/$RANDOM_TEA_BLEND/package.yml`.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
tea -E +tea.xyz/brewkit init
|
||||||
|
```
|
||||||
|
|
||||||
|
## Edit
|
||||||
|
|
||||||
|
Opens all wip packages in `$EDITOR`.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
tea -E +tea.xyz/brewkit edit
|
||||||
|
```
|
||||||
|
|
||||||
|
## Build
|
||||||
|
|
||||||
|
Builds all wip packages to `./tea.out`.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
tea -E +tea.xyz/brewkit build
|
||||||
|
```
|
||||||
|
|
||||||
|
## Test
|
||||||
|
|
||||||
|
Tests all wip packages.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
tea -E +tea.xyz/brewkit test
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
[pantry.zero]: https://github.com/teaxyz/pantry.zero
|
||||||
[pantry.extra]: https://github.com/teaxyz/pantry.extra
|
[pantry.extra]: https://github.com/teaxyz/pantry.extra
|
||||||
|
[wiki]: https://github.com/teaxyz/pantry.zero/wiki
|
||||||
|
[tea/cli]: https://github.com/teaxyz/cli
|
||||||
|
[discussion]: https://github.com/orgs/teaxyz/discussions
|
||||||
|
[PAT]: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token
|
||||||
|
[IPFS]: https://ipfs.tech
|
||||||
|
|
|
@ -17,7 +17,13 @@
|
||||||
"tea": {
|
"tea": {
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"deno.land": "^1.30"
|
"deno.land": "^1.30"
|
||||||
|
},
|
||||||
|
"env": {
|
||||||
|
// if your primary tea.prefix is somewhere else then you’ll
|
||||||
|
// need to `tea --sync` in this dev-env
|
||||||
|
"TEA_PANTRY_PATH": "{{srcroot}}:{{home}}/.tea/tea.xyz/var/pantry",
|
||||||
|
"TEA_PREFIX": "{{srcroot}}/tea.out"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"importMap": "https://raw.githubusercontent.com/teaxyz/cli/v0.22.1/import-map.json"
|
"importMap": "https://raw.githubusercontent.com/teaxyz/cli/v0.22/import-map.json"
|
||||||
}
|
}
|
||||||
|
|
47
projects/github.com/DaveGamble/cJSON/package.yml
Normal file
47
projects/github.com/DaveGamble/cJSON/package.yml
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
distributable:
|
||||||
|
url: https://github.com/DaveGamble/cJSON/archive/v1.7.15.tar.gz
|
||||||
|
strip-components: 1
|
||||||
|
|
||||||
|
versions:
|
||||||
|
github: DaveGamble/cJSON
|
||||||
|
|
||||||
|
build:
|
||||||
|
dependencies:
|
||||||
|
tea.xyz/gx/cc: c99
|
||||||
|
tea.xyz/gx/make: '*'
|
||||||
|
cmake.org: ^3
|
||||||
|
working-directory: build
|
||||||
|
script: |
|
||||||
|
cmake .. $ARGS
|
||||||
|
make --jobs {{ hw.concurrency }} install
|
||||||
|
env:
|
||||||
|
ARGS:
|
||||||
|
- -DENABLE_CJSON_UTILS=On
|
||||||
|
- -DENABLE_CJSON_TEST=Off
|
||||||
|
- -DBUILD_SHARED_AND_STATIC_LIBS=On
|
||||||
|
- -DCMAKE_INSTALL_PREFIX="{{ prefix }}"
|
||||||
|
- -DCMAKE_BUILD_TYPE=Release
|
||||||
|
|
||||||
|
test:
|
||||||
|
dependencies:
|
||||||
|
tea.xyz/gx/cc: c99
|
||||||
|
script: |
|
||||||
|
mv $FIXTURE $FIXTURE.c
|
||||||
|
cc $FIXTURE.c -lcjson
|
||||||
|
./a.out
|
||||||
|
fixture: |
|
||||||
|
#include <cjson/cJSON.h>
|
||||||
|
int main()
|
||||||
|
{
|
||||||
|
char *s = "{\"key\":\"value\"}";
|
||||||
|
cJSON *json = cJSON_Parse(s);
|
||||||
|
if (!json) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
cJSON *item = cJSON_GetObjectItem(json, "key");
|
||||||
|
if (!item) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
cJSON_Delete(json);
|
||||||
|
return 0;
|
||||||
|
}
|
|
@ -1,14 +0,0 @@
|
||||||
# build all
|
|
||||||
|
|
||||||
```sh
|
|
||||||
scripts/ls.ts | xargs scripts/sort.ts | xargs scripts/build.ts
|
|
||||||
```
|
|
||||||
|
|
||||||
# test all
|
|
||||||
|
|
||||||
`each.ts` reduces output for each input to a concise ✅ or ❌ based on exit
|
|
||||||
status.
|
|
||||||
|
|
||||||
```sh
|
|
||||||
scripts/ls.ts | xargs scripts/each.ts scripts/test.ts
|
|
||||||
```
|
|
|
@ -1,171 +0,0 @@
|
||||||
#!/usr/bin/env -S tea -E
|
|
||||||
|
|
||||||
/*---
|
|
||||||
args:
|
|
||||||
- deno
|
|
||||||
- run
|
|
||||||
- --allow-run
|
|
||||||
- --allow-env
|
|
||||||
- --allow-read
|
|
||||||
- --allow-write={{tea.prefix}}
|
|
||||||
dependencies:
|
|
||||||
nixos.org/patchelf: '*'
|
|
||||||
darwinsys.com/file: 5
|
|
||||||
---*/
|
|
||||||
|
|
||||||
import { useCellar } from "hooks"
|
|
||||||
import { PackageRequirement, Installation, Package } from "types"
|
|
||||||
import { backticks, run, host, pkg as pkgutils } from "utils"
|
|
||||||
import Path from "path"
|
|
||||||
|
|
||||||
|
|
||||||
if (import.meta.main) {
|
|
||||||
const cellar = useCellar()
|
|
||||||
const [installation, ...pkgs] = Deno.args
|
|
||||||
await fix_rpaths(
|
|
||||||
await cellar.resolve(new Path(installation)),
|
|
||||||
pkgs.map(pkgutils.parse)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//TODO this is not resilient to upgrades (obv)
|
|
||||||
//NOTE solution is to have the rpath reference major version (or more specific if poss)
|
|
||||||
|
|
||||||
/// fix rpaths or install names for executables and dynamic libraries
|
|
||||||
export default async function fix_rpaths(installation: Installation, pkgs: (Package | PackageRequirement)[]) {
|
|
||||||
const skip_rpaths = [
|
|
||||||
"go.dev", // skipping because for some reason patchelf breaks the go binary resulting in the only output being: `Segmentation Fault`
|
|
||||||
"tea.xyz", // this causes tea to pass -E/--version (and everything else?) directly to deno, making it _too_ much of a wrapper.
|
|
||||||
]
|
|
||||||
if (skip_rpaths.includes(installation.pkg.project)) {
|
|
||||||
console.info(`skipping rpath fixes for ${installation.pkg.project}`)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
console.info("doing SLOW rpath fixes…")
|
|
||||||
for await (const [exename] of exefiles(installation.path)) {
|
|
||||||
await set_rpaths(exename, pkgs, installation)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//TODO it's an error if any binary has bad rpaths before bottling
|
|
||||||
//NOTE we should have a `safety-inspector` step before bottling to check for this sort of thing
|
|
||||||
// and then have virtual env manager be more specific via (DY)?LD_LIBRARY_PATH
|
|
||||||
//FIXME somewhat inefficient for eg. git since git is mostly hardlinks to the same file
|
|
||||||
async function set_rpaths(exename: Path, pkgs: (Package | PackageRequirement)[], installation: Installation) {
|
|
||||||
if (host().platform != 'linux') throw new Error()
|
|
||||||
|
|
||||||
const cellar = useCellar()
|
|
||||||
const our_rpaths = await Promise.all(pkgs.map(pkg => prefix(pkg)))
|
|
||||||
|
|
||||||
const cmd = await (async () => {
|
|
||||||
//FIXME we need this for perl
|
|
||||||
// however really we should just have an escape hatch *just* for stuff that sets its own rpaths
|
|
||||||
const their_rpaths = (await backticks({
|
|
||||||
cmd: ["patchelf", "--print-rpath", exename],
|
|
||||||
}))
|
|
||||||
.split(":")
|
|
||||||
.compact(x => x.chuzzle())
|
|
||||||
//^^ split has ridiculous empty string behavior
|
|
||||||
|
|
||||||
const rpaths = [...their_rpaths, ...our_rpaths]
|
|
||||||
.map(x => {
|
|
||||||
const transformed = transform(x, installation)
|
|
||||||
if (transformed.startsWith("$ORIGIN")) {
|
|
||||||
console.warn("has own special rpath", transformed)
|
|
||||||
return transformed
|
|
||||||
} else {
|
|
||||||
const rel_path = new Path(transformed).relative({ to: exename.parent() })
|
|
||||||
return `$ORIGIN/${rel_path}`
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.uniq()
|
|
||||||
.join(':')
|
|
||||||
?? []
|
|
||||||
|
|
||||||
//FIXME use runtime-path since then LD_LIBRARY_PATH takes precedence which our virtual env manager requires
|
|
||||||
return ["patchelf", "--force-rpath", "--set-rpath", rpaths, exename]
|
|
||||||
})()
|
|
||||||
|
|
||||||
if (cmd.length) {
|
|
||||||
try {
|
|
||||||
await run({ cmd })
|
|
||||||
} catch (err) {
|
|
||||||
console.warn(err)
|
|
||||||
//FIXME allowing this error because on Linux:
|
|
||||||
// patchelf: cannot find section '.dynamic'. The input file is most likely statically linked
|
|
||||||
// happens with eg. gofmt
|
|
||||||
// and we don't yet have a good way to detect and skip such files
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function prefix(pkg: Package | PackageRequirement) {
|
|
||||||
return (await cellar.resolve(pkg)).path.join("lib").string
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//FIXME pretty slow since we execute `file` for every file
|
|
||||||
// eg. perl has hundreds of `.pm` files in its `lib`
|
|
||||||
async function* exefiles(prefix: Path): AsyncGenerator<[Path, 'exe' | 'lib']> {
|
|
||||||
for (const basename of ["bin", "lib", "libexec"]) {
|
|
||||||
const d = prefix.join(basename).isDirectory()
|
|
||||||
if (!d) continue
|
|
||||||
for await (const [exename, { isFile, isSymlink }] of d.walk()) {
|
|
||||||
if (!isFile || isSymlink) continue
|
|
||||||
const type = await exetype(exename)
|
|
||||||
if (type) yield [exename, type]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//FIXME lol use https://github.com/sindresorhus/file-type when we can
|
|
||||||
export async function exetype(path: Path): Promise<'exe' | 'lib' | false> {
|
|
||||||
// speed this up a bit
|
|
||||||
switch (path.extname()) {
|
|
||||||
case ".py":
|
|
||||||
case ".pyc":
|
|
||||||
case ".pl":
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
const out = await backticks({
|
|
||||||
cmd: ["file", "--mime-type", path.string]
|
|
||||||
})
|
|
||||||
const lines = out.split("\n")
|
|
||||||
const line1 = lines[0]
|
|
||||||
if (!line1) throw new Error()
|
|
||||||
const match = line1.match(/: (.*)$/)
|
|
||||||
if (!match) throw new Error()
|
|
||||||
const mime = match[1]
|
|
||||||
|
|
||||||
console.debug(mime)
|
|
||||||
|
|
||||||
switch (mime) {
|
|
||||||
case 'application/x-pie-executable':
|
|
||||||
case 'application/x-mach-binary':
|
|
||||||
case 'application/x-executable':
|
|
||||||
return 'exe'
|
|
||||||
|
|
||||||
case 'application/x-sharedlib':
|
|
||||||
return 'lib'
|
|
||||||
default:
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// convert a full version path to a major’d version path
|
|
||||||
// this so we are resilient to upgrades without requiring us to rewrite binaries on install
|
|
||||||
// since rewriting binaries would invalidate our signatures
|
|
||||||
function transform(input: string, installation: Installation) {
|
|
||||||
if (input.startsWith("$ORIGIN")) {
|
|
||||||
// we leave these alone, trusting the build tool knew what it was doing
|
|
||||||
return input
|
|
||||||
} else if (input.startsWith(installation.path.parent().string)) {
|
|
||||||
// don’t transform stuff that links to this actual package
|
|
||||||
return input
|
|
||||||
} else {
|
|
||||||
//FIXME not very robust lol
|
|
||||||
return input.replace(/v(\d+)\.\d+\.\d+/, 'v$1')
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,204 +0,0 @@
|
||||||
#!/usr/bin/env ruby
|
|
||||||
# tea brewed ruby works with a tea shebang
|
|
||||||
# but normal ruby does not, macOS comes with ruby so we just use it
|
|
||||||
# ---
|
|
||||||
# dependencies:
|
|
||||||
# ruby-lang.org: '>=2'
|
|
||||||
# args: [ruby]
|
|
||||||
# ---
|
|
||||||
|
|
||||||
require 'bundler/inline'
|
|
||||||
|
|
||||||
gemfile do
|
|
||||||
source 'https://rubygems.org'
|
|
||||||
gem 'ruby-macho', '~> 3'
|
|
||||||
end
|
|
||||||
|
|
||||||
require 'fileutils'
|
|
||||||
require 'pathname'
|
|
||||||
require 'macho'
|
|
||||||
require 'find'
|
|
||||||
|
|
||||||
#TODO lazy & memoized
|
|
||||||
$tea_prefix = ENV['TEA_PREFIX'] || `tea --prefix`.chomp
|
|
||||||
abort "set TEA_PREFIX" if $tea_prefix.empty?
|
|
||||||
|
|
||||||
$pkg_prefix = ARGV.shift
|
|
||||||
abort "arg1 should be pkg-prefix" if $pkg_prefix.empty?
|
|
||||||
$pkg_prefix = Pathname.new($pkg_prefix).realpath.to_s
|
|
||||||
|
|
||||||
$inodes = Hash.new
|
|
||||||
|
|
||||||
|
|
||||||
def arm?
|
|
||||||
def type
|
|
||||||
case RUBY_PLATFORM
|
|
||||||
when /arm/, /aarch64/ then true
|
|
||||||
else false
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
class Fixer
|
|
||||||
def initialize(file)
|
|
||||||
@file = MachO.open(file)
|
|
||||||
@changed = false
|
|
||||||
end
|
|
||||||
|
|
||||||
def fix
|
|
||||||
case @file.filetype
|
|
||||||
when :dylib
|
|
||||||
fix_id
|
|
||||||
fix_rpaths
|
|
||||||
fix_install_names
|
|
||||||
when :execute
|
|
||||||
fix_rpaths
|
|
||||||
fix_install_names
|
|
||||||
when :bundle
|
|
||||||
fix_rpaths
|
|
||||||
fix_install_names
|
|
||||||
when :object
|
|
||||||
# noop
|
|
||||||
else
|
|
||||||
throw Error("unknown filetype: #{file.filetype}: #{file.filename}")
|
|
||||||
end
|
|
||||||
|
|
||||||
# M1 binaries must be signed
|
|
||||||
# changing the macho stuff invalidates the signature
|
|
||||||
# this resigns with the default adhoc signing profile
|
|
||||||
MachO.codesign!(@file.filename) if @changed and arm?
|
|
||||||
end
|
|
||||||
|
|
||||||
def fix_id
|
|
||||||
rel_path = Pathname.new(@file.filename).relative_path_from(Pathname.new($tea_prefix))
|
|
||||||
id = "@rpath/#{rel_path}"
|
|
||||||
if @file.dylib_id != id
|
|
||||||
# only do work if we must
|
|
||||||
@file.change_dylib_id id
|
|
||||||
write
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def write
|
|
||||||
stat = File.stat(@file.filename)
|
|
||||||
if not stat.writable?
|
|
||||||
File.chmod(0644, @file.filename)
|
|
||||||
chmoded = true
|
|
||||||
end
|
|
||||||
@file.write!
|
|
||||||
@changed = true
|
|
||||||
ensure
|
|
||||||
File.chmod(stat.mode, @file.filename) if chmoded
|
|
||||||
end
|
|
||||||
|
|
||||||
def links_to_other_tea_libs?
|
|
||||||
@file.linked_dylibs.each do |lib|
|
|
||||||
# starts_with? @rpath is not enough lol
|
|
||||||
# this because we are setting `id` to @rpath now so it's a reasonable indication
|
|
||||||
# that we link to tea libs, but the build system for the pkg may well do this for its
|
|
||||||
# own libs
|
|
||||||
return true if lib.start_with? $tea_prefix or lib.start_with? '@rpath'
|
|
||||||
end
|
|
||||||
return false
|
|
||||||
end
|
|
||||||
|
|
||||||
def fix_rpaths
|
|
||||||
#TODO remove spurious rpaths
|
|
||||||
|
|
||||||
dirty = false
|
|
||||||
rel_path = Pathname.new($tea_prefix).relative_path_from(Pathname.new(@file.filename).parent)
|
|
||||||
rpath = "@loader_path/#{rel_path}"
|
|
||||||
|
|
||||||
if not @file.rpaths.include? rpath and links_to_other_tea_libs?
|
|
||||||
@file.add_rpath rpath
|
|
||||||
dirty = true
|
|
||||||
end
|
|
||||||
|
|
||||||
while @file.rpaths.include? $tea_prefix
|
|
||||||
@file.delete_rpath $tea_prefix
|
|
||||||
dirty = true
|
|
||||||
end
|
|
||||||
|
|
||||||
write if dirty
|
|
||||||
end
|
|
||||||
|
|
||||||
def bad_install_names
|
|
||||||
@file.linked_dylibs.map do |lib|
|
|
||||||
if lib.start_with? '/'
|
|
||||||
if Pathname.new(lib).cleanpath.to_s.start_with? $tea_prefix
|
|
||||||
lib
|
|
||||||
end
|
|
||||||
elsif lib.start_with? '@rpath'
|
|
||||||
path = Pathname.new(lib.sub(%r{^@rpath}, $tea_prefix))
|
|
||||||
if path.exist?
|
|
||||||
lib
|
|
||||||
else
|
|
||||||
puts "warn:#{@file.filename}:#{lib}"
|
|
||||||
end
|
|
||||||
elsif lib.start_with? '@'
|
|
||||||
puts "warn:#{@file.filename}:#{lib}"
|
|
||||||
# noop
|
|
||||||
else
|
|
||||||
lib
|
|
||||||
end
|
|
||||||
end.compact
|
|
||||||
end
|
|
||||||
|
|
||||||
def fix_install_names
|
|
||||||
bad_names = bad_install_names
|
|
||||||
return if bad_names.empty?
|
|
||||||
|
|
||||||
def fix_tea_prefix s
|
|
||||||
s = Pathname.new(s).relative_path_from(Pathname.new($tea_prefix))
|
|
||||||
s = s.sub(%r{/v(\d+)\.\d+\.\d+/}, '/v\1/')
|
|
||||||
s = s.sub(%r{/(\.\d+)+\.dylib$}, '/.dylib')
|
|
||||||
s = "@rpath/#{s}"
|
|
||||||
return s
|
|
||||||
end
|
|
||||||
|
|
||||||
bad_names.each do |old_name|
|
|
||||||
if old_name.start_with? $pkg_prefix
|
|
||||||
new_name = Pathname.new(old_name).relative_path_from(Pathname.new(@file.filename).parent)
|
|
||||||
new_name = "@loader_path/#{new_name}"
|
|
||||||
elsif old_name.start_with? '/'
|
|
||||||
new_name = fix_tea_prefix old_name
|
|
||||||
elsif old_name.start_with? '@rpath'
|
|
||||||
# so far we only feed bad @rpaths that are relative to the tea-prefix
|
|
||||||
new_name = fix_tea_prefix old_name.sub(%r{^@rpath}, $tea_prefix)
|
|
||||||
else
|
|
||||||
# assume they are meant to be relative to lib dir
|
|
||||||
new_name = Pathname.new($pkg_prefix).join("lib").relative_path_from(Pathname.new(@file.filename).parent)
|
|
||||||
new_name = "@loader_path/#{new_name}/#{old_name}"
|
|
||||||
end
|
|
||||||
|
|
||||||
@file.change_install_name old_name, new_name
|
|
||||||
end
|
|
||||||
|
|
||||||
write
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
ARGV.each do |arg|
|
|
||||||
Find.find(arg) do |file|
|
|
||||||
next unless File.file? file and !File.symlink? file
|
|
||||||
abs = Pathname.getwd.join(file).to_s
|
|
||||||
inode = File.stat(abs).ino
|
|
||||||
if $inodes[inode]
|
|
||||||
if arm?
|
|
||||||
# we have to code-sign on arm AND codesigning breaks the hard link
|
|
||||||
# so now we have to re-hardlink
|
|
||||||
puts "re-hardlinking #{abs} to #{$inodes[inode]}"
|
|
||||||
FileUtils.ln($inodes[inode], abs, :force => true)
|
|
||||||
end
|
|
||||||
# stuff like git has hardlinks to the same files
|
|
||||||
# avoid the work if we already did this inode
|
|
||||||
next
|
|
||||||
end
|
|
||||||
Fixer.new(abs).fix
|
|
||||||
$inodes[inode] = abs
|
|
||||||
rescue MachO::MagicError
|
|
||||||
#noop: not a Mach-O file
|
|
||||||
rescue MachO::TruncatedFileError
|
|
||||||
#noop: file can’t be a Mach-O file
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,73 +0,0 @@
|
||||||
#!/usr/bin/env -S tea -E
|
|
||||||
|
|
||||||
/* ---
|
|
||||||
args:
|
|
||||||
- deno
|
|
||||||
- run
|
|
||||||
- --allow-run
|
|
||||||
- --allow-env
|
|
||||||
- --allow-read
|
|
||||||
- --allow-write={{tea.prefix}}
|
|
||||||
--- */
|
|
||||||
|
|
||||||
import Path from "path"
|
|
||||||
import { undent } from "utils"
|
|
||||||
import { useFlags } from "hooks"
|
|
||||||
|
|
||||||
useFlags()
|
|
||||||
|
|
||||||
const has_shebang = (() => {
|
|
||||||
const encoder = new TextDecoder()
|
|
||||||
return (buf: Uint8Array) => {
|
|
||||||
return encoder.decode(buf) == '#!'
|
|
||||||
}
|
|
||||||
})()
|
|
||||||
|
|
||||||
for (const path of Deno.args) {
|
|
||||||
if (!Path.cwd().join(path).isFile()) continue
|
|
||||||
|
|
||||||
console.debug({ path })
|
|
||||||
|
|
||||||
const rid = await Deno.open(path, { read: true })
|
|
||||||
try {
|
|
||||||
const buf = new Uint8Array(2)
|
|
||||||
await rid.read(buf)
|
|
||||||
if (!has_shebang(buf)) continue
|
|
||||||
} finally {
|
|
||||||
rid.close()
|
|
||||||
}
|
|
||||||
|
|
||||||
//FIXME this could be pretty damn efficient if we can find the time
|
|
||||||
//NOTE as it stands this is HIDEOUSLY inefficient
|
|
||||||
|
|
||||||
const contents = await Deno.readFile(path)
|
|
||||||
const txt = new TextDecoder().decode(contents)
|
|
||||||
const [line0, ...lines] = txt.split("\n") //lol
|
|
||||||
|
|
||||||
const match = line0.match(/^#!\s*(\/[^\s]+)/)
|
|
||||||
if (!match) throw new Error()
|
|
||||||
const interpreter = match[1]
|
|
||||||
|
|
||||||
switch (interpreter) {
|
|
||||||
case "/usr/bin/env":
|
|
||||||
case "/bin/sh":
|
|
||||||
console.verbose({ line0, path })
|
|
||||||
console.verbose("^^ skipped acceptable shebang")
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
const shebang = `#!/usr/bin/env ${new Path(interpreter).basename()}`
|
|
||||||
|
|
||||||
const rewrite = undent`
|
|
||||||
${shebang}
|
|
||||||
${lines.join("\n")}
|
|
||||||
`
|
|
||||||
|
|
||||||
console.verbose({rewrote: path, to: `#!/usr/bin/env ${interpreter}`})
|
|
||||||
|
|
||||||
const stat = Deno.lstatSync(path)
|
|
||||||
const needs_chmod = stat.mode && !(stat.mode & 0o200)
|
|
||||||
if (needs_chmod) Deno.chmodSync(path, 0o666)
|
|
||||||
await Deno.writeFile(path, new TextEncoder().encode(rewrite))
|
|
||||||
if (needs_chmod) Deno.chmodSync(path, stat.mode!)
|
|
||||||
}
|
|
|
@ -1,50 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
CMD_NAME=$(basename "$1")
|
|
||||||
PREFIX="$(dirname "$(dirname "$1")")"
|
|
||||||
PROJECT_NAME=$(basename "$(dirname "$PREFIX")")
|
|
||||||
VERSION=$(basename "$PREFIX")
|
|
||||||
PYTHON_VERSION=$(python --version | cut -d' ' -f2)
|
|
||||||
PYTHON_VERSION_MAJ=$(echo $PYTHON_VERSION | cut -d. -f1)
|
|
||||||
PYTHON_VERSION_MIN=$(echo $PYTHON_VERSION | cut -d. -f1,2)
|
|
||||||
|
|
||||||
python -m venv $PREFIX/libexec
|
|
||||||
|
|
||||||
cd "$PREFIX"
|
|
||||||
|
|
||||||
libexec/bin/pip install -v --no-binary :all: --ignore-installed $CMD_NAME
|
|
||||||
mkdir bin
|
|
||||||
|
|
||||||
cat <<EOF >bin/$CMD_NAME
|
|
||||||
#!/usr/bin/env bash
|
|
||||||
self="\${BASH_SOURCE[0]}"
|
|
||||||
LIBEXEC="\$(cd "\$(dirname "\$self")"/../libexec/bin && pwd)"
|
|
||||||
source "\$LIBEXEC/activate"
|
|
||||||
exec "\$LIBEXEC"/$CMD_NAME "\$@"
|
|
||||||
EOF
|
|
||||||
chmod +x bin/$CMD_NAME
|
|
||||||
|
|
||||||
cd libexec/bin
|
|
||||||
fix-shebangs.ts *
|
|
||||||
|
|
||||||
rm Activate.ps1 activate.csh activate.fish
|
|
||||||
|
|
||||||
sed -i.bak 's|VIRTUAL_ENV=".*"|VIRTUAL_ENV="$(cd "$(dirname "${BASH_SOURCE[0]}")"/.. \&\& pwd)"|' activate
|
|
||||||
rm activate.bak
|
|
||||||
|
|
||||||
# FIXME a lot: this "updates" the `venv` on each run for relocatability
|
|
||||||
cat <<EOF >>activate
|
|
||||||
|
|
||||||
sed -i.bak \\
|
|
||||||
-e "s|$TEA_PREFIX/python.org/v$PYTHON_VERSION|\$TEA_PREFIX/python.org/v$PYTHON_VERSION_MAJ|" \\
|
|
||||||
-e 's|bin/python$PYTHON_VERSION_MAJ.$PYTHON_VERSION_MIN|bin/python|' \\
|
|
||||||
-e "s|$PREFIX/libexec|\$TEA_PREFIX/$PROJECT_NAME/$VERSION/libexec|" \\
|
|
||||||
\$VIRTUAL_ENV/pyvenv.cfg
|
|
||||||
rm \$VIRTUAL_ENV/pyvenv.cfg.bak
|
|
||||||
EOF
|
|
||||||
|
|
||||||
for x in python*; do
|
|
||||||
ln -sf ../../../../python.org/v$PYTHON_VERSION_MAJ/bin/$x $x
|
|
||||||
done
|
|
|
@ -1,47 +0,0 @@
|
||||||
#!/usr/bin/env -S tea -E
|
|
||||||
|
|
||||||
/*---
|
|
||||||
args:
|
|
||||||
- deno
|
|
||||||
- run
|
|
||||||
- --allow-read
|
|
||||||
- --allow-env
|
|
||||||
---*/
|
|
||||||
|
|
||||||
import { Package, PackageRequirement } from "types"
|
|
||||||
import { usePantry, useFlags } from "hooks"
|
|
||||||
import { hydrate } from "prefab"
|
|
||||||
import * as ARGV from "./utils/args.ts"
|
|
||||||
import { set_output } from "./utils/gha.ts"
|
|
||||||
import { pkg } from "utils"
|
|
||||||
|
|
||||||
const pantry = usePantry()
|
|
||||||
|
|
||||||
useFlags()
|
|
||||||
|
|
||||||
const mode: 'build' | 'install' = 'build' //Deno.args.includes("-b") ? 'build' : 'install'
|
|
||||||
|
|
||||||
const get_deps = async (pkg: Package | PackageRequirement) => {
|
|
||||||
const deps = await pantry.getDeps(pkg)
|
|
||||||
switch (mode) {
|
|
||||||
case 'build':
|
|
||||||
return [...deps.build, ...deps.runtime]
|
|
||||||
// case 'install':
|
|
||||||
// return deps.runtime
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const rv: PackageRequirement[] = []
|
|
||||||
for await (const pkg of ARGV.pkgs()) {
|
|
||||||
const deps = await get_deps(pkg)
|
|
||||||
const wet = await hydrate(deps)
|
|
||||||
rv.push(...wet.pkgs)
|
|
||||||
}
|
|
||||||
|
|
||||||
const gas = rv.map(pkg.str)
|
|
||||||
|
|
||||||
if (Deno.env.get("GITHUB_ACTIONS")) {
|
|
||||||
set_output("pkgs", gas)
|
|
||||||
} else {
|
|
||||||
console.log(gas.join("\n"))
|
|
||||||
}
|
|
|
@ -1,51 +0,0 @@
|
||||||
#!/usr/bin/env -S tea -E
|
|
||||||
|
|
||||||
/*---
|
|
||||||
args:
|
|
||||||
- deno
|
|
||||||
- run
|
|
||||||
- --allow-net
|
|
||||||
- --allow-run
|
|
||||||
- --allow-read
|
|
||||||
- --allow-write={{tea.prefix}}
|
|
||||||
- --allow-env
|
|
||||||
- --unstable
|
|
||||||
---*/
|
|
||||||
|
|
||||||
import { usePantry } from "hooks"
|
|
||||||
import { pkg as pkgutils } from "utils"
|
|
||||||
import { useFlags, usePrefix } from "hooks"
|
|
||||||
import { set_output } from "./utils/gha.ts"
|
|
||||||
import build, { BuildResult } from "./build/build.ts"
|
|
||||||
import * as ARGV from "./utils/args.ts"
|
|
||||||
|
|
||||||
useFlags()
|
|
||||||
|
|
||||||
const pantry = usePantry()
|
|
||||||
const dry = await ARGV.toArray(ARGV.pkgs())
|
|
||||||
const gha = !!Deno.env.get("GITHUB_ACTIONS")
|
|
||||||
const group_it = gha && dry.length > 1
|
|
||||||
const rv: BuildResult[] = []
|
|
||||||
|
|
||||||
for (const rq of dry) {
|
|
||||||
const pkg = await pantry.resolve(rq)
|
|
||||||
|
|
||||||
if (group_it) {
|
|
||||||
console.log("::group::", pkgutils.str(pkg))
|
|
||||||
} else {
|
|
||||||
console.log({ building: pkg.project })
|
|
||||||
}
|
|
||||||
|
|
||||||
rv.push(await build(pkg))
|
|
||||||
|
|
||||||
if (group_it) {
|
|
||||||
console.log("::endgroup::")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const to = usePrefix()
|
|
||||||
await set_output("pkgs", rv.map(x => pkgutils.str(x.installation.pkg)))
|
|
||||||
await set_output("paths", rv.map(x => x.installation.path), '%0A')
|
|
||||||
await set_output("relative-paths", rv.map(x => x.installation.path.relative({ to })))
|
|
||||||
await set_output("srcs", rv.map(x => x.src?.relative({ to }) ?? "~"))
|
|
||||||
await set_output("srcs-relative-paths", rv.compact(x => x.src?.relative({ to })))
|
|
|
@ -1,56 +0,0 @@
|
||||||
#!/usr/bin/env -S tea -E
|
|
||||||
|
|
||||||
/*---
|
|
||||||
args:
|
|
||||||
- deno
|
|
||||||
- run
|
|
||||||
- --allow-net
|
|
||||||
- --allow-run
|
|
||||||
- --allow-read
|
|
||||||
- --allow-write={{tea.prefix}}
|
|
||||||
- --allow-env
|
|
||||||
- --unstable
|
|
||||||
---*/
|
|
||||||
|
|
||||||
import { usePantry, useFlags, useCellar, useInventory, usePrefix } from "hooks"
|
|
||||||
import { hydrate, install, link } from "prefab"
|
|
||||||
import { str as pkgstr } from "utils/pkg.ts"
|
|
||||||
import * as ARGV from "./utils/args.ts"
|
|
||||||
import { panic } from "utils/error.ts"
|
|
||||||
import build, { BuildResult } from "./build/build.ts"
|
|
||||||
import { set_output } from "./utils/gha.ts"
|
|
||||||
import { pkg as pkgutils } from "utils"
|
|
||||||
|
|
||||||
useFlags()
|
|
||||||
|
|
||||||
const pantry = usePantry()
|
|
||||||
const cellar = useCellar()
|
|
||||||
const inventory = useInventory()
|
|
||||||
const raw = await ARGV.toArray(ARGV.pkgs())
|
|
||||||
const rv: BuildResult[] = []
|
|
||||||
|
|
||||||
for (const rq of raw) {
|
|
||||||
const dry = await pantry.getDeps(rq)
|
|
||||||
const wet = await hydrate([...dry.runtime, ...dry.build])
|
|
||||||
|
|
||||||
for (const pkg of wet.pkgs) {
|
|
||||||
if (!await cellar.has(pkg)) {
|
|
||||||
const version = await inventory.select(pkg) ?? panic(`${pkgstr(pkg)} not found`)
|
|
||||||
const installation = await install({ project: pkg.project, version })
|
|
||||||
await link(installation)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const pkg = await pantry.resolve(rq)
|
|
||||||
rv.push(await build(pkg))
|
|
||||||
await link(pkg)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Deno.env.get("GITHUB_ACTIONS")) {
|
|
||||||
const to = usePrefix()
|
|
||||||
await set_output("pkgs", rv.map(x => pkgutils.str(x.installation.pkg)))
|
|
||||||
await set_output("paths", rv.map(x => x.installation.path), '%0A')
|
|
||||||
await set_output("relative-paths", rv.map(x => x.installation.path.relative({ to })))
|
|
||||||
await set_output("srcs", rv.map(x => x.src?.relative({ to }) ?? "~"))
|
|
||||||
await set_output("srcs-relative-paths", rv.compact(x => x.src?.relative({ to })))
|
|
||||||
}
|
|
|
@ -1,167 +0,0 @@
|
||||||
import { useCellar, usePantry, usePrefix } from "hooks"
|
|
||||||
import { link, hydrate } from "prefab"
|
|
||||||
import { Installation, Package } from "types"
|
|
||||||
import useShellEnv, { expand } from "hooks/useShellEnv.ts"
|
|
||||||
import { run, undent, host, tuplize } from "utils"
|
|
||||||
import { str as pkgstr } from "utils/pkg.ts"
|
|
||||||
import fix_pkg_config_files from "./fix-pkg-config-files.ts"
|
|
||||||
import Path from "path"
|
|
||||||
|
|
||||||
const cellar = useCellar()
|
|
||||||
const pantry = usePantry()
|
|
||||||
const { platform } = host()
|
|
||||||
|
|
||||||
export interface BuildResult {
|
|
||||||
installation: Installation
|
|
||||||
src?: Path
|
|
||||||
}
|
|
||||||
|
|
||||||
export default async function _build(pkg: Package): Promise<BuildResult> {
|
|
||||||
try {
|
|
||||||
return await __build(pkg)
|
|
||||||
} catch (e) {
|
|
||||||
cellar.keg(pkg).isDirectory()?.isEmpty()?.rm() // don’t leave empty kegs around
|
|
||||||
throw e
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function __build(pkg: Package): Promise<BuildResult> {
|
|
||||||
const [deps, wet, resolved] = await calc_deps()
|
|
||||||
await clean()
|
|
||||||
const env = await mkenv()
|
|
||||||
const dst = cellar.keg(pkg).mkpath()
|
|
||||||
const [src, src_tarball] = await fetch_src(pkg) ?? []
|
|
||||||
const installation = await build()
|
|
||||||
await link(installation)
|
|
||||||
await fix_binaries(installation)
|
|
||||||
await fix_pkg_config_files(installation)
|
|
||||||
return { installation, src: src_tarball }
|
|
||||||
|
|
||||||
//////// utils
|
|
||||||
async function calc_deps() {
|
|
||||||
const deps = await pantry.getDeps(pkg)
|
|
||||||
const wet = await hydrate([...deps.runtime, ...deps.build])
|
|
||||||
deps.runtime.push(...wet.pkgs)
|
|
||||||
const resolved = await Promise.all(wet.pkgs.map(pkg => cellar.resolve(pkg)))
|
|
||||||
return tuplize(deps, wet, resolved)
|
|
||||||
}
|
|
||||||
|
|
||||||
async function clean() {
|
|
||||||
const installation = await should_clean()
|
|
||||||
// If we clean deno.land, it breaks the rest of the process.
|
|
||||||
if (installation && installation.pkg.project !== "deno.land") {
|
|
||||||
console.log({ cleaning: installation.path })
|
|
||||||
for await (const [path] of installation.path.ls()) {
|
|
||||||
// we delete contents rather than the directory itself to prevent broken vx.y symlinks
|
|
||||||
path.rm({ recursive: true })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function should_clean() {
|
|
||||||
if (pkg.project == 'tea.xyz') return
|
|
||||||
// only required as we aren't passing everything into hydrate
|
|
||||||
const depends_on_self = () => deps.build.some(x => x.project === pkg.project)
|
|
||||||
const wet_dep = () => wet.pkgs.some(x => x.project === pkg.project)
|
|
||||||
|
|
||||||
// provided this package doesn't transitively depend on itself (yes this happens)
|
|
||||||
// clean out the destination prefix first
|
|
||||||
if (!wet.bootstrap_required.has(pkg.project) && !depends_on_self() && !wet_dep()) {
|
|
||||||
return await cellar.has(pkg)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function mkenv() {
|
|
||||||
const env = await useShellEnv({ installations: resolved})
|
|
||||||
|
|
||||||
if (platform == 'darwin') {
|
|
||||||
env['MACOSX_DEPLOYMENT_TARGET'] = ['11.0']
|
|
||||||
}
|
|
||||||
|
|
||||||
env['PATH'].push("$PATH")
|
|
||||||
|
|
||||||
return env
|
|
||||||
}
|
|
||||||
|
|
||||||
async function build() {
|
|
||||||
const bld = src ?? Path.mktmp({ prefix: pkg.project }).join("wd").mkdir()
|
|
||||||
const sh = await pantry.getScript(pkg, 'build', resolved)
|
|
||||||
|
|
||||||
const brewkit = new URL(import.meta.url).path().parent().parent().join("brewkit")
|
|
||||||
|
|
||||||
const cmd = bld.parent().join("build.sh").write({ force: true, text: undent`
|
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
set -o pipefail
|
|
||||||
set -x
|
|
||||||
cd "${bld}"
|
|
||||||
|
|
||||||
export SRCROOT="${bld}"
|
|
||||||
${expand(env)}
|
|
||||||
|
|
||||||
export PATH=${brewkit}:"$PATH"
|
|
||||||
|
|
||||||
${sh}
|
|
||||||
`
|
|
||||||
}).chmod(0o700)
|
|
||||||
|
|
||||||
// copy in auxillary files from pantry directory
|
|
||||||
for await (const [path, {isFile}] of pantry.getYAML(pkg).path.parent().ls()) {
|
|
||||||
if (isFile) {
|
|
||||||
path.cp({ into: bld.join("props").mkdir() })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await run({ cmd }) // WELCOME TO THE BUILD
|
|
||||||
|
|
||||||
return { path: dst, pkg }
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fix_binaries(installation: Installation) {
|
|
||||||
const prefix = usePrefix().join("tea.xyz/var/pantry/scripts/brewkit")
|
|
||||||
const env = {
|
|
||||||
TEA_PREFIX: usePrefix().string,
|
|
||||||
}
|
|
||||||
switch (host().platform) {
|
|
||||||
case 'darwin':
|
|
||||||
return await run({
|
|
||||||
cmd: [
|
|
||||||
prefix.join('fix-machos.rb'),
|
|
||||||
installation.path,
|
|
||||||
...['bin', 'lib', 'libexec'].map(x => installation.path.join(x)).filter(x => x.isDirectory())
|
|
||||||
],
|
|
||||||
env
|
|
||||||
})
|
|
||||||
case 'linux':
|
|
||||||
return await run({
|
|
||||||
cmd: [
|
|
||||||
prefix.join('fix-elf.ts'),
|
|
||||||
installation.path,
|
|
||||||
...[...deps.runtime, pkg].map(pkgstr)
|
|
||||||
],
|
|
||||||
env
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetch_src(pkg: Package): Promise<[Path, Path] | undefined> {
|
|
||||||
console.log('fetching', pkgstr(pkg))
|
|
||||||
|
|
||||||
// we run this as a script because we don’t want these deps imported into *this* env
|
|
||||||
// since that leads to situations where we depend on things we didn’t expect to
|
|
||||||
const script = new URL(import.meta.url).path().parent().parent().join('fetch.ts')
|
|
||||||
const proc = Deno.run({
|
|
||||||
cmd: [script.string, pkgstr(pkg)],
|
|
||||||
stdout: 'piped'
|
|
||||||
})
|
|
||||||
const [out, status] = await Promise.all([proc.output(), proc.status()])
|
|
||||||
if (!status.success) throw new Error()
|
|
||||||
const [dstdir, tarball] = new TextDecoder().decode(out).split("\n")
|
|
||||||
if (!tarball) {
|
|
||||||
// no tarball, e.g. tea.xyz/gx/cc
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
return [new Path(dstdir), new Path(tarball)]
|
|
||||||
}
|
|
|
@ -1,28 +0,0 @@
|
||||||
import { Installation } from "types"
|
|
||||||
import Path from "path"
|
|
||||||
import "utils"
|
|
||||||
|
|
||||||
export default async function fix_pkg_config_files(installation: Installation) {
|
|
||||||
for await (const pcfile of find_pkg_config_files(installation)) {
|
|
||||||
const orig = await pcfile.read()
|
|
||||||
const relative_path = installation.path.relative({ to: pcfile.parent() })
|
|
||||||
const text = orig.replace(installation.path.string, `\${pcfiledir}/${relative_path}`)
|
|
||||||
if (orig !== text) {
|
|
||||||
console.verbose({ fixed: pcfile })
|
|
||||||
pcfile.write({text, force: true})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//NOTE currently we only support pc files in lib/pkgconfig
|
|
||||||
// we aim to standardize on this but will relent if a package is found
|
|
||||||
// that uses share and other tools that build against it only accept that
|
|
||||||
async function *find_pkg_config_files(installation: Installation): AsyncIterable<Path> {
|
|
||||||
const pcdir = installation.path.join("lib/pkgconfig")
|
|
||||||
if (!pcdir.isDirectory()) return
|
|
||||||
for await (const [path, { isFile }] of pcdir.ls()) {
|
|
||||||
if (isFile && path.extname() == ".pc") {
|
|
||||||
yield path
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,26 +0,0 @@
|
||||||
#!/usr/bin/env -S tea -E
|
|
||||||
|
|
||||||
/*---
|
|
||||||
args:
|
|
||||||
- deno
|
|
||||||
- run
|
|
||||||
- --allow-run
|
|
||||||
- --allow-read
|
|
||||||
- --allow-env
|
|
||||||
---*/
|
|
||||||
|
|
||||||
const args = [...Deno.args]
|
|
||||||
const via = args.shift()
|
|
||||||
|
|
||||||
for (const arg of args) {
|
|
||||||
const proc = Deno.run({
|
|
||||||
stdout: "null", stderr: "null",
|
|
||||||
cmd: [via!, arg]
|
|
||||||
})
|
|
||||||
const status = await proc.status()
|
|
||||||
if (status.code !== 0) {
|
|
||||||
console.error(`${arg} ❌`)
|
|
||||||
} else {
|
|
||||||
console.info(`${arg} ✅`)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,63 +0,0 @@
|
||||||
#!/usr/bin/env -S tea -E
|
|
||||||
|
|
||||||
/*---
|
|
||||||
dependencies:
|
|
||||||
gnu.org/tar: 1
|
|
||||||
tukaani.org/xz: 5
|
|
||||||
sourceware.org/bzip2: 1
|
|
||||||
args:
|
|
||||||
- deno
|
|
||||||
- run
|
|
||||||
- --allow-net
|
|
||||||
- --allow-run
|
|
||||||
- --allow-read
|
|
||||||
- --allow-write={{ tea.prefix }}
|
|
||||||
- --allow-env
|
|
||||||
---*/
|
|
||||||
|
|
||||||
//TODO verify the sha
|
|
||||||
|
|
||||||
import { usePantry, useCache, useDownload, useCellar, useSourceUnarchiver, useOffLicense, useFlags} from "hooks"
|
|
||||||
import { print } from "utils"
|
|
||||||
import { Stowage, Package } from "types"
|
|
||||||
import * as ARGV from "./utils/args.ts"
|
|
||||||
import Path from "path"
|
|
||||||
|
|
||||||
useFlags()
|
|
||||||
|
|
||||||
const pantry = usePantry()
|
|
||||||
const { download } = useDownload()
|
|
||||||
|
|
||||||
export async function fetch_src(pkg: Package): Promise<[Path, Path] | undefined> {
|
|
||||||
const dstdir = useCellar().shelf(pkg.project).join("src", `v${pkg.version}`)
|
|
||||||
const dist = await pantry.getDistributable(pkg)
|
|
||||||
if (!dist) return
|
|
||||||
const { url, stripComponents } = dist
|
|
||||||
const stowage: Stowage = { pkg, type: 'src', extname: url.path().extname() }
|
|
||||||
const dst = useCache().path(stowage)
|
|
||||||
const zipfile = await (async () => {
|
|
||||||
try {
|
|
||||||
// first try our mirror
|
|
||||||
const src = useOffLicense('s3').url(stowage)
|
|
||||||
return await download({ dst, src })
|
|
||||||
} catch {
|
|
||||||
// oh well, try original location then
|
|
||||||
return await download({ dst, src: url })
|
|
||||||
}
|
|
||||||
})()
|
|
||||||
await useSourceUnarchiver().unarchive({ dstdir, zipfile, stripComponents })
|
|
||||||
return [dstdir, zipfile]
|
|
||||||
}
|
|
||||||
|
|
||||||
if (import.meta.main) {
|
|
||||||
for await (let pkg of ARGV.pkgs()) {
|
|
||||||
pkg = await pantry.resolve(pkg)
|
|
||||||
const rv = await fetch_src(pkg)
|
|
||||||
if (rv) {
|
|
||||||
// a package doesn’t require a source tarball
|
|
||||||
//FIXME is this dumb tho? In theory a package could just be a build script that generates itself
|
|
||||||
// in practice this is rare and pkgs could just specify some dummy tarball
|
|
||||||
await print(rv.join("\n") + "\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,33 +0,0 @@
|
||||||
#!/usr/bin/env -S tea -E
|
|
||||||
|
|
||||||
/*---
|
|
||||||
args:
|
|
||||||
- deno
|
|
||||||
- run
|
|
||||||
- --allow-read
|
|
||||||
- --allow-env
|
|
||||||
---*/
|
|
||||||
|
|
||||||
import { useCellar, useFlags } from "hooks"
|
|
||||||
import * as ARGV from "./utils/args.ts"
|
|
||||||
|
|
||||||
useFlags()
|
|
||||||
|
|
||||||
/// filters out everything that is already installed
|
|
||||||
|
|
||||||
const cellar = useCellar()
|
|
||||||
const desired_filter = !!Deno.env.get("INVERT")
|
|
||||||
|
|
||||||
const rv: string[] = []
|
|
||||||
for await (const pkg of ARGV.pkgs()) {
|
|
||||||
const isInstalled = !!await cellar.has(pkg)
|
|
||||||
if (isInstalled == desired_filter) {
|
|
||||||
rv.push(pkg.project)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Deno.env.get("GITHUB_ACTIONS")) {
|
|
||||||
console.log(`::set-output name=pkgs::${rv.join(" ")}\n`)
|
|
||||||
} else {
|
|
||||||
console.log(rv.join("\n"))
|
|
||||||
}
|
|
|
@ -1,51 +0,0 @@
|
||||||
#!/usr/bin/env -S tea -E
|
|
||||||
|
|
||||||
/*---
|
|
||||||
args:
|
|
||||||
- deno
|
|
||||||
- run
|
|
||||||
- --allow-net
|
|
||||||
- --allow-env=AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,AWS_S3_BUCKET
|
|
||||||
---*/
|
|
||||||
|
|
||||||
import { S3, S3Object } from "s3"
|
|
||||||
import { crypto, toHashString } from "deno/crypto/mod.ts";
|
|
||||||
import { readerFromStreamReader } from "deno/streams/reader_from_stream_reader.ts"
|
|
||||||
import { readAll } from "deno/streams/read_all.ts"
|
|
||||||
import Path from "path"
|
|
||||||
|
|
||||||
const s3 = new S3({
|
|
||||||
accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!,
|
|
||||||
secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!,
|
|
||||||
region: "us-east-1",
|
|
||||||
});
|
|
||||||
|
|
||||||
const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!);
|
|
||||||
|
|
||||||
for await (const pkg of bucket.listAllObjects({ batchSize: 200 })) {
|
|
||||||
const keys = get_keys(pkg)
|
|
||||||
if (!keys) continue
|
|
||||||
|
|
||||||
console.log({ checking: keys.checksum });
|
|
||||||
|
|
||||||
if (!await bucket.headObject(keys.checksum.string)) {
|
|
||||||
console.log({ missing: keys.checksum })
|
|
||||||
|
|
||||||
const reader = (await bucket.getObject(keys.bottle.string))!.body.getReader()
|
|
||||||
const contents = await readAll(readerFromStreamReader(reader))
|
|
||||||
const sha256sum = toHashString(await crypto.subtle.digest("SHA-256", contents))
|
|
||||||
const body = new TextEncoder().encode(`${sha256sum} ${keys.bottle.basename()}`)
|
|
||||||
await bucket.putObject(keys.checksum.string, body)
|
|
||||||
|
|
||||||
console.log({ uploaded: keys.checksum })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function get_keys(pkg: S3Object): { bottle: Path, checksum: Path } | undefined {
|
|
||||||
if (!pkg.key) return
|
|
||||||
if (!/\.tar\.[gx]z$/.test(pkg.key)) return
|
|
||||||
return {
|
|
||||||
bottle: new Path(pkg.key),
|
|
||||||
checksum: new Path(`${pkg.key}.sha256sum`)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,80 +0,0 @@
|
||||||
#!/usr/bin/env -S tea -E
|
|
||||||
|
|
||||||
/*---
|
|
||||||
args:
|
|
||||||
- deno
|
|
||||||
- run
|
|
||||||
- --allow-net
|
|
||||||
- --allow-read
|
|
||||||
- --allow-env=AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,AWS_S3_BUCKET
|
|
||||||
---*/
|
|
||||||
|
|
||||||
import { S3 } from "s3"
|
|
||||||
import { stringify as yaml } from "deno/encoding/yaml.ts"
|
|
||||||
import { stringify as csv } from "deno/encoding/csv.ts"
|
|
||||||
import { Inventory } from "hooks/useInventory.ts"
|
|
||||||
import SemVer, * as semver from "semver"
|
|
||||||
|
|
||||||
const s3 = new S3({
|
|
||||||
accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!,
|
|
||||||
secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!,
|
|
||||||
region: "us-east-1",
|
|
||||||
});
|
|
||||||
|
|
||||||
const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!)
|
|
||||||
|
|
||||||
const inventory: Inventory = {}
|
|
||||||
const flat = []
|
|
||||||
|
|
||||||
for await (const pkg of bucket.listAllObjects({ batchSize: 200 })) {
|
|
||||||
if (!/\.tar\.[gx]z$/.test(pkg.key ?? '')) { continue }
|
|
||||||
|
|
||||||
const matches = pkg.key!.match(new RegExp(`^(.*)/(.*)/(.*)/v(${semver.regex.source})\.tar\.[xg]z$`))
|
|
||||||
if (!matches) { continue }
|
|
||||||
|
|
||||||
const [_, project, platform, arch, version] = matches
|
|
||||||
|
|
||||||
if (!inventory[project]) inventory[project] = {}
|
|
||||||
if (!inventory[project][platform]) inventory[project][platform] = {}
|
|
||||||
if (!inventory[project][platform]) inventory[project][platform] = {}
|
|
||||||
inventory[project][platform][arch] = [...(inventory[project]?.[platform]?.[arch] ?? []), version]
|
|
||||||
flat.push({ project, platform, arch, version })
|
|
||||||
}
|
|
||||||
|
|
||||||
/// For ultimate user-friendliness, we store this data 4 ways:
|
|
||||||
/// YAML, JSON, CSV, flat text
|
|
||||||
|
|
||||||
const te = new TextEncoder()
|
|
||||||
|
|
||||||
// YAML: type Inventory
|
|
||||||
|
|
||||||
const yml = te.encode(yaml(inventory))
|
|
||||||
|
|
||||||
bucket.putObject("versions.yml", yml)
|
|
||||||
|
|
||||||
// JSON: type Inventory
|
|
||||||
|
|
||||||
const json = te.encode(JSON.stringify(inventory))
|
|
||||||
|
|
||||||
bucket.putObject("versions.json", json)
|
|
||||||
|
|
||||||
// CSV: project,platform,arch,version
|
|
||||||
|
|
||||||
const csvData = te.encode(csv(flat, { columns: ["project", "platform", "arch", "version"]}))
|
|
||||||
|
|
||||||
bucket.putObject("versions.csv", csvData)
|
|
||||||
|
|
||||||
// TXT: per project/platform/arch, newline-delimited
|
|
||||||
|
|
||||||
for(const [project, platforms] of Object.entries(inventory)) {
|
|
||||||
for (const [platform, archs] of Object.entries(platforms)) {
|
|
||||||
for (const [arch, versions] of Object.entries(archs)) {
|
|
||||||
const v = versions.map(x => new SemVer(x)).sort(semver.compare)
|
|
||||||
const txt = te.encode(v.join("\n"))
|
|
||||||
console.log(project, platform, arch, v)
|
|
||||||
bucket.putObject(`${project}/${platform}/${arch}/versions.txt`, txt)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//end
|
|
|
@ -1,94 +0,0 @@
|
||||||
#!/usr/bin/env -S tea -E
|
|
||||||
|
|
||||||
/*---
|
|
||||||
args:
|
|
||||||
- deno
|
|
||||||
- run
|
|
||||||
- --allow-env
|
|
||||||
- --allow-net
|
|
||||||
---*/
|
|
||||||
|
|
||||||
import { S3 } from "s3"
|
|
||||||
import SemVer, * as semver from "semver"
|
|
||||||
import { format }from "deno/datetime/mod.ts"
|
|
||||||
|
|
||||||
const sortByModified = Deno.args.includes("-m")
|
|
||||||
const reverse = Deno.args.includes("-r")
|
|
||||||
const fullMatrix = Deno.args.includes("-x")
|
|
||||||
const source = Deno.args.includes("-s")
|
|
||||||
|
|
||||||
if (source && fullMatrix) {
|
|
||||||
throw new Error("incompatible flags (-x -s)")
|
|
||||||
}
|
|
||||||
|
|
||||||
const s3 = new S3({
|
|
||||||
accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!,
|
|
||||||
secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!,
|
|
||||||
region: "us-east-1",
|
|
||||||
})
|
|
||||||
|
|
||||||
const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!)
|
|
||||||
|
|
||||||
let output: FileInfo[] = []
|
|
||||||
|
|
||||||
for await(const obj of bucket.listAllObjects({ batchSize: 200 })) {
|
|
||||||
const { key, lastModified } = obj
|
|
||||||
if (!key?.match(/\.tar\.[gx]z$/)) { continue }
|
|
||||||
output.push({ key: key!, lastModified: lastModified! })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (fullMatrix) {
|
|
||||||
produceMatrix(output)
|
|
||||||
} else {
|
|
||||||
output = output.filter(x => {
|
|
||||||
const match = x.key.match(new RegExp("/(darwin|linux)/(aarch64|x86-64)/v.*\.tar\.(x|g)z"))
|
|
||||||
switch (source) {
|
|
||||||
case true: return !match
|
|
||||||
case false: return match
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
output.sort((a, b) => {
|
|
||||||
switch (sortByModified) {
|
|
||||||
case true: return a.lastModified.valueOf() - b.lastModified.valueOf()
|
|
||||||
case false: return a.key < b.key ? -1 : 1
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if (reverse) { output.reverse() }
|
|
||||||
console.table(output)
|
|
||||||
}
|
|
||||||
|
|
||||||
interface FileInfo {
|
|
||||||
key: string
|
|
||||||
lastModified: Date
|
|
||||||
}
|
|
||||||
|
|
||||||
function produceMatrix(objects: FileInfo[]): void {
|
|
||||||
const matrix = new Map()
|
|
||||||
for (const { key, lastModified } of objects) {
|
|
||||||
const match = key.match(new RegExp("(.*)/(darwin|linux)/(aarch64|x86-64)/v(.*)\.tar\.(x|g)z"))
|
|
||||||
if (!match) continue
|
|
||||||
const [_, project, _platform, _arch, _v] = match
|
|
||||||
const flavor = `${_platform}/${_arch}`
|
|
||||||
const version = semver.parse(_v)
|
|
||||||
if (!version) continue
|
|
||||||
const stats = matrix.get(project) || { project }
|
|
||||||
|
|
||||||
if (version.gt(stats[flavor]?.[0] || new SemVer([0,0,0]))) {
|
|
||||||
stats[flavor] = [version, format(lastModified, "yyyy-MM-dd HH:mm")]
|
|
||||||
}
|
|
||||||
|
|
||||||
matrix.set(project, stats)
|
|
||||||
}
|
|
||||||
|
|
||||||
const output = [...matrix.values()].map(o => ({
|
|
||||||
project: o.project,
|
|
||||||
'darwin/aarch64': `${o['darwin/aarch64']?.join(": ")}`,
|
|
||||||
'darwin/x86-64': `${o['darwin/x86-64']?.join(": ")}`,
|
|
||||||
'linux/aarch64': `${o['linux/aarch64']?.join(": ")}`,
|
|
||||||
'linux/x86-64': `${o['linux/x86-64']?.join(": ")}`
|
|
||||||
}))
|
|
||||||
output.sort((a, b) => a.project < b.project ? -1: 1)
|
|
||||||
console.table(output)
|
|
||||||
}
|
|
|
@ -1,68 +0,0 @@
|
||||||
#!/usr/bin/env -S tea -E
|
|
||||||
|
|
||||||
/// returns all pantry entries as `[{ name, path }]`
|
|
||||||
|
|
||||||
/*---
|
|
||||||
args:
|
|
||||||
- deno
|
|
||||||
- run
|
|
||||||
- --allow-env
|
|
||||||
- --allow-read
|
|
||||||
---*/
|
|
||||||
|
|
||||||
import Path from "path"
|
|
||||||
import { useFlags, usePrefix } from "hooks"
|
|
||||||
|
|
||||||
//FIXME needs to get actual paths from usePantry
|
|
||||||
const prefix = new Path(`${usePrefix()}/tea.xyz/var/pantry/projects`)
|
|
||||||
|
|
||||||
interface Entry {
|
|
||||||
project: string
|
|
||||||
path: Path
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//------------------------------------------------------------------------- funcs
|
|
||||||
export async function* ls(): AsyncGenerator<Entry> {
|
|
||||||
for await (const path of _ls_pantry(prefix)) {
|
|
||||||
yield {
|
|
||||||
project: path.parent().relative({ to: prefix }),
|
|
||||||
path
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function* _ls_pantry(dir: Path): AsyncGenerator<Path> {
|
|
||||||
if (!dir.isDirectory()) throw new Error()
|
|
||||||
|
|
||||||
for await (const [path, { name, isDirectory }] of dir.ls()) {
|
|
||||||
if (isDirectory) {
|
|
||||||
for await (const x of _ls_pantry(path)) {
|
|
||||||
yield x
|
|
||||||
}
|
|
||||||
} else if (name === "package.yml") {
|
|
||||||
yield path
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//-------------------------------------------------------------------------- main
|
|
||||||
if (import.meta.main) {
|
|
||||||
const flags = useFlags()
|
|
||||||
|
|
||||||
const rv: Entry[] = []
|
|
||||||
for await (const item of ls()) {
|
|
||||||
rv.push(item)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Deno.env.get("GITHUB_ACTIONS")) {
|
|
||||||
const projects = rv.map(x => x.project).join(":")
|
|
||||||
console.log(`::set-output name=projects::${projects}`)
|
|
||||||
// } else if (flags.json) {
|
|
||||||
// const obj = rv.map(({ path, project }) => ({ path: path.string, project }))
|
|
||||||
// const out = JSON.stringify(obj, null, 2)
|
|
||||||
// console.log(out)
|
|
||||||
} else {
|
|
||||||
console.log(rv.map(x => x.project).join("\n"))
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,40 +0,0 @@
|
||||||
#!/usr/bin/env -S tea -E
|
|
||||||
|
|
||||||
/*---
|
|
||||||
args:
|
|
||||||
- deno
|
|
||||||
- run
|
|
||||||
- --allow-read
|
|
||||||
- --allow-env
|
|
||||||
---*/
|
|
||||||
|
|
||||||
// sorts input for building
|
|
||||||
// does a full hydration, but only returns ordered, dry packages
|
|
||||||
|
|
||||||
|
|
||||||
import { pkg } from "utils"
|
|
||||||
import { usePantry, useFlags } from "hooks"
|
|
||||||
import { hydrate } from "prefab"
|
|
||||||
import * as ARGV from "./utils/args.ts"
|
|
||||||
import { set_output } from "./utils/gha.ts";
|
|
||||||
|
|
||||||
// const flags = useFlags()
|
|
||||||
const pantry = usePantry()
|
|
||||||
|
|
||||||
const dry = await ARGV.toArray(ARGV.pkgs())
|
|
||||||
|
|
||||||
const wet = await hydrate(dry, async (pkg, dry) => {
|
|
||||||
const deps = await pantry.getDeps(pkg)
|
|
||||||
return dry ? [...deps.build, ...deps.runtime] : deps.runtime
|
|
||||||
})
|
|
||||||
|
|
||||||
if (Deno.env.get("GITHUB_ACTIONS")) {
|
|
||||||
await set_output('pkgs', wet.dry.map(pkg.str))
|
|
||||||
} else {
|
|
||||||
const gas = wet.dry.map(pkg.str)
|
|
||||||
// if (flags.json) {
|
|
||||||
// console.log(gas)
|
|
||||||
// } else {
|
|
||||||
console.log(gas.join("\n"))
|
|
||||||
// }
|
|
||||||
}
|
|
106
scripts/test.ts
106
scripts/test.ts
|
@ -1,106 +0,0 @@
|
||||||
#!/usr/bin/env -S tea -E
|
|
||||||
|
|
||||||
/*---
|
|
||||||
args:
|
|
||||||
- deno
|
|
||||||
- run
|
|
||||||
- --allow-net
|
|
||||||
- --allow-run
|
|
||||||
- --allow-read
|
|
||||||
- --allow-write
|
|
||||||
- --allow-env
|
|
||||||
- --unstable
|
|
||||||
---*/
|
|
||||||
|
|
||||||
import { Installation, Package, PackageRequirement } from "types"
|
|
||||||
import { usePantry, useFlags, usePrefix } from "hooks"
|
|
||||||
import useShellEnv, { expand } from "hooks/useShellEnv.ts"
|
|
||||||
import { run, undent, pkg as pkgutils } from "utils"
|
|
||||||
import { resolve, install, hydrate, link } from "prefab"
|
|
||||||
import * as ARGV from "./utils/args.ts"
|
|
||||||
import Path from "path"
|
|
||||||
|
|
||||||
useFlags()
|
|
||||||
const pantry = usePantry()
|
|
||||||
|
|
||||||
for await (const pkg of ARGV.installs()) {
|
|
||||||
await test(pkg)
|
|
||||||
}
|
|
||||||
|
|
||||||
async function test(self: Installation) {
|
|
||||||
const yml = await pantry.getYAML(self.pkg).parse()
|
|
||||||
const deps = await deps4(self.pkg)
|
|
||||||
const installations = await prepare(deps)
|
|
||||||
|
|
||||||
// if we are testing multiple packages, they might not
|
|
||||||
// get linked when they're tested.
|
|
||||||
await link(self)
|
|
||||||
|
|
||||||
const env = await useShellEnv({ installations })
|
|
||||||
|
|
||||||
env['PATH'].push("$PATH")
|
|
||||||
|
|
||||||
let text = undent`
|
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
set -o pipefail
|
|
||||||
set -x
|
|
||||||
|
|
||||||
export TEA_PREFIX=${usePrefix()}
|
|
||||||
|
|
||||||
${expand(env)}
|
|
||||||
|
|
||||||
`
|
|
||||||
|
|
||||||
const tmp = Path.mktmp({ prefix: pkgutils.str(self.pkg) })
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (yml.test.fixture) {
|
|
||||||
const fixture = tmp.join("fixture.tea").write({ text: yml.test.fixture.toString() })
|
|
||||||
text += `export FIXTURE="${fixture}"\n\n`
|
|
||||||
}
|
|
||||||
|
|
||||||
const cwd = tmp.join("wd").mkdir()
|
|
||||||
|
|
||||||
text += `cd "${cwd}"\n\n`
|
|
||||||
|
|
||||||
text += await pantry.getScript(self.pkg, 'test', installations)
|
|
||||||
text += "\n"
|
|
||||||
|
|
||||||
for await (const [path, {name, isFile}] of pantry.getYAML(self.pkg).path.parent().ls()) {
|
|
||||||
if (isFile && name != 'package.yml') {
|
|
||||||
path.cp({ into: cwd })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const cmd = tmp
|
|
||||||
.join("test.sh")
|
|
||||||
.write({ text, force: true })
|
|
||||||
.chmod(0o500)
|
|
||||||
await run({ cmd, cwd })
|
|
||||||
tmp.rm({ recursive: true })
|
|
||||||
} catch (e) {
|
|
||||||
console.info("due to error, didn’t delete:", tmp)
|
|
||||||
throw e
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//TODO install step in CI should do this for test requirements also
|
|
||||||
async function prepare(reqs: (Package | PackageRequirement)[]) {
|
|
||||||
const { pending, installed } = await resolve(reqs)
|
|
||||||
for await (const pkg of pending) {
|
|
||||||
const installation = await install(pkg)
|
|
||||||
await link(installation)
|
|
||||||
installed.push(installation)
|
|
||||||
}
|
|
||||||
return installed
|
|
||||||
}
|
|
||||||
|
|
||||||
async function deps4(pkg: Package) {
|
|
||||||
return (await hydrate(pkg, async (pkg, dry) => {
|
|
||||||
const { runtime, test } = await pantry.getDeps(pkg)
|
|
||||||
return dry ? [...runtime, ...test] : runtime
|
|
||||||
})).pkgs
|
|
||||||
}
|
|
Loading…
Reference in a new issue