diff --git a/scripts/README.md b/scripts/README.md deleted file mode 100644 index cc57ba97..00000000 --- a/scripts/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# build all - -```sh -scripts/ls.ts | xargs scripts/sort.ts | xargs scripts/build.ts -``` - -# test all - -`each.ts` reduces output for each input to a concise ✅ or ❌ based on exit -status. - -```sh -scripts/ls.ts | xargs scripts/each.ts scripts/test.ts -``` diff --git a/scripts/bottle.ts b/scripts/bottle.ts deleted file mode 100755 index dd711909..00000000 --- a/scripts/bottle.ts +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env -S tea -E - -/* --- -dependencies: - gnu.org/tar: ^1.34 - tukaani.org/xz: ^5 - zlib.net: 1 -args: - - deno - - run - - --allow-net - - --allow-run - - --allow-env - - --allow-read - - --allow-write - - --import-map={{ srcroot }}/import-map.json ---- */ - -import { Installation } from "types" -import { useCellar, usePrefix, useFlags, useCache } from "hooks" -import { run } from "utils" -import { crypto } from "deno/crypto/mod.ts" -import { encode } from "deno/encoding/hex.ts" -import { set_output } from "./utils/gha.ts" -import * as ARGV from "./utils/args.ts" -import Path from "path" - -const cellar = useCellar() - - -//-------------------------------------------------------------------------- main - -if (import.meta.main) { - useFlags() - - const compression = Deno.env.get("COMPRESSION") == 'xz' ? 'xz' : 'gz' - const checksums: string[] = [] - const bottles: Path[] = [] - - for await (const pkg of ARGV.pkgs()) { - console.log({ bottling: pkg }) - - const installation = await cellar.resolve(pkg) - const path = await bottle(installation, compression) - const checksum = await sha256(path) - - console.log({ bottled: path }) - - bottles.push(path) - checksums.push(checksum) - } - - await set_output("bottles", bottles) - await set_output("checksums", checksums) -} - - -//------------------------------------------------------------------------- funcs -export async function bottle({ path: kegdir, pkg }: Installation, compression: 'gz' | 'xz'): Promise { - const tarball = useCache().path({ pkg, type: 'bottle', compression }) - const z = compression == 'gz' ? 'z' : 'J' - const cwd = usePrefix() - const cmd = ["tar", `c${z}f`, tarball, kegdir.relative({ to: cwd })] - await run({ cmd, cwd }) - return tarball -} - -export async function sha256(file: Path): Promise { - return await Deno.open(file.string, { read: true }) - .then(file => crypto.subtle.digest("SHA-256", file.readable)) - .then(buf => new TextDecoder().decode(encode(new Uint8Array(buf)))) -} diff --git a/scripts/brewkit/fix-elf.ts b/scripts/brewkit/fix-elf.ts deleted file mode 100755 index f7b48d4b..00000000 --- a/scripts/brewkit/fix-elf.ts +++ /dev/null @@ -1,172 +0,0 @@ -#!/usr/bin/env -S tea -E - -/*--- -args: - - deno - - run - - --allow-run - - --allow-env - - --allow-read - - --allow-write={{tea.prefix}} - - --import-map={{ srcroot }}/import-map.json -dependencies: - nixos.org/patchelf: '*' - darwinsys.com/file: 5 ----*/ - -import { useCellar } from "hooks" -import { PackageRequirement, Installation, Package } from "types" -import { backticks, run, host, pkg as pkgutils } from "utils" -import Path from "path" - - -if (import.meta.main) { - const cellar = useCellar() - const [installation, ...pkgs] = Deno.args - await fix_rpaths( - await cellar.resolve(new Path(installation)), - pkgs.map(pkgutils.parse) - ) -} - - -//TODO this is not resilient to upgrades (obv) -//NOTE solution is to have the rpath reference major version (or more specific if poss) - -/// fix rpaths or install names for executables and dynamic libraries -export default async function fix_rpaths(installation: Installation, pkgs: (Package | PackageRequirement)[]) { - const skip_rpaths = [ - "go.dev", // skipping because for some reason patchelf breaks the go binary resulting in the only output being: `Segmentation Fault` - "tea.xyz", // this causes tea to pass -E/--version (and everything else?) directly to deno, making it _too_ much of a wrapper. - ] - if (skip_rpaths.includes(installation.pkg.project)) { - console.info(`skipping rpath fixes for ${installation.pkg.project}`) - return - } - console.info("doing SLOW rpath fixes…") - for await (const [exename] of exefiles(installation.path)) { - await set_rpaths(exename, pkgs, installation) - } -} - - -//TODO it's an error if any binary has bad rpaths before bottling -//NOTE we should have a `safety-inspector` step before bottling to check for this sort of thing -// and then have virtual env manager be more specific via (DY)?LD_LIBRARY_PATH -//FIXME somewhat inefficient for eg. git since git is mostly hardlinks to the same file -async function set_rpaths(exename: Path, pkgs: (Package | PackageRequirement)[], installation: Installation) { - if (host().platform != 'linux') throw new Error() - - const cellar = useCellar() - const our_rpaths = await Promise.all(pkgs.map(pkg => prefix(pkg))) - - const cmd = await (async () => { - //FIXME we need this for perl - // however really we should just have an escape hatch *just* for stuff that sets its own rpaths - const their_rpaths = (await backticks({ - cmd: ["patchelf", "--print-rpath", exename], - })) - .split(":") - .compact(x => x.chuzzle()) - //^^ split has ridiculous empty string behavior - - const rpaths = [...their_rpaths, ...our_rpaths] - .map(x => { - const transformed = transform(x, installation) - if (transformed.startsWith("$ORIGIN")) { - console.warn("has own special rpath", transformed) - return transformed - } else { - const rel_path = new Path(transformed).relative({ to: exename.parent() }) - return `$ORIGIN/${rel_path}` - } - }) - .uniq() - .join(':') - ?? [] - - //FIXME use runtime-path since then LD_LIBRARY_PATH takes precedence which our virtual env manager requires - return ["patchelf", "--force-rpath", "--set-rpath", rpaths, exename] - })() - - if (cmd.length) { - try { - await run({ cmd }) - } catch (err) { - console.warn(err) - //FIXME allowing this error because on Linux: - // patchelf: cannot find section '.dynamic'. The input file is most likely statically linked - // happens with eg. gofmt - // and we don't yet have a good way to detect and skip such files - } - } - - async function prefix(pkg: Package | PackageRequirement) { - return (await cellar.resolve(pkg)).path.join("lib").string - } -} - -//FIXME pretty slow since we execute `file` for every file -// eg. perl has hundreds of `.pm` files in its `lib` -async function* exefiles(prefix: Path): AsyncGenerator<[Path, 'exe' | 'lib']> { - for (const basename of ["bin", "lib", "libexec"]) { - const d = prefix.join(basename).isDirectory() - if (!d) continue - for await (const [exename, { isFile, isSymlink }] of d.walk()) { - if (!isFile || isSymlink) continue - const type = await exetype(exename) - if (type) yield [exename, type] - } - } -} - -//FIXME lol use https://github.com/sindresorhus/file-type when we can -export async function exetype(path: Path): Promise<'exe' | 'lib' | false> { - // speed this up a bit - switch (path.extname()) { - case ".py": - case ".pyc": - case ".pl": - return false - } - - const out = await backticks({ - cmd: ["file", "--mime-type", path.string] - }) - const lines = out.split("\n") - const line1 = lines[0] - if (!line1) throw new Error() - const match = line1.match(/: (.*)$/) - if (!match) throw new Error() - const mime = match[1] - - console.debug(mime) - - switch (mime) { - case 'application/x-pie-executable': - case 'application/x-mach-binary': - case 'application/x-executable': - return 'exe' - - case 'application/x-sharedlib': - return 'lib' - default: - return false - } -} - -// convert a full version path to a major’d version path -// this so we are resilient to upgrades without requiring us to rewrite binaries on install -// since rewriting binaries would invalidate our signatures -function transform(input: string, installation: Installation) { - if (input.startsWith("$ORIGIN")) { - // we leave these alone, trusting the build tool knew what it was doing - return input - } else if (input.startsWith(installation.path.parent().string)) { - // don’t transform stuff that links to this actual package - return input - } else { - //FIXME not very robust lol - return input.replace(/v(\d+)\.\d+\.\d+/, 'v$1') - } -} diff --git a/scripts/brewkit/fix-machos.rb b/scripts/brewkit/fix-machos.rb deleted file mode 100755 index 1fa2f1dd..00000000 --- a/scripts/brewkit/fix-machos.rb +++ /dev/null @@ -1,197 +0,0 @@ -#!/usr/bin/env ruby -# tea brewed ruby works with a tea shebang -# but normal ruby does not, macOS comes with ruby so we just use it -# --- -# dependencies: -# ruby-lang.org: '>=2' -# args: [ruby] -# --- - -require 'bundler/inline' - -gemfile do - source 'https://rubygems.org' - gem 'ruby-macho', '~> 3' -end - -require 'fileutils' -require 'pathname' -require 'macho' -require 'find' - -#TODO lazy & memoized -$tea_prefix = ENV['TEA_PREFIX'] || `tea --prefix`.chomp -abort "set TEA_PREFIX" if $tea_prefix.empty? - -$pkg_prefix = ARGV.shift -abort "arg1 should be pkg-prefix" if $pkg_prefix.empty? -$pkg_prefix = Pathname.new($pkg_prefix).realpath.to_s - -$inodes = Hash.new - - -def arm? - def type - case RUBY_PLATFORM - when /arm/, /aarch64/ then true - else false - end - end -end - -class Fixer - def initialize(file) - @file = MachO::MachOFile.new(file) - @changed = false - end - - def fix - case @file.filetype - when :dylib - fix_id - fix_rpaths - fix_install_names - when :execute - fix_rpaths - fix_install_names - when :bundle - fix_rpaths - fix_install_names - when :object - # noop - else - throw Error("unknown filetype: #{file.filetype}: #{file.filename}") - end - - # M1 binaries must be signed - # changing the macho stuff invalidates the signature - # this resigns with the default adhoc signing profile - MachO.codesign!(@file.filename) if @changed and arm? - end - - def fix_id - rel_path = Pathname.new(@file.filename).relative_path_from(Pathname.new($tea_prefix)) - id = "@rpath/#{rel_path}" - if @file.dylib_id != id - # only do work if we must - @file.change_dylib_id id - write - end - end - - def write - @file.write! - @changed = true - end - - def links_to_other_tea_libs? - @file.linked_dylibs.each do |lib| - # starts_with? @rpath is not enough lol - # this because we are setting `id` to @rpath now so it's a reasonable indication - # that we link to tea libs, but the build system for the pkg may well do this for its - # own libs - return true if lib.start_with? $tea_prefix or lib.start_with? '@rpath' - end - return false - end - - def fix_rpaths - #TODO remove spurious rpaths - - dirty = false - rel_path = Pathname.new($tea_prefix).relative_path_from(Pathname.new(@file.filename).parent) - rpath = "@loader_path/#{rel_path}" - - if not @file.rpaths.include? rpath and links_to_other_tea_libs? - @file.add_rpath rpath - dirty = true - end - - while @file.rpaths.include? $tea_prefix - @file.delete_rpath $tea_prefix - dirty = true - end - - write if dirty - end - - def bad_install_names - @file.linked_dylibs.map do |lib| - if lib.start_with? '/' - if Pathname.new(lib).cleanpath.to_s.start_with? $tea_prefix - lib - end - elsif lib.start_with? '@rpath' - path = Pathname.new(lib.sub(%r{^@rpath}, $tea_prefix)) - if path.exist? - lib - else - puts "warn:#{@file.filename}:#{lib}" - end - elsif lib.start_with? '@' - puts "warn:#{@file.filename}:#{lib}" - # noop - else - lib - end - end.compact - end - - def fix_install_names - bad_names = bad_install_names - return if bad_names.empty? - - def fix_tea_prefix s - s = Pathname.new(s).relative_path_from(Pathname.new($tea_prefix)) - s = s.sub(%r{/v(\d+)\.\d+\.\d+/}, '/v\1/') - s = s.sub(%r{/(.+)\.(\d+)\.\d+\.\d+\.dylib$}, '/\1.dylib') - s = "@rpath/#{s}" - return s - end - - bad_names.each do |old_name| - if old_name.start_with? $pkg_prefix - new_name = Pathname.new(old_name).relative_path_from(Pathname.new(@file.filename).parent) - new_name = "@loader_path/#{new_name}" - elsif old_name.start_with? '/' - new_name = fix_tea_prefix old_name - elsif old_name.start_with? '@rpath' - # so far we only feed bad @rpaths that are relative to the tea-prefix - new_name = fix_tea_prefix old_name.sub(%r{^@rpath}, $tea_prefix) - else - # assume they are meant to be relative to lib dir - new_name = Pathname.new($pkg_prefix).join("lib").relative_path_from(Pathname.new(@file.filename).parent) - new_name = "@loader_path/#{new_name}/#{old_name}" - end - - @file.change_install_name old_name, new_name - end - - write - end -end - -ARGV.each do |arg| - Find.find(arg) do |file| - next unless File.file? file and !File.symlink? file - abs = Pathname.getwd.join(file).to_s - inode = File.stat(abs).ino - if $inodes[inode] - if arm? - # we have to code-sign on arm AND codesigning breaks the hard link - # so now we have to re-hardlink - puts "re-hardlinking #{abs} to #{$inodes[inode]}" - FileUtils.ln($inodes[inode], abs, :force => true) - end - # stuff like git has hardlinks to the same files - # avoid the work if we already did this inode - next - end - Fixer.new(abs).fix - $inodes[inode] = abs - rescue MachO::MagicError - #noop: not a Mach-O file - rescue MachO::TruncatedFileError - #noop: file can’t be a Mach-O file - end -end diff --git a/scripts/brewkit/fix-shebangs.ts b/scripts/brewkit/fix-shebangs.ts deleted file mode 100755 index 725d532d..00000000 --- a/scripts/brewkit/fix-shebangs.ts +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/env -S tea -E - -/* --- -args: - - deno - - run - - --allow-run - - --allow-env - - --allow-read - - --allow-write={{tea.prefix}} - - --import-map={{ srcroot }}/import-map.json ---- */ - -import Path from "path" -import { undent } from "utils" -import { useFlags } from "hooks" - -useFlags() - -const has_shebang = (() => { - const encoder = new TextDecoder() - return (buf: Uint8Array) => { - return encoder.decode(buf) == '#!' - } -})() - -for (const path of Deno.args) { - if (!Path.cwd().join(path).isFile()) continue - - console.debug({ path }) - - const rid = await Deno.open(path, { read: true }) - try { - const buf = new Uint8Array(2) - await rid.read(buf) - if (!has_shebang(buf)) continue - } finally { - rid.close() - } - - //FIXME this could be pretty damn efficient if we can find the time - //NOTE as it stands this is HIDEOUSLY inefficient - - const contents = await Deno.readFile(path) - const txt = new TextDecoder().decode(contents) - const [line0, ...lines] = txt.split("\n") //lol - - const match = line0.match(/^#!\s*(\/[^\s]+)/) - if (!match) throw new Error() - const interpreter = match[1] - - switch (interpreter) { - case "/usr/bin/env": - case "/bin/sh": - console.verbose({ line0, path }) - console.verbose("^^ skipped acceptable shebang") - continue - } - - const shebang = `#!/usr/bin/env ${new Path(interpreter).basename()}` - - const rewrite = undent` - ${shebang} - ${lines.join("\n")} - ` - - console.verbose({rewrote: path, to: `#!/usr/bin/env ${interpreter}`}) - - await Deno.writeFile(path, new TextEncoder().encode(rewrite)) -} diff --git a/scripts/build.ts b/scripts/build.ts deleted file mode 100755 index 1badd2e7..00000000 --- a/scripts/build.ts +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env -S tea -E - -/*--- -dependencies: - gnu.org/tar: 1 - tukaani.org/xz: 5 - sourceware.org/bzip2: 1 -args: - - deno - - run - - --allow-net - - --allow-run - - --allow-read - - --allow-write={{tea.prefix}} - - --allow-env - - --import-map={{ srcroot }}/import-map.json ----*/ - -import { usePantry } from "hooks" -import { Installation } from "types" -import { pkg as pkgutils } from "utils" -import { useFlags, usePrefix } from "hooks" -import { set_output } from "./utils/gha.ts" -import build, { BuildResult } from "./build/build.ts" -import * as ARGV from "./utils/args.ts" -import Path from "path" - -useFlags() - -const pantry = usePantry() -const dry = await ARGV.toArray(ARGV.pkgs()) -const gha = !!Deno.env.get("GITHUB_ACTIONS") -const group_it = gha && dry.length > 1 -const rv: BuildResult[] = [] - -if (usePrefix().string != "/opt") { - console.error({ TEA_PREFIX: usePrefix().string }) - throw new Error("builds must be performed in /opt (try TEA_PREFIX=/opt)") -} - -for (const rq of dry) { - const pkg = await pantry.resolve(rq) - - if (group_it) { - console.log("::group::", pkgutils.str(pkg)) - } else { - console.log({ building: pkg.project }) - } - - rv.push(await build(pkg)) - - if (group_it) { - console.log("::endgroup::") - } -} - -const to = usePrefix() -await set_output("pkgs", rv.map(x => pkgutils.str(x.installation.pkg))) -await set_output("paths", rv.map(x => x.installation.path), '%0A') -await set_output("relative-paths", rv.map(x => x.installation.path.relative({ to }))) -await set_output("srcs", rv.map(x => x.src?.relative({ to }) ?? "~")) -await set_output("srcs-relative-paths", rv.compact(x => x.src?.relative({ to }))) - -interface InstallationPlus extends Installation { - src: Path -} diff --git a/scripts/build/build.ts b/scripts/build/build.ts deleted file mode 100644 index 8d875a73..00000000 --- a/scripts/build/build.ts +++ /dev/null @@ -1,143 +0,0 @@ -import { useCellar, usePantry, usePrefix } from "hooks" -import { link, hydrate } from "prefab" -import { Installation, Package } from "types" -import useShellEnv, { expand } from "hooks/useShellEnv.ts" -import { run, undent, host, tuplize, panic } from "utils" -import { str as pkgstr } from "utils/pkg.ts" -import fix_pkg_config_files from "./fix-pkg-config-files.ts" -import Path from "path" -import { fetch_src } from "../fetch.ts"; - -const cellar = useCellar() -const pantry = usePantry() -const { platform } = host() - -export interface BuildResult { - installation: Installation - src?: Path -} - -export default async function _build(pkg: Package): Promise { - try { - return await __build(pkg) - } catch (e) { - cellar.keg(pkg).isDirectory()?.isEmpty()?.rm() // don’t leave empty kegs around - throw e - } -} - -async function __build(pkg: Package): Promise { - const [deps, wet, resolved] = await calc_deps() - await clean() - const env = mkenv() - const dst = cellar.keg(pkg).mkpath() - const [src, src_tarball] = await fetch_src(pkg) ?? [] - const installation = await build() - await link(installation) - await fix_binaries(installation) - await fix_pkg_config_files(installation) - return { installation, src: src_tarball } - -//////// utils - async function calc_deps() { - const deps = await pantry.getDeps(pkg) - const wet = await hydrate([...deps.runtime, ...deps.build], pkg => pantry.getDeps(pkg).then(x => x.runtime)) - deps.runtime.push(...wet.pkgs) - const resolved = await Promise.all(wet.pkgs.map(pkg => cellar.resolve(pkg))) - return tuplize(deps, wet, resolved) - } - - async function clean() { - const installation = await should_clean() - if (installation) { - console.log({ cleaning: installation.path }) - for await (const [path] of installation.path.ls()) { - // we delete contents rather than the directory itself to prevent broken vx.y symlinks - path.rm({ recursive: true }) - } - } - - async function should_clean() { - // only required as we aren't passing everything into hydrate - const depends_on_self = () => deps.build.some(x => x.project === pkg.project) - const wet_dep = () => wet.pkgs.some(x => x.project === pkg.project) - - // provided this package doesn't transitively depend on itself (yes this happens) - // clean out the destination prefix first - if (!wet.bootstrap_required.has(pkg.project) && !depends_on_self() && !wet_dep()) { - return await cellar.has(pkg) - } - } - } - - function mkenv() { - const env = useShellEnv({ installations: resolved}) - - if (platform == 'darwin') { - env['MACOSX_DEPLOYMENT_TARGET'] = ['11.0'] - } - - return env - } - - async function build() { - const bld = src ?? Path.mktmp({ prefix: pkg.project }).join("wd").mkdir() - const sh = await pantry.getScript(pkg, 'build', resolved) - - const cmd = bld.parent().join("build.sh").write({ force: true, text: undent` - #!/bin/bash - - set -e - set -o pipefail - set -x - cd "${bld}" - - export SRCROOT="${bld}" - ${expand(env)} - - ${/*FIXME hardcoded paths*/ ''} - export PATH=/opt/tea.xyz/var/pantry/scripts/brewkit:"$PATH" - - ${sh} - ` - }).chmod(0o500) - - // copy in auxillary files from pantry directory - for await (const [path, {isFile}] of pantry.getYAML(pkg).path.parent().ls()) { - if (isFile) { - path.cp({ into: bld.join("props").mkdir() }) - } - } - - await run({ cmd }) // THE BUILD - - return { path: dst, pkg } - } - - async function fix_binaries(installation: Installation) { - const prefix = usePrefix().join("tea.xyz/var/pantry/scripts/brewkit") - const env = { - TEA_PREFIX: usePrefix().string, - } - switch (host().platform) { - case 'darwin': - return await run({ - cmd: [ - prefix.join('fix-machos.rb'), - installation.path, - ...['bin', 'lib', 'libexec'].map(x => installation.path.join(x)).filter(x => x.isDirectory()) - ], - env - }) - case 'linux': - return await run({ - cmd: [ - prefix.join('fix-elf.ts'), - installation.path, - ...[...deps.runtime, pkg].map(pkgstr) - ], - env - }) - } - } -} diff --git a/scripts/build/fix-pkg-config-files.ts b/scripts/build/fix-pkg-config-files.ts deleted file mode 100644 index cea260db..00000000 --- a/scripts/build/fix-pkg-config-files.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Installation } from "types" -import Path from "path" -import "utils" - -export default async function fix_pkg_config_files(installation: Installation) { - for await (const pcfile of find_pkg_config_files(installation)) { - const orig = await pcfile.read() - const relative_path = installation.path.relative({ to: pcfile.parent() }) - const text = orig.replace(installation.path.string, `\${pcfiledir}/${relative_path}`) - if (orig !== text) { - console.verbose({ fixed: pcfile }) - pcfile.write({text, force: true}) - } - } -} - -//NOTE currently we only support pc files in lib/pkgconfig -// we aim to standardize on this but will relent if a package is found -// that uses share and other tools that build against it only accept that -async function *find_pkg_config_files(installation: Installation): AsyncIterable { - const pcdir = installation.path.join("lib/pkgconfig") - if (!pcdir.isDirectory()) return - for await (const [path, { isFile }] of pcdir.ls()) { - if (isFile && path.extname() == ".pc") { - yield path - } - } -} diff --git a/scripts/deps.ts b/scripts/deps.ts deleted file mode 100755 index ee69c0de..00000000 --- a/scripts/deps.ts +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env -S tea -E - -/*--- -args: - - deno - - run - - --allow-read - - --allow-env - - --import-map={{ srcroot }}/import-map.json ----*/ - -import { PackageRequirement } from "types" -import { usePantry, useFlags } from "hooks" -import { hydrate } from "prefab" -import { pkg } from "utils" - -const pantry = usePantry() - -useFlags() - -const mode: 'build' | 'install' = Deno.args.includes("-b") ? 'build' : 'install' -const get_deps = async (pkg: PackageRequirement) => { - const deps = await pantry.getDeps(pkg) - switch (mode) { - case 'build': - return [...deps.build, ...deps.runtime] - case 'install': - return deps.runtime - } -} - -const dry = Deno.args.compact(arg => !arg.startsWith('-') && pkg.parse(arg)) -const explicit = new Set(dry.map(x=>x.project)) -const wet = await hydrate(dry, get_deps) -const gas = wet.pkgs.compact(({project}) => { - if (Deno.args.includes('-i')) { - return project - } else if (!explicit.has(project)){ - return project - } -}) - -if (Deno.env.get("GITHUB_ACTIONS")) { - console.log(`::set-output name=pkgs::${gas.join(" ")}\n`) -} else { - console.log(gas.join("\n")) -} diff --git a/scripts/each.ts b/scripts/each.ts deleted file mode 100755 index 548acd29..00000000 --- a/scripts/each.ts +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env -S tea -E - -/*--- -args: - - deno - - run - - --allow-run - - --allow-read - - --allow-env - - --import-map={{ srcroot }}/import-map.json ----*/ - -const args = [...Deno.args] -const via = args.shift() - -for (const arg of args) { - const proc = Deno.run({ - stdout: "null", stderr: "null", - cmd: [via!, arg] - }) - const status = await proc.status() - if (status.code !== 0) { - console.error(`${arg} ❌`) - } else { - console.info(`${arg} ✅`) - } -} diff --git a/scripts/fetch.ts b/scripts/fetch.ts deleted file mode 100755 index 501e258b..00000000 --- a/scripts/fetch.ts +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env -S tea -E - -/*--- -args: - - deno - - run - - --allow-net - - --allow-run - - --allow-read - - --allow-write={{ tea.prefix }} - - --allow-env - - --import-map={{ srcroot }}/import-map.json ----*/ - -//TODO verify the sha - -import { usePantry, useCache, useDownload, useCellar, useSourceUnarchiver, useOffLicense } from "hooks" -import { panic, print } from "utils" -import { Stowage, Package } from "types" -import * as ARGV from "./utils/args.ts" -import Path from "path" - -const pantry = usePantry() -const { download } = useDownload() - -export async function fetch_src(pkg: Package): Promise<[Path, Path] | undefined> { - const dstdir = useCellar().shelf(pkg.project).join("src", `v${pkg.version}`) - const dist = await pantry.getDistributable(pkg) - if (!dist) return - const { url, stripComponents } = dist - const stowage: Stowage = { pkg, type: 'src', extname: url.path().extname() } - const dst = useCache().path(stowage) - const zipfile = await (async () => { - try { - // first try our mirror - const src = useOffLicense('s3').url(stowage) - return await download({ dst, src }) - } catch { - // oh well, try original location then - return await download({ dst, src: url }) - } - })() - await useSourceUnarchiver().unarchive({ dstdir, zipfile, stripComponents }) - return [dstdir, zipfile] -} - -if (import.meta.main) { - for await (let pkg of ARGV.pkgs()) { - pkg = await pantry.resolve(pkg) - const [dstdir] = await fetch_src(pkg) ?? panic() - await print(`${dstdir}\n`) - } -} diff --git a/scripts/filter.ts b/scripts/filter.ts deleted file mode 100755 index fdc46a41..00000000 --- a/scripts/filter.ts +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env -S tea -E - -/*--- -args: - - deno - - run - - --allow-read - - --allow-env - - --import-map={{ srcroot }}/import-map.json ----*/ - -import { useCellar, useFlags } from "hooks" -import * as ARGV from "./utils/args.ts" - -useFlags() - -/// filters out everything that is already installed - -const cellar = useCellar() -const desired_filter = !!Deno.env.get("INVERT") - -const rv: string[] = [] -for await (const pkg of ARGV.pkgs()) { - const isInstalled = !!await cellar.has(pkg) - if (isInstalled == desired_filter) { - rv.push(pkg.project) - } -} - -if (Deno.env.get("GITHUB_ACTIONS")) { - console.log(`::set-output name=pkgs::${rv.join(" ")}\n`) -} else { - console.log(rv.join("\n")) -} diff --git a/scripts/fixup-checksums.ts b/scripts/fixup-checksums.ts deleted file mode 100755 index 3eb663d3..00000000 --- a/scripts/fixup-checksums.ts +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env -S tea -E - -/*--- -args: - - deno - - run - - --allow-net - - --allow-env=AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,AWS_S3_BUCKET - - --import-map={{ srcroot }}/import-map.json ----*/ - -import { S3, S3Object } from "s3" -import { Sha256 } from "deno/hash/sha256.ts" -import { readerFromStreamReader, readAll } from "deno/streams/conversion.ts" -import Path from "../src/vendor/Path.ts" - -const s3 = new S3({ - accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!, - secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!, - region: "us-east-1", -}); - -const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!); - -for await (const pkg of bucket.listAllObjects({ batchSize: 200 })) { - const keys = get_keys(pkg) - if (!keys) continue - - console.log({ checking: keys.checksum }); - - if (!await bucket.headObject(keys.checksum.string)) { - console.log({ missing: keys.checksum }) - - const reader = (await bucket.getObject(keys.bottle.string))!.body.getReader() - const contents = await readAll(readerFromStreamReader(reader)) - const sha256sum = new Sha256().update(contents).toString() - const body = new TextEncoder().encode(`${sha256sum} ${keys.bottle.basename()}`) - await bucket.putObject(keys.checksum.string, body) - - console.log({ uploaded: keys.checksum }) - } -} - -function get_keys(pkg: S3Object): { bottle: Path, checksum: Path } | undefined { - if (!pkg.key) return - if (!/\.tar\.[gx]z$/.test(pkg.key)) return - return { - bottle: new Path(pkg.key), - checksum: new Path(`${pkg.key}.sha256sum`) - } -} \ No newline at end of file diff --git a/scripts/inventory.ts b/scripts/inventory.ts deleted file mode 100755 index 9a1a0a0d..00000000 --- a/scripts/inventory.ts +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env -S tea -E - -/*--- -args: - - deno - - run - - --allow-net - - --allow-read - - --allow-env=AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,AWS_S3_BUCKET - - --import-map={{ srcroot }}/import-map.json ----*/ - -import { S3 } from "s3" -import { stringify as yaml } from "deno/encoding/yaml.ts" -import { stringify as csv } from "deno/encoding/csv.ts" -import { Inventory } from "hooks/useInventory.ts" -import SemVer, * as semver from "semver" - -const s3 = new S3({ - accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!, - secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!, - region: "us-east-1", -}); - -const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!) - -const inventory: Inventory = {} -const flat = [] - -for await (const pkg of bucket.listAllObjects({ batchSize: 200 })) { - if (!/\.tar\.[gx]z$/.test(pkg.key ?? '')) { continue } - - const matches = pkg.key!.match(new RegExp(`^(.*)/(.*)/(.*)/v(${semver.regex.source})\.tar\.[xg]z$`)) - if (!matches) { continue } - - const [_, project, platform, arch, version] = matches - - if (!inventory[project]) inventory[project] = {} - if (!inventory[project][platform]) inventory[project][platform] = {} - if (!inventory[project][platform]) inventory[project][platform] = {} - inventory[project][platform][arch] = [...(inventory[project]?.[platform]?.[arch] ?? []), version] - flat.push({ project, platform, arch, version }) -} - -/// For ultimate user-friendliness, we store this data 4 ways: -/// YAML, JSON, CSV, flat text - -const te = new TextEncoder() - -// YAML: type Inventory - -const yml = te.encode(yaml(inventory)) - -bucket.putObject("versions.yml", yml) - -// JSON: type Inventory - -const json = te.encode(JSON.stringify(inventory)) - -bucket.putObject("versions.json", json) - -// CSV: project,platform,arch,version - -const csvData = te.encode(csv(flat, { columns: ["project", "platform", "arch", "version"]})) - -bucket.putObject("versions.csv", csvData) - -// TXT: per project/platform/arch, newline-delimited - -for(const [project, platforms] of Object.entries(inventory)) { - for (const [platform, archs] of Object.entries(platforms)) { - for (const [arch, versions] of Object.entries(archs)) { - const v = versions.map(x => new SemVer(x)).sort(semver.compare) - const txt = te.encode(v.join("\n")) - console.log(project, platform, arch, v) - bucket.putObject(`${project}/${platform}/${arch}/versions.txt`, txt) - } - } -} - -//end diff --git a/scripts/ls-aws-s3.ts b/scripts/ls-aws-s3.ts deleted file mode 100755 index b3007616..00000000 --- a/scripts/ls-aws-s3.ts +++ /dev/null @@ -1,95 +0,0 @@ -#!/usr/bin/env -S tea -E - -/*--- -args: - - deno - - run - - --allow-env - - --allow-net - - --import-map={{ srcroot }}/import-map.json ----*/ - -import { S3 } from "s3" -import SemVer, * as semver from "semver" -import { format }from "deno/datetime/mod.ts" - -const sortByModified = Deno.args.includes("-m") -const reverse = Deno.args.includes("-r") -const fullMatrix = Deno.args.includes("-x") -const source = Deno.args.includes("-s") - -if (source && fullMatrix) { - throw new Error("incompatible flags (-x -s)") -} - -const s3 = new S3({ - accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!, - secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!, - region: "us-east-1", -}) - -const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!) - -let output: FileInfo[] = [] - -for await(const obj of bucket.listAllObjects({ batchSize: 200 })) { - const { key, lastModified } = obj - if (!key?.match(/\.tar\.[gx]z$/)) { continue } - output.push({ key: key!, lastModified: lastModified! }) -} - -if (fullMatrix) { - produceMatrix(output) -} else { - output = output.filter(x => { - const match = x.key.match(new RegExp("/(darwin|linux)/(aarch64|x86-64)/v.*\.tar\.(x|g)z")) - switch (source) { - case true: return !match - case false: return match - } - }) - - output.sort((a, b) => { - switch (sortByModified) { - case true: return a.lastModified.valueOf() - b.lastModified.valueOf() - case false: return a.key < b.key ? -1 : 1 - } - }) - - if (reverse) { output.reverse() } - console.table(output) -} - -interface FileInfo { - key: string - lastModified: Date -} - -function produceMatrix(objects: FileInfo[]): void { - const matrix = new Map() - for (const { key, lastModified } of objects) { - const match = key.match(new RegExp("(.*)/(darwin|linux)/(aarch64|x86-64)/v(.*)\.tar\.(x|g)z")) - if (!match) continue - const [_, project, _platform, _arch, _v] = match - const flavor = `${_platform}/${_arch}` - const version = semver.parse(_v) - if (!version) continue - const stats = matrix.get(project) || { project } - - if (version.gt(stats[flavor]?.[0] || new SemVer([0,0,0]))) { - stats[flavor] = [version, format(lastModified, "yyyy-MM-dd HH:mm")] - } - - matrix.set(project, stats) - } - - const output = [...matrix.values()].map(o => ({ - project: o.project, - 'darwin/aarch64': `${o['darwin/aarch64']?.join(": ")}`, - 'darwin/x86-64': `${o['darwin/x86-64']?.join(": ")}`, - 'linux/aarch64': `${o['linux/aarch64']?.join(": ")}`, - 'linux/x86-64': `${o['linux/x86-64']?.join(": ")}` - })) - output.sort((a, b) => a.project < b.project ? -1: 1) - console.table(output) -} diff --git a/scripts/ls.ts b/scripts/ls.ts deleted file mode 100755 index b58659dc..00000000 --- a/scripts/ls.ts +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env -S tea -E - -// returns all pantry entries as `[{ name, path }]` - -/*--- -args: - - deno - - run - - --allow-env - - --allow-read - - --import-map={{ srcroot }}/import-map.json ----*/ - -import Path from "path" -import { useFlags, usePrefix } from "hooks" - -const prefix = new Path(`${usePrefix()}/tea.xyz/var/pantry/projects`) - -interface Entry { - project: string - path: Path -} - - -//------------------------------------------------------------------------- funcs -export async function* ls(): AsyncGenerator { - for await (const path of _ls_pantry(prefix)) { - yield { - project: path.parent().relative({ to: prefix }), - path - } - } -} - -async function* _ls_pantry(dir: Path): AsyncGenerator { - if (!dir.isDirectory()) throw new Error() - - for await (const [path, { name, isDirectory }] of dir.ls()) { - if (isDirectory) { - for await (const x of _ls_pantry(path)) { - yield x - } - } else if (name === "package.yml") { - yield path - } - } -} - -//-------------------------------------------------------------------------- main -if (import.meta.main) { - const flags = useFlags() - - const rv: Entry[] = [] - for await (const item of ls()) { - rv.push(item) - } - - if (Deno.env.get("GITHUB_ACTIONS")) { - const projects = rv.map(x => x.project).join(":") - console.log(`::set-output name=projects::${projects}`) - } else if (flags.json) { - const obj = rv.map(({ path, project }) => ({ path: path.string, project })) - const out = JSON.stringify(obj, null, 2) - console.log(out) - } else { - console.log(rv.map(x => x.project).join("\n")) - } -} diff --git a/scripts/sort.ts b/scripts/sort.ts deleted file mode 100755 index bec06617..00000000 --- a/scripts/sort.ts +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env -S tea -E - -/*--- -args: - - deno - - run - - --allow-read - - --allow-env - - --import-map={{ srcroot }}/import-map.json ----*/ - -// sorts input for building -// does a full hydration, but only returns ordered, dry packages - - -import { pkg } from "utils" -import { usePantry, useFlags } from "hooks" -import { hydrate } from "prefab" -import { PackageRequirement } from "types" -import * as ARGV from "./utils/args.ts" - -const flags = useFlags() -const pantry = usePantry() - -const dry = await ARGV.toArray(ARGV.pkgs()) - -const wet = await hydrate(dry, async (pkg, dry) => { - const deps = await pantry.getDeps(pkg) - return dry ? [...deps.build, ...deps.runtime] : deps.runtime -}) - -if (Deno.env.get("GITHUB_ACTIONS")) { - const massage = (input: PackageRequirement[]) => - input.map(p => { - let out = pkg.str(p) - // shell quoting via GHA is weird and we don’t fully understand it - if (/[<>]/.test(out)) out = `"${out}"` - return out - }).join(" ") - - console.log(`::set-output name=pkgs::${massage(wet.dry)}`) - console.log(`::set-output name=pre-install::${massage(wet.wet)}`) -} else { - const gas = wet.dry.map(x => pkg.str(x)) - if (flags.json) { - console.log(gas) - } else { - console.log(gas.join("\n")) - } -} diff --git a/scripts/test.ts b/scripts/test.ts deleted file mode 100755 index 868daf88..00000000 --- a/scripts/test.ts +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env -S tea -E - -/*--- -args: - - deno - - run - - --allow-net - - --allow-run - - --allow-read - - --allow-write - - --allow-env - - --import-map={{ srcroot }}/import-map.json ----*/ - -import { Installation, Package, PackageRequirement } from "types" -import { usePantry, useFlags, usePrefix } from "hooks" -import useShellEnv, { expand } from "hooks/useShellEnv.ts" -import { run, undent, pkg as pkgutils } from "utils" -import { resolve, install, hydrate, link } from "prefab" -import Path from "path" -import * as ARGV from "./utils/args.ts" - -const { debug } = useFlags() - -const pantry = usePantry() - -for await (const pkg of ARGV.installs()) { - await test(pkg) -} - -async function test(self: Installation) { - const yml = await pantry.getYAML(self.pkg).parse() - const deps = await deps4(self.pkg) - const installations = await prepare(deps) - - // if we are testing multiple packages, they might not - // get linked when they're tested. - await link(self) - - const env = useShellEnv({ installations: [self, ...installations] }) - - let text = undent` - #!/bin/bash - - set -e - set -o pipefail - set -x - - export TEA_PREFIX=${usePrefix()} - - ${expand(env)} - - ` - - const tmp = Path.mktmp({ prefix: pkgutils.str(self.pkg) }) - - try { - if (yml.test.fixture) { - const fixture = tmp.join("fixture.tea").write({ text: yml.test.fixture.toString() }) - text += `export FIXTURE="${fixture}"\n\n` - } - - const cwd = tmp.join("wd").mkdir() - - text += `cd "${cwd}"\n\n` - - text += await pantry.getScript(self.pkg, 'test', installations) - text += "\n" - - for await (const [path, {name, isFile}] of pantry.getYAML(self.pkg).path.parent().ls()) { - if (isFile && name != 'package.yml') path.cp({ into: cwd }) - } - - const cmd = tmp - .join("test.sh") - .write({ text, force: true }) - .chmod(0o500) - await run({ cmd, cwd }) - tmp.rm({ recursive: true }) - } catch (e) { - console.info("due to error, didn’t delete:", tmp) - throw e - } -} - - -//TODO install step in CI should do this for test requirements also -async function prepare(reqs: (Package | PackageRequirement)[]) { - const { pending, installed } = await resolve(reqs) - for await (const pkg of pending) { - const installation = await install(pkg) - await link(installation) - installed.push(installation) - } - return installed -} - -async function deps4(pkg: Package) { - return (await hydrate(pkg, async (pkg, dry) => { - const { runtime, test } = await pantry.getDeps(pkg) - return dry ? [...runtime, ...test] : runtime - })).pkgs -} diff --git a/scripts/upload-sync.ts b/scripts/upload-sync.ts deleted file mode 100755 index d759b6ee..00000000 --- a/scripts/upload-sync.ts +++ /dev/null @@ -1,64 +0,0 @@ -#!/usr/bin/env -S tea -E - -/*--- -args: - - deno - - run - - --allow-read - - --allow-net - - --allow-env=AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,AWS_S3_BUCKET,TEA_PREFIX - - --import-map={{ srcroot }}/import-map.json ----*/ - -import { readAll, readerFromStreamReader } from "deno/streams/mod.ts" -import { useCache, useOffLicense } from "hooks" -import { Package } from "types" -import { Sha256 } from "deno/hash/sha256.ts" -import { S3 } from "s3" -import Path from "path" - -const s3 = new S3({ - accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!, - secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!, - region: "us-east-1", -}) - -const offy = useOffLicense('s3') -const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!) - -for (const stowed of await useCache().ls()) { - const url = offy.url(stowed) - const key = offy.key(stowed) - - console.log({ checking: url }) - - const inRepo = await bucket.headObject(key) - const repoChecksum = inRepo ? await checksum(`${url}.sha256sum`) : undefined - - // path.read() returns a string; this is easier to get a UInt8Array - const contents = await Deno.readFile(stowed.path.string) - const sha256sum = new Sha256().update(contents).toString() - - if (!inRepo || repoChecksum !== sha256sum) { - const basename = url.path().basename() - const body = new TextEncoder().encode(`${sha256sum} ${basename}`) - - console.log({ uploading: url }) - - await bucket.putObject(key, contents) - await bucket.putObject(`${key}.sha256sum`, body) - - console.log({ uploaded: url }) - } -} - -async function checksum(url: string) { - const rsp = await fetch(url) - if (!rsp.ok) throw new Error(`404-not-found: ${url}`) - const rdr = rsp.body?.getReader() - if (!rdr) throw new Error(`Couldn’t read: ${url}`) - const r = await readAll(readerFromStreamReader(rdr)) - return new TextDecoder().decode(r).split(' ')[0] -} - -type RV = Package & {bottle: Path} diff --git a/scripts/upload.ts b/scripts/upload.ts deleted file mode 100755 index e1ec1466..00000000 --- a/scripts/upload.ts +++ /dev/null @@ -1,132 +0,0 @@ -#!/usr/bin/env -S tea -E - -/*--- -args: - - deno - - run - - --allow-net - - --allow-read - - --allow-env - - --import-map={{ srcroot }}/import-map.json ----*/ - -import { S3 } from "s3" -import { pkg as pkgutils } from "utils" -import { useFlags, useOffLicense, useCache, usePrefix } from "hooks" -import { Package, PackageRequirement } from "types" -import SemVer, * as semver from "semver" -import { dirname, basename } from "deno/path/mod.ts" -import Path from "path" -import { set_output } from "./utils/gha.ts" -import { sha256 } from "./bottle.ts" - -useFlags() - -if (Deno.args.length === 0) throw new Error("no args supplied") - -const s3 = new S3({ - accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!, - secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!, - region: "us-east-1", -}) - -const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!) -const encode = (() => { const e = new TextEncoder(); return e.encode.bind(e) })() -const cache = useCache() - -const pkgs = args_get("pkgs").map(pkgutils.parse).map(assert_pkg) -const srcs = args_get("srcs") -const bottles = args_get("bottles") -const checksums = args_get("checksums") - - -function args_get(key: string): string[] { - const it = Deno.args[Symbol.iterator]() - while (true) { - const { value, done } = it.next() - if (done) throw new Error() - if (value === `--${key}`) break - } - const rv: string[] = [] - while (true) { - const { value, done } = it.next() - if (done) return rv - if (value.startsWith('--')) return rv - rv.push(value) - } -} - -const rv: string[] = [] -const put = async (key: string, body: string | Path | Uint8Array) => { - console.log({ uploading: body, to: key }) - rv.push(`/${key}`) - if (body instanceof Path) { - body = await Deno.readFile(body.string) - } else if (typeof body === "string") { - body = encode(body) - } - return bucket.putObject(key, body) -} - -for (const [index, pkg] of pkgs.entries()) { - const bottle = new Path(bottles[index]) - const checksum = checksums[index] - const stowed = cache.decode(bottle)! - const key = useOffLicense('s3').key(stowed) - const versions = await get_versions(key, pkg) - - //FIXME stream the bottle (at least) to S3 - await put(key, bottle) - await put(`${key}.sha256sum`, `${checksum} ${basename(key)}`) - await put(`${dirname(key)}/versions.txt`, versions.join("\n")) - - // mirror the sources - if (srcs[index] != "~") { - const src = usePrefix().join(srcs[index]) - const srcKey = useOffLicense('s3').key({ - pkg: stowed.pkg, - type: "src", - extname: src.extname() - }) - const srcChecksum = await sha256(src) - const srcVersions = await get_versions(srcKey, pkg) - await put(srcKey, src) - await put(`${srcKey}.sha256sum`, `${srcChecksum} ${basename(srcKey)}`) - await put(`${dirname(srcKey)}/versions.txt`, srcVersions.join("\n")) - } -} - -await set_output('cf-invalidation-paths', rv) - -//end - -async function get_versions(key: string, pkg: Package): Promise { - const prefix = dirname(key) - const rsp = await bucket.listObjects({ prefix }) - - //FIXME? API isn’t clear if these nulls indicate failure or not - //NOTE if this is a new package then some empty results is expected - const got = rsp - ?.contents - ?.compact(x => x.key) - .map(x => basename(x)) - .filter(x => x.match(/v.*\.tar\.gz$/)) - .map(x => x.replace(/v(.*)\.tar\.gz/, "$1")) - ?? [] - - // have to add pkg.version as put and get are not atomic - return [...new Set([...got, pkg.version.toString()])] - .compact(semver.parse) - .sort(semver.compare) -} - -function assert_pkg(pkg: Package | PackageRequirement) { - if ("version" in pkg) { - return pkg - } else { - return { - project: pkg.project, - version: new SemVer(pkg.constraint) - } - } -} diff --git a/scripts/utils/args.ts b/scripts/utils/args.ts deleted file mode 100644 index bd4ba459..00000000 --- a/scripts/utils/args.ts +++ /dev/null @@ -1,59 +0,0 @@ -import { Installation, Package, PackageRequirement } from "types" -import { useCellar } from "hooks" -import { parse } from "utils/pkg.ts" - -/// processes Deno.args unless STDIN is not a TTY and has input -export async function *args(): AsyncGenerator { - if (Deno.isatty(Deno.stdin.rid)) { - for (const arg of Deno.args) { - yield arg - } - } else { - let yielded_something = false - const buf = new Uint8Array(10) - const decode = (() => { const d = new TextDecoder(); return d.decode.bind(d) })() - let n: number | null - let txt = '' - const rx = /\s*(.*?)\s+/ - while ((n = await Deno.stdin.read(buf)) !== null) { - txt += decode(buf.subarray(0, n)) - while (true) { - const match = txt.match(rx) - if (!match) break - yield match[1] - txt = txt.slice(match[0].length) - yielded_something = true - } - } - if (txt) { - yield txt - } else if (!yielded_something) { - for (const arg of Deno.args) { - yield arg - } - } - } -} - -export async function *pkgs(): AsyncGenerator { - for await (const arg of args()) { - const match = arg.match(/projects\/(.*)\/package.yml/) - const project = match ? match[1] : arg - yield parse(project) - } -} - -export async function *installs(): AsyncGenerator { - const cellar = useCellar() - for await (const pkg of pkgs()) { - yield await cellar.resolve(pkg) - } -} - -export async function toArray(input: AsyncGenerator) { - const rv: T[] = [] - for await (const i of input) { - rv.push(i) - } - return rv -} diff --git a/scripts/utils/gha.ts b/scripts/utils/gha.ts deleted file mode 100644 index 3f7926d3..00000000 --- a/scripts/utils/gha.ts +++ /dev/null @@ -1,19 +0,0 @@ - -const e = new TextEncoder() -const encode = e.encode.bind(e) - -export function set_output(name: string, arr: T[], separator = " ") { - const value = arr.map(escape).join(separator) - const txt = `::set-output name=${name}::${value}` - return Deno.stdout.write(encode(`${txt}\n`)) -} - -//TODO HTML escapes probs -function escape(input: T): string { - const out = `${input}` - if (/[<>~]/.test(out)) { - return `"${out}"` - } else { - return out - } -}