mirror of
https://github.com/ivabus/pantry
synced 2024-11-26 10:25:05 +03:00
“superenv” (#185)
* fixes for dylib ids on darwin (sadly elaborate) * wip
This commit is contained in:
parent
115e2fd642
commit
f836b167a1
12 changed files with 237 additions and 88 deletions
5
.github/workflows/build.yml
vendored
5
.github/workflows/build.yml
vendored
|
@ -96,7 +96,10 @@ jobs:
|
||||||
|
|
||||||
# tarring ourselves ∵ GHA-artifacts (ludicrously) lose permissions
|
# tarring ourselves ∵ GHA-artifacts (ludicrously) lose permissions
|
||||||
# /ref https://github.com/actions/upload-artifact/issues/38
|
# /ref https://github.com/actions/upload-artifact/issues/38
|
||||||
- run: tar czf $GITHUB_WORKSPACE/artifacts.tgz ${{ steps.build.outputs.relative-paths }} ${{ steps.build.outputs.srcs }}
|
- run:
|
||||||
|
tar czf $GITHUB_WORKSPACE/artifacts.tgz
|
||||||
|
${{ steps.build.outputs.relative-paths }}
|
||||||
|
${{ steps.build.outputs.srcs-actual }}
|
||||||
working-directory: ${{ steps.tea.outputs.prefix }}
|
working-directory: ${{ steps.tea.outputs.prefix }}
|
||||||
|
|
||||||
- name: upload artifacts
|
- name: upload artifacts
|
||||||
|
|
28
README.md
28
README.md
|
@ -4,13 +4,23 @@ tea is a decentralized package manager—this requires a decentralized package
|
||||||
registry. We’re releasing our testnet later this year. In the meantime the
|
registry. We’re releasing our testnet later this year. In the meantime the
|
||||||
pantry is our stop-gap solution.
|
pantry is our stop-gap solution.
|
||||||
|
|
||||||
# Entry Requirements
|
# Getting Started
|
||||||
|
|
||||||
|
You’ll need a `GITHUB_TOKEN` in your environment since we use the GitHub
|
||||||
|
GraphQL API to fetch versions.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Meta
|
||||||
|
|
||||||
|
## Entry Requirements
|
||||||
|
|
||||||
This pantry only accepts devtools that we feel confident we can maintain.
|
This pantry only accepts devtools that we feel confident we can maintain.
|
||||||
Quality and robustness are our goals. If you want other tools you can maintain
|
Quality and robustness are our goals. If you want other tools you can maintain
|
||||||
your own pantry and we’ll build the binaries.
|
your own pantry and we’ll build the binaries.
|
||||||
|
|
||||||
# Philosophy
|
## Philosophy
|
||||||
|
|
||||||
Fundamentally we're coming at this from the perspective that the maintainer
|
Fundamentally we're coming at this from the perspective that the maintainer
|
||||||
should decide how their software is distributed and we’re making the tools so
|
should decide how their software is distributed and we’re making the tools so
|
||||||
|
@ -18,12 +28,20 @@ they can do that in cross platform way.
|
||||||
|
|
||||||
This repo is a bootstrap and is stubs.
|
This repo is a bootstrap and is stubs.
|
||||||
|
|
||||||
# Naming
|
## Naming
|
||||||
|
|
||||||
We use fully-qualified names. Naming is hard, and the world has spent a while
|
We use fully-qualified names. Naming is hard, and the world has spent a while
|
||||||
trying to get it right. In this kind of domain the *correct choice* is
|
trying to get it right. In this kind of domain the *correct choice* is
|
||||||
to namespace.
|
to namespace.
|
||||||
|
|
||||||
|
## Packaging Knowledgebase
|
||||||
|
|
||||||
|
Our [wiki](/wiki) is our knowledgebase. Fill it with the fruits of your
|
||||||
|
knowledge. Please keep it tidy.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Coming Soon
|
# Coming Soon
|
||||||
|
|
||||||
## Maintaining Your Own Pantry
|
## Maintaining Your Own Pantry
|
||||||
|
@ -39,10 +57,8 @@ If you have a website you can host your own `package.yml` there and we will
|
||||||
build binaries for you. This feature is coming soon and will require
|
build binaries for you. This feature is coming soon and will require
|
||||||
signed, versioned tags and signed source tarballs.
|
signed, versioned tags and signed source tarballs.
|
||||||
|
|
||||||
# Packaging Knowledgebase
|
|
||||||
|
|
||||||
Our [wiki](/wiki) is our knowledgebase. Fill it with the fruits of your
|
|
||||||
knowledge. Please keep it tidy.
|
|
||||||
|
|
||||||
# Dependencies
|
# Dependencies
|
||||||
|
|
||||||
|
|
30
projects/tea.xyz/gx/cc/cc.rb
Executable file
30
projects/tea.xyz/gx/cc/cc.rb
Executable file
|
@ -0,0 +1,30 @@
|
||||||
|
#!/usr/bin/ruby
|
||||||
|
|
||||||
|
# - we inject our rpath to ensure our libs our found
|
||||||
|
# - for bottles we replace that in fix-machos.rb with a relocatable prefix
|
||||||
|
# - in general usage we don’t, so if the user needs to distribute their artifacts,
|
||||||
|
# they will need to fix them first, but that's typical anyway.
|
||||||
|
# - for tea-envs the user probably won’t use tea.xyz/gx/cc even though they *should*
|
||||||
|
# and thus we set LDFLAGS in the hope that they will be picked up and the rpath set
|
||||||
|
|
||||||
|
$tea_prefix = ENV['TEA_PREFIX'] || `tea --prefix`.chomp
|
||||||
|
exe = File.basename($0)
|
||||||
|
|
||||||
|
# remove duplicates since this in fact embeds the rpath multiple times
|
||||||
|
args = ARGV.map do |arg|
|
||||||
|
arg unless arg == "-Wl,-rpath,#$tea_prefix"
|
||||||
|
end.compact
|
||||||
|
|
||||||
|
for arg in args do
|
||||||
|
# figuring out what “mode” we are operating in is hard
|
||||||
|
# we don’t want to add this linker command always because it causes a warning to be
|
||||||
|
# output if we are not outputing executables/dylibs and this warning can break
|
||||||
|
# configure scripts, however the below is not fully encompassing
|
||||||
|
# we aren't sure what the rules are TBH, possibly it is as simple as if the output (`-o`)
|
||||||
|
# is a .o then we don’t add the rpath
|
||||||
|
if arg.start_with? '-l' or arg.end_with? '.dylib'
|
||||||
|
exec "/usr/bin/#{exe}", *args, "-Wl,-rpath,#$tea_prefix"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
exec "/usr/bin/#{exe}", *args
|
11
projects/tea.xyz/gx/cc/ld
Executable file
11
projects/tea.xyz/gx/cc/ld
Executable file
|
@ -0,0 +1,11 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
exe="$(basename $0)"
|
||||||
|
|
||||||
|
if test -z "$TEA_PREFIX"
|
||||||
|
then
|
||||||
|
echo '$TEA_PREFIX mysteriously unset' >&2
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
exec /usr/bin/"$exe" "$@" -rpath "$TEA_PREFIX"
|
||||||
|
fi
|
43
projects/tea.xyz/gx/cc/package.yml
Normal file
43
projects/tea.xyz/gx/cc/package.yml
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
distributable: ~
|
||||||
|
|
||||||
|
# FIXME we want the c version eg. c99
|
||||||
|
# or should that be some kind of option? so you specify you want a cc that support c99
|
||||||
|
versions:
|
||||||
|
- 0.1.0
|
||||||
|
|
||||||
|
dependencies:
|
||||||
|
linux:
|
||||||
|
llvm.org: '*'
|
||||||
|
darwin:
|
||||||
|
apple.com/xcode/clt: '*'
|
||||||
|
|
||||||
|
build:
|
||||||
|
working-directory:
|
||||||
|
${{prefix}}/bin
|
||||||
|
script: |
|
||||||
|
if test {{ hw.platform }} = darwin; then
|
||||||
|
cp "$SRCROOT"/props/ld .
|
||||||
|
cp "$SRCROOT"/props/cc.rb cc
|
||||||
|
else
|
||||||
|
ln -s "$LLVM"/clang cc
|
||||||
|
ln -s "$LLVM"/lld ld
|
||||||
|
for x in ar as strip objcopy nm objdump ranlib readelf strings; do
|
||||||
|
ln -sf "$LLVM"/llvm-$x $x
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
for x in clang gcc clang++ c++ g++; do
|
||||||
|
ln -s cc $x
|
||||||
|
done
|
||||||
|
for x in ldd lld-link lld ld64.lld; do
|
||||||
|
ln -s ld $x
|
||||||
|
done
|
||||||
|
|
||||||
|
# dunno why we gotta do this, but we do
|
||||||
|
chmod 777 *
|
||||||
|
env:
|
||||||
|
LLVM: ../../../../../llvm.org/v*/bin
|
||||||
|
|
||||||
|
test: |
|
||||||
|
cc --version
|
||||||
|
ld --help
|
|
@ -1,9 +1,9 @@
|
||||||
#!/usr/bin/env ruby
|
#!/usr/bin/env ruby
|
||||||
# ^^ we have to specify ruby or ruby refuses to run the script
|
# tea brewed ruby works with a tea shebang
|
||||||
# as an aside, what kind of feature *is* that exactly?
|
# but normal ruby does not, macOS comes with ruby so we just use it
|
||||||
# ---
|
# ---
|
||||||
# dependencies:
|
# dependencies:
|
||||||
# ruby-lang.org: 3
|
# ruby-lang.org: '>=2'
|
||||||
# args: [ruby]
|
# args: [ruby]
|
||||||
# ---
|
# ---
|
||||||
|
|
||||||
|
@ -14,9 +14,6 @@ gemfile do
|
||||||
gem 'ruby-macho', '~> 3'
|
gem 'ruby-macho', '~> 3'
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
#TODO file.stat.ino where file is Pathname
|
|
||||||
|
|
||||||
require 'fileutils'
|
require 'fileutils'
|
||||||
require 'pathname'
|
require 'pathname'
|
||||||
require 'macho'
|
require 'macho'
|
||||||
|
@ -73,9 +70,11 @@ class Fixer
|
||||||
end
|
end
|
||||||
|
|
||||||
def fix_id
|
def fix_id
|
||||||
if @file.dylib_id != @file.filename
|
rel_path = Pathname.new(@file.filename).relative_path_from(Pathname.new($tea_prefix))
|
||||||
|
id = "@rpath/#{rel_path}"
|
||||||
|
if @file.dylib_id != id
|
||||||
# only do work if we must
|
# only do work if we must
|
||||||
@file.change_dylib_id @file.filename
|
@file.change_dylib_id id
|
||||||
write
|
write
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -87,7 +86,11 @@ class Fixer
|
||||||
|
|
||||||
def links_to_other_tea_libs?
|
def links_to_other_tea_libs?
|
||||||
@file.linked_dylibs.each do |lib|
|
@file.linked_dylibs.each do |lib|
|
||||||
return true if lib.start_with? $tea_prefix
|
# starts_with? @rpath is not enough lol
|
||||||
|
# this because we are setting `id` to @rpath now so it's a reasonable indication
|
||||||
|
# that we link to tea libs, but the build system for the pkg may well do this for its
|
||||||
|
# own libs
|
||||||
|
return true if lib.start_with? $tea_prefix or lib.start_with? '@rpath'
|
||||||
end
|
end
|
||||||
return false
|
return false
|
||||||
end
|
end
|
||||||
|
@ -95,14 +98,21 @@ class Fixer
|
||||||
def fix_rpaths
|
def fix_rpaths
|
||||||
#TODO remove spurious rpaths
|
#TODO remove spurious rpaths
|
||||||
|
|
||||||
|
dirty = false
|
||||||
rel_path = Pathname.new($tea_prefix).relative_path_from(Pathname.new(@file.filename).parent)
|
rel_path = Pathname.new($tea_prefix).relative_path_from(Pathname.new(@file.filename).parent)
|
||||||
rpath = "@loader_path/#{rel_path}"
|
rpath = "@loader_path/#{rel_path}"
|
||||||
|
|
||||||
return if @file.rpaths.include? rpath
|
if not @file.rpaths.include? rpath and links_to_other_tea_libs?
|
||||||
return unless links_to_other_tea_libs?
|
@file.add_rpath rpath
|
||||||
|
dirty = true
|
||||||
|
end
|
||||||
|
|
||||||
@file.add_rpath rpath
|
while @file.rpaths.include? $tea_prefix
|
||||||
write
|
@file.delete_rpath $tea_prefix
|
||||||
|
dirty = true
|
||||||
|
end
|
||||||
|
|
||||||
|
write if dirty
|
||||||
end
|
end
|
||||||
|
|
||||||
def bad_install_names
|
def bad_install_names
|
||||||
|
@ -111,6 +121,13 @@ class Fixer
|
||||||
if Pathname.new(lib).cleanpath.to_s.start_with? $tea_prefix
|
if Pathname.new(lib).cleanpath.to_s.start_with? $tea_prefix
|
||||||
lib
|
lib
|
||||||
end
|
end
|
||||||
|
elsif lib.start_with? '@rpath'
|
||||||
|
path = Pathname.new(lib.sub(%r{^@rpath}, $tea_prefix))
|
||||||
|
if path.exist?
|
||||||
|
lib
|
||||||
|
else
|
||||||
|
puts "warn:#{@file.filename}:#{lib}"
|
||||||
|
end
|
||||||
elsif lib.start_with? '@'
|
elsif lib.start_with? '@'
|
||||||
puts "warn:#{@file.filename}:#{lib}"
|
puts "warn:#{@file.filename}:#{lib}"
|
||||||
# noop
|
# noop
|
||||||
|
@ -124,14 +141,23 @@ class Fixer
|
||||||
bad_names = bad_install_names
|
bad_names = bad_install_names
|
||||||
return if bad_names.empty?
|
return if bad_names.empty?
|
||||||
|
|
||||||
|
def fix_tea_prefix s
|
||||||
|
s = Pathname.new(s).relative_path_from(Pathname.new($tea_prefix))
|
||||||
|
s = s.sub(%r{/v(\d+)\.\d+\.\d+/}, '/v\1/')
|
||||||
|
s = s.sub(%r{/(.+)\.(\d+)\.\d+\.\d+\.dylib$}, '/\1.dylib')
|
||||||
|
s = "@rpath/#{s}"
|
||||||
|
return s
|
||||||
|
end
|
||||||
|
|
||||||
bad_names.each do |old_name|
|
bad_names.each do |old_name|
|
||||||
if old_name.start_with? $pkg_prefix
|
if old_name.start_with? $pkg_prefix
|
||||||
new_name = Pathname.new(old_name).relative_path_from(Pathname.new(@file.filename).parent)
|
new_name = Pathname.new(old_name).relative_path_from(Pathname.new(@file.filename).parent)
|
||||||
new_name = "@loader_path/#{new_name}"
|
new_name = "@loader_path/#{new_name}"
|
||||||
elsif old_name.start_with? '/'
|
elsif old_name.start_with? '/'
|
||||||
new_name = Pathname.new(old_name).relative_path_from(Pathname.new($tea_prefix))
|
new_name = fix_tea_prefix old_name
|
||||||
new_name = new_name.sub(%r{/v(\d+)\.\d+\.\d+/}, '/v\1/')
|
elsif old_name.start_with? '@rpath'
|
||||||
new_name = "@rpath/#{new_name}"
|
# so far we only feed bad @rpaths that are relative to the tea-prefix
|
||||||
|
new_name = fix_tea_prefix old_name.sub(%r{^@rpath}, $tea_prefix)
|
||||||
else
|
else
|
||||||
# assume they are meant to be relative to lib dir
|
# assume they are meant to be relative to lib dir
|
||||||
new_name = Pathname.new($pkg_prefix).join("lib").relative_path_from(Pathname.new(@file.filename).parent)
|
new_name = Pathname.new($pkg_prefix).join("lib").relative_path_from(Pathname.new(@file.filename).parent)
|
||||||
|
|
|
@ -16,12 +16,12 @@ args:
|
||||||
- --import-map={{ srcroot }}/import-map.json
|
- --import-map={{ srcroot }}/import-map.json
|
||||||
---*/
|
---*/
|
||||||
|
|
||||||
import { useCache, usePantry } from "hooks"
|
import { usePantry } from "hooks"
|
||||||
import { Installation } from "types"
|
import { Installation } from "types"
|
||||||
import { pkg as pkgutils } from "utils"
|
import { pkg as pkgutils } from "utils"
|
||||||
import { useFlags, usePrefix } from "hooks"
|
import { useFlags, usePrefix } from "hooks"
|
||||||
import { set_output } from "./utils/gha.ts"
|
import { set_output } from "./utils/gha.ts"
|
||||||
import build from "./build/build.ts"
|
import build, { BuildResult } from "./build/build.ts"
|
||||||
import * as ARGV from "./utils/args.ts"
|
import * as ARGV from "./utils/args.ts"
|
||||||
import Path from "path"
|
import Path from "path"
|
||||||
|
|
||||||
|
@ -31,7 +31,7 @@ const pantry = usePantry()
|
||||||
const dry = await ARGV.toArray(ARGV.pkgs())
|
const dry = await ARGV.toArray(ARGV.pkgs())
|
||||||
const gha = !!Deno.env.get("GITHUB_ACTIONS")
|
const gha = !!Deno.env.get("GITHUB_ACTIONS")
|
||||||
const group_it = gha && dry.length > 1
|
const group_it = gha && dry.length > 1
|
||||||
const rv: InstallationPlus[] = []
|
const rv: BuildResult[] = []
|
||||||
|
|
||||||
if (usePrefix().string != "/opt") {
|
if (usePrefix().string != "/opt") {
|
||||||
console.error({ TEA_PREFIX: usePrefix().string })
|
console.error({ TEA_PREFIX: usePrefix().string })
|
||||||
|
@ -47,21 +47,18 @@ for (const rq of dry) {
|
||||||
console.log({ building: pkg.project })
|
console.log({ building: pkg.project })
|
||||||
}
|
}
|
||||||
|
|
||||||
const install = await build(pkg)
|
rv.push(await build(pkg))
|
||||||
const { url } = await pantry.getDistributable(pkg)
|
|
||||||
const extname = url.path().extname()
|
|
||||||
const src = useCache().path({ pkg, type: "src", extname })
|
|
||||||
rv.push({...install, src })
|
|
||||||
|
|
||||||
if (group_it) {
|
if (group_it) {
|
||||||
console.log("::endgroup::")
|
console.log("::endgroup::")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await set_output("pkgs", rv.map(x => pkgutils.str(x.pkg)))
|
await set_output("pkgs", rv.map(x => pkgutils.str(x.installation.pkg)))
|
||||||
await set_output("paths", rv.map(x => x.path), '%0A')
|
await set_output("paths", rv.map(x => x.installation.path), '%0A')
|
||||||
await set_output("relative-paths", rv.map(x => x.path.relative({ to: usePrefix() })))
|
await set_output("relative-paths", rv.map(x => x.installation.path.relative({ to: usePrefix() })))
|
||||||
await set_output("srcs", rv.map(x => x.src.relative({ to: usePrefix() })))
|
await set_output("srcs", rv.map(x => x.src?.relative({ to: usePrefix() }) ?? "~"))
|
||||||
|
await set_output("srcs-actual", rv.compact(x => x.src?.relative({ to: usePrefix() })))
|
||||||
|
|
||||||
interface InstallationPlus extends Installation {
|
interface InstallationPlus extends Installation {
|
||||||
src: Path
|
src: Path
|
||||||
|
|
|
@ -1,17 +1,23 @@
|
||||||
import { useSourceUnarchiver, useCellar, usePantry, useCache, usePrefix } from "hooks"
|
import { useCellar, usePantry, usePrefix } from "hooks"
|
||||||
import { link, hydrate } from "prefab"
|
import { link, hydrate } from "prefab"
|
||||||
import { Installation, Package } from "types"
|
import { Installation, Package } from "types"
|
||||||
import useShellEnv, { expand } from "hooks/useShellEnv.ts"
|
import useShellEnv, { expand } from "hooks/useShellEnv.ts"
|
||||||
import { run, undent, host, tuplize } from "utils"
|
import { run, undent, host, tuplize, panic } from "utils"
|
||||||
import { str as pkgstr } from "utils/pkg.ts"
|
import { str as pkgstr } from "utils/pkg.ts"
|
||||||
import fix_pkg_config_files from "./fix-pkg-config-files.ts"
|
import fix_pkg_config_files from "./fix-pkg-config-files.ts"
|
||||||
import Path from "path"
|
import Path from "path"
|
||||||
|
import { fetch_src } from "../fetch.ts";
|
||||||
|
|
||||||
const cellar = useCellar()
|
const cellar = useCellar()
|
||||||
const pantry = usePantry()
|
const pantry = usePantry()
|
||||||
const { platform } = host()
|
const { platform } = host()
|
||||||
|
|
||||||
export default async function _build(pkg: Package) {
|
export interface BuildResult {
|
||||||
|
installation: Installation
|
||||||
|
src?: Path
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function _build(pkg: Package): Promise<BuildResult> {
|
||||||
try {
|
try {
|
||||||
return await __build(pkg)
|
return await __build(pkg)
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
@ -20,17 +26,17 @@ export default async function _build(pkg: Package) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function __build(pkg: Package) {
|
async function __build(pkg: Package): Promise<BuildResult> {
|
||||||
const [deps, wet, resolved] = await calc_deps()
|
const [deps, wet, resolved] = await calc_deps()
|
||||||
await clean()
|
await clean()
|
||||||
const env = await mkenv()
|
const env = mkenv()
|
||||||
const dst = cellar.keg(pkg).mkpath()
|
const dst = cellar.keg(pkg).mkpath()
|
||||||
const src = await fetch_src(pkg)
|
const [src, src_tarball] = await fetch_src(pkg) ?? []
|
||||||
const installation = await build()
|
const installation = await build()
|
||||||
await link(installation)
|
await link(installation)
|
||||||
await fix_binaries(installation)
|
await fix_binaries(installation)
|
||||||
await fix_pkg_config_files(installation)
|
await fix_pkg_config_files(installation)
|
||||||
return installation
|
return { installation, src: src_tarball }
|
||||||
|
|
||||||
//////// utils
|
//////// utils
|
||||||
async function calc_deps() {
|
async function calc_deps() {
|
||||||
|
@ -75,17 +81,18 @@ async function __build(pkg: Package) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function build() {
|
async function build() {
|
||||||
|
const bld = src ?? Path.mktmp({ prefix: pkg.project }).join("wd").mkdir()
|
||||||
const sh = await pantry.getScript(pkg, 'build', resolved)
|
const sh = await pantry.getScript(pkg, 'build', resolved)
|
||||||
|
|
||||||
const cmd = src.parent().join("build.sh").write({ force: true, text: undent`
|
const cmd = bld.parent().join("build.sh").write({ force: true, text: undent`
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
set -x
|
set -x
|
||||||
cd "${src}"
|
cd "${bld}"
|
||||||
|
|
||||||
export SRCROOT="${src}"
|
export SRCROOT="${bld}"
|
||||||
${expand(env)}
|
${expand(env)}
|
||||||
|
|
||||||
${/*FIXME hardcoded paths*/ ''}
|
${/*FIXME hardcoded paths*/ ''}
|
||||||
|
@ -98,7 +105,7 @@ async function __build(pkg: Package) {
|
||||||
// copy in auxillary files from pantry directory
|
// copy in auxillary files from pantry directory
|
||||||
for await (const [path, {isFile}] of pantry.getYAML(pkg).path.parent().ls()) {
|
for await (const [path, {isFile}] of pantry.getYAML(pkg).path.parent().ls()) {
|
||||||
if (isFile) {
|
if (isFile) {
|
||||||
path.cp({ into: src.join("props").mkdir() })
|
path.cp({ into: bld.join("props").mkdir() })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -116,7 +123,6 @@ async function __build(pkg: Package) {
|
||||||
case 'darwin':
|
case 'darwin':
|
||||||
return await run({
|
return await run({
|
||||||
cmd: [
|
cmd: [
|
||||||
'tea',
|
|
||||||
prefix.join('fix-machos.rb'),
|
prefix.join('fix-machos.rb'),
|
||||||
installation.path,
|
installation.path,
|
||||||
...['bin', 'lib', 'libexec'].map(x => installation.path.join(x)).filter(x => x.isDirectory())
|
...['bin', 'lib', 'libexec'].map(x => installation.path.join(x)).filter(x => x.isDirectory())
|
||||||
|
@ -135,11 +141,3 @@ async function __build(pkg: Package) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function fetch_src(pkg: Package): Promise<Path> {
|
|
||||||
const dstdir = usePrefix().join(pkg.project, "src", `v${pkg.version}`)
|
|
||||||
const { url, stripComponents } = await pantry.getDistributable(pkg)
|
|
||||||
const zipfile = await useCache().download({ pkg, url, type: 'src' })
|
|
||||||
await useSourceUnarchiver().unarchive({ dstdir, zipfile, stripComponents })
|
|
||||||
return dstdir
|
|
||||||
}
|
|
||||||
|
|
|
@ -12,23 +12,42 @@ args:
|
||||||
- --import-map={{ srcroot }}/import-map.json
|
- --import-map={{ srcroot }}/import-map.json
|
||||||
---*/
|
---*/
|
||||||
|
|
||||||
import { usePantry, useCache, useCellar, useSourceUnarchiver } from "hooks"
|
//TODO verify the sha
|
||||||
import { Command } from "cliffy/command/mod.ts"
|
|
||||||
import { print, pkg as pkgutils } from "utils"
|
|
||||||
|
|
||||||
const { args } = await new Command()
|
import { usePantry, useCache, useDownload, useCellar, useSourceUnarchiver, useOffLicense } from "hooks"
|
||||||
.name("tea-fetch-src")
|
import { panic, print } from "utils"
|
||||||
.arguments("<pkgspec:string>")
|
import { Stowage, Package } from "types"
|
||||||
.parse(Deno.args)
|
import * as ARGV from "./utils/args.ts"
|
||||||
|
import Path from "path"
|
||||||
|
|
||||||
const pantry = usePantry()
|
const pantry = usePantry()
|
||||||
const req = pkgutils.parse(args[0])
|
const { download } = useDownload()
|
||||||
const pkg = await pantry.resolve(req); console.debug(pkg)
|
|
||||||
|
|
||||||
const dstdir = useCellar().keg(pkg).join("src")
|
export async function fetch_src(pkg: Package): Promise<[Path, Path] | undefined> {
|
||||||
const { url, stripComponents } = await pantry.getDistributable(pkg)
|
const dstdir = useCellar().shelf(pkg.project).parent().join("src", `v${pkg.version}`)
|
||||||
const { download } = useCache()
|
const dist = await pantry.getDistributable(pkg)
|
||||||
const zipfile = await download({ pkg, url, type: 'src' })
|
if (!dist) return
|
||||||
await useSourceUnarchiver().unarchive({ dstdir, zipfile, stripComponents })
|
const { url, stripComponents } = dist
|
||||||
|
const stowage: Stowage = { pkg, type: 'src', extname: url.path().extname() }
|
||||||
|
const dst = useCache().path(stowage)
|
||||||
|
const zipfile = await (async () => {
|
||||||
|
try {
|
||||||
|
// first try our mirror
|
||||||
|
const src = useOffLicense('s3').url(stowage)
|
||||||
|
return await download({ dst, src })
|
||||||
|
} catch {
|
||||||
|
// oh well, try original location then
|
||||||
|
return await download({ dst, src: url })
|
||||||
|
}
|
||||||
|
})()
|
||||||
|
await useSourceUnarchiver().unarchive({ dstdir, zipfile, stripComponents })
|
||||||
|
return [dstdir, zipfile]
|
||||||
|
}
|
||||||
|
|
||||||
await print(`${dstdir}\n`)
|
if (import.meta.main) {
|
||||||
|
for await (let pkg of ARGV.pkgs()) {
|
||||||
|
pkg = await pantry.resolve(pkg)
|
||||||
|
const [dstdir] = await fetch_src(pkg) ?? panic()
|
||||||
|
await print(`${dstdir}\n`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -13,7 +13,7 @@ args:
|
||||||
---*/
|
---*/
|
||||||
|
|
||||||
import { Installation, Package, PackageRequirement } from "types"
|
import { Installation, Package, PackageRequirement } from "types"
|
||||||
import { usePantry, useFlags } from "hooks"
|
import { usePantry, useFlags, usePrefix } from "hooks"
|
||||||
import useShellEnv, { expand } from "hooks/useShellEnv.ts"
|
import useShellEnv, { expand } from "hooks/useShellEnv.ts"
|
||||||
import { run, undent, pkg as pkgutils } from "utils"
|
import { run, undent, pkg as pkgutils } from "utils"
|
||||||
import { resolve, install, hydrate, link } from "prefab"
|
import { resolve, install, hydrate, link } from "prefab"
|
||||||
|
@ -41,6 +41,8 @@ async function test(self: Installation) {
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
set -x
|
set -x
|
||||||
|
|
||||||
|
export TEA_PREFIX=${usePrefix()}
|
||||||
|
|
||||||
${expand(env)}
|
${expand(env)}
|
||||||
|
|
||||||
`
|
`
|
||||||
|
@ -69,8 +71,10 @@ async function test(self: Installation) {
|
||||||
.write({ text, force: true })
|
.write({ text, force: true })
|
||||||
.chmod(0o500)
|
.chmod(0o500)
|
||||||
await run({ cmd, cwd })
|
await run({ cmd, cwd })
|
||||||
} finally {
|
tmp.rm({ recursive: true })
|
||||||
if (!debug) tmp.rm({ recursive: true })
|
} catch (e) {
|
||||||
|
console.info("due to error, didn’t delete:", tmp)
|
||||||
|
throw e
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ args:
|
||||||
|
|
||||||
import { S3 } from "s3"
|
import { S3 } from "s3"
|
||||||
import { pkg as pkgutils } from "utils"
|
import { pkg as pkgutils } from "utils"
|
||||||
import { useFlags, useOffLicense, useCache, usePrefix } from "hooks"
|
import { useFlags, useOffLicense, useCache } from "hooks"
|
||||||
import { Package, PackageRequirement } from "types"
|
import { Package, PackageRequirement } from "types"
|
||||||
import SemVer, * as semver from "semver"
|
import SemVer, * as semver from "semver"
|
||||||
import { dirname, basename } from "deno/path/mod.ts"
|
import { dirname, basename } from "deno/path/mod.ts"
|
||||||
|
@ -80,18 +80,20 @@ for (const [index, pkg] of pkgs.entries()) {
|
||||||
await put(`${key}.sha256sum`, `${checksum} ${basename(key)}`)
|
await put(`${key}.sha256sum`, `${checksum} ${basename(key)}`)
|
||||||
await put(`${dirname(key)}/versions.txt`, versions.join("\n"))
|
await put(`${dirname(key)}/versions.txt`, versions.join("\n"))
|
||||||
|
|
||||||
// Store sources
|
// mirror the sources
|
||||||
const src = usePrefix().join(srcs[index])
|
if (srcs[index] != "~") {
|
||||||
const srcKey = useOffLicense('s3').key({
|
const src = new Path(srcs[index])
|
||||||
pkg: stowed.pkg,
|
const srcKey = useOffLicense('s3').key({
|
||||||
type: "src",
|
pkg: stowed.pkg,
|
||||||
extname: src.extname()
|
type: "src",
|
||||||
})
|
extname: src.extname()
|
||||||
const srcChecksum = await sha256(src)
|
})
|
||||||
const srcVersions = await get_versions(srcKey, pkg)
|
const srcChecksum = await sha256(src)
|
||||||
await put(srcKey, src)
|
const srcVersions = await get_versions(srcKey, pkg)
|
||||||
await put(`${srcKey}.sha256sum`, `${srcChecksum} ${basename(srcKey)}`)
|
await put(srcKey, src)
|
||||||
await put(`${dirname(srcKey)}/versions.txt`, srcVersions.join("\n"))
|
await put(`${srcKey}.sha256sum`, `${srcChecksum} ${basename(srcKey)}`)
|
||||||
|
await put(`${dirname(srcKey)}/versions.txt`, srcVersions.join("\n"))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await set_output('cf-invalidation-paths', rv)
|
await set_output('cf-invalidation-paths', rv)
|
||||||
|
|
|
@ -11,7 +11,7 @@ export function set_output<T>(name: string, arr: T[], separator = " ") {
|
||||||
//TODO HTML escapes probs
|
//TODO HTML escapes probs
|
||||||
function escape<T>(input: T): string {
|
function escape<T>(input: T): string {
|
||||||
const out = `${input}`
|
const out = `${input}`
|
||||||
if (/[<>]/.test(out)) {
|
if (/[<>~]/.test(out)) {
|
||||||
return `"${out}"`
|
return `"${out}"`
|
||||||
} else {
|
} else {
|
||||||
return out
|
return out
|
||||||
|
|
Loading…
Reference in a new issue