“superenv” (#185)

* fixes for dylib ids on darwin (sadly elaborate)

* wip
This commit is contained in:
Max Howell 2022-10-17 13:45:32 -04:00 committed by GitHub
parent 115e2fd642
commit f836b167a1
12 changed files with 237 additions and 88 deletions

View file

@ -96,7 +96,10 @@ jobs:
# tarring ourselves ∵ GHA-artifacts (ludicrously) lose permissions
# /ref https://github.com/actions/upload-artifact/issues/38
- run: tar czf $GITHUB_WORKSPACE/artifacts.tgz ${{ steps.build.outputs.relative-paths }} ${{ steps.build.outputs.srcs }}
- run:
tar czf $GITHUB_WORKSPACE/artifacts.tgz
${{ steps.build.outputs.relative-paths }}
${{ steps.build.outputs.srcs-actual }}
working-directory: ${{ steps.tea.outputs.prefix }}
- name: upload artifacts

View file

@ -4,13 +4,23 @@ tea is a decentralized package manager—this requires a decentralized package
registry. Were releasing our testnet later this year. In the meantime the
pantry is our stop-gap solution.
# Entry Requirements
# Getting Started
Youll need a `GITHUB_TOKEN` in your environment since we use the GitHub
GraphQL API to fetch versions.
 
# Meta
## Entry Requirements
This pantry only accepts devtools that we feel confident we can maintain.
Quality and robustness are our goals. If you want other tools you can maintain
your own pantry and well build the binaries.
# Philosophy
## Philosophy
Fundamentally we're coming at this from the perspective that the maintainer
should decide how their software is distributed and were making the tools so
@ -18,12 +28,20 @@ they can do that in cross platform way.
This repo is a bootstrap and is stubs.
# Naming
## Naming
We use fully-qualified names. Naming is hard, and the world has spent a while
trying to get it right. In this kind of domain the *correct choice* is
to namespace.
## Packaging Knowledgebase
Our [wiki](/wiki) is our knowledgebase. Fill it with the fruits of your
knowledge. Please keep it tidy.
 
# Coming Soon
## Maintaining Your Own Pantry
@ -39,10 +57,8 @@ If you have a website you can host your own `package.yml` there and we will
build binaries for you. This feature is coming soon and will require
signed, versioned tags and signed source tarballs.
# Packaging Knowledgebase
Our [wiki](/wiki) is our knowledgebase. Fill it with the fruits of your
knowledge. Please keep it tidy.
 
# Dependencies

30
projects/tea.xyz/gx/cc/cc.rb Executable file
View file

@ -0,0 +1,30 @@
#!/usr/bin/ruby
# - we inject our rpath to ensure our libs our found
# - for bottles we replace that in fix-machos.rb with a relocatable prefix
# - in general usage we dont, so if the user needs to distribute their artifacts,
# they will need to fix them first, but that's typical anyway.
# - for tea-envs the user probably wont use tea.xyz/gx/cc even though they *should*
# and thus we set LDFLAGS in the hope that they will be picked up and the rpath set
$tea_prefix = ENV['TEA_PREFIX'] || `tea --prefix`.chomp
exe = File.basename($0)
# remove duplicates since this in fact embeds the rpath multiple times
args = ARGV.map do |arg|
arg unless arg == "-Wl,-rpath,#$tea_prefix"
end.compact
for arg in args do
# figuring out what “mode” we are operating in is hard
# we dont want to add this linker command always because it causes a warning to be
# output if we are not outputing executables/dylibs and this warning can break
# configure scripts, however the below is not fully encompassing
# we aren't sure what the rules are TBH, possibly it is as simple as if the output (`-o`)
# is a .o then we dont add the rpath
if arg.start_with? '-l' or arg.end_with? '.dylib'
exec "/usr/bin/#{exe}", *args, "-Wl,-rpath,#$tea_prefix"
end
end
exec "/usr/bin/#{exe}", *args

11
projects/tea.xyz/gx/cc/ld Executable file
View file

@ -0,0 +1,11 @@
#!/bin/sh
exe="$(basename $0)"
if test -z "$TEA_PREFIX"
then
echo '$TEA_PREFIX mysteriously unset' >&2
exit 1
else
exec /usr/bin/"$exe" "$@" -rpath "$TEA_PREFIX"
fi

View file

@ -0,0 +1,43 @@
distributable: ~
# FIXME we want the c version eg. c99
# or should that be some kind of option? so you specify you want a cc that support c99
versions:
- 0.1.0
dependencies:
linux:
llvm.org: '*'
darwin:
apple.com/xcode/clt: '*'
build:
working-directory:
${{prefix}}/bin
script: |
if test {{ hw.platform }} = darwin; then
cp "$SRCROOT"/props/ld .
cp "$SRCROOT"/props/cc.rb cc
else
ln -s "$LLVM"/clang cc
ln -s "$LLVM"/lld ld
for x in ar as strip objcopy nm objdump ranlib readelf strings; do
ln -sf "$LLVM"/llvm-$x $x
done
fi
for x in clang gcc clang++ c++ g++; do
ln -s cc $x
done
for x in ldd lld-link lld ld64.lld; do
ln -s ld $x
done
# dunno why we gotta do this, but we do
chmod 777 *
env:
LLVM: ../../../../../llvm.org/v*/bin
test: |
cc --version
ld --help

View file

@ -1,9 +1,9 @@
#!/usr/bin/env ruby
# ^^ we have to specify ruby or ruby refuses to run the script
# as an aside, what kind of feature *is* that exactly?
# tea brewed ruby works with a tea shebang
# but normal ruby does not, macOS comes with ruby so we just use it
# ---
# dependencies:
# ruby-lang.org: 3
# ruby-lang.org: '>=2'
# args: [ruby]
# ---
@ -14,9 +14,6 @@ gemfile do
gem 'ruby-macho', '~> 3'
end
#TODO file.stat.ino where file is Pathname
require 'fileutils'
require 'pathname'
require 'macho'
@ -73,9 +70,11 @@ class Fixer
end
def fix_id
if @file.dylib_id != @file.filename
rel_path = Pathname.new(@file.filename).relative_path_from(Pathname.new($tea_prefix))
id = "@rpath/#{rel_path}"
if @file.dylib_id != id
# only do work if we must
@file.change_dylib_id @file.filename
@file.change_dylib_id id
write
end
end
@ -87,7 +86,11 @@ class Fixer
def links_to_other_tea_libs?
@file.linked_dylibs.each do |lib|
return true if lib.start_with? $tea_prefix
# starts_with? @rpath is not enough lol
# this because we are setting `id` to @rpath now so it's a reasonable indication
# that we link to tea libs, but the build system for the pkg may well do this for its
# own libs
return true if lib.start_with? $tea_prefix or lib.start_with? '@rpath'
end
return false
end
@ -95,14 +98,21 @@ class Fixer
def fix_rpaths
#TODO remove spurious rpaths
dirty = false
rel_path = Pathname.new($tea_prefix).relative_path_from(Pathname.new(@file.filename).parent)
rpath = "@loader_path/#{rel_path}"
return if @file.rpaths.include? rpath
return unless links_to_other_tea_libs?
if not @file.rpaths.include? rpath and links_to_other_tea_libs?
@file.add_rpath rpath
dirty = true
end
@file.add_rpath rpath
write
while @file.rpaths.include? $tea_prefix
@file.delete_rpath $tea_prefix
dirty = true
end
write if dirty
end
def bad_install_names
@ -111,6 +121,13 @@ class Fixer
if Pathname.new(lib).cleanpath.to_s.start_with? $tea_prefix
lib
end
elsif lib.start_with? '@rpath'
path = Pathname.new(lib.sub(%r{^@rpath}, $tea_prefix))
if path.exist?
lib
else
puts "warn:#{@file.filename}:#{lib}"
end
elsif lib.start_with? '@'
puts "warn:#{@file.filename}:#{lib}"
# noop
@ -124,14 +141,23 @@ class Fixer
bad_names = bad_install_names
return if bad_names.empty?
def fix_tea_prefix s
s = Pathname.new(s).relative_path_from(Pathname.new($tea_prefix))
s = s.sub(%r{/v(\d+)\.\d+\.\d+/}, '/v\1/')
s = s.sub(%r{/(.+)\.(\d+)\.\d+\.\d+\.dylib$}, '/\1.dylib')
s = "@rpath/#{s}"
return s
end
bad_names.each do |old_name|
if old_name.start_with? $pkg_prefix
new_name = Pathname.new(old_name).relative_path_from(Pathname.new(@file.filename).parent)
new_name = "@loader_path/#{new_name}"
elsif old_name.start_with? '/'
new_name = Pathname.new(old_name).relative_path_from(Pathname.new($tea_prefix))
new_name = new_name.sub(%r{/v(\d+)\.\d+\.\d+/}, '/v\1/')
new_name = "@rpath/#{new_name}"
new_name = fix_tea_prefix old_name
elsif old_name.start_with? '@rpath'
# so far we only feed bad @rpaths that are relative to the tea-prefix
new_name = fix_tea_prefix old_name.sub(%r{^@rpath}, $tea_prefix)
else
# assume they are meant to be relative to lib dir
new_name = Pathname.new($pkg_prefix).join("lib").relative_path_from(Pathname.new(@file.filename).parent)

View file

@ -16,12 +16,12 @@ args:
- --import-map={{ srcroot }}/import-map.json
---*/
import { useCache, usePantry } from "hooks"
import { usePantry } from "hooks"
import { Installation } from "types"
import { pkg as pkgutils } from "utils"
import { useFlags, usePrefix } from "hooks"
import { set_output } from "./utils/gha.ts"
import build from "./build/build.ts"
import build, { BuildResult } from "./build/build.ts"
import * as ARGV from "./utils/args.ts"
import Path from "path"
@ -31,7 +31,7 @@ const pantry = usePantry()
const dry = await ARGV.toArray(ARGV.pkgs())
const gha = !!Deno.env.get("GITHUB_ACTIONS")
const group_it = gha && dry.length > 1
const rv: InstallationPlus[] = []
const rv: BuildResult[] = []
if (usePrefix().string != "/opt") {
console.error({ TEA_PREFIX: usePrefix().string })
@ -47,22 +47,19 @@ for (const rq of dry) {
console.log({ building: pkg.project })
}
const install = await build(pkg)
const { url } = await pantry.getDistributable(pkg)
const extname = url.path().extname()
const src = useCache().path({ pkg, type: "src", extname })
rv.push({...install, src })
rv.push(await build(pkg))
if (group_it) {
console.log("::endgroup::")
}
}
await set_output("pkgs", rv.map(x => pkgutils.str(x.pkg)))
await set_output("paths", rv.map(x => x.path), '%0A')
await set_output("relative-paths", rv.map(x => x.path.relative({ to: usePrefix() })))
await set_output("srcs", rv.map(x => x.src.relative({ to: usePrefix() })))
await set_output("pkgs", rv.map(x => pkgutils.str(x.installation.pkg)))
await set_output("paths", rv.map(x => x.installation.path), '%0A')
await set_output("relative-paths", rv.map(x => x.installation.path.relative({ to: usePrefix() })))
await set_output("srcs", rv.map(x => x.src?.relative({ to: usePrefix() }) ?? "~"))
await set_output("srcs-actual", rv.compact(x => x.src?.relative({ to: usePrefix() })))
interface InstallationPlus extends Installation {
src: Path
}
}

View file

@ -1,17 +1,23 @@
import { useSourceUnarchiver, useCellar, usePantry, useCache, usePrefix } from "hooks"
import { useCellar, usePantry, usePrefix } from "hooks"
import { link, hydrate } from "prefab"
import { Installation, Package } from "types"
import useShellEnv, { expand } from "hooks/useShellEnv.ts"
import { run, undent, host, tuplize } from "utils"
import { run, undent, host, tuplize, panic } from "utils"
import { str as pkgstr } from "utils/pkg.ts"
import fix_pkg_config_files from "./fix-pkg-config-files.ts"
import Path from "path"
import { fetch_src } from "../fetch.ts";
const cellar = useCellar()
const pantry = usePantry()
const { platform } = host()
export default async function _build(pkg: Package) {
export interface BuildResult {
installation: Installation
src?: Path
}
export default async function _build(pkg: Package): Promise<BuildResult> {
try {
return await __build(pkg)
} catch (e) {
@ -20,17 +26,17 @@ export default async function _build(pkg: Package) {
}
}
async function __build(pkg: Package) {
async function __build(pkg: Package): Promise<BuildResult> {
const [deps, wet, resolved] = await calc_deps()
await clean()
const env = await mkenv()
const env = mkenv()
const dst = cellar.keg(pkg).mkpath()
const src = await fetch_src(pkg)
const [src, src_tarball] = await fetch_src(pkg) ?? []
const installation = await build()
await link(installation)
await fix_binaries(installation)
await fix_pkg_config_files(installation)
return installation
return { installation, src: src_tarball }
//////// utils
async function calc_deps() {
@ -75,17 +81,18 @@ async function __build(pkg: Package) {
}
async function build() {
const bld = src ?? Path.mktmp({ prefix: pkg.project }).join("wd").mkdir()
const sh = await pantry.getScript(pkg, 'build', resolved)
const cmd = src.parent().join("build.sh").write({ force: true, text: undent`
const cmd = bld.parent().join("build.sh").write({ force: true, text: undent`
#!/bin/bash
set -e
set -o pipefail
set -x
cd "${src}"
cd "${bld}"
export SRCROOT="${src}"
export SRCROOT="${bld}"
${expand(env)}
${/*FIXME hardcoded paths*/ ''}
@ -98,7 +105,7 @@ async function __build(pkg: Package) {
// copy in auxillary files from pantry directory
for await (const [path, {isFile}] of pantry.getYAML(pkg).path.parent().ls()) {
if (isFile) {
path.cp({ into: src.join("props").mkdir() })
path.cp({ into: bld.join("props").mkdir() })
}
}
@ -116,7 +123,6 @@ async function __build(pkg: Package) {
case 'darwin':
return await run({
cmd: [
'tea',
prefix.join('fix-machos.rb'),
installation.path,
...['bin', 'lib', 'libexec'].map(x => installation.path.join(x)).filter(x => x.isDirectory())
@ -135,11 +141,3 @@ async function __build(pkg: Package) {
}
}
}
async function fetch_src(pkg: Package): Promise<Path> {
const dstdir = usePrefix().join(pkg.project, "src", `v${pkg.version}`)
const { url, stripComponents } = await pantry.getDistributable(pkg)
const zipfile = await useCache().download({ pkg, url, type: 'src' })
await useSourceUnarchiver().unarchive({ dstdir, zipfile, stripComponents })
return dstdir
}

View file

@ -12,23 +12,42 @@ args:
- --import-map={{ srcroot }}/import-map.json
---*/
import { usePantry, useCache, useCellar, useSourceUnarchiver } from "hooks"
import { Command } from "cliffy/command/mod.ts"
import { print, pkg as pkgutils } from "utils"
//TODO verify the sha
const { args } = await new Command()
.name("tea-fetch-src")
.arguments("<pkgspec:string>")
.parse(Deno.args)
import { usePantry, useCache, useDownload, useCellar, useSourceUnarchiver, useOffLicense } from "hooks"
import { panic, print } from "utils"
import { Stowage, Package } from "types"
import * as ARGV from "./utils/args.ts"
import Path from "path"
const pantry = usePantry()
const req = pkgutils.parse(args[0])
const pkg = await pantry.resolve(req); console.debug(pkg)
const { download } = useDownload()
const dstdir = useCellar().keg(pkg).join("src")
const { url, stripComponents } = await pantry.getDistributable(pkg)
const { download } = useCache()
const zipfile = await download({ pkg, url, type: 'src' })
await useSourceUnarchiver().unarchive({ dstdir, zipfile, stripComponents })
export async function fetch_src(pkg: Package): Promise<[Path, Path] | undefined> {
const dstdir = useCellar().shelf(pkg.project).parent().join("src", `v${pkg.version}`)
const dist = await pantry.getDistributable(pkg)
if (!dist) return
const { url, stripComponents } = dist
const stowage: Stowage = { pkg, type: 'src', extname: url.path().extname() }
const dst = useCache().path(stowage)
const zipfile = await (async () => {
try {
// first try our mirror
const src = useOffLicense('s3').url(stowage)
return await download({ dst, src })
} catch {
// oh well, try original location then
return await download({ dst, src: url })
}
})()
await useSourceUnarchiver().unarchive({ dstdir, zipfile, stripComponents })
return [dstdir, zipfile]
}
await print(`${dstdir}\n`)
if (import.meta.main) {
for await (let pkg of ARGV.pkgs()) {
pkg = await pantry.resolve(pkg)
const [dstdir] = await fetch_src(pkg) ?? panic()
await print(`${dstdir}\n`)
}
}

View file

@ -13,7 +13,7 @@ args:
---*/
import { Installation, Package, PackageRequirement } from "types"
import { usePantry, useFlags } from "hooks"
import { usePantry, useFlags, usePrefix } from "hooks"
import useShellEnv, { expand } from "hooks/useShellEnv.ts"
import { run, undent, pkg as pkgutils } from "utils"
import { resolve, install, hydrate, link } from "prefab"
@ -41,6 +41,8 @@ async function test(self: Installation) {
set -o pipefail
set -x
export TEA_PREFIX=${usePrefix()}
${expand(env)}
`
@ -69,8 +71,10 @@ async function test(self: Installation) {
.write({ text, force: true })
.chmod(0o500)
await run({ cmd, cwd })
} finally {
if (!debug) tmp.rm({ recursive: true })
tmp.rm({ recursive: true })
} catch (e) {
console.info("due to error, didnt delete:", tmp)
throw e
}
}

View file

@ -12,7 +12,7 @@ args:
import { S3 } from "s3"
import { pkg as pkgutils } from "utils"
import { useFlags, useOffLicense, useCache, usePrefix } from "hooks"
import { useFlags, useOffLicense, useCache } from "hooks"
import { Package, PackageRequirement } from "types"
import SemVer, * as semver from "semver"
import { dirname, basename } from "deno/path/mod.ts"
@ -80,18 +80,20 @@ for (const [index, pkg] of pkgs.entries()) {
await put(`${key}.sha256sum`, `${checksum} ${basename(key)}`)
await put(`${dirname(key)}/versions.txt`, versions.join("\n"))
// Store sources
const src = usePrefix().join(srcs[index])
const srcKey = useOffLicense('s3').key({
pkg: stowed.pkg,
type: "src",
extname: src.extname()
})
const srcChecksum = await sha256(src)
const srcVersions = await get_versions(srcKey, pkg)
await put(srcKey, src)
await put(`${srcKey}.sha256sum`, `${srcChecksum} ${basename(srcKey)}`)
await put(`${dirname(srcKey)}/versions.txt`, srcVersions.join("\n"))
// mirror the sources
if (srcs[index] != "~") {
const src = new Path(srcs[index])
const srcKey = useOffLicense('s3').key({
pkg: stowed.pkg,
type: "src",
extname: src.extname()
})
const srcChecksum = await sha256(src)
const srcVersions = await get_versions(srcKey, pkg)
await put(srcKey, src)
await put(`${srcKey}.sha256sum`, `${srcChecksum} ${basename(srcKey)}`)
await put(`${dirname(srcKey)}/versions.txt`, srcVersions.join("\n"))
}
}
await set_output('cf-invalidation-paths', rv)

View file

@ -11,7 +11,7 @@ export function set_output<T>(name: string, arr: T[], separator = " ") {
//TODO HTML escapes probs
function escape<T>(input: T): string {
const out = `${input}`
if (/[<>]/.test(out)) {
if (/[<>~]/.test(out)) {
return `"${out}"`
} else {
return out