mirror of
https://github.com/ivabus/pantry
synced 2024-11-26 18:25:08 +03:00
“superenv” (#185)
* fixes for dylib ids on darwin (sadly elaborate) * wip
This commit is contained in:
parent
c970127d11
commit
fe002f9b9f
8 changed files with 149 additions and 87 deletions
28
README.md
28
README.md
|
@ -4,13 +4,23 @@ tea is a decentralized package manager—this requires a decentralized package
|
|||
registry. We’re releasing our testnet later this year. In the meantime the
|
||||
pantry is our stop-gap solution.
|
||||
|
||||
# Entry Requirements
|
||||
# Getting Started
|
||||
|
||||
You’ll need a `GITHUB_TOKEN` in your environment since we use the GitHub
|
||||
GraphQL API to fetch versions.
|
||||
|
||||
|
||||
|
||||
|
||||
# Meta
|
||||
|
||||
## Entry Requirements
|
||||
|
||||
This pantry only accepts devtools that we feel confident we can maintain.
|
||||
Quality and robustness are our goals. If you want other tools you can maintain
|
||||
your own pantry and we’ll build the binaries.
|
||||
|
||||
# Philosophy
|
||||
## Philosophy
|
||||
|
||||
Fundamentally we're coming at this from the perspective that the maintainer
|
||||
should decide how their software is distributed and we’re making the tools so
|
||||
|
@ -18,12 +28,20 @@ they can do that in cross platform way.
|
|||
|
||||
This repo is a bootstrap and is stubs.
|
||||
|
||||
# Naming
|
||||
## Naming
|
||||
|
||||
We use fully-qualified names. Naming is hard, and the world has spent a while
|
||||
trying to get it right. In this kind of domain the *correct choice* is
|
||||
to namespace.
|
||||
|
||||
## Packaging Knowledgebase
|
||||
|
||||
Our [wiki](/wiki) is our knowledgebase. Fill it with the fruits of your
|
||||
knowledge. Please keep it tidy.
|
||||
|
||||
|
||||
|
||||
|
||||
# Coming Soon
|
||||
|
||||
## Maintaining Your Own Pantry
|
||||
|
@ -39,10 +57,8 @@ If you have a website you can host your own `package.yml` there and we will
|
|||
build binaries for you. This feature is coming soon and will require
|
||||
signed, versioned tags and signed source tarballs.
|
||||
|
||||
# Packaging Knowledgebase
|
||||
|
||||
Our [wiki](/wiki) is our knowledgebase. Fill it with the fruits of your
|
||||
knowledge. Please keep it tidy.
|
||||
|
||||
|
||||
# Dependencies
|
||||
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
#!/usr/bin/env ruby
|
||||
# ^^ we have to specify ruby or ruby refuses to run the script
|
||||
# as an aside, what kind of feature *is* that exactly?
|
||||
# tea brewed ruby works with a tea shebang
|
||||
# but normal ruby does not, macOS comes with ruby so we just use it
|
||||
# ---
|
||||
# dependencies:
|
||||
# ruby-lang.org: 3
|
||||
# ruby-lang.org: '>=2'
|
||||
# args: [ruby]
|
||||
# ---
|
||||
|
||||
|
@ -14,9 +14,6 @@ gemfile do
|
|||
gem 'ruby-macho', '~> 3'
|
||||
end
|
||||
|
||||
|
||||
#TODO file.stat.ino where file is Pathname
|
||||
|
||||
require 'fileutils'
|
||||
require 'pathname'
|
||||
require 'macho'
|
||||
|
@ -73,9 +70,11 @@ class Fixer
|
|||
end
|
||||
|
||||
def fix_id
|
||||
if @file.dylib_id != @file.filename
|
||||
rel_path = Pathname.new(@file.filename).relative_path_from(Pathname.new($tea_prefix))
|
||||
id = "@rpath/#{rel_path}"
|
||||
if @file.dylib_id != id
|
||||
# only do work if we must
|
||||
@file.change_dylib_id @file.filename
|
||||
@file.change_dylib_id id
|
||||
write
|
||||
end
|
||||
end
|
||||
|
@ -87,7 +86,11 @@ class Fixer
|
|||
|
||||
def links_to_other_tea_libs?
|
||||
@file.linked_dylibs.each do |lib|
|
||||
return true if lib.start_with? $tea_prefix
|
||||
# starts_with? @rpath is not enough lol
|
||||
# this because we are setting `id` to @rpath now so it's a reasonable indication
|
||||
# that we link to tea libs, but the build system for the pkg may well do this for its
|
||||
# own libs
|
||||
return true if lib.start_with? $tea_prefix or lib.start_with? '@rpath'
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
@ -95,14 +98,21 @@ class Fixer
|
|||
def fix_rpaths
|
||||
#TODO remove spurious rpaths
|
||||
|
||||
dirty = false
|
||||
rel_path = Pathname.new($tea_prefix).relative_path_from(Pathname.new(@file.filename).parent)
|
||||
rpath = "@loader_path/#{rel_path}"
|
||||
|
||||
return if @file.rpaths.include? rpath
|
||||
return unless links_to_other_tea_libs?
|
||||
|
||||
if not @file.rpaths.include? rpath and links_to_other_tea_libs?
|
||||
@file.add_rpath rpath
|
||||
write
|
||||
dirty = true
|
||||
end
|
||||
|
||||
while @file.rpaths.include? $tea_prefix
|
||||
@file.delete_rpath $tea_prefix
|
||||
dirty = true
|
||||
end
|
||||
|
||||
write if dirty
|
||||
end
|
||||
|
||||
def bad_install_names
|
||||
|
@ -111,6 +121,13 @@ class Fixer
|
|||
if Pathname.new(lib).cleanpath.to_s.start_with? $tea_prefix
|
||||
lib
|
||||
end
|
||||
elsif lib.start_with? '@rpath'
|
||||
path = Pathname.new(lib.sub(%r{^@rpath}, $tea_prefix))
|
||||
if path.exist?
|
||||
lib
|
||||
else
|
||||
puts "warn:#{@file.filename}:#{lib}"
|
||||
end
|
||||
elsif lib.start_with? '@'
|
||||
puts "warn:#{@file.filename}:#{lib}"
|
||||
# noop
|
||||
|
@ -124,14 +141,23 @@ class Fixer
|
|||
bad_names = bad_install_names
|
||||
return if bad_names.empty?
|
||||
|
||||
def fix_tea_prefix s
|
||||
s = Pathname.new(s).relative_path_from(Pathname.new($tea_prefix))
|
||||
s = s.sub(%r{/v(\d+)\.\d+\.\d+/}, '/v\1/')
|
||||
s = s.sub(%r{/(.+)\.(\d+)\.\d+\.\d+\.dylib$}, '/\1.dylib')
|
||||
s = "@rpath/#{s}"
|
||||
return s
|
||||
end
|
||||
|
||||
bad_names.each do |old_name|
|
||||
if old_name.start_with? $pkg_prefix
|
||||
new_name = Pathname.new(old_name).relative_path_from(Pathname.new(@file.filename).parent)
|
||||
new_name = "@loader_path/#{new_name}"
|
||||
elsif old_name.start_with? '/'
|
||||
new_name = Pathname.new(old_name).relative_path_from(Pathname.new($tea_prefix))
|
||||
new_name = new_name.sub(%r{/v(\d+)\.\d+\.\d+/}, '/v\1/')
|
||||
new_name = "@rpath/#{new_name}"
|
||||
new_name = fix_tea_prefix old_name
|
||||
elsif old_name.start_with? '@rpath'
|
||||
# so far we only feed bad @rpaths that are relative to the tea-prefix
|
||||
new_name = fix_tea_prefix old_name.sub(%r{^@rpath}, $tea_prefix)
|
||||
else
|
||||
# assume they are meant to be relative to lib dir
|
||||
new_name = Pathname.new($pkg_prefix).join("lib").relative_path_from(Pathname.new(@file.filename).parent)
|
||||
|
|
|
@ -16,12 +16,12 @@ args:
|
|||
- --import-map={{ srcroot }}/import-map.json
|
||||
---*/
|
||||
|
||||
import { useCache, usePantry } from "hooks"
|
||||
import { usePantry } from "hooks"
|
||||
import { Installation } from "types"
|
||||
import { pkg as pkgutils } from "utils"
|
||||
import { useFlags, usePrefix } from "hooks"
|
||||
import { set_output } from "./utils/gha.ts"
|
||||
import build from "./build/build.ts"
|
||||
import build, { BuildResult } from "./build/build.ts"
|
||||
import * as ARGV from "./utils/args.ts"
|
||||
import Path from "path"
|
||||
|
||||
|
@ -31,7 +31,7 @@ const pantry = usePantry()
|
|||
const dry = await ARGV.toArray(ARGV.pkgs())
|
||||
const gha = !!Deno.env.get("GITHUB_ACTIONS")
|
||||
const group_it = gha && dry.length > 1
|
||||
const rv: InstallationPlus[] = []
|
||||
const rv: BuildResult[] = []
|
||||
|
||||
if (usePrefix().string != "/opt") {
|
||||
console.error({ TEA_PREFIX: usePrefix().string })
|
||||
|
@ -47,21 +47,18 @@ for (const rq of dry) {
|
|||
console.log({ building: pkg.project })
|
||||
}
|
||||
|
||||
const install = await build(pkg)
|
||||
const { url } = await pantry.getDistributable(pkg)
|
||||
const extname = url.path().extname()
|
||||
const src = useCache().path({ pkg, type: "src", extname })
|
||||
rv.push({...install, src })
|
||||
rv.push(await build(pkg))
|
||||
|
||||
if (group_it) {
|
||||
console.log("::endgroup::")
|
||||
}
|
||||
}
|
||||
|
||||
await set_output("pkgs", rv.map(x => pkgutils.str(x.pkg)))
|
||||
await set_output("paths", rv.map(x => x.path), '%0A')
|
||||
await set_output("relative-paths", rv.map(x => x.path.relative({ to: usePrefix() })))
|
||||
await set_output("srcs", rv.map(x => x.src.relative({ to: usePrefix() })))
|
||||
await set_output("pkgs", rv.map(x => pkgutils.str(x.installation.pkg)))
|
||||
await set_output("paths", rv.map(x => x.installation.path), '%0A')
|
||||
await set_output("relative-paths", rv.map(x => x.installation.path.relative({ to: usePrefix() })))
|
||||
await set_output("srcs", rv.map(x => x.src?.relative({ to: usePrefix() }) ?? "~"))
|
||||
await set_output("srcs-actual", rv.compact(x => x.src?.relative({ to: usePrefix() })))
|
||||
|
||||
interface InstallationPlus extends Installation {
|
||||
src: Path
|
||||
|
|
|
@ -1,17 +1,23 @@
|
|||
import { useSourceUnarchiver, useCellar, usePantry, useCache, usePrefix } from "hooks"
|
||||
import { useCellar, usePantry, usePrefix } from "hooks"
|
||||
import { link, hydrate } from "prefab"
|
||||
import { Installation, Package } from "types"
|
||||
import useShellEnv, { expand } from "hooks/useShellEnv.ts"
|
||||
import { run, undent, host, tuplize } from "utils"
|
||||
import { run, undent, host, tuplize, panic } from "utils"
|
||||
import { str as pkgstr } from "utils/pkg.ts"
|
||||
import fix_pkg_config_files from "./fix-pkg-config-files.ts"
|
||||
import Path from "path"
|
||||
import { fetch_src } from "../fetch.ts";
|
||||
|
||||
const cellar = useCellar()
|
||||
const pantry = usePantry()
|
||||
const { platform } = host()
|
||||
|
||||
export default async function _build(pkg: Package) {
|
||||
export interface BuildResult {
|
||||
installation: Installation
|
||||
src?: Path
|
||||
}
|
||||
|
||||
export default async function _build(pkg: Package): Promise<BuildResult> {
|
||||
try {
|
||||
return await __build(pkg)
|
||||
} catch (e) {
|
||||
|
@ -20,17 +26,17 @@ export default async function _build(pkg: Package) {
|
|||
}
|
||||
}
|
||||
|
||||
async function __build(pkg: Package) {
|
||||
async function __build(pkg: Package): Promise<BuildResult> {
|
||||
const [deps, wet, resolved] = await calc_deps()
|
||||
await clean()
|
||||
const env = await mkenv()
|
||||
const env = mkenv()
|
||||
const dst = cellar.keg(pkg).mkpath()
|
||||
const src = await fetch_src(pkg)
|
||||
const [src, src_tarball] = await fetch_src(pkg) ?? []
|
||||
const installation = await build()
|
||||
await link(installation)
|
||||
await fix_binaries(installation)
|
||||
await fix_pkg_config_files(installation)
|
||||
return installation
|
||||
return { installation, src: src_tarball }
|
||||
|
||||
//////// utils
|
||||
async function calc_deps() {
|
||||
|
@ -75,17 +81,18 @@ async function __build(pkg: Package) {
|
|||
}
|
||||
|
||||
async function build() {
|
||||
const bld = src ?? Path.mktmp({ prefix: pkg.project }).join("wd").mkdir()
|
||||
const sh = await pantry.getScript(pkg, 'build', resolved)
|
||||
|
||||
const cmd = src.parent().join("build.sh").write({ force: true, text: undent`
|
||||
const cmd = bld.parent().join("build.sh").write({ force: true, text: undent`
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -o pipefail
|
||||
set -x
|
||||
cd "${src}"
|
||||
cd "${bld}"
|
||||
|
||||
export SRCROOT="${src}"
|
||||
export SRCROOT="${bld}"
|
||||
${expand(env)}
|
||||
|
||||
${/*FIXME hardcoded paths*/ ''}
|
||||
|
@ -98,7 +105,7 @@ async function __build(pkg: Package) {
|
|||
// copy in auxillary files from pantry directory
|
||||
for await (const [path, {isFile}] of pantry.getYAML(pkg).path.parent().ls()) {
|
||||
if (isFile) {
|
||||
path.cp({ into: src.join("props").mkdir() })
|
||||
path.cp({ into: bld.join("props").mkdir() })
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -116,7 +123,6 @@ async function __build(pkg: Package) {
|
|||
case 'darwin':
|
||||
return await run({
|
||||
cmd: [
|
||||
'tea',
|
||||
prefix.join('fix-machos.rb'),
|
||||
installation.path,
|
||||
...['bin', 'lib', 'libexec'].map(x => installation.path.join(x)).filter(x => x.isDirectory())
|
||||
|
@ -135,11 +141,3 @@ async function __build(pkg: Package) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function fetch_src(pkg: Package): Promise<Path> {
|
||||
const dstdir = usePrefix().join(pkg.project, "src", `v${pkg.version}`)
|
||||
const { url, stripComponents } = await pantry.getDistributable(pkg)
|
||||
const zipfile = await useCache().download({ pkg, url, type: 'src' })
|
||||
await useSourceUnarchiver().unarchive({ dstdir, zipfile, stripComponents })
|
||||
return dstdir
|
||||
}
|
||||
|
|
|
@ -12,23 +12,42 @@ args:
|
|||
- --import-map={{ srcroot }}/import-map.json
|
||||
---*/
|
||||
|
||||
import { usePantry, useCache, useCellar, useSourceUnarchiver } from "hooks"
|
||||
import { Command } from "cliffy/command/mod.ts"
|
||||
import { print, pkg as pkgutils } from "utils"
|
||||
//TODO verify the sha
|
||||
|
||||
const { args } = await new Command()
|
||||
.name("tea-fetch-src")
|
||||
.arguments("<pkgspec:string>")
|
||||
.parse(Deno.args)
|
||||
import { usePantry, useCache, useDownload, useCellar, useSourceUnarchiver, useOffLicense } from "hooks"
|
||||
import { panic, print } from "utils"
|
||||
import { Stowage, Package } from "types"
|
||||
import * as ARGV from "./utils/args.ts"
|
||||
import Path from "path"
|
||||
|
||||
const pantry = usePantry()
|
||||
const req = pkgutils.parse(args[0])
|
||||
const pkg = await pantry.resolve(req); console.debug(pkg)
|
||||
const { download } = useDownload()
|
||||
|
||||
const dstdir = useCellar().keg(pkg).join("src")
|
||||
const { url, stripComponents } = await pantry.getDistributable(pkg)
|
||||
const { download } = useCache()
|
||||
const zipfile = await download({ pkg, url, type: 'src' })
|
||||
await useSourceUnarchiver().unarchive({ dstdir, zipfile, stripComponents })
|
||||
export async function fetch_src(pkg: Package): Promise<[Path, Path] | undefined> {
|
||||
const dstdir = useCellar().shelf(pkg.project).parent().join("src", `v${pkg.version}`)
|
||||
const dist = await pantry.getDistributable(pkg)
|
||||
if (!dist) return
|
||||
const { url, stripComponents } = dist
|
||||
const stowage: Stowage = { pkg, type: 'src', extname: url.path().extname() }
|
||||
const dst = useCache().path(stowage)
|
||||
const zipfile = await (async () => {
|
||||
try {
|
||||
// first try our mirror
|
||||
const src = useOffLicense('s3').url(stowage)
|
||||
return await download({ dst, src })
|
||||
} catch {
|
||||
// oh well, try original location then
|
||||
return await download({ dst, src: url })
|
||||
}
|
||||
})()
|
||||
await useSourceUnarchiver().unarchive({ dstdir, zipfile, stripComponents })
|
||||
return [dstdir, zipfile]
|
||||
}
|
||||
|
||||
await print(`${dstdir}\n`)
|
||||
if (import.meta.main) {
|
||||
for await (let pkg of ARGV.pkgs()) {
|
||||
pkg = await pantry.resolve(pkg)
|
||||
const [dstdir] = await fetch_src(pkg) ?? panic()
|
||||
await print(`${dstdir}\n`)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ args:
|
|||
---*/
|
||||
|
||||
import { Installation, Package, PackageRequirement } from "types"
|
||||
import { usePantry, useFlags } from "hooks"
|
||||
import { usePantry, useFlags, usePrefix } from "hooks"
|
||||
import useShellEnv, { expand } from "hooks/useShellEnv.ts"
|
||||
import { run, undent, pkg as pkgutils } from "utils"
|
||||
import { resolve, install, hydrate, link } from "prefab"
|
||||
|
@ -41,6 +41,8 @@ async function test(self: Installation) {
|
|||
set -o pipefail
|
||||
set -x
|
||||
|
||||
export TEA_PREFIX=${usePrefix()}
|
||||
|
||||
${expand(env)}
|
||||
|
||||
`
|
||||
|
@ -69,8 +71,10 @@ async function test(self: Installation) {
|
|||
.write({ text, force: true })
|
||||
.chmod(0o500)
|
||||
await run({ cmd, cwd })
|
||||
} finally {
|
||||
if (!debug) tmp.rm({ recursive: true })
|
||||
tmp.rm({ recursive: true })
|
||||
} catch (e) {
|
||||
console.info("due to error, didn’t delete:", tmp)
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ args:
|
|||
|
||||
import { S3 } from "s3"
|
||||
import { pkg as pkgutils } from "utils"
|
||||
import { useFlags, useOffLicense, useCache, usePrefix } from "hooks"
|
||||
import { useFlags, useOffLicense, useCache } from "hooks"
|
||||
import { Package, PackageRequirement } from "types"
|
||||
import SemVer, * as semver from "semver"
|
||||
import { dirname, basename } from "deno/path/mod.ts"
|
||||
|
@ -80,8 +80,9 @@ for (const [index, pkg] of pkgs.entries()) {
|
|||
await put(`${key}.sha256sum`, `${checksum} ${basename(key)}`)
|
||||
await put(`${dirname(key)}/versions.txt`, versions.join("\n"))
|
||||
|
||||
// Store sources
|
||||
const src = usePrefix().join(srcs[index])
|
||||
// mirror the sources
|
||||
if (srcs[index] != "~") {
|
||||
const src = new Path(srcs[index])
|
||||
const srcKey = useOffLicense('s3').key({
|
||||
pkg: stowed.pkg,
|
||||
type: "src",
|
||||
|
@ -92,6 +93,7 @@ for (const [index, pkg] of pkgs.entries()) {
|
|||
await put(srcKey, src)
|
||||
await put(`${srcKey}.sha256sum`, `${srcChecksum} ${basename(srcKey)}`)
|
||||
await put(`${dirname(srcKey)}/versions.txt`, srcVersions.join("\n"))
|
||||
}
|
||||
}
|
||||
|
||||
await set_output('cf-invalidation-paths', rv)
|
||||
|
|
|
@ -11,7 +11,7 @@ export function set_output<T>(name: string, arr: T[], separator = " ") {
|
|||
//TODO HTML escapes probs
|
||||
function escape<T>(input: T): string {
|
||||
const out = `${input}`
|
||||
if (/[<>]/.test(out)) {
|
||||
if (/[<>~]/.test(out)) {
|
||||
return `"${out}"`
|
||||
} else {
|
||||
return out
|
||||
|
|
Loading…
Reference in a new issue