mirror of
https://github.com/ivabus/pantry
synced 2024-09-20 00:30:48 +03:00
+portable git (#126)
This commit is contained in:
parent
bf3c2033d2
commit
0240a4bfb1
6
.github/workflows/build.yml
vendored
6
.github/workflows/build.yml
vendored
|
@ -51,7 +51,7 @@ jobs:
|
|||
ubuntu-latest)
|
||||
rm -rf /opt/tea.xyz/var/pantry
|
||||
ln -s $GITHUB_WORKSPACE/pantry /opt/tea.xyz/var/pantry
|
||||
mkdir .git # no git in our image
|
||||
mkdir .git # no git in our image, needed for tea finding SRCROOT
|
||||
|
||||
#FIXME needed for gdk-pixbuf
|
||||
apt --yes install shared-mime-info
|
||||
|
@ -106,9 +106,9 @@ jobs:
|
|||
# TODO only upload when we merge
|
||||
# TODO upload to a staging location until we release new pantry versions
|
||||
- name: upload bottles
|
||||
run: ./scripts/upload.ts ${{ steps.bottle.outputs.bottles }}
|
||||
run: scripts/upload.ts ${{ steps.bottle.outputs.bottles }}
|
||||
env:
|
||||
AWS_S3: ${{ secrets.AWS_S3 }}
|
||||
AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
||||
|
|
|
@ -9,17 +9,28 @@ dependencies:
|
|||
zlib.net: 1
|
||||
gnu.org/gettext: ^0.21
|
||||
curl.se: '>=5'
|
||||
perl.org: '*'
|
||||
|
||||
build:
|
||||
dependencies:
|
||||
tea.xyz/gx/cc: c99
|
||||
tea.xyz/gx/make: '*'
|
||||
script: |
|
||||
./configure --prefix={{prefix}}
|
||||
make --jobs {{hw.concurrency}} install $ARGS
|
||||
# installs to eg. libexec rather than libexec/git-core
|
||||
sed -i.bak 's|/git-core||g' config.mak.in
|
||||
|
||||
./configure $CONF
|
||||
make install $BAKE
|
||||
|
||||
cd "{{prefix}}"
|
||||
DEBUG=1 fix-shebangs.ts bin/* libexec/*
|
||||
env:
|
||||
V: 1
|
||||
ARGS:
|
||||
BAKE:
|
||||
- --jobs {{hw.concurrency}}
|
||||
- NO_TCLTK=1
|
||||
CONF:
|
||||
- --prefix={{prefix}}
|
||||
- --with-perl=/opt/perl.org/v*/bin/perl
|
||||
|
||||
test: git --version #FIXME better
|
||||
|
|
71
scripts/brewkit/fix-shebangs.ts
Executable file
71
scripts/brewkit/fix-shebangs.ts
Executable file
|
@ -0,0 +1,71 @@
|
|||
#!/usr/bin/env -S tea -E
|
||||
|
||||
/* ---
|
||||
args:
|
||||
- deno
|
||||
- run
|
||||
- --allow-net
|
||||
- --allow-run
|
||||
- --allow-env
|
||||
- --allow-read
|
||||
- --allow-write={{tea.prefix}}
|
||||
- --import-map={{ srcroot }}/import-map.json
|
||||
--- */
|
||||
|
||||
import { Path } from "types"
|
||||
import { undent } from "utils"
|
||||
import useFlags from "hooks/useFlags.ts"
|
||||
|
||||
useFlags()
|
||||
|
||||
const has_shebang = (() => {
|
||||
const encoder = new TextDecoder()
|
||||
return (buf: Uint8Array) => {
|
||||
return encoder.decode(buf) == '#!'
|
||||
}
|
||||
})()
|
||||
|
||||
for (const path of Deno.args) {
|
||||
if (!Path.cwd().join(path).isFile()) continue
|
||||
|
||||
console.debug({ path })
|
||||
|
||||
const rid = await Deno.open(path, { read: true })
|
||||
try {
|
||||
const buf = new Uint8Array(2)
|
||||
await rid.read(buf)
|
||||
if (!has_shebang(buf)) continue
|
||||
} finally {
|
||||
rid.close()
|
||||
}
|
||||
|
||||
//FIXME this could be pretty damn efficient if we can find the time
|
||||
//NOTE as it stands this is HIDEOUSLY inefficient
|
||||
|
||||
const contents = await Deno.readFile(path)
|
||||
const txt = new TextDecoder().decode(contents)
|
||||
const [line0, ...lines] = txt.split("\n") //lol
|
||||
|
||||
const match = line0.match(/^#!\s*(\/[^\s]+)/)
|
||||
if (match) {
|
||||
switch (match[1]) {
|
||||
case "/usr/bin/env":
|
||||
case "/bin/sh":
|
||||
console.verbose({ line0, path })
|
||||
console.verbose("^^ skipped acceptable shebang")
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
const interpreter = new Path(line0.slice(2)).basename()
|
||||
const shebang = `#!/usr/bin/env ${interpreter}`
|
||||
|
||||
const rewrite = undent`
|
||||
${shebang}
|
||||
${lines.join("\n")}
|
||||
`
|
||||
|
||||
console.verbose({rewrote: path, to: `#!/usr/bin/env ${interpreter}`})
|
||||
|
||||
await Deno.writeFile(path, new TextEncoder().encode(rewrite))
|
||||
}
|
|
@ -20,7 +20,7 @@ const s3 = new S3({
|
|||
region: "us-east-1",
|
||||
});
|
||||
|
||||
const bucket = s3.getBucket(Deno.env.get("S3_BUCKET")!);
|
||||
const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!);
|
||||
|
||||
for await (const pkg of bucket.listAllObjects({ batchSize: 200 })) {
|
||||
if (!pkg.key?.endsWith('.tar.gz')) { continue }
|
||||
|
|
|
@ -20,7 +20,7 @@ const s3 = new S3({
|
|||
region: "us-east-1",
|
||||
});
|
||||
|
||||
const bucket = s3.getBucket(Deno.env.get("AWS_S3") ?? Deno.env.get("S3_BUCKET")!);
|
||||
const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!);
|
||||
|
||||
const inventory: Inventory = {}
|
||||
const flat = []
|
||||
|
|
|
@ -22,7 +22,7 @@ const s3 = new S3({
|
|||
region: "us-east-1",
|
||||
});
|
||||
|
||||
const bucket = s3.getBucket(Deno.env.get("AWS_S3") ?? Deno.env.get("S3_BUCKET")!);
|
||||
const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!);
|
||||
|
||||
for (const pkg of await useCache().ls()) {
|
||||
const key = useCache().s3Key(pkg)
|
||||
|
|
|
@ -26,7 +26,7 @@ const s3 = new S3({
|
|||
region: "us-east-1",
|
||||
})
|
||||
|
||||
const bucket = s3.getBucket(Deno.env.get("AWS_S3") ?? Deno.env.get("S3_BUCKET")!)
|
||||
const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!)
|
||||
|
||||
const encode = (() => { const e = new TextEncoder(); return e.encode.bind(e) })()
|
||||
|
||||
|
|
Loading…
Reference in a new issue