2022-09-08 23:40:35 +03:00
|
|
|
#!/usr/bin/env -S tea -E
|
|
|
|
|
|
|
|
/*---
|
|
|
|
args:
|
|
|
|
- deno
|
|
|
|
- run
|
|
|
|
- --allow-net
|
2022-09-13 19:46:10 +03:00
|
|
|
- --allow-read
|
|
|
|
- --allow-env=AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,AWS_S3_BUCKET
|
2022-09-08 23:40:35 +03:00
|
|
|
- --import-map={{ srcroot }}/import-map.json
|
|
|
|
---*/
|
|
|
|
|
|
|
|
import { S3 } from "s3";
|
|
|
|
import { stringify as yaml } from "deno/encoding/yaml.ts"
|
|
|
|
import { stringify as csv } from "deno/encoding/csv.ts"
|
2022-09-20 14:53:40 +03:00
|
|
|
import { Inventory } from "hooks/useInventory.ts"
|
2022-09-08 23:40:35 +03:00
|
|
|
|
|
|
|
const s3 = new S3({
|
|
|
|
accessKeyID: Deno.env.get("AWS_ACCESS_KEY_ID")!,
|
|
|
|
secretKey: Deno.env.get("AWS_SECRET_ACCESS_KEY")!,
|
|
|
|
region: "us-east-1",
|
|
|
|
});
|
|
|
|
|
2022-09-13 16:18:47 +03:00
|
|
|
const bucket = s3.getBucket(Deno.env.get("AWS_S3_BUCKET")!);
|
2022-09-08 23:40:35 +03:00
|
|
|
|
|
|
|
const inventory: Inventory = {}
|
|
|
|
const flat = []
|
|
|
|
|
|
|
|
for await (const pkg of bucket.listAllObjects({ batchSize: 200 })) {
|
|
|
|
if (!pkg.key?.endsWith('.tar.gz')) { continue }
|
|
|
|
|
|
|
|
const matches = pkg.key.match(new RegExp("^(.*)/(.*)/(.*)/v([0-9]+\.[0-9]+\.[0-9]+)\.tar\.gz$"))
|
|
|
|
|
|
|
|
if (!matches) { continue }
|
|
|
|
|
|
|
|
const [_, project, platform, arch, version] = matches
|
|
|
|
|
|
|
|
if (!inventory[project]) inventory[project] = {}
|
|
|
|
if (!inventory[project][platform]) inventory[project][platform] = {}
|
|
|
|
if (!inventory[project][platform]) inventory[project][platform] = {}
|
|
|
|
inventory[project][platform][arch] = [...(inventory[project]?.[platform]?.[arch] ?? []), version]
|
|
|
|
flat.push({ project, platform, arch, version })
|
|
|
|
}
|
|
|
|
|
|
|
|
/// For ultimate user-friendliness, we store this data 4 ways:
|
|
|
|
/// YAML, JSON, CSV, flat text
|
|
|
|
|
|
|
|
const te = new TextEncoder()
|
|
|
|
|
|
|
|
// YAML: type Inventory
|
|
|
|
|
|
|
|
const yml = te.encode(yaml(inventory))
|
|
|
|
|
|
|
|
bucket.putObject("versions.yml", yml)
|
|
|
|
|
|
|
|
// JSON: type Inventory
|
|
|
|
|
|
|
|
const json = te.encode(JSON.stringify(inventory))
|
|
|
|
|
|
|
|
bucket.putObject("versions.json", json)
|
|
|
|
|
|
|
|
// CSV: project,platform,arch,version
|
|
|
|
|
2022-09-20 14:53:40 +03:00
|
|
|
const csvData = te.encode(csv(flat, { columns: ["project", "platform", "arch", "version"]}))
|
2022-09-08 23:40:35 +03:00
|
|
|
|
|
|
|
bucket.putObject("versions.csv", csvData)
|
|
|
|
|
|
|
|
// TXT: per project/platform/arch, newline-delimited
|
|
|
|
|
|
|
|
for(const [project, platforms] of Object.entries(inventory)) {
|
|
|
|
for (const [platform, archs] of Object.entries(platforms)) {
|
|
|
|
for (const [arch, versions] of Object.entries(archs)) {
|
|
|
|
const txt = te.encode(versions.join("\n"))
|
|
|
|
bucket.putObject(`${project}/${platform}/${arch}/versions.txt`, txt)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
//end
|