pantry/projects/github.com/oobabooga/text-generation-webui/entrypoint.sh

63 lines
1.1 KiB
Bash
Raw Normal View History

#!/usr/bin/env -S pkgx bash
# shellcheck shell=bash
2023-08-02 18:52:03 +03:00
PORT=$(pkgx get-port)
2023-08-02 18:52:03 +03:00
set -emfo pipefail
pkgx gum format <<EoMD
2023-08-02 18:52:03 +03:00
# loading llama.cpp model…
this may take a while
EoMD
echo # spacer
pkgx llama.cpp --fetch
2023-08-02 18:52:03 +03:00
d="$(cd "$(dirname "$0")" && pwd)"
2023-08-02 18:52:03 +03:00
XDG="${XDG_DATA_HOME:-$HOME/.local/share}"
"$d"/bin/text-generation-webui \
--listen-port "$PORT" \
2023-08-02 18:52:03 +03:00
--model-dir "$XDG/models" \
--model OpenLLaMA \
&
PID=$!
# poll until a HEAD request succeeds
while ! curl -Is http://127.0.0.1:"$PORT" | grep -q "HTTP/1.1 200 OK"; do
2023-08-02 18:52:03 +03:00
if ! kill -0 $PID; then
echo "webui process died!"
exit 1
fi
sleep 1
done
# open the URL once the HEAD request succeeds
# shellcheck disable=SC2154
if test -n "$pkgx_GUI"; then
echo "{\"xyz.pkgx\":{\"gui\":\"http://127.0.0.1:$PORT\"}}" >&2
2023-08-02 18:52:03 +03:00
else
open "http://127.0.0.1:$PORT"
fi
pkgx gum format <<EoMD
2023-08-02 18:52:03 +03:00
# text generation web UI
this package has been modified for your convenience:
* download additional models to \`$XDG/models\`
> bugs reports to our [tracker](https://github.com/pkgxxyz/pantry/issues). thanks!
2023-08-02 18:52:03 +03:00
enjoy!
EoMD
echo # spacer
fg >/dev/null # unbackground the webui process