mirror of
https://github.com/ivabus/pantry
synced 2024-11-22 08:25:07 +03:00
+text-generation-webui (#2721)
This commit is contained in:
parent
bc3aca83e4
commit
888dd724fb
4 changed files with 122 additions and 1 deletions
|
@ -34,7 +34,7 @@ else
|
|||
fi
|
||||
|
||||
tea gum format <<EoMD
|
||||
# Stable Diffusion WEBUI
|
||||
# Stable Diffusion web UI
|
||||
|
||||
this package has been modified for your convenience:
|
||||
|
||||
|
|
|
@ -0,0 +1,60 @@
|
|||
#!/usr/bin/env -S tea bash
|
||||
|
||||
PORT=$(tea get-port)
|
||||
|
||||
set -emfo pipefail
|
||||
|
||||
tea gum format <<EoMD
|
||||
# loading llama.cpp model…
|
||||
|
||||
this may take a while…
|
||||
|
||||
EoMD
|
||||
|
||||
echo # spacer
|
||||
|
||||
tea llama.cpp --fetch
|
||||
|
||||
d="$(cd "$(dirname $0)" && pwd)"
|
||||
|
||||
XDG="${XDG_DATA_HOME:-$HOME/.local/share}"
|
||||
|
||||
"$d"/bin/text-generation-webui \
|
||||
--listen-port $PORT \
|
||||
--model-dir "$XDG/models" \
|
||||
--model OpenLLaMA \
|
||||
&
|
||||
|
||||
PID=$!
|
||||
|
||||
# poll until a HEAD request succeeds
|
||||
while ! curl -Is http://127.0.0.1:$PORT | grep -q "HTTP/1.1 200 OK"; do
|
||||
if ! kill -0 $PID; then
|
||||
echo "webui process died!"
|
||||
exit 1
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
# open the URL once the HEAD request succeeds
|
||||
if test -n "$TEA_GUI"; then
|
||||
echo "{\"xyz.tea\":{\"gui\":\"http://127.0.0.1:$PORT\"}}" >&2
|
||||
else
|
||||
open "http://127.0.0.1:$PORT"
|
||||
fi
|
||||
|
||||
tea gum format <<EoMD
|
||||
# text generation web UI
|
||||
|
||||
this package has been modified for your convenience:
|
||||
|
||||
* download additional models to \`$XDG/models\`
|
||||
|
||||
> bugs reports to our [tracker](https://github.com/teaxyz/pantry/issues). thanks!
|
||||
|
||||
enjoy!
|
||||
EoMD
|
||||
|
||||
echo # spacer
|
||||
|
||||
fg >/dev/null # unbackground the webui process
|
|
@ -0,0 +1,59 @@
|
|||
distributable:
|
||||
url: https://github.com/oobabooga/text-generation-webui/archive/refs/tags/{{version.tag}}.tar.gz
|
||||
|
||||
provides:
|
||||
- bin/text-generation-webui
|
||||
|
||||
versions:
|
||||
github: oobabooga/text-generation-webui
|
||||
|
||||
dependencies:
|
||||
python.org: ~3.10
|
||||
tea.xyz: ^0
|
||||
|
||||
entrypoint: tea ./entrypoint.sh
|
||||
|
||||
platforms:
|
||||
darwin
|
||||
|
||||
# TODO https://github.com/oobabooga/text-generation-webui/blob/385229313fd728f6e7573895564253d98b9826da/docs/llama.cpp.md?plain=1#L4
|
||||
# TODO entry
|
||||
build:
|
||||
dependencies:
|
||||
gnu.org/coreutils: '*'
|
||||
working-directory:
|
||||
text-generation-webui-{{version.raw}}
|
||||
script:
|
||||
# pkg expects all the files from its checkout
|
||||
- |
|
||||
mkdir -p {{prefix}}/venv/bin
|
||||
cp -R . {{prefix}}/venv/bin
|
||||
- working-directory: '{{prefix}}/venv/bin'
|
||||
run:
|
||||
rm -rf docker .github docs .gitignore *.md
|
||||
|
||||
- |
|
||||
python -m venv {{prefix}}/venv
|
||||
source {{prefix}}/venv/bin/activate
|
||||
|
||||
# these requirements are separate as they vary by platform
|
||||
- pip install torch torchvision torchaudio
|
||||
|
||||
- pip install -r requirements.txt
|
||||
|
||||
# pkg expects to be run with CWD set to its checkout
|
||||
- working-directory: '{{prefix}}/venv/bin'
|
||||
run: |
|
||||
echo '#!/usr/bin/env python' > text-generation-webui
|
||||
echo 'import os' >> text-generation-webui
|
||||
echo 'os.chdir(os.path.dirname(os.path.abspath(__file__)))' >> text-generation-webui
|
||||
cat server.py >> text-generation-webui
|
||||
chmod +x text-generation-webui
|
||||
rm server.py
|
||||
|
||||
- python-venv-stubber.sh text-generation-webui
|
||||
|
||||
- cp ../props/entrypoint.sh {{prefix}}
|
||||
|
||||
test:
|
||||
text-generation-webui --help
|
|
@ -7,6 +7,8 @@ cmds:
|
|||
- http-server
|
||||
- vercel
|
||||
- changelogithub
|
||||
- get-port
|
||||
args:
|
||||
chalk: npx --yes --package=chalk-cli --
|
||||
get-port: npx --yes --package=get-port-cli --
|
||||
...: [npx, --yes, --]
|
||||
|
|
Loading…
Reference in a new issue