2023-03-25 00:53:39 +03:00
|
|
|
#!/bin/sh
|
|
|
|
|
|
|
|
set -e
|
|
|
|
test -n "$VERBOSE" && set -x
|
|
|
|
|
|
|
|
D="$(cd "$(dirname "$0")"/.. && pwd)"
|
|
|
|
VERSION="$(basename "$D")"
|
2023-03-28 15:41:23 +03:00
|
|
|
MODEL_DIR="${XDG_DATA_HOME:-$HOME/.local/share}/models/LLaMA"
|
2023-03-25 00:53:39 +03:00
|
|
|
|
|
|
|
export PATH="$D/tbin:$PATH"
|
|
|
|
|
|
|
|
llama-fetch "$MODEL_DIR" "$VERSION"
|
|
|
|
|
|
|
|
if test $1 = chat; then
|
|
|
|
exec "$D"/tbin/llama.cpp \
|
|
|
|
--model "$MODEL_DIR"/7B/ggml-model-q4_0.bin \
|
|
|
|
-n 256 \
|
|
|
|
--repeat_penalty 1.0 \
|
|
|
|
--color \
|
|
|
|
-i \
|
|
|
|
-r \
|
|
|
|
"User:" \
|
|
|
|
-f "$D"/share/prompts/chat-with-bob.txt
|
|
|
|
else
|
|
|
|
exec "$D"/tbin/llama.cpp \
|
|
|
|
--color \
|
|
|
|
--model "$MODEL_DIR"/7B/ggml-model-q4_0.bin \
|
|
|
|
"$@"
|
|
|
|
fi
|