Update start.sh
Browse files
start.sh
CHANGED
|
@@ -1,10 +1,9 @@
|
|
| 1 |
#!/usr/bin/env bash
|
| 2 |
set -euo pipefail
|
| 3 |
|
| 4 |
-
# 1) 啟動 Ollama 服務
|
| 5 |
ollama serve &
|
| 6 |
|
| 7 |
-
#
|
| 8 |
for i in {1..60}; do
|
| 9 |
if curl -fsS http://127.0.0.1:11434/api/tags >/dev/null; then
|
| 10 |
break
|
|
@@ -12,11 +11,11 @@ for i in {1..60}; do
|
|
| 12 |
sleep 1
|
| 13 |
done
|
| 14 |
|
| 15 |
-
#
|
| 16 |
ollama pull "${EMBEDDING_MODEL:-nomic-embed-text}" || true
|
| 17 |
if [ "${LLM_BINDING:-}" = "ollama" ]; then
|
| 18 |
ollama pull "${LLM_MODEL:-llama2}" || true
|
| 19 |
fi
|
| 20 |
|
| 21 |
-
#
|
| 22 |
exec python -m lightrag.api.lightrag_server
|
|
|
|
| 1 |
#!/usr/bin/env bash
|
| 2 |
set -euo pipefail
|
| 3 |
|
|
|
|
| 4 |
ollama serve &
|
| 5 |
|
| 6 |
+
# 等待 Ollama API 可用
|
| 7 |
for i in {1..60}; do
|
| 8 |
if curl -fsS http://127.0.0.1:11434/api/tags >/dev/null; then
|
| 9 |
break
|
|
|
|
| 11 |
sleep 1
|
| 12 |
done
|
| 13 |
|
| 14 |
+
# 拉取嵌入與(如需要)生成模型
|
| 15 |
ollama pull "${EMBEDDING_MODEL:-nomic-embed-text}" || true
|
| 16 |
if [ "${LLM_BINDING:-}" = "ollama" ]; then
|
| 17 |
ollama pull "${LLM_MODEL:-llama2}" || true
|
| 18 |
fi
|
| 19 |
|
| 20 |
+
# 啟動 LightRAG(如平台要求自訂埠,改成 --port "${PORT:-9621}")
|
| 21 |
exec python -m lightrag.api.lightrag_server
|