Spaces:
Running
Running
Dan Flower
commited on
Commit
·
d5b6359
1
Parent(s):
2a0db8e
dockerfile env model path case error
Browse files- Dockerfile +1 -1
- model/download_model.py +1 -1
- streamlit_app.py +2 -2
Dockerfile
CHANGED
|
@@ -43,6 +43,6 @@ ENV BROWSER_GATHER_USAGE_STATS=false
|
|
| 43 |
RUN echo "[browser]\ngatherUsageStats = false" > /tmp/.streamlit/config.toml
|
| 44 |
|
| 45 |
# Path where your downloader stores the model
|
| 46 |
-
ENV MODEL_PATH=/tmp/models/
|
| 47 |
|
| 48 |
ENTRYPOINT ["streamlit", "run", "streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"]
|
|
|
|
| 43 |
RUN echo "[browser]\ngatherUsageStats = false" > /tmp/.streamlit/config.toml
|
| 44 |
|
| 45 |
# Path where your downloader stores the model
|
| 46 |
+
ENV MODEL_PATH=/tmp/models/TinyLlama-1.1B-Chat-v1.0.Q4_K_M.gguf
|
| 47 |
|
| 48 |
ENTRYPOINT ["streamlit", "run", "streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"]
|
model/download_model.py
CHANGED
|
@@ -4,7 +4,7 @@ from huggingface_hub import hf_hub_download
|
|
| 4 |
|
| 5 |
def main():
|
| 6 |
MODEL_REPO = os.getenv("MODEL_REPO", "TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF")
|
| 7 |
-
MODEL_FILE = os.getenv("MODEL_FILE", "
|
| 8 |
MODEL_DIR = pathlib.Path(os.getenv("MODEL_DIR", "/tmp/models"))
|
| 9 |
MODEL_DIR.mkdir(parents=True, exist_ok=True)
|
| 10 |
|
|
|
|
| 4 |
|
| 5 |
def main():
|
| 6 |
MODEL_REPO = os.getenv("MODEL_REPO", "TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF")
|
| 7 |
+
MODEL_FILE = os.getenv("MODEL_FILE", "TinyLlama-1.1B-Chat-v1.0.Q4_K_M.gguf") # <- fixed case
|
| 8 |
MODEL_DIR = pathlib.Path(os.getenv("MODEL_DIR", "/tmp/models"))
|
| 9 |
MODEL_DIR.mkdir(parents=True, exist_ok=True)
|
| 10 |
|
streamlit_app.py
CHANGED
|
@@ -8,7 +8,7 @@ sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
|
| 8 |
# Ensure /app is in sys.path so we can import utils.* from anywhere
|
| 9 |
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
|
| 10 |
# Environment setup
|
| 11 |
-
os.environ["MODEL_PATH"] = "/tmp/models/
|
| 12 |
os.environ["STREAMLIT_HOME"] = "/tmp/.streamlit"
|
| 13 |
os.environ["XDG_CONFIG_HOME"] = "/tmp/.streamlit"
|
| 14 |
os.environ["BROWSER_GATHER_USAGE_STATS"] = "false"
|
|
@@ -21,7 +21,7 @@ os.makedirs("/tmp/models", exist_ok=True)
|
|
| 21 |
|
| 22 |
# Runtime model download if needed
|
| 23 |
|
| 24 |
-
MODEL_PATH = "/tmp/models/
|
| 25 |
if not os.path.exists(MODEL_PATH):
|
| 26 |
st.warning("Model not found. Downloading...")
|
| 27 |
try:
|
|
|
|
| 8 |
# Ensure /app is in sys.path so we can import utils.* from anywhere
|
| 9 |
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
|
| 10 |
# Environment setup
|
| 11 |
+
os.environ["MODEL_PATH"] = "/tmp/models/TinyLlama-1.1B-Chat-v1.0.Q4_K_M.gguf"
|
| 12 |
os.environ["STREAMLIT_HOME"] = "/tmp/.streamlit"
|
| 13 |
os.environ["XDG_CONFIG_HOME"] = "/tmp/.streamlit"
|
| 14 |
os.environ["BROWSER_GATHER_USAGE_STATS"] = "false"
|
|
|
|
| 21 |
|
| 22 |
# Runtime model download if needed
|
| 23 |
|
| 24 |
+
MODEL_PATH = "/tmp/models/TinyLlama-1.1B-Chat-v1.0.Q4_K_M.gguf"
|
| 25 |
if not os.path.exists(MODEL_PATH):
|
| 26 |
st.warning("Model not found. Downloading...")
|
| 27 |
try:
|