Dan Flower commited on
Commit
32a7395
Β·
1 Parent(s): 9344a7a

remove troubleshooting markers

Browse files
Files changed (1) hide show
  1. streamlit_app.py +36 -36
streamlit_app.py CHANGED
@@ -23,46 +23,46 @@ os.makedirs("/tmp/models", exist_ok=True)
23
 
24
  MODEL_PATH = "/tmp/models/tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf"
25
  # Always run download_model.py for troubleshooting
26
- st.warning("Running model download step...")
27
-
28
- '''try:
29
- result = subprocess.run(
30
- ["python3", "model/download_model.py"],
31
- check=True,
32
- capture_output=True,
33
- text=True # ensures stdout/stderr are strings
34
- )
35
- st.success("Model download attempted.")
36
- st.text("STDOUT:")
37
- st.text(result.stdout)
38
- st.text("STDERR:")
39
- st.text(result.stderr)
40
- except subprocess.CalledProcessError as e:
41
- st.error("Model download failed. Check HF_TOKEN or permissions.")
42
- st.text(f"Exit code: {e.returncode}")
43
- st.text(f"Command: {e.cmd}")
44
- st.text("STDOUT:")
45
- st.text(e.stdout or "No stdout")
46
- st.text("STDERR:")
47
- st.text(e.stderr or "No stderr")
48
- st.stop()
49
 
50
  #end of temp code'''
51
- '''if not os.path.exists(MODEL_PATH):
52
- st.warning("Model not found. Downloading...")
53
- try:
54
- subprocess.run(["python3", "model/download_model.py"], check=True, capture_output=True)
55
- st.success("Model downloaded successfully.")
56
- except subprocess.CalledProcessError as e:
57
- st.error("Model download failed. Check HF_TOKEN or permissions.")
58
- st.text(f"Exit code: {e.returncode}")
59
- st.text(f"Command: {e.cmd}")
60
- st.text(f"Output: {e.output if hasattr(e, 'output') else 'N/A'}")
61
- st.stop()
62
 
63
  #st.markdown("## πŸ“ /tmp/models content:")
64
  #st.text('\n'.join(os.listdir("/tmp/models")))
65
- '''
66
  # Add local subdirectories to Python path
67
  sys.path.append(os.path.join(os.path.dirname(__file__), "modules"))
68
  sys.path.append(os.path.join(os.path.dirname(__file__), "model"))
@@ -127,4 +127,4 @@ else:
127
  - **Detailed logging**
128
  - **Optional RAG integration** where applicable
129
  """)
130
- st.markdown("Built using Zephyr-7B + llama.cpp")
 
23
 
24
  MODEL_PATH = "/tmp/models/tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf"
25
  # Always run download_model.py for troubleshooting
26
+ #st.warning("Running model download step...")
27
+
28
+ #try:
29
+ # result = subprocess.run(
30
+ # ["python3", "model/download_model.py"],
31
+ # check=True,
32
+ # capture_output=True,
33
+ # text=True # ensures stdout/stderr are strings
34
+ # )
35
+ # st.success("Model download attempted.")
36
+ # st.text("STDOUT:")
37
+ # st.text(result.stdout)
38
+ # st.text("STDERR:")
39
+ # st.text(result.stderr)
40
+ #except subprocess.CalledProcessError as e:
41
+ # st.error("Model download failed. Check HF_TOKEN or permissions.")
42
+ # st.text(f"Exit code: {e.returncode}")
43
+ # st.text(f"Command: {e.cmd}")
44
+ # st.text("STDOUT:")
45
+ # st.text(e.stdout or "No stdout")
46
+ # st.text("STDERR:")
47
+ # st.text(e.stderr or "No stderr")
48
+ # st.stop()
49
 
50
  #end of temp code'''
51
+ #if not os.path.exists(MODEL_PATH):
52
+ # st.warning("Model not found. Downloading...")
53
+ # try:
54
+ # subprocess.run(["python3", "model/download_model.py"], check=True, capture_output=True)
55
+ # st.success("Model downloaded successfully.")
56
+ # except subprocess.CalledProcessError as e:
57
+ # st.error("Model download failed. Check HF_TOKEN or permissions.")
58
+ # st.text(f"Exit code: {e.returncode}")
59
+ # st.text(f"Command: {e.cmd}")
60
+ # st.text(f"Output: {e.output if hasattr(e, 'output') else 'N/A'}")
61
+ # st.stop()
62
 
63
  #st.markdown("## πŸ“ /tmp/models content:")
64
  #st.text('\n'.join(os.listdir("/tmp/models")))
65
+
66
  # Add local subdirectories to Python path
67
  sys.path.append(os.path.join(os.path.dirname(__file__), "modules"))
68
  sys.path.append(os.path.join(os.path.dirname(__file__), "model"))
 
127
  - **Detailed logging**
128
  - **Optional RAG integration** where applicable
129
  """)
130
+