|
|
|
|
|
import os |
|
|
import platform |
|
|
import time |
|
|
from pathlib import Path |
|
|
from typing import Dict |
|
|
|
|
|
import streamlit as st |
|
|
import pandas as pd |
|
|
|
|
|
from src.paths import ( |
|
|
base_dir, |
|
|
guidelines_dir, |
|
|
faiss_index_dir, |
|
|
exports_dir, |
|
|
cases_dir, |
|
|
audit_dir, |
|
|
hf_cache_dir, |
|
|
initialize_environment, |
|
|
describe_paths, |
|
|
) |
|
|
|
|
|
st.set_page_config(page_title="AI‑Native E‑Consult — Health Check", page_icon="🩺", layout="wide") |
|
|
|
|
|
st.title("AI‑Native E‑Consult Prototype (V1)") |
|
|
st.caption("Step 0 — Environment Setup & Health Check") |
|
|
st.warning("Demo only — de‑identified data. Prototype for feedback; **not for clinical use**.", icon="🛑") |
|
|
|
|
|
|
|
|
env = initialize_environment() |
|
|
st.session_state.setdefault("_app_env", env) |
|
|
|
|
|
with st.expander("Environment variables (runtime)", expanded=False): |
|
|
st.json(env) |
|
|
|
|
|
|
|
|
def _probe_import(modname: str): |
|
|
try: |
|
|
m = __import__(modname) |
|
|
ver = getattr(m, "__version__", "") |
|
|
|
|
|
if modname == "faiss" and not ver: |
|
|
ver = getattr(m, "FAISS_VERSION", "") or "" |
|
|
return True, ver, "" |
|
|
except Exception as e: |
|
|
return False, "", f"{type(e).__name__}: {e}" |
|
|
|
|
|
mods = [ |
|
|
"torch", "accelerate", "transformers", "bitsandbytes", "faiss", |
|
|
"sentence_transformers", "pypdf", "huggingface_hub", "numpy", "pandas" |
|
|
] |
|
|
|
|
|
rows = [] |
|
|
for name in mods: |
|
|
ok, ver, err = _probe_import(name) |
|
|
rows.append({ |
|
|
"package": name, |
|
|
"status": "✅" if ok else "❌", |
|
|
"version": ver, |
|
|
"error": err, |
|
|
}) |
|
|
|
|
|
st.subheader("Python packages") |
|
|
st.dataframe(pd.DataFrame(rows), use_container_width=True) |
|
|
|
|
|
|
|
|
cuda_txt = "Not checked" |
|
|
gpu_name = "" |
|
|
try: |
|
|
import torch |
|
|
has_cuda = torch.cuda.is_available() |
|
|
cuda_txt = "✅ Available" if has_cuda else "❌ Not available" |
|
|
if has_cuda: |
|
|
try: |
|
|
gpu_name = torch.cuda.get_device_name(0) |
|
|
except Exception: |
|
|
gpu_name = "CUDA detected (name unavailable)" |
|
|
except Exception as e: |
|
|
has_cuda = False |
|
|
cuda_txt = f"⚠️ Torch import error: {e}" |
|
|
|
|
|
colA, colB = st.columns(2) |
|
|
with colA: |
|
|
st.subheader("System") |
|
|
st.write({ |
|
|
"python": platform.python_version(), |
|
|
"platform": platform.platform(), |
|
|
"cwd": str(Path.cwd()), |
|
|
"time": time.strftime("%Y-%m-%d %H:%M:%S"), |
|
|
"CUDA": cuda_txt, |
|
|
"GPU": gpu_name, |
|
|
}) |
|
|
|
|
|
with colB: |
|
|
st.subheader("Paths") |
|
|
pinfo: Dict[str, str] = describe_paths() |
|
|
st.write(pinfo) |
|
|
|
|
|
|
|
|
def _count_pdfs(p: Path) -> int: |
|
|
return sum(1 for _ in p.glob("**/*.pdf")) |
|
|
|
|
|
def _human_bytes(n: int) -> str: |
|
|
for u in ["B", "KB", "MB", "GB", "TB"]: |
|
|
if n < 1024: |
|
|
return f"{n:.1f} {u}" |
|
|
n /= 1024 |
|
|
return f"{n:.1f} PB" |
|
|
|
|
|
|
|
|
cache = Path(pinfo["hf_cache_dir"]) |
|
|
size = 0 |
|
|
try: |
|
|
for root, _, files in os.walk(cache): |
|
|
for f in files: |
|
|
try: |
|
|
size += (Path(root) / f).stat().st_size |
|
|
except Exception: |
|
|
pass |
|
|
except Exception: |
|
|
pass |
|
|
|
|
|
st.write({ |
|
|
"guideline_pdfs": _count_pdfs(Path(pinfo["guidelines_dir"])), |
|
|
"index_present": ( |
|
|
(Path(pinfo["faiss_index_dir"]) / "faiss.index").exists() |
|
|
and (Path(pinfo["faiss_index_dir"]) / "chunks.jsonl").exists() |
|
|
and (Path(pinfo["faiss_index_dir"]) / "index_info.json").exists() |
|
|
), |
|
|
"hf_cache_size": _human_bytes(size), |
|
|
}) |
|
|
|
|
|
st.info( |
|
|
"**Model selection**\n\n" |
|
|
f"- Primary: `{os.getenv('MODEL_ID', 'google/medgemma-27b-text-it')}` (GPU / 4-bit)\n" |
|
|
f"- Fallback: `{os.getenv('MODEL_FALLBACK_ID', 'google/medgemma-4b-it')}` (CPU)\n" |
|
|
f"- Stub mode (`E2E_STUB=1`): returns deterministic output for UI tests.", |
|
|
icon="⚙️" |
|
|
) |
|
|
|
|
|
st.success("Health page loaded. Proceed to **Step 1 — RAG Corpus Prep** from the sidebar when ready.", icon="➡️") |
|
|
|