File size: 1,320 Bytes
6331e04
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
import os, json, requests, streamlit as st
from backend.rag_engine import get_embedder,get_chroma,retrieve,seed_index
from backend.soap_generator import compose_soap
from utils.constants import DOCS_DIR,CHAT_ENDPOINT

st.set_page_config(page_title='MediAssist v13',page_icon='🩺',layout='wide')
@st.cache_resource
def emb():return get_embedder()
@st.cache_resource
def col():return get_chroma()[1]

def chat(prompt):
    token=os.getenv('HF_API_TOKEN')
    if not token:return 'Missing HF_API_TOKEN'
    r=requests.post(CHAT_ENDPOINT,headers={"Authorization":f"Bearer {token}"},json={"inputs":prompt},timeout=200)
    d=r.json()
    if isinstance(d,list) and "generated_text" in d[0]:
        return d[0]["generated_text"]
    return str(d)

st.title("🩺 MediAssist v13 — AI Gynae Assistant")

with st.sidebar:
    if st.button("Seed Index"):
        n=seed_index(col(),emb(),DOCS_DIR);st.success(f"Indexed {n} chunks")

txt=st.text_area("Patient narrative")
if st.button("Generate Report"):
    items=retrieve(col(),emb(),txt,5)
    soap=compose_soap(txt,items)
    ctx="\n".join([i["text"] for i in items])
    prompt=f"Use this context to create a refined clinical report:\n{ctx}\nPatient: {txt}"
    reply=chat(prompt)
    st.subheader("AI Draft Report");st.write(reply)
    st.subheader("SOAP");st.json(soap)