KarthikMuraliM commited on
Commit
bf1736b
·
1 Parent(s): c6acebd

Initial Fastapi setup

Browse files
Files changed (4) hide show
  1. .gitignore +29 -0
  2. Dockerfile +18 -0
  3. app.py +43 -0
  4. requirements.txt +3 -0
.gitignore ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # Virtual environments
7
+ venv/
8
+ .env/
9
+ .envs/
10
+ .venv/
11
+ ENV/
12
+ env/
13
+
14
+ # Distribution / packaging
15
+ build/
16
+ dist/
17
+ *.egg-info/
18
+ .eggs/
19
+
20
+ # PyInstaller
21
+ *.manifest
22
+ *.spec
23
+
24
+ # IDEs
25
+ .vscode/
26
+ .idea/
27
+
28
+ # Logs
29
+ *.log
Dockerfile ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use an official Python runtime as a parent image
2
+ FROM python:3.10-slim
3
+
4
+ # Set the working directory in the container
5
+ WORKDIR /app
6
+
7
+ # Copy the current directory contents into the container at /app
8
+ COPY . /app
9
+
10
+ # Install any needed packages specified in requirements.txt
11
+ RUN pip install --no-cache-dir -r requirements.txt
12
+
13
+ # Make port 7860 available to the world outside this container
14
+ EXPOSE 7860
15
+
16
+ # Run the uvicorn server when the container launches
17
+ # Hugging Face Spaces automatically maps this to your public URL
18
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
app.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # app.py
2
+ from fastapi import FastAPI, File, UploadFile, Form
3
+ from typing import List
4
+ import os
5
+
6
+ app = FastAPI()
7
+
8
+ # A simple root endpoint to confirm the app is running
9
+ @app.get("/")
10
+ async def read_root():
11
+ return {"message": "Data Analyst Agent API is running!"}
12
+
13
+ # Our main API endpoint for data analysis tasks
14
+ @app.post("/api/")
15
+ async def analyze_data(
16
+ questions_file: UploadFile = File(..., alias="questions.txt"),
17
+ files: List[UploadFile] = File([], alias="files"), # This will catch other files if sent
18
+ ):
19
+ # Read the content of questions.txt
20
+ questions_content = await questions_file.read()
21
+ questions_text = questions_content.decode("utf-8")
22
+
23
+ response_messages = [f"Received questions:\n{questions_text}"]
24
+
25
+ # Process other uploaded files
26
+ for file in files:
27
+ # You would typically save these to a temporary location
28
+ # For now, just acknowledge receipt
29
+ response_messages.append(f"Received file: {file.filename} (Content-Type: {file.content_type})")
30
+ # Example: Save to a temporary file
31
+ # with open(f"/tmp/{file.filename}", "wb") as f:
32
+ # f.write(await file.read())
33
+ # response_messages.append(f"Saved {file.filename} to /tmp/")
34
+
35
+
36
+ # This is where the core logic will go. For now, it's a placeholder.
37
+ # The LLM will process questions_text and use other files.
38
+
39
+ return {"status": "Processing initiated", "details": response_messages}
40
+
41
+ if __name__ == "__main__":
42
+ import uvicorn
43
+ uvicorn.run(app, host="0.0.0.0", port=7860) # Hugging Face Spaces typically use port 7860
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ fastapi
2
+ uvicorn
3
+ python-multipart # Required for FastAPI to handle file uploads