usag1e commited on
Commit
32777f1
·
1 Parent(s): a931f78

Add debugging logs to predict endpoint

Browse files
Files changed (1) hide show
  1. app.py +18 -6
app.py CHANGED
@@ -1,5 +1,10 @@
1
- from fastapi import FastAPI, HTTPException
2
  from pydantic import BaseModel
 
 
 
 
 
3
 
4
  app = FastAPI()
5
 
@@ -9,11 +14,18 @@ class InputText(BaseModel):
9
 
10
  @app.get("/")
11
  def root():
 
12
  return {"message": "Welcome to the LLM API"}
13
 
14
  @app.post("/predict")
15
- def predict(data: InputText):
16
- # Access the input_text from the request body
17
- input_text = data.input_text
18
- # For now, return the input text as a response
19
- return {"response": f"The input was: {input_text}"}
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException, Request
2
  from pydantic import BaseModel
3
+ import logging
4
+
5
+ # Set up logging
6
+ logging.basicConfig(level=logging.INFO)
7
+ logger = logging.getLogger("LLM-API")
8
 
9
  app = FastAPI()
10
 
 
14
 
15
  @app.get("/")
16
  def root():
17
+ logger.info("Root endpoint called.")
18
  return {"message": "Welcome to the LLM API"}
19
 
20
  @app.post("/predict")
21
+ async def predict(data: InputText, request: Request):
22
+ logger.info("Received request: %s", await request.body())
23
+ try:
24
+ # Log the received input
25
+ input_text = data.input_text
26
+ logger.info(f"Processing input: {input_text}")
27
+ # Return a mock response for now
28
+ return {"response": f"The input was: {input_text}"}
29
+ except Exception as e:
30
+ logger.error(f"Error occurred: {e}")
31
+ raise HTTPException(status_code=500, detail=str(e))