File size: 474 Bytes
d46c5c3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
from fastapi import FastAPI, Query
from transformers import pipeline

app = FastAPI()

# Load a different model, replace "bert-base-uncased" with your desired model
pipe = pipeline(model="openai-community/gpt2")

@app.get("/")
def read_root():
    return {"message": "API is live. Use the /predict endpoint."}

@app.get("/predict")
def predict(text: str = Query(..., description="Input text for model inference")):
    result = pipe(text)
    return {"predictions": result}