docker-demo / main.py
ariG23498's picture
ariG23498 HF staff
Create main.py
d46c5c3 verified
raw
history blame contribute delete
474 Bytes
from fastapi import FastAPI, Query
from transformers import pipeline
app = FastAPI()
# Load a different model, replace "bert-base-uncased" with your desired model
pipe = pipeline(model="openai-community/gpt2")
@app.get("/")
def read_root():
return {"message": "API is live. Use the /predict endpoint."}
@app.get("/predict")
def predict(text: str = Query(..., description="Input text for model inference")):
result = pipe(text)
return {"predictions": result}