from fastapi import FastAPI, Query from transformers import pipeline app = FastAPI() # Load a different model, replace "bert-base-uncased" with your desired model pipe = pipeline(model="openai-community/gpt2") @app.get("/") def read_root(): return {"message": "API is live. Use the /predict endpoint."} @app.get("/predict") def predict(text: str = Query(..., description="Input text for model inference")): result = pipe(text) return {"predictions": result}