edderyouch's picture
Update app.py
d558110 verified
raw
history blame
609 Bytes
import keras_nlp
from keras_nlp.models import GemmaCausalLM
import warnings
warnings.filterwarnings('ignore')
import os
model = keras_nlp.models.CausalLM.from_preset(f"hf://soufyane/gemma_data_science")
def process_text_gemma(input_text):
response = model.generate(f"question: {input_text}", max_length=256)
return response
def main(input_text):
return process_text_gemma(input_text[0])
gr.Interface(
fn=main,
inputs=["text"],
outputs=["text"],
title="Gemma Data Science Model",
description="This is a text-to-text model for data science tasks.",
live=True
).launch()