Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -5,7 +5,7 @@ from dotenv import load_dotenv
|
|
5 |
from httpx import Client
|
6 |
from huggingface_hub import HfApi
|
7 |
import pandas as pd
|
8 |
-
from transformers import pipeline
|
9 |
import spaces
|
10 |
from llama_cpp import Llama
|
11 |
|
@@ -22,11 +22,13 @@ headers = {
|
|
22 |
}
|
23 |
client = Client(headers=headers)
|
24 |
api = HfApi(token=HF_TOKEN)
|
25 |
-
pipe = pipeline("text-generation", model="motherduckdb/DuckDB-NSQL-7B-v0.1", device="cuda")
|
26 |
llama = Llama(
|
27 |
model_path="DuckDB-NSQL-7B-v0.1-q8_0.gguf",
|
28 |
n_ctx=2048,
|
|
|
29 |
)
|
|
|
30 |
@spaces.GPU
|
31 |
def generate_sql(prompt):
|
32 |
# pred = pipe(prompt, max_length=1000)
|
|
|
5 |
from httpx import Client
|
6 |
from huggingface_hub import HfApi
|
7 |
import pandas as pd
|
8 |
+
#from transformers import pipeline
|
9 |
import spaces
|
10 |
from llama_cpp import Llama
|
11 |
|
|
|
22 |
}
|
23 |
client = Client(headers=headers)
|
24 |
api = HfApi(token=HF_TOKEN)
|
25 |
+
#pipe = pipeline("text-generation", model="motherduckdb/DuckDB-NSQL-7B-v0.1", device="cuda")
|
26 |
llama = Llama(
|
27 |
model_path="DuckDB-NSQL-7B-v0.1-q8_0.gguf",
|
28 |
n_ctx=2048,
|
29 |
+
n_gpu_layers=50
|
30 |
)
|
31 |
+
|
32 |
@spaces.GPU
|
33 |
def generate_sql(prompt):
|
34 |
# pred = pipe(prompt, max_length=1000)
|