Spaces:
Running on CPU Upgrade

akhaliq HF staff commited on
Commit
c6c5393
·
1 Parent(s): 8570eb1

add sambanova coder

Browse files
Files changed (5) hide show
  1. app.py +2 -0
  2. app_sambanova.py +15 -24
  3. app_sambanova_coder.py +20 -0
  4. pyproject.toml +1 -1
  5. requirements.txt +1 -1
app.py CHANGED
@@ -29,6 +29,7 @@ from app_qwen import demo as demo_qwen
29
  from app_qwen_coder import demo as demo_qwen_coder
30
  from app_nvidia_coder import demo as demo_nvidia_coder
31
  from app_openai import demo as demo_openai
 
32
  from utils import get_app
33
  import gradio as gr
34
 
@@ -60,6 +61,7 @@ gr.load(
60
  # Create mapping of providers to their demos
61
  PROVIDERS = {
62
  "OpenAI Coder": demo_openai_coder,
 
63
  "Gemini Coder": demo_gemini_coder,
64
  "OpenAI": demo_openai,
65
  "NVIDIA Coder": demo_nvidia_coder,
 
29
  from app_qwen_coder import demo as demo_qwen_coder
30
  from app_nvidia_coder import demo as demo_nvidia_coder
31
  from app_openai import demo as demo_openai
32
+ from app_sambanova_coder import demo as demo_sambanova_coder
33
  from utils import get_app
34
  import gradio as gr
35
 
 
61
  # Create mapping of providers to their demos
62
  PROVIDERS = {
63
  "OpenAI Coder": demo_openai_coder,
64
+ "Sambanova Coder": demo_sambanova_coder,
65
  "Gemini Coder": demo_gemini_coder,
66
  "OpenAI": demo_openai,
67
  "NVIDIA Coder": demo_nvidia_coder,
app_sambanova.py CHANGED
@@ -1,28 +1,19 @@
1
- import os
2
 
3
- import sambanova_gradio
4
 
5
- from utils import get_app
 
 
 
 
6
 
7
- demo = get_app(
8
- models=[
9
- "Meta-Llama-3.2-1B-Instruct",
10
- "Meta-Llama-3.2-3B-Instruct",
11
- "Llama-3.2-11B-Vision-Instruct",
12
- "Llama-3.2-90B-Vision-Instruct",
13
- "Meta-Llama-3.1-8B-Instruct",
14
- "Meta-Llama-3.1-70B-Instruct",
15
- "Meta-Llama-3.1-405B-Instruct",
16
- "Qwen2.5-72B-Instruct",
17
- "Qwen2.5-Coder-32B-Instruct",
18
- "Meta-Llama-3.3-70B-Instruct",
19
- "QwQ-32B-Preview",
20
- ],
21
- default_model="QwQ-32B-Preview",
22
- src=sambanova_gradio.registry,
23
- accept_token=not os.getenv("SAMBANOVA_API_KEY"),
24
- multimodal=True,
25
- )
26
 
27
- if __name__ == "__main__":
28
- demo.launch()
 
 
 
 
 
 
 
1
+ import ai_gradio
2
 
3
+ from utils_ai_gradio import get_app
4
 
5
+ # Get the hyperbolic models but keep their full names for loading
6
+ SAMBANOVA_MODELS_FULL = [k for k in ai_gradio.registry.keys() if k.startswith("sambanova:")]
7
+
8
+ # Create display names without the prefix
9
+ SAMBANOVA_MODELS_DISPLAY = [k.replace("sambanova:", "") for k in SAMBANOVA_MODELS_FULL]
10
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
 
12
+ # Create and launch the interface using get_app utility
13
+ demo = get_app(
14
+ models=SAMBANOVA_MODELS_FULL, # Use the full names with prefix
15
+ default_model=SAMBANOVA_MODELS_FULL[-1],
16
+ dropdown_label="Select Sambanova Model",
17
+ choices=SAMBANOVA_MODELS_DISPLAY, # Display names without prefix
18
+ fill_height=True,
19
+ )
app_sambanova_coder.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import ai_gradio
2
+
3
+ from utils_ai_gradio import get_app
4
+
5
+ # Get the hyperbolic models but keep their full names for loading
6
+ SAMBANOVA_MODELS_FULL = [k for k in ai_gradio.registry.keys() if k.startswith("sambanova:")]
7
+
8
+ # Create display names without the prefix
9
+ SAMBANOVA_MODELS_DISPLAY = [k.replace("sambanova:", "") for k in SAMBANOVA_MODELS_FULL]
10
+
11
+
12
+ # Create and launch the interface using get_app utility
13
+ demo = get_app(
14
+ models=SAMBANOVA_MODELS_FULL, # Use the full names with prefix
15
+ default_model=SAMBANOVA_MODELS_FULL[-1],
16
+ dropdown_label="Select Sambanova Model",
17
+ choices=SAMBANOVA_MODELS_DISPLAY, # Display names without prefix
18
+ fill_height=True,
19
+ coder=True,
20
+ )
pyproject.toml CHANGED
@@ -38,7 +38,7 @@ dependencies = [
38
  "langchain>=0.3.14",
39
  "chromadb>=0.5.23",
40
  "openai>=1.55.0",
41
- "ai-gradio[crewai,deepseek,gemini,groq,hyperbolic,openai,smolagents,transformers, langchain, mistral,minimax,nvidia, qwen]>=0.2.41",
42
  ]
43
 
44
  [tool.uv.sources]
 
38
  "langchain>=0.3.14",
39
  "chromadb>=0.5.23",
40
  "openai>=1.55.0",
41
+ "ai-gradio[crewai,deepseek,gemini,groq,hyperbolic,openai,smolagents,transformers, langchain, mistral,minimax,nvidia, qwen]>=0.2.42",
42
  ]
43
 
44
  [tool.uv.sources]
requirements.txt CHANGED
@@ -2,7 +2,7 @@
2
  # uv pip compile pyproject.toml -o requirements.txt
3
  accelerate==1.2.1
4
  # via ai-gradio
5
- ai-gradio==0.2.41
6
  # via anychat (pyproject.toml)
7
  aiofiles==23.2.1
8
  # via gradio
 
2
  # uv pip compile pyproject.toml -o requirements.txt
3
  accelerate==1.2.1
4
  # via ai-gradio
5
+ ai-gradio==0.2.42
6
  # via anychat (pyproject.toml)
7
  aiofiles==23.2.1
8
  # via gradio