ngxson HF staff commited on
Commit
db1f459
·
verified ·
1 Parent(s): 76b7c8d

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +70 -0
app.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import requests
3
+ import json
4
+
5
+ def fetch_manifest(model_id, tag_name):
6
+ try:
7
+ model_id = model_id.replace("hf.co", "").replace("://", "").replace("/", " ").strip().replace(" ", "/")
8
+
9
+ # Fetch manifest
10
+ manifest_url = f"https://huggingface.co/v2/{model_id}/manifests/{tag_name}"
11
+ manifest_response = requests.get(manifest_url)
12
+ manifest_response.raise_for_status()
13
+ manifest_data = manifest_response.json()
14
+
15
+ # Initialize output
16
+ output = f"Manifest for {model_id}:{tag_name}\n"
17
+ output += json.dumps(manifest_data, indent=2) + "\n\n"
18
+
19
+ # Find template and params layers
20
+ system_layer = next((layer for layer in manifest_data["layers"]
21
+ if layer["mediaType"] == "application/vnd.ollama.image.system"), None)
22
+ template_layer = next((layer for layer in manifest_data["layers"]
23
+ if layer["mediaType"] == "application/vnd.ollama.image.template"), None)
24
+ params_layer = next((layer for layer in manifest_data["layers"]
25
+ if layer["mediaType"] == "application/vnd.ollama.image.params"), None)
26
+
27
+ # Fetch and display system if found
28
+ if system_layer:
29
+ system_url = f"https://huggingface.co/v2/{model_id}/blobs/{system_layer['digest']}"
30
+ system_response = requests.get(system_url)
31
+ system_response.raise_for_status()
32
+ output += "System message:\n"
33
+ output += system_response.text + "\n\n"
34
+
35
+ # Fetch and display template if found
36
+ if template_layer:
37
+ template_url = f"https://huggingface.co/v2/{model_id}/blobs/{template_layer['digest']}"
38
+ template_response = requests.get(template_url)
39
+ template_response.raise_for_status()
40
+ output += "Template:\n"
41
+ output += template_response.text + "\n\n"
42
+
43
+ # Fetch and display params if found
44
+ if params_layer:
45
+ params_url = f"https://huggingface.co/v2/{model_id}/blobs/{params_layer['digest']}"
46
+ params_response = requests.get(params_url)
47
+ params_response.raise_for_status()
48
+ output += "Parameters:\n"
49
+ output += json.dumps(params_response.text, indent=2) + "\n"
50
+
51
+ return output
52
+
53
+ except requests.exceptions.RequestException as e:
54
+ raise gr.Error(f"Error occurred: {str(e)}")
55
+ except Exception as e:
56
+ raise gr.Error(f"Unexpected error: {str(e)}")
57
+
58
+ # Create Gradio interface
59
+ iface = gr.Interface(
60
+ fn=fetch_manifest,
61
+ inputs=[
62
+ gr.Textbox(label="Model ID (e.g., bartowski/Meta-Llama-3.1-8B-Instruct-GGUF)", placeholder="Enter model ID"),
63
+ gr.Textbox("latest", label="Tag Name (e.g., latest)", placeholder="Enter tag name")
64
+ ],
65
+ outputs=gr.Textbox(label="Results", lines=20),
66
+ title="Hugging Face Model Manifest Viewer",
67
+ description="Enter a Hugging Face model ID and tag name to view its manifest, template, and parameters.",
68
+ )
69
+
70
+ iface.launch()