Update README.md

#3
by boompack - opened
Files changed (1) hide show
  1. README.md +15 -3
README.md CHANGED
@@ -3,11 +3,11 @@ title: Mmmm
3
  emoji: πŸš€
4
  colorFrom: red
5
  colorTo: indigo
6
- sdk: gradio
7
  sdk_version: 5.4.0
8
  app_file: app.py
9
  pinned: false
10
- license: mit
11
  duplicated_from: ysharma/ChatGPT4
12
  disable_embedding: true
13
  datasets:
@@ -17,6 +17,18 @@ datasets:
17
  models:
18
  - allenai/WildLlama-7b-user-assistant
19
  - allenai/WildLlama-7b-assistant-only
 
20
  ---
21
  - https://arxiv.org/abs/2405.01470
22
- - https://arxiv.org/abs/2409.03753
 
 
 
 
 
 
 
 
 
 
 
 
3
  emoji: πŸš€
4
  colorFrom: red
5
  colorTo: indigo
6
+ sdk: docker
7
  sdk_version: 5.4.0
8
  app_file: app.py
9
  pinned: false
10
+ license: bigscience-openrail-m
11
  duplicated_from: ysharma/ChatGPT4
12
  disable_embedding: true
13
  datasets:
 
17
  models:
18
  - allenai/WildLlama-7b-user-assistant
19
  - allenai/WildLlama-7b-assistant-only
20
+ short_description: nbb
21
  ---
22
  - https://arxiv.org/abs/2405.01470
23
+ - https://arxiv.org/abs/2409.03753
24
+ -
25
+ huggingface-cli
26
+ # Use a pipeline as a high-level helper
27
+ from transformers import pipeline
28
+
29
+ pipe = pipeline("text-generation", model="allenai/WildLlama-7b-assistant-only")
30
+ # Load model directly
31
+ from transformers import AutoTokenizer, AutoModelForCausalLM
32
+
33
+ tokenizer = AutoTokenizer.from_pretrained("allenai/WildLlama-7b-assistant-only")
34
+ model = AutoModelForCausalLM.from_pretrained("allenai/WildLlama-7b-assistant-only")