Spaces:
Running
on
Zero
Running
on
Zero
ehristoforu
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -76,13 +76,15 @@ def generate(
|
|
76 |
input_ids = input_ids[:, -MAX_INPUT_TOKEN_LENGTH:]
|
77 |
gr.Warning(f"Trimmed input from conversation as it was longer than {MAX_INPUT_TOKEN_LENGTH} tokens.")
|
78 |
input_ids = input_ids.to(model.device)
|
79 |
-
|
80 |
streamer = TextIteratorStreamer(tokenizer, timeout=20.0, skip_prompt=True, skip_special_tokens=True)
|
81 |
generate_kwargs = dict(
|
82 |
{"input_ids": input_ids},
|
83 |
streamer=streamer,
|
84 |
max_new_tokens=max_new_tokens,
|
85 |
eos_token_id=tokenizer.eos_token_id,
|
|
|
|
|
86 |
do_sample=True,
|
87 |
top_p=top_p,
|
88 |
top_k=top_k,
|
|
|
76 |
input_ids = input_ids[:, -MAX_INPUT_TOKEN_LENGTH:]
|
77 |
gr.Warning(f"Trimmed input from conversation as it was longer than {MAX_INPUT_TOKEN_LENGTH} tokens.")
|
78 |
input_ids = input_ids.to(model.device)
|
79 |
+
attention_mask = input_ids["attention_mask"]
|
80 |
streamer = TextIteratorStreamer(tokenizer, timeout=20.0, skip_prompt=True, skip_special_tokens=True)
|
81 |
generate_kwargs = dict(
|
82 |
{"input_ids": input_ids},
|
83 |
streamer=streamer,
|
84 |
max_new_tokens=max_new_tokens,
|
85 |
eos_token_id=tokenizer.eos_token_id,
|
86 |
+
pad_token_id=tokenizer.eos_token_id,
|
87 |
+
attention_mask=attention_mask,
|
88 |
do_sample=True,
|
89 |
top_p=top_p,
|
90 |
top_k=top_k,
|