Update README.md
Browse filesRemove `global_attention_mask` from `forward` call as `LongT5Model` does not accept any.
README.md
CHANGED
@@ -411,7 +411,7 @@ input_ids = tokenizer(LONG_ARTICLE, return_tensors="pt").input_ids.to("cuda")
|
|
411 |
|
412 |
model = LongT5ForConditionalGeneration.from_pretrained("Stancld/longt5-tglobal-large-16384-pubmed-3k_steps", return_dict_in_generate=True).to("cuda")
|
413 |
|
414 |
-
sequences = model.generate(input_ids
|
415 |
|
416 |
summary = tokenizer.batch_decode(sequences)
|
417 |
```
|
|
|
411 |
|
412 |
model = LongT5ForConditionalGeneration.from_pretrained("Stancld/longt5-tglobal-large-16384-pubmed-3k_steps", return_dict_in_generate=True).to("cuda")
|
413 |
|
414 |
+
sequences = model.generate(input_ids).sequences
|
415 |
|
416 |
summary = tokenizer.batch_decode(sequences)
|
417 |
```
|