Spaces:
Runtime error
Runtime error
abdalrahmanshahrour
commited on
Commit
•
d4ecab0
0
Parent(s):
Duplicate from abdalrahmanshahrour/Summarization
Browse files- .gitattributes +34 -0
- README.md +13 -0
- app.py +83 -0
- download.py +7 -0
- requirements.txt +72 -0
- style.css +35 -0
- summarize.py +177 -0
- text1.txt +2 -0
- text2.txt +1 -0
.gitattributes
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
29 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
30 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
31 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
32 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
33 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
34 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
title: Summarization
|
3 |
+
emoji: ⚡
|
4 |
+
colorFrom: blue
|
5 |
+
colorTo: gray
|
6 |
+
sdk: streamlit
|
7 |
+
sdk_version: 1.15.2
|
8 |
+
app_file: app.py
|
9 |
+
pinned: false
|
10 |
+
duplicated_from: abdalrahmanshahrour/Summarization
|
11 |
+
---
|
12 |
+
|
13 |
+
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
app.py
ADDED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from urllib.parse import unquote
|
2 |
+
import arabic_reshaper
|
3 |
+
import streamlit as st
|
4 |
+
from bidi.algorithm import get_display
|
5 |
+
from summarize import get_results
|
6 |
+
import time
|
7 |
+
import requests
|
8 |
+
|
9 |
+
from streamlit_lottie import st_lottie # pip install streamlit-lottie
|
10 |
+
|
11 |
+
from streamlit_lottie import st_lottie_spinner
|
12 |
+
|
13 |
+
|
14 |
+
st.set_page_config(
|
15 |
+
page_title="Arabic Summarization",
|
16 |
+
page_icon="🤖",
|
17 |
+
layout="wide",
|
18 |
+
initial_sidebar_state="expanded",
|
19 |
+
menu_items={
|
20 |
+
'Get Help': 'https://www.extremelycoolapp.com/help',
|
21 |
+
'Report a bug': "https://www.extremelycoolapp.com/bug",
|
22 |
+
'About': "# Arabic Text Summarizeation , abdalrahman shahrour",
|
23 |
+
}
|
24 |
+
)
|
25 |
+
|
26 |
+
|
27 |
+
def load_lottieurl(url: str):
|
28 |
+
r = requests.get(url)
|
29 |
+
if r.status_code != 200:
|
30 |
+
return None
|
31 |
+
return r.json()
|
32 |
+
|
33 |
+
|
34 |
+
st.header('تلخيص النصوص العربية : ')
|
35 |
+
|
36 |
+
rtl = lambda w: get_display(f"{arabic_reshaper.reshape(w)}")
|
37 |
+
|
38 |
+
with open('style.css') as f:
|
39 |
+
st.markdown(f'<style>{f.read()}</style>',unsafe_allow_html=True)
|
40 |
+
|
41 |
+
|
42 |
+
c = load_lottieurl("https://assets7.lottiefiles.com/packages/lf20_ofa3xwo7.json")
|
43 |
+
# st.sidebar.st_lottie(c)
|
44 |
+
with st.sidebar:
|
45 |
+
st_lottie(c)
|
46 |
+
st.markdown("")
|
47 |
+
|
48 |
+
|
49 |
+
model = st.sidebar.selectbox('Select one', ['arabartsummarization', 'AraBART', 'auto-arabic-summarization', 'BERT2BERT', 'xlmroberta2xlmroberta', 'nltk_summarizer'],help="Model",)
|
50 |
+
|
51 |
+
|
52 |
+
st.sidebar.write("\n")
|
53 |
+
num_beams = st.sidebar.slider(
|
54 |
+
"Number of beams", min_value=1, max_value=10, value=3, step=1
|
55 |
+
)
|
56 |
+
|
57 |
+
st.sidebar.write("\n")
|
58 |
+
length_penalty = st.sidebar.slider(
|
59 |
+
"Length penalty ", min_value=0.1, max_value=3.0, value=1.0, step=0.1,
|
60 |
+
)
|
61 |
+
st.sidebar.write("\n")
|
62 |
+
number_of_sentence = st.sidebar.slider(
|
63 |
+
"Number of sentence", min_value=1, max_value=10, value=3, step=1
|
64 |
+
)
|
65 |
+
|
66 |
+
st.sidebar.write("\n")
|
67 |
+
height = st.sidebar.slider(
|
68 |
+
"height", min_value=200, max_value=1000, value=350, step=20
|
69 |
+
)
|
70 |
+
|
71 |
+
|
72 |
+
doc = st.text_area("ضع هنا النص المراد تلخيصه : ",height=height,value="""يجري علماء في بريطانيا تجربة لاختبار فعالية عقار إيبوبروفين لمساعدة المصابين بفيروس كورونا. وذكرت هيئة الإذاعة البريطانية "بي بي سي" أن فريق مشترك من أطباء مستشفيات "جاي" و"سانت توماس" و"كينغز كوليدج" في لندن يعتقد أن إيبوبروفين، وهو مضاد للالتهابات ومسكن للألم، يمكن أن يعالج صعوبات التنفس.
|
73 |
+
ويأمل العلماء أن يساعد هذا العلاج المنخفض التكلفة المرضى في الاستغناء عن أجهزة التنفس الصناعي. وذكرت أنه خلال فترة الاختبار، سيحصل نصف المرضى على إيبوبروفين بالإضافة إلى الرعاية المعتادة، حيث سيتم استخدام تركيبة خاصة من إيبوبروفين بدلا من الأقراص العادية التي قد يشتريها الناس عادة.""")
|
74 |
+
|
75 |
+
summarize_button = st.button(label="لخص النص")
|
76 |
+
|
77 |
+
if summarize_button:
|
78 |
+
with st.spinner("جاري التلخيص ..."):
|
79 |
+
result = get_results(doc, model, num_beams, length_penalty,number_of_sentence)
|
80 |
+
if len(result) > 0:
|
81 |
+
st.write(result)
|
82 |
+
else:
|
83 |
+
st.write("")
|
download.py
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import nltk
|
2 |
+
|
3 |
+
def __init__():
|
4 |
+
nltk.download('punkt')
|
5 |
+
nltk.download('stopwords')
|
6 |
+
nltk.download('wordnet')
|
7 |
+
nltk.download('omw-1.4')
|
requirements.txt
ADDED
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
altair==4.2.0
|
2 |
+
arabert==1.0.1
|
3 |
+
arabic-reshaper==2.1.3
|
4 |
+
attrs==22.2.0
|
5 |
+
blinker==1.5
|
6 |
+
cachetools==5.2.0
|
7 |
+
certifi==2022.12.7
|
8 |
+
charset-normalizer==2.1.1
|
9 |
+
click==8.1.3
|
10 |
+
codetiming==1.3.0
|
11 |
+
commonmark==0.9.1
|
12 |
+
decorator==5.1.1
|
13 |
+
emoji==1.4.2
|
14 |
+
entrypoints==0.4
|
15 |
+
farasapy==0.0.14
|
16 |
+
filelock==3.8.2
|
17 |
+
future==0.18.2
|
18 |
+
gitdb==4.0.10
|
19 |
+
GitPython==3.1.29
|
20 |
+
huggingface-hub==0.11.1
|
21 |
+
idna==3.4
|
22 |
+
importlib-metadata==5.2.0
|
23 |
+
Jinja2==3.1.2
|
24 |
+
joblib==1.2.0
|
25 |
+
jsonschema==4.17.3
|
26 |
+
MarkupSafe==2.1.1
|
27 |
+
nltk==3.8
|
28 |
+
numpy==1.24.0
|
29 |
+
nvidia-cublas-cu11==11.10.3.66
|
30 |
+
nvidia-cuda-nvrtc-cu11==11.7.99
|
31 |
+
nvidia-cuda-runtime-cu11==11.7.99
|
32 |
+
nvidia-cudnn-cu11==8.5.0.96
|
33 |
+
packaging==22.0
|
34 |
+
pandas==1.5.2
|
35 |
+
Pillow==9.3.0
|
36 |
+
preprocess==2.0.0
|
37 |
+
protobuf==3.20.2
|
38 |
+
PyArabic==0.6.15
|
39 |
+
pyarrow==10.0.1
|
40 |
+
pydeck==0.8.0
|
41 |
+
Pygments==2.13.0
|
42 |
+
Pympler==1.0.1
|
43 |
+
pyrsistent==0.19.2
|
44 |
+
python-bidi==0.4.2
|
45 |
+
python-dateutil==2.8.2
|
46 |
+
pytz==2022.7
|
47 |
+
pytz-deprecation-shim==0.1.0.post0
|
48 |
+
PyYAML==6.0
|
49 |
+
regex==2022.10.31
|
50 |
+
requests==2.28.1
|
51 |
+
rich==12.6.0
|
52 |
+
semver==2.13.0
|
53 |
+
sentencepiece==0.1.97
|
54 |
+
six==1.16.0
|
55 |
+
smmap==5.0.0
|
56 |
+
streamlit==1.16.0
|
57 |
+
streamlit-lottie==0.0.3
|
58 |
+
streamlit-option-menu==0.3.2
|
59 |
+
tokenizers==0.13.2
|
60 |
+
toml==0.10.2
|
61 |
+
toolz==0.12.0
|
62 |
+
torch==1.13.1
|
63 |
+
tornado==6.2
|
64 |
+
tqdm==4.64.1
|
65 |
+
transformers==4.25.1
|
66 |
+
typing_extensions==4.4.0
|
67 |
+
tzdata==2022.7
|
68 |
+
tzlocal==4.2
|
69 |
+
urllib3==1.26.13
|
70 |
+
validators==0.20.0
|
71 |
+
watchdog==2.2.0
|
72 |
+
zipp==3.11.0
|
style.css
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
.stMarkdown h1, .main .element-container.css-o7ulmj.e1tzin5v3 {
|
2 |
+
text-align: right;
|
3 |
+
}
|
4 |
+
.stMarkdown div.css-nlntq9.e16nr0p33 {
|
5 |
+
font-weight: bold;
|
6 |
+
}
|
7 |
+
textarea {
|
8 |
+
direction: rtl;
|
9 |
+
height: 140px;
|
10 |
+
}
|
11 |
+
.stTextArea .css-qrbaxs {
|
12 |
+
float: right;
|
13 |
+
font-size: 23px;
|
14 |
+
}
|
15 |
+
h1 {
|
16 |
+
font-family: 'Scheherazade', serif;
|
17 |
+
}
|
18 |
+
.main div.css-nlntq9.e16nr0p33 > p {
|
19 |
+
direction: rtl;
|
20 |
+
}
|
21 |
+
.main .stMarkdown div.css-nlntq9 p {
|
22 |
+
font-size: 22px;
|
23 |
+
}
|
24 |
+
.main .stMarkdown div.css-nlntq9 {
|
25 |
+
direction: rtl;
|
26 |
+
}
|
27 |
+
.main p, .main div, .main input, .main label {
|
28 |
+
text-align: right;
|
29 |
+
direction: rtl;
|
30 |
+
}
|
31 |
+
|
32 |
+
}
|
33 |
+
.main button {
|
34 |
+
font-size: 22px;
|
35 |
+
}
|
summarize.py
ADDED
@@ -0,0 +1,177 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
import os
|
3 |
+
import re
|
4 |
+
from functools import lru_cache
|
5 |
+
from urllib.parse import unquote
|
6 |
+
|
7 |
+
import streamlit as st
|
8 |
+
from codetiming import Timer
|
9 |
+
from transformers import pipeline
|
10 |
+
from arabert.preprocess import ArabertPreprocessor
|
11 |
+
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, AutoModelForCausalLM
|
12 |
+
import tokenizers
|
13 |
+
import re
|
14 |
+
import heapq
|
15 |
+
from string import punctuation
|
16 |
+
import nltk
|
17 |
+
from nltk.corpus import stopwords
|
18 |
+
import download
|
19 |
+
nltk.download('punkt')
|
20 |
+
nltk.download('stopwords')
|
21 |
+
nltk.download('wordnet')
|
22 |
+
nltk.download('omw-1.4')
|
23 |
+
|
24 |
+
|
25 |
+
punctuation = punctuation + '\n'
|
26 |
+
logger = logging.getLogger(__name__)
|
27 |
+
os.environ["TOKENIZERS_PARALLELISM"] = "false"
|
28 |
+
|
29 |
+
logger.info("Loading models...")
|
30 |
+
reader_time = Timer("loading", text="Time: {:.2f}", logger=logging.info)
|
31 |
+
reader_time.start()
|
32 |
+
|
33 |
+
|
34 |
+
reader_time.stop()
|
35 |
+
|
36 |
+
|
37 |
+
logger.info("Finished loading the models...")
|
38 |
+
logger.info(f"Time spent loading: {reader_time.last}")
|
39 |
+
|
40 |
+
@lru_cache(maxsize=200)
|
41 |
+
def get_results(text, model_selected, num_beams, length_penalty,number_of_sentence):
|
42 |
+
logger.info("\n=================================================================")
|
43 |
+
logger.info(f"Text: {text}")
|
44 |
+
logger.info(f"model_selected: {model_selected}")
|
45 |
+
logger.info(f"length_penalty: {length_penalty}")
|
46 |
+
reader_time = Timer("summarize", text="Time: {:.2f}", logger=logging.info)
|
47 |
+
reader_time.start()
|
48 |
+
if model_selected == 'GPT-2':
|
49 |
+
number_of_tokens_limit = 80
|
50 |
+
else:
|
51 |
+
number_of_tokens_limit = 150
|
52 |
+
logger.info(f"input length: {len(text.split())}")
|
53 |
+
|
54 |
+
if model_selected == 'arabartsummarization':
|
55 |
+
model_name="abdalrahmanshahrour/arabartsummarization"
|
56 |
+
preprocessor = ArabertPreprocessor(model_name="")
|
57 |
+
|
58 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
59 |
+
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
|
60 |
+
pipeline1 = pipeline("text2text-generation",model=model,tokenizer=tokenizer)
|
61 |
+
result = pipeline1(text,
|
62 |
+
pad_token_id= tokenizer.eos_token_id,
|
63 |
+
num_beams=num_beams,
|
64 |
+
repetition_penalty=3.0,
|
65 |
+
max_length=200,
|
66 |
+
length_penalty=length_penalty,
|
67 |
+
no_repeat_ngram_size = 3)[0]['generated_text']
|
68 |
+
logger.info('arabartsummarization')
|
69 |
+
elif model_selected == 'AraBART':
|
70 |
+
|
71 |
+
model_name= "abdalrahmanshahrour/AraBART-summ"
|
72 |
+
preprocessor = ArabertPreprocessor(model_name="")
|
73 |
+
|
74 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
75 |
+
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
|
76 |
+
pipeline1 = pipeline("text2text-generation",model=model,tokenizer=tokenizer)
|
77 |
+
result = pipeline1(text,
|
78 |
+
pad_token_id= tokenizer.eos_token_id,
|
79 |
+
num_beams=num_beams,
|
80 |
+
repetition_penalty=3.0,
|
81 |
+
max_length=200,
|
82 |
+
length_penalty=length_penalty,
|
83 |
+
no_repeat_ngram_size = 3)[0]['generated_text']
|
84 |
+
logger.info('AraBART')
|
85 |
+
|
86 |
+
elif model_selected == "auto-arabic-summarization":
|
87 |
+
|
88 |
+
model_name="abdalrahmanshahrour/auto-arabic-summarization"
|
89 |
+
preprocessor = ArabertPreprocessor(model_name="")
|
90 |
+
|
91 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
92 |
+
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
|
93 |
+
pipeline1 = pipeline("text2text-generation",model=model,tokenizer=tokenizer)
|
94 |
+
result = pipeline1(text,
|
95 |
+
pad_token_id= tokenizer.eos_token_id,
|
96 |
+
num_beams=num_beams,
|
97 |
+
repetition_penalty=3.0,
|
98 |
+
max_length=200,
|
99 |
+
length_penalty=length_penalty,
|
100 |
+
no_repeat_ngram_size = 3)[0]['generated_text']
|
101 |
+
logger.info('auto-arabic-summarization')
|
102 |
+
|
103 |
+
elif model_selected == 'BERT2BERT':
|
104 |
+
|
105 |
+
model_name="malmarjeh/bert2bert"
|
106 |
+
preprocessor = ArabertPreprocessor(model_name="")
|
107 |
+
|
108 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
109 |
+
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
|
110 |
+
pipeline1 = pipeline("text2text-generation",model=model,tokenizer=tokenizer)
|
111 |
+
result = pipeline1(text,
|
112 |
+
pad_token_id= tokenizer.eos_token_id,
|
113 |
+
num_beams=num_beams,
|
114 |
+
repetition_penalty=3.0,
|
115 |
+
max_length=200,
|
116 |
+
length_penalty=length_penalty,
|
117 |
+
no_repeat_ngram_size = 3)[0]['generated_text']
|
118 |
+
logger.info('BERT2BERT')
|
119 |
+
|
120 |
+
elif model_selected == "xlmroberta2xlmroberta":
|
121 |
+
model_name="ahmeddbahaa/xlmroberta2xlmroberta-finetune-summarization-ar"
|
122 |
+
preprocessor = ArabertPreprocessor(model_name="")
|
123 |
+
|
124 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
125 |
+
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
|
126 |
+
pipeline1 = pipeline("text2text-generation",model=model,tokenizer=tokenizer)
|
127 |
+
result = pipeline1(text,
|
128 |
+
pad_token_id= tokenizer.eos_token_id,
|
129 |
+
num_beams=num_beams,
|
130 |
+
repetition_penalty=3.0,
|
131 |
+
max_length=200,
|
132 |
+
length_penalty=length_penalty,
|
133 |
+
no_repeat_ngram_size = 3)[0]['generated_text']
|
134 |
+
logger.info('xlmroberta2xlmroberta')
|
135 |
+
|
136 |
+
elif model_selected == "nltk_summarizer":
|
137 |
+
# number_of_sentence = 3
|
138 |
+
stopWords = set(nltk.corpus.stopwords.words("arabic") + nltk.corpus.stopwords.words("english"))
|
139 |
+
word_frequencies = {}
|
140 |
+
for word in nltk.word_tokenize(text):
|
141 |
+
if word not in stopWords:
|
142 |
+
if word not in punctuation:
|
143 |
+
if word not in word_frequencies.keys():
|
144 |
+
word_frequencies[word] = 1
|
145 |
+
else:
|
146 |
+
word_frequencies[word] += 1
|
147 |
+
|
148 |
+
maximum_frequncy = max(list(word_frequencies.values()),default=3)
|
149 |
+
|
150 |
+
for word in word_frequencies.keys():
|
151 |
+
word_frequencies[word] = (word_frequencies[word]/maximum_frequncy)
|
152 |
+
|
153 |
+
sentence_list = nltk.sent_tokenize(text)
|
154 |
+
sentence_scores = {}
|
155 |
+
for sent in sentence_list:
|
156 |
+
for word in nltk.word_tokenize(sent.lower()):
|
157 |
+
if word in word_frequencies.keys():
|
158 |
+
if len(sent.split(' ')) < 30:
|
159 |
+
if sent not in sentence_scores.keys():
|
160 |
+
sentence_scores[sent] = word_frequencies[word]
|
161 |
+
else:
|
162 |
+
sentence_scores[sent] += word_frequencies[word]
|
163 |
+
|
164 |
+
summary_sentences = heapq.nlargest(number_of_sentence, sentence_scores, key=sentence_scores.get)
|
165 |
+
|
166 |
+
result = ' '.join(summary_sentences)
|
167 |
+
else:
|
168 |
+
result = "الرجاء اختيار نموذج"
|
169 |
+
|
170 |
+
reader_time.stop()
|
171 |
+
logger.info(f"Time spent summarizing: {reader_time.last}")
|
172 |
+
|
173 |
+
return result
|
174 |
+
|
175 |
+
|
176 |
+
if __name__ == "__main__":
|
177 |
+
results_dict = ""
|
text1.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
يجري علماء في بريطانيا تجربة لاختبار فعالية عقار إيبوبروفين لمساعدة المصابين بفيروس كورونا. وذكرت هيئة الإذاعة البريطانية "بي بي سي" أن فريق مشترك من أطباء مستشفيات "جاي" و"سانت توماس" و"كينغز كوليدج" في لندن يعتقد أن إيبوبروفين، وهو مضاد للالتهابات ومسكن للألم، يمكن أن يعالج صعوبات التنفس.
|
2 |
+
ويأمل العلماء أن يساعد هذا العلاج المنخفض التكلفة المرضى في الاستغناء عن أجهزة التنفس الصناعي. وذكرت أنه خلال فترة الاختبار، سيحصل نصف المرضى على إيبوبروفين بالإضافة إلى الرعاية المعتادة، حيث سيتم استخدام تركيبة خاصة من إيبوبروفين بدلا من الأقراص العادية التي قد يشتريها الناس عادة.
|
text2.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
شهدت مدينة طرابلس، مساء أمس الأربعاء، احتجاجات شعبية وأعمال شغب لليوم الثالث على التوالي، وذلك بسبب تردي الوضع المعيشي والاقتصادي. واندلعت مواجهات عنيفة وعمليات كر وفر ما بين الجيش اللبناني والمحتجين استمرت لساعات، إثر محاولة فتح الطرقات المقطوعة، ما أدى إلى إصابة العشرات من الطرفين.
|