Spaces:
Runtime error
Runtime error
yuntian-deng
commited on
Commit
•
46c0344
1
Parent(s):
d207e3d
Update app.py
Browse files
app.py
CHANGED
@@ -8,6 +8,9 @@ MODEL = "gpt-3.5-turbo"
|
|
8 |
API_URL = os.getenv("API_URL")
|
9 |
DISABLED = os.getenv("DISABLED") == 'True'
|
10 |
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
|
|
|
|
|
|
11 |
|
12 |
def exception_handler(exception_type, exception, traceback):
|
13 |
print("%s: %s" % (exception_type.__name__, exception))
|
@@ -28,7 +31,7 @@ def parse_codeblock(text):
|
|
28 |
lines[i] = "<br/>" + line.replace("<", "<").replace(">", ">")
|
29 |
return "".join(lines)
|
30 |
|
31 |
-
def predict(inputs, top_p, temperature, chat_counter, chatbot
|
32 |
payload = {
|
33 |
"model": MODEL,
|
34 |
"messages": [{"role": "user", "content": f"{inputs}"}],
|
@@ -42,7 +45,8 @@ def predict(inputs, top_p, temperature, chat_counter, chatbot=[], history=[]):
|
|
42 |
|
43 |
headers = {
|
44 |
"Content-Type": "application/json",
|
45 |
-
"Authorization": f"Bearer {OPENAI_API_KEY}"
|
|
|
46 |
}
|
47 |
|
48 |
# print(f"chat_counter - {chat_counter}")
|
@@ -73,7 +77,8 @@ def predict(inputs, top_p, temperature, chat_counter, chatbot=[], history=[]):
|
|
73 |
"frequency_penalty":0,
|
74 |
}
|
75 |
|
76 |
-
chat_counter+=1
|
|
|
77 |
history.append(inputs)
|
78 |
token_counter = 0
|
79 |
partial_words = ""
|
@@ -163,10 +168,10 @@ with gr.Blocks(css = """#col_container { margin-left: auto; margin-right: auto;}
|
|
163 |
<div>
|
164 |
<p>By using our app, which is powered by OpenAI's API, you acknowledge and agree to the following terms regarding the data you provide:</p>
|
165 |
<ol>
|
166 |
-
<li><strong>Collection:</strong> We may collect information, including the inputs you type into our app
|
167 |
-
<li><strong>Use:</strong> We may use the collected data for research purposes, to improve our services, and to develop new products or services, including commercial applications.</li>
|
168 |
-
<li><strong>Sharing and Publication:</strong> Your data may be published, shared with third parties, or used for analysis and reporting purposes.</li>
|
169 |
-
<li><strong>Data Retention:</strong> We may retain your data for as long as necessary.</li>
|
170 |
</ol>
|
171 |
<p>By continuing to use our app, you provide your explicit consent to the collection, use, and potential sharing of your data as described above. If you do not agree with our data collection, use, and sharing practices, please do not use our app.</p>
|
172 |
</div>
|
@@ -183,4 +188,4 @@ with gr.Blocks(css = """#col_container { margin-left: auto; margin-right: auto;}
|
|
183 |
b1.click(reset_textbox, [], [inputs, b1], queue=False)
|
184 |
b1.click(predict, [inputs, top_p, temperature, chat_counter, chatbot, state], [chatbot, state, chat_counter, server_status_code, inputs, b1],) #openai_api_key
|
185 |
|
186 |
-
demo.queue(max_size=20, concurrency_count=
|
|
|
8 |
API_URL = os.getenv("API_URL")
|
9 |
DISABLED = os.getenv("DISABLED") == 'True'
|
10 |
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
11 |
+
NUM_THREADS = int(os.getenv("NUM_THREADS"))
|
12 |
+
|
13 |
+
print (NUM_THREADS)
|
14 |
|
15 |
def exception_handler(exception_type, exception, traceback):
|
16 |
print("%s: %s" % (exception_type.__name__, exception))
|
|
|
31 |
lines[i] = "<br/>" + line.replace("<", "<").replace(">", ">")
|
32 |
return "".join(lines)
|
33 |
|
34 |
+
def predict(inputs, top_p, temperature, chat_counter, chatbot, history, request:gr.Request):
|
35 |
payload = {
|
36 |
"model": MODEL,
|
37 |
"messages": [{"role": "user", "content": f"{inputs}"}],
|
|
|
45 |
|
46 |
headers = {
|
47 |
"Content-Type": "application/json",
|
48 |
+
"Authorization": f"Bearer {OPENAI_API_KEY}",
|
49 |
+
"Headers": f"{request.kwargs['headers']}"
|
50 |
}
|
51 |
|
52 |
# print(f"chat_counter - {chat_counter}")
|
|
|
77 |
"frequency_penalty":0,
|
78 |
}
|
79 |
|
80 |
+
chat_counter += 1
|
81 |
+
|
82 |
history.append(inputs)
|
83 |
token_counter = 0
|
84 |
partial_words = ""
|
|
|
168 |
<div>
|
169 |
<p>By using our app, which is powered by OpenAI's API, you acknowledge and agree to the following terms regarding the data you provide:</p>
|
170 |
<ol>
|
171 |
+
<li><strong>Collection:</strong> We may collect information, including the inputs you type into our app, the outputs generated by OpenAI's API, and certain technical details about your device and connection (such as browser type, operating system, and IP address) provided by your device's request headers.</li>
|
172 |
+
<li><strong>Use:</strong> We may use the collected data for research purposes, to improve our services, and to develop new products or services, including commercial applications, and for security purposes, such as protecting against unauthorized access and attacks.</li>
|
173 |
+
<li><strong>Sharing and Publication:</strong> Your data, including the technical details collected from your device's request headers, may be published, shared with third parties, or used for analysis and reporting purposes.</li>
|
174 |
+
<li><strong>Data Retention:</strong> We may retain your data, including the technical details collected from your device's request headers, for as long as necessary.</li>
|
175 |
</ol>
|
176 |
<p>By continuing to use our app, you provide your explicit consent to the collection, use, and potential sharing of your data as described above. If you do not agree with our data collection, use, and sharing practices, please do not use our app.</p>
|
177 |
</div>
|
|
|
188 |
b1.click(reset_textbox, [], [inputs, b1], queue=False)
|
189 |
b1.click(predict, [inputs, top_p, temperature, chat_counter, chatbot, state], [chatbot, state, chat_counter, server_status_code, inputs, b1],) #openai_api_key
|
190 |
|
191 |
+
demo.queue(max_size=20, concurrency_count=NUM_THREADS, api_open=False).launch(share=False)
|