Spaces:
Runtime error
Runtime error
Update actions to include openai and faiss index
Browse files- actions/actions.py +97 -7
actions/actions.py
CHANGED
@@ -9,7 +9,67 @@ from rasa_sdk import Action, Tracker
|
|
9 |
from rasa_sdk.events import SlotSet, FollowupAction
|
10 |
from rasa_sdk.executor import CollectingDispatcher
|
11 |
import random
|
|
|
|
|
|
|
12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
class GeneralHelp(Action):
|
14 |
def name(self) -> Text:
|
15 |
return "action_general_help"
|
@@ -77,10 +137,11 @@ class ActionJoinClassify(Action):
|
|
77 |
last_intent = tracker.slots.get("local_chapter", None)
|
78 |
|
79 |
# Check if the last intent was 'local_chapter'
|
80 |
-
if last_intent
|
81 |
dispatcher.utter_message(template="utter_join_chapter")
|
82 |
else:
|
83 |
-
|
|
|
84 |
|
85 |
|
86 |
class ActionEligibilityClassify(Action):
|
@@ -97,12 +158,12 @@ class ActionEligibilityClassify(Action):
|
|
97 |
last_intent = tracker.slots.get("local_chapter", None)
|
98 |
|
99 |
# Check if the last intent was 'local_chapter'
|
100 |
-
if last_intent
|
101 |
dispatcher.utter_message(template="utter_local_chapter_participation_eligibility")
|
102 |
else:
|
103 |
-
|
104 |
|
105 |
-
|
106 |
class ActionCostClassify(Action):
|
107 |
|
108 |
def name(self) -> Text:
|
@@ -117,7 +178,36 @@ class ActionCostClassify(Action):
|
|
117 |
last_intent = tracker.slots.get("local_chapter", None)
|
118 |
|
119 |
# Check if the last intent was 'local_chapter'
|
120 |
-
if last_intent
|
121 |
dispatcher.utter_message(template="utter_local_chapter_cost")
|
122 |
else:
|
123 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
from rasa_sdk.events import SlotSet, FollowupAction
|
10 |
from rasa_sdk.executor import CollectingDispatcher
|
11 |
import random
|
12 |
+
import os
|
13 |
+
import sys
|
14 |
+
import openai
|
15 |
|
16 |
+
# Add "/app/actions" to the sys.path
|
17 |
+
actions_path = os.path.abspath("/app/actions")
|
18 |
+
sys.path.insert(0, actions_path)
|
19 |
+
|
20 |
+
print("-#-System-path-#-")
|
21 |
+
for path in sys.path:
|
22 |
+
print(path)
|
23 |
+
print("-#-END-OF-System-path-#-")
|
24 |
+
# Import search_content.py from /actions folder
|
25 |
+
from search_content import main_search
|
26 |
+
|
27 |
+
|
28 |
+
# Import api key from secrets
|
29 |
+
secret_value_0 = os.environ.get("openai")
|
30 |
+
|
31 |
+
openai.api_key = secret_value_0
|
32 |
+
# Provide your OpenAI API key
|
33 |
+
|
34 |
+
def generate_openai_response(query, model_engine="text-davinci-002", max_tokens=124, temperature=0.8):
|
35 |
+
"""Generate a response using the OpenAI API."""
|
36 |
+
|
37 |
+
# Run the main function from search_content.py and store the results in a variable
|
38 |
+
results = main_search(query)
|
39 |
+
|
40 |
+
# Create context from the results
|
41 |
+
context = "".join([f"#{str(i)}" for i in results])[:2014] # Trim the context to 2014 characters - Modify as necessory
|
42 |
+
prompt_template = f"Relevant context: {context}\n\n Answer the question in detail: {query}"
|
43 |
+
|
44 |
+
# Generate a response using the OpenAI API
|
45 |
+
response = openai.Completion.create(
|
46 |
+
engine=model_engine,
|
47 |
+
prompt=prompt_template,
|
48 |
+
max_tokens=max_tokens,
|
49 |
+
temperature=temperature,
|
50 |
+
n=1,
|
51 |
+
stop=None,
|
52 |
+
)
|
53 |
+
|
54 |
+
return response.choices[0].text.strip()
|
55 |
+
|
56 |
+
class GetOpenAIResponse(Action):
|
57 |
+
|
58 |
+
def name(self) -> Text:
|
59 |
+
return "action_get_response_openai"
|
60 |
+
|
61 |
+
def run(self,
|
62 |
+
dispatcher: CollectingDispatcher,
|
63 |
+
tracker: Tracker,
|
64 |
+
domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
|
65 |
+
|
66 |
+
# Use OpenAI API to generate a response
|
67 |
+
query = tracker.latest_message.get('text')
|
68 |
+
response = generate_openai_response(query)
|
69 |
+
|
70 |
+
# Output the generated response to user
|
71 |
+
dispatcher.utter_message(text=response)
|
72 |
+
|
73 |
class GeneralHelp(Action):
|
74 |
def name(self) -> Text:
|
75 |
return "action_general_help"
|
|
|
137 |
last_intent = tracker.slots.get("local_chapter", None)
|
138 |
|
139 |
# Check if the last intent was 'local_chapter'
|
140 |
+
if last_intent == 'local chapter':
|
141 |
dispatcher.utter_message(template="utter_join_chapter")
|
142 |
else:
|
143 |
+
return [FollowupAction("action_get_response_openai")]
|
144 |
+
|
145 |
|
146 |
|
147 |
class ActionEligibilityClassify(Action):
|
|
|
158 |
last_intent = tracker.slots.get("local_chapter", None)
|
159 |
|
160 |
# Check if the last intent was 'local_chapter'
|
161 |
+
if last_intent == 'local chapter':
|
162 |
dispatcher.utter_message(template="utter_local_chapter_participation_eligibility")
|
163 |
else:
|
164 |
+
return [FollowupAction("action_get_response_openai")]
|
165 |
|
166 |
+
|
167 |
class ActionCostClassify(Action):
|
168 |
|
169 |
def name(self) -> Text:
|
|
|
178 |
last_intent = tracker.slots.get("local_chapter", None)
|
179 |
|
180 |
# Check if the last intent was 'local_chapter'
|
181 |
+
if last_intent == 'local chapter':
|
182 |
dispatcher.utter_message(template="utter_local_chapter_cost")
|
183 |
else:
|
184 |
+
return [FollowupAction("action_get_response_openai")]
|
185 |
+
|
186 |
+
class SayHelloWorld(Action):
|
187 |
+
|
188 |
+
def name(self) -> Text:
|
189 |
+
return "action_hello_world"
|
190 |
+
|
191 |
+
def run(self,
|
192 |
+
dispatcher: CollectingDispatcher,
|
193 |
+
tracker: Tracker,
|
194 |
+
domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
|
195 |
+
|
196 |
+
# Use OpenAI API to generate a response
|
197 |
+
secret_value_0 = os.environ.get("openai")
|
198 |
+
openai.api_key = secret_value_0
|
199 |
+
model_engine = "text-davinci-002"
|
200 |
+
prompt_template = "Say hello world"
|
201 |
+
|
202 |
+
response = openai.Completion.create(
|
203 |
+
engine=model_engine,
|
204 |
+
prompt=prompt_template,
|
205 |
+
max_tokens=124,
|
206 |
+
temperature=0.8,
|
207 |
+
n=1,
|
208 |
+
stop=None,
|
209 |
+
)
|
210 |
+
|
211 |
+
# Output the generated response to user
|
212 |
+
generated_text = response.choices[0].text
|
213 |
+
dispatcher.utter_message(text=generated_text)
|