Spaces:
Sleeping
Sleeping
changed model to gpt-4
Browse files
app.py
CHANGED
|
@@ -68,7 +68,8 @@ class Obnoxious_Agent:
|
|
| 68 |
print ("Checking for Obnoxious Query")
|
| 69 |
|
| 70 |
response = self.client.chat.completions.create(
|
| 71 |
-
model="gpt-3.5-turbo",
|
|
|
|
| 72 |
messages=[{"role": "user", "content": self.prompt + query}]
|
| 73 |
)
|
| 74 |
|
|
@@ -159,7 +160,8 @@ class Answering_Agent:
|
|
| 159 |
and the conversational context."\n"""
|
| 160 |
|
| 161 |
response = self.openai_client.chat.completions.create(
|
| 162 |
-
model="gpt-3.5-turbo",
|
|
|
|
| 163 |
messages=[{"role": "user", "content": prompt+mode_prompt}]
|
| 164 |
)
|
| 165 |
|
|
@@ -242,7 +244,8 @@ class Relevant_Documents_Agent:
|
|
| 242 |
Navigate the complexities of this impersonation with attentiveness to the conversational flow and historical context.\n"""
|
| 243 |
|
| 244 |
response = self.openai_client.chat.completions.create(
|
| 245 |
-
model="gpt-3.5-turbo",
|
|
|
|
| 246 |
messages=[{"role": "user", "content": prompt}]
|
| 247 |
)
|
| 248 |
|
|
@@ -296,7 +299,7 @@ class Head_Agent:
|
|
| 296 |
# Check for existing session state variables
|
| 297 |
# if "openai_model" not in st.session_state:
|
| 298 |
# # ... (initialize model)
|
| 299 |
-
openai_model = getattr(st.session_state, 'openai_model', 'gpt-
|
| 300 |
|
| 301 |
# if "messages" not in st.session_state:
|
| 302 |
# # ... (initialize messages)
|
|
@@ -397,7 +400,7 @@ class Head_Agent:
|
|
| 397 |
"""
|
| 398 |
|
| 399 |
response = self.client.chat.completions.create(
|
| 400 |
-
model="gpt-
|
| 401 |
messages=[{"role": "user", "content": APE_prompt}]
|
| 402 |
)
|
| 403 |
|
|
|
|
| 68 |
print ("Checking for Obnoxious Query")
|
| 69 |
|
| 70 |
response = self.client.chat.completions.create(
|
| 71 |
+
# model="gpt-3.5-turbo",
|
| 72 |
+
model="gpt-4",
|
| 73 |
messages=[{"role": "user", "content": self.prompt + query}]
|
| 74 |
)
|
| 75 |
|
|
|
|
| 160 |
and the conversational context."\n"""
|
| 161 |
|
| 162 |
response = self.openai_client.chat.completions.create(
|
| 163 |
+
# model="gpt-3.5-turbo",
|
| 164 |
+
model="gpt-4",
|
| 165 |
messages=[{"role": "user", "content": prompt+mode_prompt}]
|
| 166 |
)
|
| 167 |
|
|
|
|
| 244 |
Navigate the complexities of this impersonation with attentiveness to the conversational flow and historical context.\n"""
|
| 245 |
|
| 246 |
response = self.openai_client.chat.completions.create(
|
| 247 |
+
# model="gpt-3.5-turbo",
|
| 248 |
+
model="gpt-4",
|
| 249 |
messages=[{"role": "user", "content": prompt}]
|
| 250 |
)
|
| 251 |
|
|
|
|
| 299 |
# Check for existing session state variables
|
| 300 |
# if "openai_model" not in st.session_state:
|
| 301 |
# # ... (initialize model)
|
| 302 |
+
openai_model = getattr(st.session_state, 'openai_model', 'gpt-4')
|
| 303 |
|
| 304 |
# if "messages" not in st.session_state:
|
| 305 |
# # ... (initialize messages)
|
|
|
|
| 400 |
"""
|
| 401 |
|
| 402 |
response = self.client.chat.completions.create(
|
| 403 |
+
model="gpt-4",
|
| 404 |
messages=[{"role": "user", "content": APE_prompt}]
|
| 405 |
)
|
| 406 |
|