awacke1 commited on
Commit
b3d6356
·
1 Parent(s): b5f843a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -6
app.py CHANGED
@@ -20,10 +20,7 @@ import httpx # add 11/13/23
20
  import asyncio
21
  from openai import OpenAI
22
  #from openai import AsyncOpenAI
23
- client = OpenAI(
24
- # defaults to os.environ.get("OPENAI_API_KEY")
25
- api_key= os.getenv('OPENAI_API_KEY')
26
- )
27
 
28
 
29
  from datetime import datetime
@@ -163,9 +160,10 @@ def chat_with_model(prompt, document_section, model_choice='gpt-3.5-turbo'):
163
  collected_chunks = []
164
  collected_messages = []
165
  key = os.getenv('OPENAI_API_KEY')
166
- client.api_key = key
167
 
168
- client = OpenAI()
 
 
169
  stream = client.chat.completions.create(
170
  prompt="Say this is a test",
171
  messages=[{"role": "user", "content": "Say this is a test"}],
@@ -216,6 +214,10 @@ def chat_with_file_contents(prompt, file_content, model_choice='gpt-3.5-turbo'):
216
  conversation.append({'role': 'user', 'content': prompt})
217
  if len(file_content)>0:
218
  conversation.append({'role': 'assistant', 'content': file_content})
 
 
 
 
219
  response = client.chat.completions.create(model=model_choice, messages=conversation)
220
  return response['choices'][0]['message']['content']
221
 
 
20
  import asyncio
21
  from openai import OpenAI
22
  #from openai import AsyncOpenAI
23
+
 
 
 
24
 
25
 
26
  from datetime import datetime
 
160
  collected_chunks = []
161
  collected_messages = []
162
  key = os.getenv('OPENAI_API_KEY')
 
163
 
164
+ client = OpenAI(
165
+ api_key= os.getenv('OPENAI_API_KEY')
166
+ )
167
  stream = client.chat.completions.create(
168
  prompt="Say this is a test",
169
  messages=[{"role": "user", "content": "Say this is a test"}],
 
214
  conversation.append({'role': 'user', 'content': prompt})
215
  if len(file_content)>0:
216
  conversation.append({'role': 'assistant', 'content': file_content})
217
+
218
+ client = OpenAI(
219
+ api_key= os.getenv('OPENAI_API_KEY')
220
+ )
221
  response = client.chat.completions.create(model=model_choice, messages=conversation)
222
  return response['choices'][0]['message']['content']
223