inoid commited on
Commit
9f74466
·
1 Parent(s): 2a563da

Update local changes

Browse files
Files changed (1) hide show
  1. seminar_edition_ai.py +29 -8
seminar_edition_ai.py CHANGED
@@ -112,7 +112,6 @@ def predictProclamando(queryKey):
112
  def predictFromInit( sermonTopic, llmModelList):
113
  global HISTORY_ANSWER
114
  keyStr = 'SERMON_TOPIC'
115
-
116
  templates = SermonGeminiPromptTemplate()
117
 
118
  llm = llmModelList[0] if len(llmModelList) > 0 else None
@@ -131,6 +130,21 @@ def predictFromInit( sermonTopic, llmModelList):
131
  keyStr = 'BIBLE_VERSICLE'
132
 
133
  global retriever
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
134
  answer = askQuestionInit(
135
  '',
136
  chain,
@@ -143,13 +157,6 @@ def predictFromInit( sermonTopic, llmModelList):
143
  if answer != '':
144
  doc = Document(page_content="text", metadata = {"source": "local"})
145
 
146
- vectorstore = Chroma.from_documents(
147
- documents = [doc],
148
- embedding = embed_model,
149
- persist_directory="chroma_db_dir_sermon", # Local mode with in-memory storage only
150
- collection_name="sermon_lab_ai"
151
- )
152
-
153
  retriever = vectorstore.as_retriever(
154
  search_kwargs = {"k": 3}
155
  )
@@ -169,6 +176,20 @@ def predictQuestionBuild(sermonTopic):
169
  ['SERMON_IDEA', 'context']
170
  )
171
  global retriever
 
 
 
 
 
 
 
 
 
 
 
 
 
 
172
  answer = askQuestionEx(
173
  '',
174
  chain,
 
112
  def predictFromInit( sermonTopic, llmModelList):
113
  global HISTORY_ANSWER
114
  keyStr = 'SERMON_TOPIC'
 
115
  templates = SermonGeminiPromptTemplate()
116
 
117
  llm = llmModelList[0] if len(llmModelList) > 0 else None
 
130
  keyStr = 'BIBLE_VERSICLE'
131
 
132
  global retriever
133
+
134
+ if retriever == None:
135
+ doc = Document(page_content="text", metadata={"source": "local"})
136
+
137
+ vectorstore = Chroma.from_documents(
138
+ documents=[doc],
139
+ embedding=embed_model,
140
+ persist_directory="chroma_db_dir_sermon", # Local mode with in-memory storage only
141
+ collection_name="sermon_lab_ai"
142
+ )
143
+
144
+ retriever = vectorstore.as_retriever(
145
+ search_kwargs={"k": 3}
146
+ )
147
+
148
  answer = askQuestionInit(
149
  '',
150
  chain,
 
157
  if answer != '':
158
  doc = Document(page_content="text", metadata = {"source": "local"})
159
 
 
 
 
 
 
 
 
160
  retriever = vectorstore.as_retriever(
161
  search_kwargs = {"k": 3}
162
  )
 
176
  ['SERMON_IDEA', 'context']
177
  )
178
  global retriever
179
+
180
+ if retriever == None:
181
+ doc = Document(page_content="text", metadata={"source": "local"})
182
+
183
+ vectorstore = Chroma.from_documents(
184
+ documents=[doc],
185
+ embedding=embed_model,
186
+ persist_directory="chroma_db_dir_sermon", # Local mode with in-memory storage only
187
+ collection_name="sermon_lab_ai"
188
+ )
189
+ retriever = vectorstore.as_retriever(
190
+ search_kwargs={"k": 3}
191
+ )
192
+
193
  answer = askQuestionEx(
194
  '',
195
  chain,