inoid commited on
Commit
960a48f
·
1 Parent(s): fd4d049

Update local changes

Browse files
Files changed (1) hide show
  1. seminar_edition_ai.py +11 -1
seminar_edition_ai.py CHANGED
@@ -134,13 +134,18 @@ def predictFromInit( sermonTopic, llmModelList):
134
  keyStr = 'BIBLE_VERSICLE'
135
 
136
  global retriever
 
 
 
 
 
137
 
138
  if retriever == None:
139
  doc = Document(page_content="text", metadata={"source": "local"})
140
 
141
  vectorstore = Chroma.from_documents(
142
  documents=[doc],
143
- embedding=embed_model,
144
  persist_directory="chroma_db_dir_sermon", # Local mode with in-memory storage only
145
  collection_name="sermon_lab_ai"
146
  )
@@ -180,6 +185,11 @@ def predictQuestionBuild(sermonTopic):
180
  ['SERMON_IDEA', 'context']
181
  )
182
  global retriever
 
 
 
 
 
183
 
184
  if retriever == None:
185
  doc = Document(page_content="text", metadata={"source": "local"})
 
134
  keyStr = 'BIBLE_VERSICLE'
135
 
136
  global retriever
137
+ global embed_model
138
+
139
+ if embed_model == None:
140
+ llmBuilder = GeminiLLM()
141
+ embed_model = llmBuilder.getEmbeddingsModel()
142
 
143
  if retriever == None:
144
  doc = Document(page_content="text", metadata={"source": "local"})
145
 
146
  vectorstore = Chroma.from_documents(
147
  documents=[doc],
148
+ embedding= embed_model,
149
  persist_directory="chroma_db_dir_sermon", # Local mode with in-memory storage only
150
  collection_name="sermon_lab_ai"
151
  )
 
185
  ['SERMON_IDEA', 'context']
186
  )
187
  global retriever
188
+ global embed_model
189
+
190
+ if embed_model == None:
191
+ llmBuilder = GeminiLLM()
192
+ embed_model = llmBuilder.getEmbeddingsModel()
193
 
194
  if retriever == None:
195
  doc = Document(page_content="text", metadata={"source": "local"})