pentarosarium commited on
Commit
b552089
·
1 Parent(s): 29f8d5d

3.44 +bloomz

Browse files
Files changed (1) hide show
  1. app.py +8 -3
app.py CHANGED
@@ -9,7 +9,6 @@ import os
9
  from openpyxl import load_workbook
10
  from langchain.prompts import PromptTemplate
11
  from langchain_core.runnables import RunnablePassthrough
12
- from transformers import pipeline, AutoModelForSeq2SeqGeneration, AutoTokenizer
13
  from io import StringIO, BytesIO
14
  import sys
15
  import contextlib
@@ -21,6 +20,12 @@ from tenacity import retry, stop_after_attempt, wait_exponential
21
  from typing import Optional
22
  from deep_translator import GoogleTranslator
23
  from googletrans import Translator as LegacyTranslator
 
 
 
 
 
 
24
 
25
 
26
  class FallbackLLMSystem:
@@ -30,7 +35,7 @@ class FallbackLLMSystem:
30
  # Initialize BLOOMZ model for Russian text processing
31
  self.model_name = "bigscience/bloomz-560m" # Smaller version for efficiency
32
  self.tokenizer = AutoTokenizer.from_pretrained(self.model_name)
33
- self.model = AutoModelForSeq2SeqGeneration.from_pretrained(self.model_name)
34
 
35
  # Set device
36
  self.device = "cuda" if torch.cuda.is_available() else "cpu"
@@ -723,7 +728,7 @@ def create_output_file(df, uploaded_file, llm):
723
  return output
724
  def main():
725
  with st.sidebar:
726
- st.title("::: AI-анализ мониторинга новостей (v.3.43 ):::")
727
  st.subheader("по материалам СКАН-ИНТЕРФАКС ")
728
 
729
 
 
9
  from openpyxl import load_workbook
10
  from langchain.prompts import PromptTemplate
11
  from langchain_core.runnables import RunnablePassthrough
 
12
  from io import StringIO, BytesIO
13
  import sys
14
  import contextlib
 
20
  from typing import Optional
21
  from deep_translator import GoogleTranslator
22
  from googletrans import Translator as LegacyTranslator
23
+ from transformers import (
24
+ pipeline,
25
+ AutoModelForSeq2SeqLM,
26
+ AutoTokenizer,
27
+ AutoModelForCausalLM # Added as alternative
28
+ )
29
 
30
 
31
  class FallbackLLMSystem:
 
35
  # Initialize BLOOMZ model for Russian text processing
36
  self.model_name = "bigscience/bloomz-560m" # Smaller version for efficiency
37
  self.tokenizer = AutoTokenizer.from_pretrained(self.model_name)
38
+ self.model = AutoModelForSeq2SeqLM.from_pretrained(self.model_name)
39
 
40
  # Set device
41
  self.device = "cuda" if torch.cuda.is_available() else "cpu"
 
728
  return output
729
  def main():
730
  with st.sidebar:
731
+ st.title("::: AI-анализ мониторинга новостей (v.3.44 ):::")
732
  st.subheader("по материалам СКАН-ИНТЕРФАКС ")
733
 
734