gleisonnanet commited on
Commit
ab29dc4
·
1 Parent(s): 0de232a

removendo chat para teste

Browse files
Files changed (2) hide show
  1. Dockerfile +1 -2
  2. main.py +1 -35
Dockerfile CHANGED
@@ -21,8 +21,7 @@ VOLUME /code/models
21
  COPY . .
22
  RUN chmod -R 777 /code
23
 
24
-
25
- RUN transformers-cli download csebuetnlp/mT5_multilingual_XLSum
26
  RUN transformers-cli download facebook/m2m100_1.2B
27
 
28
 
 
21
  COPY . .
22
  RUN chmod -R 777 /code
23
 
24
+
 
25
  RUN transformers-cli download facebook/m2m100_1.2B
26
 
27
 
main.py CHANGED
@@ -6,7 +6,7 @@ from typing import List, Literal
6
  from fastapi import FastAPI
7
  from pydantic import BaseModel
8
  from enum import Enum
9
- from transformers import M2M100Tokenizer, M2M100ForConditionalGeneration, AutoTokenizer, AutoModelForSeq2SeqLM
10
  import torch
11
  import uvicorn
12
  app = FastAPI(docs_url="/")
@@ -59,41 +59,7 @@ async def translate(request: TranslationRequest):
59
 
60
 
61
 
62
- # chat
63
- WHITESPACE_HANDLER = lambda k: re.sub('\s+', ' ', re.sub('\n+', ' ', k.strip()))
64
 
65
- chat_model_name = "csebuetnlp/mT5_multilingual_XLSum"
66
- tokenizer = AutoTokenizer.from_pretrained(chat_model_name)
67
- modelchat = AutoModelForSeq2SeqLM.from_pretrained(chat_model_name)
68
-
69
- @app.get("/chat")
70
- async def read_root(text: str):
71
- input_ids = tokenizer(
72
- [WHITESPACE_HANDLER(text)],
73
- return_tensors="pt",
74
- padding="max_length",
75
- truncation=True,
76
- max_length=512
77
- )["input_ids"]
78
-
79
- # max_length=84,
80
- output_ids = modelchat.generate(
81
- input_ids=input_ids,
82
- max_length=500,
83
- no_repeat_ngram_size=2,
84
- num_beams=4
85
- )[0]
86
-
87
- summary = tokenizer.decode(
88
- output_ids,
89
- skip_special_tokens=True,
90
- clean_up_tokenization_spaces=True
91
- )
92
-
93
- return {"summary": summary}
94
-
95
-
96
-
97
 
98
 
99
 
 
6
  from fastapi import FastAPI
7
  from pydantic import BaseModel
8
  from enum import Enum
9
+ from transformers import M2M100Tokenizer, M2M100ForConditionalGeneration
10
  import torch
11
  import uvicorn
12
  app = FastAPI(docs_url="/")
 
59
 
60
 
61
 
 
 
62
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
 
64
 
65