Spaces:
Sleeping
Sleeping
Update api.py
Browse files
api.py
CHANGED
@@ -61,10 +61,6 @@ async def summ_inference(txts: str):
|
|
61 |
|
62 |
async def scrape_urls(urls):
|
63 |
logging.warning('Entering scrape_urls()')
|
64 |
-
'''
|
65 |
-
Get the data shape by parallely calculating lenght of each chunk and
|
66 |
-
aggregating them to get lenght of complete training dataset
|
67 |
-
'''
|
68 |
pool = multiprocessing.Pool(processes=multiprocessing.cpu_count())
|
69 |
|
70 |
results = []
|
@@ -92,10 +88,12 @@ async def scrape_urls(urls):
|
|
92 |
# scrape_errors.append(err)
|
93 |
# return scraped_texts, scrape_errors
|
94 |
|
|
|
95 |
##### API #####
|
96 |
app = FastAPI()
|
97 |
summ_tokenizer, summ_model = load_summarizer_models()
|
98 |
|
|
|
99 |
class URLList(BaseModel):
|
100 |
urls: List[str]
|
101 |
key: str
|
@@ -109,6 +107,7 @@ def authenticate_key(api_key: str):
|
|
109 |
if api_key != os.getenv('API_KEY'):
|
110 |
raise NewsSummarizerAPIAuthenticationError("Authentication error: Invalid API key.")
|
111 |
|
|
|
112 |
@app.post("/generate_summary/")
|
113 |
async def read_items(q: URLList):
|
114 |
try:
|
|
|
61 |
|
62 |
async def scrape_urls(urls):
|
63 |
logging.warning('Entering scrape_urls()')
|
|
|
|
|
|
|
|
|
64 |
pool = multiprocessing.Pool(processes=multiprocessing.cpu_count())
|
65 |
|
66 |
results = []
|
|
|
88 |
# scrape_errors.append(err)
|
89 |
# return scraped_texts, scrape_errors
|
90 |
|
91 |
+
|
92 |
##### API #####
|
93 |
app = FastAPI()
|
94 |
summ_tokenizer, summ_model = load_summarizer_models()
|
95 |
|
96 |
+
|
97 |
class URLList(BaseModel):
|
98 |
urls: List[str]
|
99 |
key: str
|
|
|
107 |
if api_key != os.getenv('API_KEY'):
|
108 |
raise NewsSummarizerAPIAuthenticationError("Authentication error: Invalid API key.")
|
109 |
|
110 |
+
|
111 |
@app.post("/generate_summary/")
|
112 |
async def read_items(q: URLList):
|
113 |
try:
|