|
from flask import Flask, request, jsonify, Response |
|
import random |
|
import string |
|
import threading |
|
import time |
|
from collections import deque |
|
from datetime import datetime, timedelta |
|
import loggingfrom flask import Flask, request, jsonify, Response |
|
import random |
|
import string |
|
import threading |
|
import time |
|
from datetime import datetime, timedelta |
|
import logging |
|
from gevent import sleep |
|
|
|
app = Flask(__name__) |
|
|
|
|
|
TRANSFER_LIFETIME = timedelta(hours=6) |
|
CLEANUP_INTERVAL = 300 |
|
MAX_CHUNK_SIZE = 100 * 1024 * 1024 |
|
KEEP_ALIVE_INTERVAL = 25 |
|
TRANSFER_TIMEOUT = 3600 * 3 |
|
BUFFER_FLUSH_SIZE = 100 * 1024 * 1024 |
|
|
|
|
|
logging.basicConfig( |
|
level=logging.INFO, |
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' |
|
) |
|
logger = logging.getLogger('FileTransfer') |
|
|
|
|
|
transfers = {} |
|
transfer_data = {} |
|
transfer_lock = threading.Lock() |
|
|
|
def cleanup_task(): |
|
"""Фоновая очистка старых передач""" |
|
while True: |
|
time.sleep(CLEANUP_INTERVAL) |
|
with transfer_lock: |
|
now = datetime.now() |
|
to_delete = [ |
|
tid for tid, info in transfers.items() |
|
if info['completed'] and (now - info['completed_time']) > TRANSFER_LIFETIME |
|
] |
|
for tid in to_delete: |
|
del transfers[tid] |
|
del transfer_data[tid] |
|
logger.info(f"Cleaned transfer: {tid}") |
|
|
|
threading.Thread(target=cleanup_task, daemon=True).start() |
|
|
|
@app.route('/download/<transfer_id>', methods=['GET']) |
|
def download_file(transfer_id): |
|
"""Потоковая отдача файла с оптимизацией буферизации""" |
|
def generate(): |
|
buffer = bytearray() |
|
last_flush = time.time() |
|
|
|
try: |
|
while True: |
|
with transfer_lock: |
|
transfer = transfers.get(transfer_id) |
|
if not transfer or (transfer['completed'] and not transfer_data.get(transfer_id)): |
|
break |
|
|
|
chunks = transfer_data.get(transfer_id, []) |
|
while chunks: |
|
buffer.extend(chunks.pop(0)) |
|
|
|
|
|
if (len(buffer) >= BUFFER_FLUSH_SIZE or |
|
(time.time() - last_flush) > KEEP_ALIVE_INTERVAL): |
|
yield bytes(buffer) |
|
buffer.clear() |
|
last_flush = time.time() |
|
|
|
|
|
sleep(0.1) |
|
|
|
|
|
if buffer: |
|
yield bytes(buffer) |
|
|
|
except Exception as e: |
|
logger.error(f"Download error: {transfer_id} - {str(e)}") |
|
finally: |
|
logger.info(f"Download finished: {transfer_id}") |
|
|
|
return Response( |
|
generate(), |
|
mimetype='application/octet-stream', |
|
headers={ |
|
'Content-Disposition': f'attachment; filename="{transfers[transfer_id]["filename"]}"', |
|
'Cache-Control': 'no-store' |
|
} |
|
) |
|
|
|
if __name__ == '__main__': |
|
from gevent.pywsgi import WSGIServer |
|
logger.info("Starting optimized server...") |
|
http_server = WSGIServer(('0.0.0.0', 5000), app) |
|
http_server.serve_forever() |
|
|
|
app = Flask(__name__) |
|
|
|
|
|
TRANSFER_LIFETIME = timedelta(hours=1) |
|
CLEANUP_INTERVAL = 300 |
|
MAX_CHUNK_SIZE = 100 * 1024 * 1024 |
|
KEEP_ALIVE_INTERVAL = 20 |
|
TRANSFER_TIMEOUT = 3600 |
|
|
|
|
|
logging.basicConfig( |
|
level=logging.INFO, |
|
format='%(asctime)s - %(levelname)s - %(message)s' |
|
) |
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
transfers = {} |
|
transfer_data = {} |
|
transfer_lock = threading.Lock() |
|
|
|
def cleanup_task(): |
|
"""Фоновая задача для очистки старых передач""" |
|
while True: |
|
time.sleep(CLEANUP_INTERVAL) |
|
with transfer_lock: |
|
now = datetime.now() |
|
to_delete = [] |
|
for transfer_id, info in transfers.items(): |
|
if info['completed'] and (now - info['completed_time']) > TRANSFER_LIFETIME: |
|
to_delete.append(transfer_id) |
|
logger.info(f"Cleaning up transfer: {transfer_id}") |
|
|
|
for transfer_id in to_delete: |
|
del transfers[transfer_id] |
|
del transfer_data[transfer_id] |
|
|
|
|
|
threading.Thread(target=cleanup_task, daemon=True).start() |
|
|
|
def generate_short_id(length=8): |
|
"""Генерация уникального ID для передачи""" |
|
while True: |
|
token = ''.join(random.choices(string.ascii_letters + string.digits, k=length)) |
|
if token not in transfers: |
|
return token |
|
|
|
@app.route('/create_transfer', methods=['POST']) |
|
def create_transfer(): |
|
"""Создание новой передачи""" |
|
try: |
|
data = request.json |
|
if not data or 'filename' not in data: |
|
return jsonify({'error': 'Missing required fields'}), 400 |
|
|
|
transfer_id = generate_short_id() |
|
|
|
with transfer_lock: |
|
transfers[transfer_id] = { |
|
'filename': data['filename'], |
|
'filesize': data.get('filesize', 0), |
|
'completed': False, |
|
'created_time': datetime.now(), |
|
'completed_time': None, |
|
'last_activity': datetime.now() |
|
} |
|
transfer_data[transfer_id] = deque(maxlen=1000) |
|
|
|
logger.info(f"Created new transfer: {transfer_id}") |
|
return jsonify({'transfer_id': transfer_id}) |
|
|
|
except Exception as e: |
|
logger.error(f"Error creating transfer: {str(e)}") |
|
return jsonify({'error': str(e)}), 500 |
|
|
|
@app.route('/upload/<transfer_id>', methods=['POST']) |
|
def upload_chunk(transfer_id): |
|
"""Загрузка чанка данных""" |
|
try: |
|
if transfer_id not in transfers: |
|
return jsonify({'error': 'Invalid transfer ID'}), 404 |
|
|
|
chunk = request.data |
|
if len(chunk) > MAX_CHUNK_SIZE: |
|
return jsonify({'error': 'Chunk size too large'}), 413 |
|
|
|
with transfer_lock: |
|
if transfers[transfer_id]['completed']: |
|
return jsonify({'error': 'Transfer already completed'}), 400 |
|
|
|
transfer_data[transfer_id].append(chunk) |
|
transfers[transfer_id]['last_activity'] = datetime.now() |
|
|
|
if request.headers.get('X-Transfer-Complete') == 'true': |
|
transfers[transfer_id]['completed'] = True |
|
transfers[transfer_id]['completed_time'] = datetime.now() |
|
logger.info(f"Transfer completed: {transfer_id}") |
|
|
|
return jsonify({'status': 'chunk accepted'}) |
|
|
|
except Exception as e: |
|
logger.error(f"Error uploading chunk: {str(e)}") |
|
return jsonify({'error': str(e)}), 500 |
|
|
|
@app.route('/download/<transfer_id>', methods=['GET']) |
|
def download_file(transfer_id): |
|
"""Скачивание файла""" |
|
def generate(): |
|
try: |
|
index = 0 |
|
last_activity = time.time() |
|
|
|
while True: |
|
with transfer_lock: |
|
|
|
if time.time() - last_activity > TRANSFER_TIMEOUT: |
|
logger.warning(f"Transfer timeout: {transfer_id}") |
|
break |
|
|
|
transfer = transfers.get(transfer_id) |
|
if not transfer: |
|
logger.warning(f"Transfer not found: {transfer_id}") |
|
break |
|
|
|
chunks = transfer_data.get(transfer_id, []) |
|
|
|
|
|
while index < len(chunks): |
|
yield chunks[index] |
|
last_activity = time.time() |
|
index += 1 |
|
|
|
if transfer['completed']: |
|
logger.info(f"Transfer finished: {transfer_id}") |
|
break |
|
|
|
|
|
if time.time() - last_activity > KEEP_ALIVE_INTERVAL: |
|
yield b'\0' |
|
last_activity = time.time() |
|
|
|
time.sleep(0.5) |
|
|
|
except Exception as e: |
|
logger.error(f"Download error: {str(e)}") |
|
|
|
if transfer_id not in transfers: |
|
return jsonify({'error': 'Transfer not found'}), 404 |
|
|
|
transfer = transfers[transfer_id] |
|
return Response( |
|
generate(), |
|
mimetype='application/octet-stream', |
|
headers={ |
|
'Content-Disposition': f'attachment; filename="{transfer["filename"]}"', |
|
'Cache-Control': 'no-store', |
|
'Transfer-Encoding': 'chunked' |
|
} |
|
) |
|
|
|
if __name__ == '__main__': |
|
|
|
app.run( |
|
host='0.0.0.0', |
|
port=5000, |
|
threaded=True, |
|
debug=False |
|
) |