| 1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465 |
- import os
- import uuid
- import hashlib
- from fastapi import APIRouter, UploadFile, File
- from typing import List
- import db
- import config
- import preview_utils
- router = APIRouter(prefix="/files", tags=["files"])
- @router.post("/upload")
- async def upload_files(files: List[UploadFile] = File(...)):
- if not files: return {"uploaded": []}
- uploaded_data = []
- for file in files:
- if not file.filename: continue
- file_ext = os.path.splitext(file.filename)[1]
- unique_filename = f"{uuid.uuid4()}{file_ext}"
- file_path = os.path.join(config.UPLOAD_DIR, unique_filename)
- db_file_path = f"uploads/{unique_filename}"
-
- sha256_hash = hashlib.sha256()
- with open(file_path, "wb") as buffer:
- while chunk := file.file.read(8192):
- sha256_hash.update(chunk)
- buffer.write(chunk)
-
- file_hash = sha256_hash.hexdigest()
-
- # --- CACHE CHECK (Hash based) ---
- filament_g = None
- print_time = None
- cached_record = db.execute_query(
- "SELECT filament_g, print_time FROM order_files WHERE file_hash = %s AND print_time IS NOT NULL LIMIT 1",
- (file_hash,)
- )
- if cached_record:
- filament_g = cached_record[0]['filament_g']
- print_time = cached_record[0]['print_time']
-
- # Only slice if not cached
- if not print_time and config.SYNC_SLICING_ON_UPLOAD and file_ext.lower() == ".stl":
- import slicer_utils
- result = slicer_utils.slice_model(file_path)
- if result and result.get('success'):
- filament_g = result.get('filament_g')
- print_time = result.get('print_time_str')
-
- preview_path = None
- db_preview_path = None
- if file_ext.lower() == ".stl":
- preview_filename = f"{uuid.uuid4()}.png"
- preview_path = os.path.join(config.PREVIEW_DIR, preview_filename)
- db_preview_path = f"uploads/previews/{preview_filename}"
- preview_utils.generate_stl_preview(file_path, preview_path)
- query = "INSERT INTO order_files (order_id, filename, file_path, file_size, quantity, file_hash, print_time, filament_g, preview_path) VALUES (NULL, %s, %s, %s, 1, %s, %s, %s, %s)"
- f_id = db.execute_commit(query, (file.filename, db_file_path, file.size, file_hash, print_time, filament_g, db_preview_path))
-
- uploaded_data.append({
- "id": f_id, "filename": file.filename, "size": file.size,
- "print_time": print_time, "filament_g": filament_g, "preview_path": db_preview_path
- })
- return {"uploaded": uploaded_data}
|