Initial project commit
This commit is contained in:
@@ -0,0 +1,2 @@
|
||||
from app.tasks.image_tasks import generate_thumbnail
|
||||
from app.tasks.stats_tasks import aggregate_daily_stats
|
||||
@@ -0,0 +1,46 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from app.celery_app import celery_app
|
||||
from app.core.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
THUMB_SIZES = [(400, 400), (200, 200)]
|
||||
|
||||
|
||||
@celery_app.task(bind=True, max_retries=3, default_retry_delay=10)
|
||||
def generate_thumbnail(self, image_path: str):
|
||||
"""Generate thumbnail variants for an uploaded image."""
|
||||
try:
|
||||
from PIL import Image
|
||||
|
||||
if settings.STORAGE_BACKEND != "local":
|
||||
logger.info("Thumbnail generation skipped for non-local storage: %s", image_path)
|
||||
return {"status": "skipped", "reason": "non-local storage"}
|
||||
|
||||
full_path = Path(settings.LOCAL_STORAGE_PATH) / image_path.lstrip("/")
|
||||
if not full_path.exists():
|
||||
logger.warning("Image not found: %s", full_path)
|
||||
return {"status": "error", "reason": "file not found"}
|
||||
|
||||
results = []
|
||||
img = Image.open(full_path)
|
||||
if img.mode in ("RGBA", "P"):
|
||||
img = img.convert("RGB")
|
||||
|
||||
for w, h in THUMB_SIZES:
|
||||
thumb = img.copy()
|
||||
thumb.thumbnail((w, h), Image.LANCZOS)
|
||||
suffix = full_path.suffix or ".jpg"
|
||||
thumb_name = f"{full_path.stem}_{w}x{h}{suffix}"
|
||||
thumb_path = full_path.parent / thumb_name
|
||||
thumb.save(str(thumb_path), quality=85, optimize=True)
|
||||
results.append(str(thumb_path))
|
||||
logger.info("Generated thumbnail: %s", thumb_path)
|
||||
|
||||
return {"status": "ok", "thumbnails": results}
|
||||
|
||||
except Exception as exc:
|
||||
logger.exception("Thumbnail generation failed for %s", image_path)
|
||||
raise self.retry(exc=exc)
|
||||
@@ -0,0 +1,55 @@
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from app.celery_app import celery_app
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@celery_app.task
|
||||
def aggregate_daily_stats():
|
||||
"""Aggregate daily statistics and store to Redis for dashboard consumption."""
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db.session import sync_engine
|
||||
from app.models.comment import Comment
|
||||
from app.models.event import Event
|
||||
from app.models.favorite import Favorite
|
||||
from app.models.rating import Rating
|
||||
from app.models.shooting import ShootingRequest
|
||||
from app.models.spot import Spot
|
||||
from app.models.user import User
|
||||
|
||||
try:
|
||||
with Session(sync_engine) as session:
|
||||
stats = {
|
||||
"users": session.execute(select(func.count(User.id))).scalar() or 0,
|
||||
"spots": session.execute(select(func.count(Spot.id))).scalar() or 0,
|
||||
"approved_spots": session.execute(
|
||||
select(func.count(Spot.id)).where(Spot.audit_status == "approved")
|
||||
).scalar() or 0,
|
||||
"comments": session.execute(select(func.count(Comment.id))).scalar() or 0,
|
||||
"ratings": session.execute(select(func.count(Rating.id))).scalar() or 0,
|
||||
"favorites": session.execute(select(func.count(Favorite.id))).scalar() or 0,
|
||||
"shootings": session.execute(select(func.count(ShootingRequest.id))).scalar() or 0,
|
||||
"events": session.execute(select(func.count(Event.id))).scalar() or 0,
|
||||
"generated_at": datetime.now(timezone.utc).isoformat(),
|
||||
}
|
||||
|
||||
import redis
|
||||
r = redis.from_url(str(_get_redis_url()))
|
||||
r.hset("ciyuan:daily_stats", mapping={k: str(v) for k, v in stats.items()})
|
||||
r.expire("ciyuan:daily_stats", 86400 * 2)
|
||||
|
||||
logger.info("Daily stats aggregated: %s", stats)
|
||||
return stats
|
||||
|
||||
except Exception:
|
||||
logger.exception("Failed to aggregate daily stats")
|
||||
raise
|
||||
|
||||
|
||||
def _get_redis_url():
|
||||
from app.core.config import settings
|
||||
return settings.REDIS_URL
|
||||
Reference in New Issue
Block a user