591 lines
21 KiB
Python
591 lines
21 KiB
Python
"""
|
|
Enhanced content management routes with async operations and comprehensive validation.
|
|
Provides secure upload, download, metadata management with Redis caching.
|
|
"""
|
|
|
|
import asyncio
|
|
from datetime import datetime, timedelta
|
|
from typing import Dict, List, Optional, Any
|
|
from uuid import UUID, uuid4
|
|
|
|
from sanic import Blueprint, Request, response
|
|
from sanic.response import JSONResponse, ResponseStream
|
|
from sqlalchemy import select, update, delete, and_, or_
|
|
from sqlalchemy.orm import selectinload
|
|
|
|
from app.core.config import get_settings
|
|
from app.core.database import get_async_session, get_cache_manager
|
|
from app.core.logging import get_logger
|
|
from app.core.models.content import Content, ContentMetadata, ContentAccess, License
|
|
from app.core.models.user import User
|
|
from app.api.middleware import require_auth, validate_request, rate_limit
|
|
from app.core.validation import ContentSchema, ContentUpdateSchema, ContentSearchSchema
|
|
from app.core.storage import StorageManager
|
|
from app.core.security import encrypt_data, decrypt_data, generate_access_token
|
|
|
|
# Initialize blueprint
|
|
content_bp = Blueprint("content", url_prefix="/api/v1/content")
|
|
logger = get_logger(__name__)
|
|
settings = get_settings()
|
|
|
|
@content_bp.route("/", methods=["POST"])
|
|
@rate_limit(limit=50, window=3600) # 50 uploads per hour
|
|
@require_auth(permissions=["content.create"])
|
|
@validate_request(ContentSchema)
|
|
async def create_content(request: Request) -> JSONResponse:
|
|
"""
|
|
Create new content with metadata and security validation.
|
|
|
|
Args:
|
|
request: Sanic request with validated content data
|
|
|
|
Returns:
|
|
JSONResponse: Created content information with upload URLs
|
|
"""
|
|
try:
|
|
data = request.json
|
|
user_id = request.ctx.user.id
|
|
|
|
async with get_async_session() as session:
|
|
# Check user upload quota
|
|
quota_key = f"user:{user_id}:upload_quota"
|
|
cache_manager = get_cache_manager()
|
|
|
|
current_quota = await cache_manager.get(quota_key, default=0)
|
|
if current_quota >= settings.MAX_UPLOADS_PER_DAY:
|
|
return response.json(
|
|
{"error": "Upload quota exceeded", "code": "QUOTA_EXCEEDED"},
|
|
status=429
|
|
)
|
|
|
|
# Create content record
|
|
content = Content(
|
|
id=uuid4(),
|
|
user_id=user_id,
|
|
title=data["title"],
|
|
description=data.get("description"),
|
|
content_type=data["content_type"],
|
|
file_size=data.get("file_size", 0),
|
|
status="pending",
|
|
visibility=data.get("visibility", "private"),
|
|
tags=data.get("tags", []),
|
|
license_id=data.get("license_id")
|
|
)
|
|
|
|
session.add(content)
|
|
|
|
# Create metadata if provided
|
|
if data.get("metadata"):
|
|
metadata = ContentMetadata(
|
|
content_id=content.id,
|
|
metadata_type="custom",
|
|
data=data["metadata"]
|
|
)
|
|
session.add(metadata)
|
|
|
|
await session.commit()
|
|
await session.refresh(content)
|
|
|
|
# Update quota counter
|
|
await cache_manager.increment(quota_key, ttl=86400) # 24 hours
|
|
|
|
# Generate upload URLs for chunked upload
|
|
storage_manager = StorageManager()
|
|
upload_info = await storage_manager.create_upload_session(
|
|
content.id, data.get("file_size", 0)
|
|
)
|
|
|
|
# Cache content for quick access
|
|
content_cache_key = f"content:{content.id}"
|
|
await cache_manager.set(
|
|
content_cache_key,
|
|
{
|
|
"id": str(content.id),
|
|
"title": content.title,
|
|
"status": content.status,
|
|
"user_id": str(content.user_id)
|
|
},
|
|
ttl=3600
|
|
)
|
|
|
|
await logger.ainfo(
|
|
"Content created successfully",
|
|
content_id=str(content.id),
|
|
user_id=str(user_id),
|
|
title=content.title
|
|
)
|
|
|
|
return response.json({
|
|
"content_id": str(content.id),
|
|
"upload_session": upload_info,
|
|
"status": content.status,
|
|
"created_at": content.created_at.isoformat()
|
|
}, status=201)
|
|
|
|
except Exception as e:
|
|
await logger.aerror(
|
|
"Failed to create content",
|
|
error=str(e),
|
|
user_id=str(user_id)
|
|
)
|
|
return response.json(
|
|
{"error": "Failed to create content", "code": "CREATION_FAILED"},
|
|
status=500
|
|
)
|
|
|
|
@content_bp.route("/<content_id:uuid>", methods=["GET"])
|
|
@rate_limit(limit=200, window=3600) # 200 requests per hour
|
|
@require_auth(permissions=["content.read"])
|
|
async def get_content(request: Request, content_id: UUID) -> JSONResponse:
|
|
"""
|
|
Retrieve content information with access control and caching.
|
|
|
|
Args:
|
|
request: Sanic request object
|
|
content_id: UUID of the content to retrieve
|
|
|
|
Returns:
|
|
JSONResponse: Content information or error
|
|
"""
|
|
try:
|
|
user_id = request.ctx.user.id
|
|
cache_manager = get_cache_manager()
|
|
|
|
# Try cache first
|
|
cache_key = f"content:{content_id}:full"
|
|
cached_content = await cache_manager.get(cache_key)
|
|
|
|
if cached_content:
|
|
# Check access permissions from cache
|
|
if await _check_content_access(content_id, user_id, "read"):
|
|
return response.json(cached_content)
|
|
else:
|
|
return response.json(
|
|
{"error": "Access denied", "code": "ACCESS_DENIED"},
|
|
status=403
|
|
)
|
|
|
|
async with get_async_session() as session:
|
|
# Load content with relationships
|
|
stmt = (
|
|
select(Content)
|
|
.options(
|
|
selectinload(Content.metadata),
|
|
selectinload(Content.access_controls),
|
|
selectinload(Content.license)
|
|
)
|
|
.where(Content.id == content_id)
|
|
)
|
|
result = await session.execute(stmt)
|
|
content = result.scalar_one_or_none()
|
|
|
|
if not content:
|
|
return response.json(
|
|
{"error": "Content not found", "code": "NOT_FOUND"},
|
|
status=404
|
|
)
|
|
|
|
# Check access permissions
|
|
if not await _check_content_access_db(session, content, user_id, "read"):
|
|
return response.json(
|
|
{"error": "Access denied", "code": "ACCESS_DENIED"},
|
|
status=403
|
|
)
|
|
|
|
# Prepare response data
|
|
content_data = {
|
|
"id": str(content.id),
|
|
"title": content.title,
|
|
"description": content.description,
|
|
"content_type": content.content_type,
|
|
"file_size": content.file_size,
|
|
"status": content.status,
|
|
"visibility": content.visibility,
|
|
"tags": content.tags,
|
|
"created_at": content.created_at.isoformat(),
|
|
"updated_at": content.updated_at.isoformat(),
|
|
"metadata": [
|
|
{
|
|
"type": m.metadata_type,
|
|
"data": m.data
|
|
} for m in content.metadata
|
|
],
|
|
"license": {
|
|
"name": content.license.name,
|
|
"description": content.license.description
|
|
} if content.license else None
|
|
}
|
|
|
|
# Cache the result
|
|
await cache_manager.set(cache_key, content_data, ttl=1800) # 30 minutes
|
|
|
|
# Update access statistics
|
|
await _update_access_stats(content_id, user_id, "view")
|
|
|
|
return response.json(content_data)
|
|
|
|
except Exception as e:
|
|
await logger.aerror(
|
|
"Failed to retrieve content",
|
|
content_id=str(content_id),
|
|
user_id=str(user_id),
|
|
error=str(e)
|
|
)
|
|
return response.json(
|
|
{"error": "Failed to retrieve content", "code": "RETRIEVAL_FAILED"},
|
|
status=500
|
|
)
|
|
|
|
@content_bp.route("/<content_id:uuid>", methods=["PUT"])
|
|
@rate_limit(limit=100, window=3600) # 100 updates per hour
|
|
@require_auth(permissions=["content.update"])
|
|
@validate_request(ContentUpdateSchema)
|
|
async def update_content(request: Request, content_id: UUID) -> JSONResponse:
|
|
"""
|
|
Update content metadata and settings with validation.
|
|
|
|
Args:
|
|
request: Sanic request with update data
|
|
content_id: UUID of content to update
|
|
|
|
Returns:
|
|
JSONResponse: Updated content information
|
|
"""
|
|
try:
|
|
data = request.json
|
|
user_id = request.ctx.user.id
|
|
|
|
async with get_async_session() as session:
|
|
# Load existing content
|
|
stmt = select(Content).where(Content.id == content_id)
|
|
result = await session.execute(stmt)
|
|
content = result.scalar_one_or_none()
|
|
|
|
if not content:
|
|
return response.json(
|
|
{"error": "Content not found", "code": "NOT_FOUND"},
|
|
status=404
|
|
)
|
|
|
|
# Check update permissions
|
|
if not await _check_content_access_db(session, content, user_id, "update"):
|
|
return response.json(
|
|
{"error": "Access denied", "code": "ACCESS_DENIED"},
|
|
status=403
|
|
)
|
|
|
|
# Update fields
|
|
for field, value in data.items():
|
|
if hasattr(content, field) and field not in ["id", "user_id", "created_at"]:
|
|
setattr(content, field, value)
|
|
|
|
content.updated_at = datetime.utcnow()
|
|
await session.commit()
|
|
|
|
# Invalidate caches
|
|
cache_manager = get_cache_manager()
|
|
await cache_manager.delete(f"content:{content_id}")
|
|
await cache_manager.delete(f"content:{content_id}:full")
|
|
|
|
await logger.ainfo(
|
|
"Content updated successfully",
|
|
content_id=str(content_id),
|
|
user_id=str(user_id),
|
|
updated_fields=list(data.keys())
|
|
)
|
|
|
|
return response.json({
|
|
"content_id": str(content_id),
|
|
"status": "updated",
|
|
"updated_at": content.updated_at.isoformat()
|
|
})
|
|
|
|
except Exception as e:
|
|
await logger.aerror(
|
|
"Failed to update content",
|
|
content_id=str(content_id),
|
|
error=str(e)
|
|
)
|
|
return response.json(
|
|
{"error": "Failed to update content", "code": "UPDATE_FAILED"},
|
|
status=500
|
|
)
|
|
|
|
@content_bp.route("/search", methods=["POST"])
|
|
@rate_limit(limit=100, window=3600) # 100 searches per hour
|
|
@require_auth(permissions=["content.read"])
|
|
@validate_request(ContentSearchSchema)
|
|
async def search_content(request: Request) -> JSONResponse:
|
|
"""
|
|
Search content with filters, pagination and caching.
|
|
|
|
Args:
|
|
request: Sanic request with search parameters
|
|
|
|
Returns:
|
|
JSONResponse: Search results with pagination
|
|
"""
|
|
try:
|
|
data = request.json
|
|
user_id = request.ctx.user.id
|
|
|
|
# Build cache key from search parameters
|
|
search_key = f"search:{hash(str(sorted(data.items())))}:{user_id}"
|
|
cache_manager = get_cache_manager()
|
|
|
|
# Try cache first
|
|
cached_results = await cache_manager.get(search_key)
|
|
if cached_results:
|
|
return response.json(cached_results)
|
|
|
|
async with get_async_session() as session:
|
|
# Build base query
|
|
stmt = select(Content).where(
|
|
or_(
|
|
Content.visibility == "public",
|
|
Content.user_id == user_id
|
|
)
|
|
)
|
|
|
|
# Apply filters
|
|
if data.get("query"):
|
|
query = f"%{data['query']}%"
|
|
stmt = stmt.where(
|
|
or_(
|
|
Content.title.ilike(query),
|
|
Content.description.ilike(query)
|
|
)
|
|
)
|
|
|
|
if data.get("content_type"):
|
|
stmt = stmt.where(Content.content_type == data["content_type"])
|
|
|
|
if data.get("tags"):
|
|
for tag in data["tags"]:
|
|
stmt = stmt.where(Content.tags.contains([tag]))
|
|
|
|
if data.get("status"):
|
|
stmt = stmt.where(Content.status == data["status"])
|
|
|
|
# Apply date filters
|
|
if data.get("date_from"):
|
|
stmt = stmt.where(Content.created_at >= datetime.fromisoformat(data["date_from"]))
|
|
|
|
if data.get("date_to"):
|
|
stmt = stmt.where(Content.created_at <= datetime.fromisoformat(data["date_to"]))
|
|
|
|
# Apply pagination
|
|
page = data.get("page", 1)
|
|
per_page = min(data.get("per_page", 20), 100) # Max 100 items per page
|
|
offset = (page - 1) * per_page
|
|
|
|
# Get total count
|
|
from sqlalchemy import func
|
|
count_stmt = select(func.count(Content.id)).select_from(stmt.subquery())
|
|
total_result = await session.execute(count_stmt)
|
|
total = total_result.scalar()
|
|
|
|
# Apply ordering and pagination
|
|
if data.get("sort_by") == "created_at":
|
|
stmt = stmt.order_by(Content.created_at.desc())
|
|
elif data.get("sort_by") == "title":
|
|
stmt = stmt.order_by(Content.title.asc())
|
|
else:
|
|
stmt = stmt.order_by(Content.updated_at.desc())
|
|
|
|
stmt = stmt.offset(offset).limit(per_page)
|
|
|
|
# Execute query
|
|
result = await session.execute(stmt)
|
|
content_list = result.scalars().all()
|
|
|
|
# Prepare response
|
|
search_results = {
|
|
"results": [
|
|
{
|
|
"id": str(content.id),
|
|
"title": content.title,
|
|
"description": content.description,
|
|
"content_type": content.content_type,
|
|
"file_size": content.file_size,
|
|
"status": content.status,
|
|
"visibility": content.visibility,
|
|
"tags": content.tags,
|
|
"created_at": content.created_at.isoformat()
|
|
} for content in content_list
|
|
],
|
|
"pagination": {
|
|
"page": page,
|
|
"per_page": per_page,
|
|
"total": total,
|
|
"pages": (total + per_page - 1) // per_page
|
|
}
|
|
}
|
|
|
|
# Cache results for 5 minutes
|
|
await cache_manager.set(search_key, search_results, ttl=300)
|
|
|
|
return response.json(search_results)
|
|
|
|
except Exception as e:
|
|
await logger.aerror(
|
|
"Search failed",
|
|
user_id=str(user_id),
|
|
error=str(e)
|
|
)
|
|
return response.json(
|
|
{"error": "Search failed", "code": "SEARCH_FAILED"},
|
|
status=500
|
|
)
|
|
|
|
@content_bp.route("/<content_id:uuid>/download", methods=["GET"])
|
|
@rate_limit(limit=50, window=3600) # 50 downloads per hour
|
|
@require_auth(permissions=["content.download"])
|
|
async def download_content(request: Request, content_id: UUID) -> ResponseStream:
|
|
"""
|
|
Secure content download with access control and logging.
|
|
|
|
Args:
|
|
request: Sanic request object
|
|
content_id: UUID of content to download
|
|
|
|
Returns:
|
|
ResponseStream: File stream or error response
|
|
"""
|
|
try:
|
|
user_id = request.ctx.user.id
|
|
|
|
async with get_async_session() as session:
|
|
# Load content
|
|
stmt = select(Content).where(Content.id == content_id)
|
|
result = await session.execute(stmt)
|
|
content = result.scalar_one_or_none()
|
|
|
|
if not content:
|
|
return response.json(
|
|
{"error": "Content not found", "code": "NOT_FOUND"},
|
|
status=404
|
|
)
|
|
|
|
# Check download permissions
|
|
if not await _check_content_access_db(session, content, user_id, "download"):
|
|
return response.json(
|
|
{"error": "Access denied", "code": "ACCESS_DENIED"},
|
|
status=403
|
|
)
|
|
|
|
# Generate download token
|
|
download_token = generate_access_token(
|
|
{"content_id": str(content_id), "user_id": str(user_id)},
|
|
expires_in=3600 # 1 hour
|
|
)
|
|
|
|
# Log download activity
|
|
await _update_access_stats(content_id, user_id, "download")
|
|
|
|
# Get storage manager and create download stream
|
|
storage_manager = StorageManager()
|
|
file_stream = await storage_manager.get_file_stream(content.file_path)
|
|
|
|
await logger.ainfo(
|
|
"Content download initiated",
|
|
content_id=str(content_id),
|
|
user_id=str(user_id),
|
|
filename=content.title
|
|
)
|
|
|
|
return await response.stream(
|
|
file_stream,
|
|
headers={
|
|
"Content-Type": content.content_type or "application/octet-stream",
|
|
"Content-Disposition": f'attachment; filename="{content.title}"',
|
|
"Content-Length": str(content.file_size),
|
|
"X-Download-Token": download_token
|
|
}
|
|
)
|
|
|
|
except Exception as e:
|
|
await logger.aerror(
|
|
"Download failed",
|
|
content_id=str(content_id),
|
|
user_id=str(user_id),
|
|
error=str(e)
|
|
)
|
|
return response.json(
|
|
{"error": "Download failed", "code": "DOWNLOAD_FAILED"},
|
|
status=500
|
|
)
|
|
|
|
async def _check_content_access(content_id: UUID, user_id: UUID, action: str) -> bool:
|
|
"""Check user access to content from cache or database."""
|
|
cache_manager = get_cache_manager()
|
|
access_key = f"access:{content_id}:{user_id}:{action}"
|
|
|
|
cached_access = await cache_manager.get(access_key)
|
|
if cached_access is not None:
|
|
return cached_access
|
|
|
|
async with get_async_session() as session:
|
|
stmt = select(Content).where(Content.id == content_id)
|
|
result = await session.execute(stmt)
|
|
content = result.scalar_one_or_none()
|
|
|
|
if not content:
|
|
return False
|
|
|
|
has_access = await _check_content_access_db(session, content, user_id, action)
|
|
|
|
# Cache result for 5 minutes
|
|
await cache_manager.set(access_key, has_access, ttl=300)
|
|
|
|
return has_access
|
|
|
|
async def _check_content_access_db(session, content: Content, user_id: UUID, action: str) -> bool:
|
|
"""Check user access to content in database."""
|
|
# Content owner always has access
|
|
if content.user_id == user_id:
|
|
return True
|
|
|
|
# Public content allows read access
|
|
if content.visibility == "public" and action in ["read", "view"]:
|
|
return True
|
|
|
|
# Check explicit access controls
|
|
stmt = (
|
|
select(ContentAccess)
|
|
.where(
|
|
and_(
|
|
ContentAccess.content_id == content.id,
|
|
ContentAccess.user_id == user_id,
|
|
ContentAccess.permission == action,
|
|
ContentAccess.expires_at > datetime.utcnow()
|
|
)
|
|
)
|
|
)
|
|
result = await session.execute(stmt)
|
|
access_control = result.scalar_one_or_none()
|
|
|
|
return access_control is not None
|
|
|
|
async def _update_access_stats(content_id: UUID, user_id: UUID, action: str) -> None:
|
|
"""Update content access statistics."""
|
|
try:
|
|
cache_manager = get_cache_manager()
|
|
|
|
# Update daily stats
|
|
today = datetime.utcnow().date().isoformat()
|
|
stats_key = f"stats:{content_id}:{action}:{today}"
|
|
await cache_manager.increment(stats_key, ttl=86400)
|
|
|
|
# Update user activity
|
|
user_activity_key = f"activity:{user_id}:{action}:{today}"
|
|
await cache_manager.increment(user_activity_key, ttl=86400)
|
|
|
|
except Exception as e:
|
|
await logger.awarning(
|
|
"Failed to update access stats",
|
|
content_id=str(content_id),
|
|
user_id=str(user_id),
|
|
action=action,
|
|
error=str(e)
|
|
) |