"""
Collections API Endpoints for WebApp (My Collections)
MVP: ידני בלבד + חוקים בסיסיים (compute_smart_items) ללא שיתוף/ייצוא.
מוגן ע"י require_auth, כולל dynamic_cache ואירועי observability.
"""
from __future__ import annotations
from flask import Blueprint, jsonify, request, session, send_file
from functools import wraps
from typing import Optional, Dict, Any, List, Tuple
from datetime import datetime, timezone
from io import BytesIO
import json
import os
import time
import zipfile
import html
import logging
from cache_manager import dynamic_cache, cache
from webapp.activity_tracker import log_user_event
try:
from config import config as _cfg # type: ignore
except Exception: # pragma: no cover
_cfg = None # type: ignore
logger = logging.getLogger(__name__)
# Observability: structured events and internal alerts (fail-open stubs)
try: # type: ignore
from observability import emit_event # type: ignore
except Exception: # pragma: no cover
[תיעוד]
def emit_event(event: str, severity: str = "info", **fields): # type: ignore
return None
try: # type: ignore
from internal_alerts import emit_internal_alert # type: ignore
except Exception: # pragma: no cover
def emit_internal_alert(name: str, severity: str = "info", summary: str = "", **details): # type: ignore
return None
# Manual tracing decorator (fail-open)
try: # type: ignore
from observability_instrumentation import traced # type: ignore
except Exception: # pragma: no cover
def traced(*_a, **_k): # type: ignore
def _inner(f):
return f
return _inner
# Blueprint
# חשוב: אין לקבוע url_prefix כאן; הרישום מתבצע ב-app עם '/api/collections'
collections_bp = Blueprint('collections', __name__)
# Alias for conventional import style in app registration
# Some environments expect `collections_api.bp` by convention.
bp = collections_bp
# --- Activity logging ---
@collections_bp.before_request
def _log_collections_usage() -> None:
"""נרשם רק עבור משתמשים מחוברים כדי להעשיר את סטטיסטיקות ה-Web."""
try:
user_id = session.get('user_id')
except Exception:
user_id = None
if not user_id:
return
username = None
try:
user_data = session.get('user_data') or {}
if isinstance(user_data, dict):
username = user_data.get('username')
except Exception:
username = None
try:
log_user_event(int(user_id), username=username)
except Exception:
pass
# ==================== Helpers ====================
def _get_request_id() -> str:
try:
rid = getattr(request, "_req_id", "")
if not rid:
rid = request.headers.get("X-Request-ID", "")
return rid or ""
except Exception:
return ""
[תיעוד]
def get_db():
from webapp.app import get_db as _get_db
return _get_db()
[תיעוד]
def get_manager():
from database.collections_manager import CollectionsManager
return CollectionsManager(get_db())
[תיעוד]
def require_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
if 'user_id' not in session:
return jsonify({'ok': False, 'error': 'Unauthorized'}), 401
return f(*args, **kwargs)
return decorated
def _tags_feature_enabled() -> bool:
"""Feature flag for collections item tags."""
try:
env_val = os.getenv("FEATURE_COLLECTIONS_TAGS")
if env_val is not None:
return str(env_val or "").strip().lower() in {"1", "true", "yes", "y", "on"}
except Exception:
pass
try:
if _cfg is None:
return True
return bool(getattr(_cfg, "FEATURE_COLLECTIONS_TAGS", True))
except Exception:
return True
# Simple in-memory rate limiter per user/IP and endpoint key
_TAGS_RATE_LOG: Dict[Tuple[str, str], List[float]] = {}
def _rate_limit_scope_id() -> str:
try:
uid = session.get("user_id")
if uid:
return f"user:{int(uid)}"
except Exception:
pass
try:
ip = request.headers.get("X-Forwarded-For") or request.remote_addr or ""
except Exception:
ip = ""
return f"ip:{ip or 'anonymous'}"
def _tags_rate_limit_check(key: str, max_per_minute: int) -> Tuple[bool, int]:
now = time.time()
window_start = now - 60.0
scope_id = _rate_limit_scope_id()
bucket_key = (str(scope_id), str(key or ""))
try:
entries = _TAGS_RATE_LOG.get(bucket_key, [])
# drop old timestamps
i = 0
for i, ts in enumerate(entries):
if ts > window_start:
break
if entries:
if entries[0] <= window_start:
cutoff = i if entries[i] > window_start else (i + 1)
entries = entries[cutoff:]
allowed = len(entries) < max(1, int(max_per_minute or 1))
if allowed:
entries.append(now)
_TAGS_RATE_LOG[bucket_key] = entries
return True, 0
retry_after = int(max(1.0, 60.0 - (now - (entries[0] if entries else window_start))))
return False, retry_after
except Exception:
return True, 0
def _get_public_base_url() -> str:
"""הפקת בסיס כתובת ציבורית לפעולות שיתוף (UI ו-API)."""
try:
configured_base = getattr(_cfg, 'PUBLIC_BASE_URL', None) if _cfg is not None else None
if configured_base:
return str(configured_base).rstrip('/')
host_base = str(getattr(request, 'host_url', '') or '').rstrip('/')
script_root = str(getattr(request, 'script_root', '') or '').strip()
if script_root:
script_root = '/' + script_root.strip('/') if script_root not in ('/', '') else ''
return f"{host_base}{script_root}".rstrip('/')
except Exception:
return ''
def _build_app_url(path: str) -> str:
"""בונה URL פנימי ל-Webapp כולל script_root (למשל /myapp/collections/...)."""
try:
script_root = str(getattr(request, "script_root", "") or "").strip()
except Exception:
script_root = ""
if script_root and script_root not in ("/", ""):
script_root = "/" + script_root.strip("/")
else:
script_root = ""
p = str(path or "").strip()
if not p.startswith("/"):
p = "/" + p
return f"{script_root}{p}"
def _build_public_collection_url(token: str) -> str:
try:
base = _get_public_base_url()
prefix = base or ''
return f"{prefix}/collections/shared/{token}"
except Exception:
return f"/collections/shared/{token}"
def _build_public_collection_api_url(token: str) -> str:
try:
base = _get_public_base_url()
prefix = base or ''
return f"{prefix}/api/collections/shared/{token}"
except Exception:
return f"/api/collections/shared/{token}"
_BINARY_EXTENSIONS = {
'.exe', '.dll', '.so', '.dylib', '.bin', '.dat',
'.pdf', '.doc', '.docx', '.xls', '.xlsx',
'.jpg', '.jpeg', '.png', '.gif', '.bmp', '.ico',
'.mp3', '.mp4', '.avi', '.mov', '.wav',
'.zip', '.rar', '.7z', '.tar', '.gz', '.pyc', '.pyo', '.class', '.o', '.a',
}
def _format_size(size_bytes: Optional[float]) -> Optional[str]:
if size_bytes is None:
return None
try:
size = float(size_bytes)
except Exception:
return None
units = ['B', 'KB', 'MB', 'GB', 'TB']
for unit in units:
if size < 1024.0 or unit == units[-1]:
return f"{size:.1f} {unit}"
size /= 1024.0
return f"{size:.1f} TB"
def _is_binary(content: str, filename: str = "") -> bool:
try:
ext = ""
if filename:
lower = filename.lower()
idx = lower.rfind('.')
if idx >= 0:
ext = lower[idx:]
if ext in _BINARY_EXTENSIONS:
return True
except Exception:
pass
if not content:
return False
try:
content.encode('utf-8')
except UnicodeEncodeError:
return True
if '\0' in content:
return True
return False
def _to_iso(value: Any) -> Any:
if isinstance(value, datetime):
try:
return value.isoformat()
except Exception:
return None
return value
def _sanitize_filename(value: str, fallback: str = "file.txt") -> str:
try:
name = str(value or "").strip()
except Exception:
name = ""
if not name:
name = fallback
name = name.replace("\\", "/")
if "/" in name:
name = name.split("/")[-1]
name = name.replace("..", "_")
return name or fallback
# ==================== Endpoints ====================
@collections_bp.route('', methods=['POST'])
@require_auth
@traced("collections.create")
def create_collection():
try:
user_id = int(session['user_id'])
data = request.get_json(silent=True) or {}
name = sanitize_input(data.get('name', ''), 80)
description = sanitize_input(data.get('description', ''), 500)
mode = str(data.get('mode', 'manual')).lower()
rules = data.get('rules') or {}
icon = data.get('icon')
color = data.get('color')
is_favorite = bool(data.get('is_favorite', False))
sort_order = data.get('sort_order')
mgr = get_manager()
result = mgr.create_collection(
user_id=user_id,
name=name,
description=description,
mode=mode,
rules=rules if isinstance(rules, dict) else {},
icon=icon,
color=color,
is_favorite=is_favorite,
sort_order=sort_order if isinstance(sort_order, int) else None,
)
if result.get('ok'):
# Invalidate list/detail caches
try:
uid = str(user_id)
cache.delete_pattern(f"collections_list:{uid}:*")
cache.delete_pattern(f"collections_list:v2:{uid}:*")
if result.get('collection') and result['collection'].get('id'):
cid = result['collection']['id']
cache.delete_pattern(f"collections_detail:{uid}:-api-collections-{cid}*")
except Exception:
pass
return jsonify(result)
except Exception as e:
rid = _get_request_id()
uid = session.get('user_id')
try:
emit_event("collections_create_error", severity="anomaly", operation="collections.create", handled=True, request_id=rid, user_id=int(uid) if uid else None, error=str(e))
except Exception:
pass
logger.error("Error creating collection: %s", e, exc_info=True)
return jsonify({'ok': False, 'error': 'שגיאה ביצירת האוסף'}), 500
@collections_bp.route('', methods=['GET'])
@require_auth
@traced("collections.list")
@dynamic_cache(content_type='collections_list', key_prefix='collections_list:v2')
def list_collections():
try:
user_id = int(session['user_id'])
try:
limit = int(request.args.get('limit') or 100)
skip = int(request.args.get('skip') or 0)
except Exception:
return jsonify({'ok': False, 'error': 'Invalid limit/skip'}), 400
mgr = get_manager()
created_workspace = False
try:
created_workspace = mgr.ensure_default_collections(user_id)
except Exception:
created_workspace = False
result = mgr.list_collections(user_id, limit=limit, skip=skip)
# אם עדיין חסר אוסף "שולחן עבודה" – נסה ליצור ולשלוף מחדש (למשתמשים קיימים)
try:
collections = result.get('collections') if isinstance(result, dict) else None
except Exception:
collections = None
has_workspace = False
if isinstance(collections, list):
try:
has_workspace = any((c or {}).get('name') == 'שולחן עבודה' for c in collections)
except Exception:
has_workspace = False
if not has_workspace:
try:
if mgr.ensure_default_collections(user_id):
created_workspace = True
result = mgr.list_collections(user_id, limit=limit, skip=skip)
collections = result.get('collections') if isinstance(result, dict) else None
except Exception:
pass
if result.get('ok'):
collections = result.get('collections') or []
for col in collections:
try:
share = col.get('share') or {}
token = share.get('token')
if bool(share.get('enabled')) and token:
public_url = _build_public_collection_url(str(token))
public_api_url = _build_public_collection_api_url(str(token))
col['public_url'] = public_url
col['public_api_url'] = public_api_url
else:
col.pop('public_url', None)
col.pop('public_api_url', None)
except Exception:
continue
# החזר dict ישירות כדי להימנע מסריאליזציה כפולה (jsonify ואז get_json בדקורטור cache)
return result
except Exception as e:
rid = _get_request_id()
uid = session.get('user_id')
try:
emit_event("collections_get_list_error", severity="anomaly", operation="collections.list", handled=True, request_id=rid, user_id=int(uid) if uid else None, error=str(e))
except Exception:
pass
logger.error("Error listing collections: %s", e)
return jsonify({'ok': False, 'error': 'שגיאה בשליפת האוספים'}), 500
@collections_bp.route('/<collection_id>', methods=['GET'])
@require_auth
@traced("collections.get")
@dynamic_cache(content_type='collections_detail', key_prefix='collections_detail')
def get_collection(collection_id: str):
try:
user_id = int(session['user_id'])
mgr = get_manager()
result = mgr.get_collection(user_id, collection_id)
if result.get('ok'):
col = result.get('collection') or {}
share = col.get('share') or {}
token = share.get('token')
if bool(share.get('enabled')) and token:
public_url = _build_public_collection_url(str(token))
public_api_url = _build_public_collection_api_url(str(token))
result['public_url'] = public_url
result['public_api_url'] = public_api_url
col['public_url'] = public_url
col['public_api_url'] = public_api_url
# החזר dict ישירות כדי להימנע מסריאליזציה כפולה (jsonify ואז get_json בדקורטור cache)
return result
except Exception as e:
rid = _get_request_id()
try:
emit_event("collections_get_detail_error", severity="anomaly", operation="collections.get", handled=True, request_id=rid, collection_id=str(collection_id), error=str(e))
except Exception:
pass
logger.error("Error getting collection: %s", e)
return jsonify({'ok': False, 'error': 'שגיאה בשליפת האוסף'}), 500
@collections_bp.route('/<collection_id>', methods=['PUT'])
@require_auth
@traced("collections.update")
def update_collection(collection_id: str):
try:
user_id = int(session['user_id'])
data = request.get_json(silent=True) or {}
# sanitize string fields only
for key in ('name', 'description', 'icon', 'color'):
if key in data and isinstance(data[key], str):
data[key] = sanitize_input(data[key], 80 if key == 'name' else 500)
mgr = get_manager()
result = mgr.update_collection(user_id, collection_id, **data)
if result.get('ok'):
try:
uid = str(user_id)
cache.delete_pattern(f"collections_list:{uid}:*")
cache.delete_pattern(f"collections_list:v2:{uid}:*")
cache.delete_pattern(f"collections_detail:{uid}:-api-collections-{collection_id}*")
except Exception:
pass
return jsonify(result)
except Exception as e:
rid = _get_request_id()
try:
emit_event("collections_update_error", severity="anomaly", operation="collections.update", handled=True, request_id=rid, collection_id=str(collection_id), error=str(e))
except Exception:
pass
logger.error("Error updating collection: %s", e)
return jsonify({'ok': False, 'error': 'שגיאה בעדכון האוסף'}), 500
@collections_bp.route('/<collection_id>', methods=['DELETE'])
@require_auth
@traced("collections.delete")
def delete_collection(collection_id: str):
try:
user_id = int(session['user_id'])
mgr = get_manager()
result = mgr.delete_collection(user_id, collection_id)
if result.get('ok'):
try:
uid = str(user_id)
cache.delete_pattern(f"collections_list:{uid}:*")
cache.delete_pattern(f"collections_list:v2:{uid}:*")
cache.delete_pattern(f"collections_detail:{uid}:-api-collections-{collection_id}*")
cache.delete_pattern(f"collections_items:{uid}:-api-collections-{collection_id}-items*")
except Exception:
pass
return jsonify(result)
except Exception as e:
rid = _get_request_id()
try:
emit_event("collections_delete_error", severity="anomaly", operation="collections.delete", handled=True, request_id=rid, collection_id=str(collection_id), error=str(e))
except Exception:
pass
logger.error("Error deleting collection: %s", e)
return jsonify({'ok': False, 'error': 'שגיאה במחיקת האוסף'}), 500
@collections_bp.route('/<collection_id>/items', methods=['GET'])
@require_auth
@traced("collections.items")
@dynamic_cache(content_type='collections_items', key_prefix='collections_items')
def get_items(collection_id: str):
try:
user_id = int(session['user_id'])
try:
page = int(request.args.get('page') or 1)
per_page = int(request.args.get('per_page') or 20)
except Exception:
return jsonify({'ok': False, 'error': 'Invalid page/per_page'}), 400
include_computed = str(request.args.get('include_computed', 'true')).lower() == 'true'
mgr = get_manager()
t0 = time.perf_counter()
result = mgr.get_collection_items(
user_id,
collection_id,
page=page,
per_page=per_page,
include_computed=include_computed,
)
elapsed_ms = max(0.0, (time.perf_counter() - t0) * 1000.0)
# מדידת payload (best-effort) רק אם איטי או לפי בקשה מפורשת
debug_perf = str(request.args.get("debug_perf", "")).lower() in {"1", "true", "yes"}
try:
slow_ms_env = os.getenv("COLLECTIONS_API_ITEMS_SLOW_MS", "")
slow_ms = float(slow_ms_env) if slow_ms_env not in (None, "") else 500.0
except Exception:
slow_ms = 500.0
if debug_perf or elapsed_ms >= float(slow_ms or 0.0):
payload_bytes: Optional[int] = None
try:
payload_bytes = len(
json.dumps(result, ensure_ascii=False, separators=(",", ":")).encode("utf-8")
)
except Exception:
payload_bytes = None
try:
emit_event(
"collections_get_items_http_perf",
severity="warn" if elapsed_ms >= float(slow_ms or 0.0) else "info",
operation="collections.items",
request_id=_get_request_id(),
user_id=int(user_id),
collection_id=str(collection_id),
page=int(page),
per_page=int(per_page),
include_computed=bool(include_computed),
total_ms=round(elapsed_ms, 1),
payload_bytes=payload_bytes,
items_count=int(len((result or {}).get("items") or [])) if isinstance(result, dict) else None,
ok=bool((result or {}).get("ok")) if isinstance(result, dict) else None,
handled=True,
)
except Exception:
pass
try:
logger.warning(
"collections_get_items_http_slow ms=%.1f bytes=%s items=%s",
elapsed_ms,
payload_bytes,
(len((result or {}).get("items") or []) if isinstance(result, dict) else None),
)
except Exception:
pass
# החזר dict ישירות כדי להימנע מסריאליזציה כפולה (jsonify ואז get_json בדקורטור cache)
return result
except Exception as e:
rid = _get_request_id()
try:
emit_event("collections_get_items_error", severity="anomaly", operation="collections.get_items", handled=True, request_id=rid, collection_id=str(collection_id), error=str(e))
except Exception:
pass
logger.error("Error getting items: %s", e)
return jsonify({'ok': False, 'error': 'שגיאה בשליפת פריטים'}), 500
@collections_bp.route('/<collection_id>/items', methods=['POST'])
@require_auth
@traced("collections.items_add")
def add_items(collection_id: str):
try:
user_id = int(session['user_id'])
data = request.get_json(silent=True) or {}
items = data.get('items') or []
if not isinstance(items, list):
return jsonify({'ok': False, 'error': 'items must be a list'}), 400
# sanitize notes
for it in items:
if isinstance(it, dict) and 'note' in it and isinstance(it['note'], str):
it['note'] = sanitize_input(it['note'], 500)
mgr = get_manager()
try:
result = mgr.add_items(user_id, collection_id, items)
except ValueError as e:
rid = _get_request_id()
try:
logger.error(
"Validation error adding items (request_id=%s, user_id=%s, collection_id=%s): %s",
rid,
user_id,
collection_id,
e,
)
except Exception:
pass
return jsonify({"ok": False, "error": "קלט לא חוקי"}), 400
if result.get('ok'):
try:
uid = str(user_id)
cache.delete_pattern(f"collections_items:{uid}:-api-collections-{collection_id}-items*")
cache.delete_pattern(f"collections_detail:{uid}:-api-collections-{collection_id}*")
cache.delete_pattern(f"collections_list:{uid}:*")
except Exception:
pass
return jsonify(result)
except Exception as e:
rid = _get_request_id()
try:
emit_event("collections_items_add_error", severity="anomaly", operation="collections.items_add", handled=True, request_id=rid, collection_id=str(collection_id), error=str(e))
except Exception:
pass
logger.error("Error adding items: %s", e)
return jsonify({'ok': False, 'error': 'שגיאה בהוספת פריטים'}), 500
@collections_bp.route('/<collection_id>/items', methods=['DELETE'])
@require_auth
@traced("collections.items_remove")
def remove_items(collection_id: str):
try:
user_id = int(session['user_id'])
data = request.get_json(silent=True) or {}
items = data.get('items') or []
if not isinstance(items, list):
return jsonify({'ok': False, 'error': 'items must be a list'}), 400
mgr = get_manager()
result = mgr.remove_items(user_id, collection_id, items)
if result.get('ok'):
try:
uid = str(user_id)
cache.delete_pattern(f"collections_items:{uid}:-api-collections-{collection_id}-items*")
cache.delete_pattern(f"collections_detail:{uid}:-api-collections-{collection_id}*")
cache.delete_pattern(f"collections_list:{uid}:*")
except Exception:
pass
return jsonify(result)
except Exception as e:
rid = _get_request_id()
try:
emit_event("collections_items_remove_error", severity="anomaly", operation="collections.items_remove", handled=True, request_id=rid, collection_id=str(collection_id), error=str(e))
except Exception:
pass
logger.error("Error removing items: %s", e)
return jsonify({'ok': False, 'error': 'שגיאה בהסרת פריטים'}), 500
@collections_bp.route('/<collection_id>/reorder', methods=['PUT'])
@require_auth
@traced("collections.reorder")
def reorder_items(collection_id: str):
try:
user_id = int(session['user_id'])
data = request.get_json(silent=True) or {}
order = data.get('order') or []
if not isinstance(order, list):
return jsonify({'ok': False, 'error': 'order must be a list'}), 400
mgr = get_manager()
result = mgr.reorder_items(user_id, collection_id, order)
if result.get('ok'):
try:
uid = str(user_id)
cache.delete_pattern(f"collections_items:{uid}:-api-collections-{collection_id}-items*")
cache.delete_pattern(f"collections_detail:{uid}:-api-collections-{collection_id}*")
except Exception:
pass
return jsonify(result)
except Exception as e:
rid = _get_request_id()
try:
emit_event("collections_reorder_error", severity="anomaly", operation="collections.reorder", handled=True, request_id=rid, collection_id=str(collection_id), error=str(e))
except Exception:
pass
logger.error("Error reordering items: %s", e)
return jsonify({'ok': False, 'error': 'שגיאה בסידור פריטים'}), 500
@collections_bp.route('/items/<item_id>/tags', methods=['PATCH'])
@require_auth
@traced("collections.items_tags_update")
@tags_rate_limit("update_tags", 120)
def update_item_tags(item_id: str):
"""
עדכון תגיות של פריט באוסף.
Body: {"tags": ["🔥", "🐛"]}
"""
if not _tags_feature_enabled():
return jsonify({"ok": False, "error": "feature_disabled"}), 404
user_id = int(session["user_id"])
data = request.get_json(silent=True) or {}
tags = data.get("tags", [])
mgr = get_manager()
try:
updated_item = mgr.update_item_tags(user_id, item_id, tags)
if not updated_item:
return jsonify({"ok": False, "error": "Item not found"}), 404
return jsonify({"ok": True, "item": updated_item})
except ValueError as e:
rid = _get_request_id()
try:
logger.error(
"Validation error updating item tags (request_id=%s, user_id=%s, item_id=%s): %s",
rid,
user_id,
item_id,
e,
)
except Exception:
pass
return jsonify({"ok": False, "error": "קלט לא חוקי"}), 400
except Exception as e:
logger.exception("Error updating item tags: %s", e)
emit_event(
"collections_item_tags_update_error",
severity="anomaly",
operation="collections.items_tags_update",
handled=True,
request_id=_get_request_id(),
user_id=int(user_id),
item_id=str(item_id),
error=str(e),
)
return jsonify({"ok": False, "error": "Internal error"}), 500
@collections_bp.route('/tags/metadata', methods=['GET'])
@traced("collections.tags_metadata")
@tags_rate_limit("tags_metadata", 300)
def get_tags_metadata():
"""
קבלת מטאדאטה של תגיות זמינות.
"""
if not _tags_feature_enabled():
return jsonify({"ok": False, "error": "feature_disabled"}), 404
mgr = get_manager()
metadata = mgr.get_tags_metadata()
return jsonify({"ok": True, **metadata})
@collections_bp.route('/tags/filtered', methods=['POST'])
@require_auth
@traced("collections.tags_filtered")
@tags_rate_limit("tags_filtered", 300)
def log_tags_filtered():
"""Log filtering usage for analytics/observability."""
if not _tags_feature_enabled():
return jsonify({"ok": False, "error": "feature_disabled"}), 404
user_id = session["user_id"]
data = request.get_json(silent=True) or {}
raw_tags = data.get("tags") if isinstance(data, dict) else None
tags = raw_tags if isinstance(raw_tags, list) else []
collection_id = data.get("collection_id") if isinstance(data, dict) else None
emit_event(
"collections_tags_filtered",
user_id=int(user_id),
collection_id=str(collection_id) if collection_id else None,
tags=list(tags),
count=int(len(tags or [])),
)
return jsonify({"ok": True})
# --- Phase 2: Share ---
@collections_bp.route('/<collection_id>/share', methods=['POST'])
@require_auth
@traced("collections.share")
def update_share(collection_id: str):
"""הפעלת/ביטול שיתוף עבור אוסף. Body: {enabled: bool}"""
try:
user_id = int(session['user_id'])
data = request.get_json(silent=True) or {}
if 'enabled' not in data:
return jsonify({'ok': False, 'error': 'enabled חסר'}), 400
enabled = bool(data.get('enabled'))
mgr = get_manager()
result = mgr.set_share(user_id, collection_id, enabled=enabled)
if result.get('ok'):
try:
uid = str(user_id)
cache.delete_pattern(f"collections_detail:{uid}:-api-collections-{collection_id}*")
cache.delete_pattern(f"collections_list:{uid}:*")
except Exception:
pass
# צרף URL ציבורי אם מופעל
try:
col = result.get('collection') or {}
share = col.get('share') or {}
token = share.get('token')
if bool(share.get('enabled')) and token:
public_url = _build_public_collection_url(str(token))
public_api_url = _build_public_collection_api_url(str(token))
result['public_url'] = public_url
result['public_api_url'] = public_api_url
col['public_url'] = public_url
col['public_api_url'] = public_api_url
else:
result.pop('public_url', None)
result.pop('public_api_url', None)
if isinstance(col, dict):
col.pop('public_url', None)
col.pop('public_api_url', None)
except Exception:
pass
return jsonify(result)
except Exception as e:
rid = _get_request_id()
try:
emit_event("collections_share_error", severity="anomaly", operation="collections.share", handled=True, request_id=rid, collection_id=str(collection_id), error=str(e))
except Exception:
pass
logger.error("Error updating share: %s", e)
return jsonify({'ok': False, 'error': 'שגיאה בעדכון שיתוף'}), 500
@collections_bp.route('/shared/<token>', methods=['GET'])
@traced("collections.shared_get")
def get_shared_collection(token: str):
"""שליפת אוסף משותף לצפייה ציבורית ללא התחברות (JSON בלבד)."""
try:
mgr = get_manager()
ctx = mgr.get_share_context(token)
if not ctx.get('ok'):
error_message = ctx.get('error', 'שגיאה בשליפת שיתוף')
status = 404 if error_message == 'לא נמצא' else 500
return jsonify({'ok': False, 'error': error_message}), status
collection = ctx.get('collection') or {}
items = ctx.get('items') or []
doc_refs_by_key: Dict[Tuple[str, str], Dict[str, Any]] = ctx.get('doc_refs_by_key', {}) # type: ignore[assignment]
api_base = _build_public_collection_api_url(str(token))
for item in items:
try:
source = str((item.get('source') or 'regular')).lower()
file_name = str(item.get('file_name') or '').strip()
share_meta = doc_refs_by_key.get((source, file_name))
if not share_meta:
continue
file_id = share_meta.get('doc_id')
if not file_id:
continue
share_payload: Dict[str, Any] = {
'file_id': file_id,
'view_url': f"{api_base}/files/{file_id}",
'download_url': f"{api_base}/files/{file_id}/download",
'language': share_meta.get('language'),
'updated_at': _to_iso(share_meta.get('updated_at')),
'created_at': _to_iso(share_meta.get('created_at')),
'size_bytes': share_meta.get('file_size'),
'lines_count': share_meta.get('lines_count'),
}
size_label = _format_size(share_meta.get('file_size'))
if size_label:
share_payload['size_label'] = size_label
item['share'] = share_payload
except Exception:
continue
export_url = f"{api_base}/export"
total_items = ctx.get('items_result', {}).get('total_items')
response_body: Dict[str, Any] = {
'ok': True,
'collection': collection,
'items': items,
'export_url': export_url,
}
if total_items is not None:
response_body['items_total'] = total_items
return jsonify(response_body)
except Exception as e:
try:
emit_event("collections_shared_get_error", severity="anomaly", operation="collections.shared_get", handled=True, token=str(token), error=str(e))
except Exception:
pass
return jsonify({'ok': False, 'error': 'שגיאה בשליפת שיתוף'}), 500
@collections_bp.route('/shared/<token>/files/<file_id>', methods=['GET'])
@traced("collections.shared_file")
def get_shared_file(token: str, file_id: str):
try:
mgr = get_manager()
details = mgr.get_shared_file_details(token, file_id)
if not details.get('ok'):
error_message = details.get('error', 'שגיאה בשליפת קובץ')
status = 404 if error_message == 'לא נמצא' else 500
return jsonify({'ok': False, 'error': error_message}), status
file_payload = details.get('file') or {}
ctx = details.get('context') or {}
collection = details.get('collection') or {}
content = str(file_payload.get('content') or '')
file_name = str(file_payload.get('file_name') or '')
is_binary = _is_binary(content, file_name)
preview_payload: Dict[str, Any] = {
'id': file_payload.get('id'),
'file_name': file_name,
'language': file_payload.get('language'),
'description': file_payload.get('description') or '',
'tags': file_payload.get('tags') or [],
'size_bytes': file_payload.get('size_bytes'),
'size_label': _format_size(file_payload.get('size_bytes')),
'lines_count': file_payload.get('lines_count'),
'created_at': _to_iso(file_payload.get('created_at')),
'updated_at': _to_iso(file_payload.get('updated_at')),
'source': file_payload.get('source'),
'is_binary': bool(is_binary),
'download_url': f"{_build_public_collection_api_url(str(token))}/files/{file_payload.get('id')}/download",
}
if not is_binary:
preview_payload['content'] = content
mgr.log_share_activity(
token,
collection_id=ctx.get('collection_id'),
file_id=file_payload.get('id'),
event='view',
ip=request.remote_addr,
user_agent=request.headers.get('User-Agent'),
)
return jsonify({'ok': True, 'collection': collection, 'file': preview_payload})
except Exception as e:
try:
emit_event("collections_shared_file_error", severity="anomaly", operation="collections.shared_file", handled=True, token=str(token), file_id=str(file_id), error=str(e))
except Exception:
pass
return jsonify({'ok': False, 'error': 'שגיאה בשליפת קובץ'}), 500
@collections_bp.route('/shared/<token>/files/<file_id>/download', methods=['GET'])
@traced("collections.shared_file_download")
def download_shared_file(token: str, file_id: str):
try:
mgr = get_manager()
details = mgr.get_shared_file_details(token, file_id)
if not details.get('ok'):
error_message = details.get('error', 'שגיאה בשליפת קובץ')
status = 404 if error_message == 'לא נמצא' else 500
return jsonify({'ok': False, 'error': error_message}), status
file_payload = details.get('file') or {}
ctx = details.get('context') or {}
content = str(file_payload.get('content') or '')
file_name = _sanitize_filename(file_payload.get('file_name') or '', fallback='shared.txt')
buffer = BytesIO()
buffer.write(content.encode('utf-8'))
buffer.seek(0)
response = send_file(buffer, mimetype='text/plain; charset=utf-8', as_attachment=True, download_name=file_name)
response.headers['Cache-Control'] = 'no-store'
mgr.log_share_activity(
token,
collection_id=ctx.get('collection_id'),
file_id=file_payload.get('id'),
event='download',
ip=request.remote_addr,
user_agent=request.headers.get('User-Agent'),
)
return response
except Exception as e:
try:
emit_event("collections_shared_file_download_error", severity="anomaly", operation="collections.shared_file_download", handled=True, token=str(token), file_id=str(file_id), error=str(e))
except Exception:
pass
return jsonify({'ok': False, 'error': 'שגיאה בהורדת קובץ'}), 500
@collections_bp.route('/shared/<token>/export', methods=['GET'])
@traced("collections.shared_export")
def export_shared_collection(token: str):
try:
mgr = get_manager()
docs_res = mgr.collect_shared_documents(token)
if not docs_res.get('ok'):
error_message = docs_res.get('error', 'שגיאה ביצוא קבצים')
status = 404 if error_message == 'לא נמצא' else 500
return jsonify({'ok': False, 'error': error_message}), status
documents = docs_res.get('documents') or []
if not documents:
return jsonify({'ok': False, 'error': 'אין קבצים זמינים לשיתוף'}), 404
buffer = BytesIO()
name_counters: Dict[str, int] = {}
used_names: set[str] = set()
with zipfile.ZipFile(buffer, 'w', zipfile.ZIP_DEFLATED) as zf:
for doc in documents:
base_name = _sanitize_filename(doc.get('file_name') or '', fallback='file.txt')
counter = name_counters.get(base_name, 0)
name_candidate = base_name
while name_candidate in used_names:
counter += 1
if '.' in base_name:
stem, ext = base_name.rsplit('.', 1)
name_candidate = f"{stem}_{counter}.{ext}"
else:
name_candidate = f"{base_name}_{counter}"
name_counters[base_name] = counter
used_names.add(name_candidate)
content = doc.get('content')
if not isinstance(content, str):
content = str(content or '')
zf.writestr(name_candidate, content)
buffer.seek(0)
collection = docs_res.get('collection') or {}
ctx = docs_res.get('context') or {}
slug = collection.get('slug') or collection.get('name') or 'collection'
zip_name = _sanitize_filename(str(slug), fallback='collection') + '_shared.zip'
response = send_file(buffer, mimetype='application/zip', as_attachment=True, download_name=zip_name)
response.headers['Cache-Control'] = 'no-store'
mgr.log_share_activity(
token,
collection_id=ctx.get('collection_id'),
event='export',
ip=request.remote_addr,
user_agent=request.headers.get('User-Agent'),
)
return response
except Exception as e:
try:
emit_event("collections_shared_export_error", severity="anomaly", operation="collections.shared_export", handled=True, token=str(token), error=str(e))
except Exception:
pass
return jsonify({'ok': False, 'error': 'שגיאה בהפקת קובץ ZIP'}), 500
# --- Shared -> Save (requires auth) ---
def _safe_tags(tags: Any, *, limit: int = 50) -> List[str]:
if not isinstance(tags, list):
return []
out: List[str] = []
for t in tags:
try:
s = str(t or "").strip()
except Exception:
s = ""
if not s:
continue
out.append(s[:80])
if len(out) >= limit:
break
# הסרת כפילויות תוך שמירה על סדר
seen: set[str] = set()
return [x for x in out if not (x in seen or seen.add(x))]
def _compute_size_and_lines(content: str) -> Tuple[int, int]:
if not isinstance(content, str) or not content:
return 0, 0
try:
size = len(content.encode("utf-8", errors="ignore"))
except Exception:
size = len(content)
try:
lines = len(content.splitlines())
except Exception:
lines = 0
return int(size), int(lines)
def _save_shared_document_to_user(db_ref, *, user_id: int, doc: Dict[str, Any]) -> Dict[str, Any]:
"""שמירת מסמך משיתוף לתוך המשתמש הנוכחי (code_snippets או large_files).
Returns:
{ok, action: inserted|skipped, source, file_name, inserted_id?}
"""
try:
src = str((doc.get("source") or "regular")).lower()
except Exception:
src = "regular"
file_name = _sanitize_filename(str(doc.get("file_name") or ""), fallback="shared.txt")
content = doc.get("content")
if not isinstance(content, str):
content = str(content or "")
language = str(doc.get("language") or "text").strip() or "text"
description = str(doc.get("description") or "").strip()
tags = _safe_tags(doc.get("tags"))
now = datetime.now(timezone.utc)
file_size, lines_count = _compute_size_and_lines(content)
# אם large_files לא קיים בסביבה – fallback ל-code_snippets
large_coll = getattr(db_ref, "large_files", None)
if src == "large" and large_coll is None:
src = "regular"
if src == "large" and large_coll is not None:
# מניעת כפילות: אם כבר יש מסמך פעיל זהה – דלג
try:
prev = large_coll.find_one(
{"user_id": int(user_id), "file_name": file_name, "is_active": True},
sort=[("updated_at", -1), ("_id", -1)],
)
except Exception:
prev = None
try:
prev_content = (prev or {}).get("content")
except Exception:
prev_content = None
if isinstance(prev_content, str) and prev_content == content:
return {"ok": True, "action": "skipped", "source": "large", "file_name": file_name}
payload = {
"user_id": int(user_id),
"file_name": file_name,
"content": content,
"programming_language": language,
"description": description,
"tags": tags,
"file_size": file_size,
"lines_count": lines_count,
"is_active": True,
"created_at": now,
"updated_at": now,
}
try:
res = large_coll.insert_one(payload)
inserted_id = str(getattr(res, "inserted_id", "") or "")
except Exception:
return {"ok": False, "error": "שמירת קובץ גדול נכשלה"}
return {"ok": True, "action": "inserted", "source": "large", "file_name": file_name, "inserted_id": inserted_id}
# regular
try:
prev = db_ref.code_snippets.find_one(
{"user_id": int(user_id), "file_name": file_name, "is_active": True},
sort=[("version", -1), ("updated_at", -1), ("_id", -1)],
)
except Exception:
prev = None
try:
prev_code = (prev or {}).get("code")
except Exception:
prev_code = None
if isinstance(prev_code, str) and prev_code == content:
return {"ok": True, "action": "skipped", "source": "regular", "file_name": file_name}
try:
prev_version = int((prev or {}).get("version", 0) or 0)
except Exception:
prev_version = 0
version = prev_version + 1
payload = {
"user_id": int(user_id),
"file_name": file_name,
"code": content,
"programming_language": language,
"description": description,
"tags": tags,
"version": int(version),
"file_size": file_size,
"lines_count": lines_count,
"created_at": now,
"updated_at": now,
"is_active": True,
}
try:
res = db_ref.code_snippets.insert_one(payload)
inserted_id = str(getattr(res, "inserted_id", "") or "")
except Exception:
return {"ok": False, "error": "שמירת קובץ נכשלה"}
return {"ok": True, "action": "inserted", "source": "regular", "file_name": file_name, "inserted_id": inserted_id}
def _invalidate_user_collections_cache(user_id: int, *, collection_id: Optional[str] = None) -> None:
try:
uid = str(int(user_id))
except Exception:
return
try:
invalidate = getattr(cache, "invalidate_user_cache", None)
if callable(invalidate):
invalidate(int(user_id))
except Exception:
pass
try:
cache.delete_pattern(f"collections_list:{uid}:*")
cache.delete_pattern(f"collections_list:v2:{uid}:*")
cache.delete_pattern(f"collections_detail:{uid}:*")
cache.delete_pattern(f"collections_items:{uid}:*")
except Exception:
pass
if collection_id:
try:
cache.delete_pattern(f"collections_detail:{uid}:-api-collections-{collection_id}*")
cache.delete_pattern(f"collections_items:{uid}:-api-collections-{collection_id}-items*")
except Exception:
pass
def _create_saved_collection_for_user(mgr, *, user_id: int, base_name: str, base_description: str = "", icon: Optional[str] = None, color: Optional[str] = None) -> Dict[str, Any]:
safe_base = sanitize_input(base_name or "אוסף משותף", 60).strip() or "אוסף משותף"
desc = (sanitize_input(base_description or "", 500) or "").strip()
if desc:
desc = f"{desc}\n\nנשמר מאוסף משותף"
else:
desc = "נשמר מאוסף משותף"
# נסה שמות עם סיומות כדי להימנע מהתנגשות slug
for i in range(0, 20):
suffix = " (שמור)" if i == 0 else f" (שמור {i + 1})"
name = (safe_base + suffix)[:80]
res = mgr.create_collection(
user_id=int(user_id),
name=name,
description=desc[:500],
mode="manual",
icon=icon,
color=color,
)
if res.get("ok"):
return res
return {"ok": False, "error": "לא ניתן ליצור אוסף חדש לשמירה"}
def _resolve_workspace_collection_id(mgr, *, user_id: int) -> Optional[str]:
"""ניסיון למצוא את אוסף 'שולחן עבודה' של המשתמש (או fallback)."""
try:
mgr.ensure_default_collections(int(user_id))
except Exception:
pass
# קודם חפש אוסף פעיל בשם 'שולחן עבודה'
try:
doc = mgr.collections.find_one({"user_id": int(user_id), "name": "שולחן עבודה", "is_active": True})
except Exception:
doc = None
# אם אין אוסף פעיל, נסה למצוא אוסף קיים ולהפעיל אותו מחדש כדי שלא נוסיף פריטים לאוסף "מחוק"/נסתר
if not doc:
try:
doc_any = mgr.collections.find_one({"user_id": int(user_id), "name": "שולחן עבודה"})
except Exception:
doc_any = None
if isinstance(doc_any, dict):
try:
cid_any = str(doc_any.get("_id") or "").strip()
except Exception:
cid_any = ""
if cid_any:
try:
mgr.collections.update_one(
{"_id": doc_any.get("_id"), "user_id": int(user_id)},
{"$set": {"is_active": True, "updated_at": datetime.now(timezone.utc)}},
)
except Exception:
pass
return cid_any
try:
cid = str((doc or {}).get("_id") or "").strip()
except Exception:
cid = ""
if cid:
return cid
# אם עדיין לא נמצא: נסה ליצור אוסף חדש בשם "שולחן עבודה" (יכול להיכשל אם יש התנגשות slug/מגבלות)
try:
created_ws = mgr.create_collection(
user_id=int(user_id),
name="שולחן עבודה",
description="קבצים שאני עובד עליהם כרגע",
mode="manual",
icon="🖥️",
color="purple",
is_favorite=True,
sort_order=-1,
)
except Exception:
created_ws = {"ok": False}
if isinstance(created_ws, dict) and created_ws.get("ok"):
try:
return str((created_ws.get("collection") or {}).get("id") or "").strip() or None
except Exception:
return None
# fallback: צור אוסף "שמורים"
created = _create_saved_collection_for_user(mgr, user_id=int(user_id), base_name="שמורים", base_description="")
if created.get("ok"):
try:
return str(created.get("collection", {}).get("id") or "").strip()
except Exception:
return None
return None
@collections_bp.route('/shared/<token>/save', methods=['POST'])
@require_auth
@traced("collections.shared_save_all")
def save_shared_collection_to_webapp(token: str):
"""שמירת כל האוסף המשותף ב-Webapp (יוצר אוסף חדש אצל המשתמש ומעתיק את הקבצים)."""
try:
user_id = int(session["user_id"])
db_ref = get_db()
mgr = get_manager()
docs_res = mgr.collect_shared_documents(token)
if not docs_res.get("ok"):
error_message = docs_res.get("error", "שגיאה בשמירה")
status = 404 if error_message == "לא נמצא" else 500
return jsonify({"ok": False, "error": error_message}), status
shared_collection = docs_res.get("collection") or {}
documents: List[Dict[str, Any]] = list(docs_res.get("documents") or [])
if not documents:
return jsonify({"ok": False, "error": "אין קבצים זמינים לשמירה"}), 404
created = _create_saved_collection_for_user(
mgr,
user_id=user_id,
base_name=str(shared_collection.get("name") or "אוסף משותף"),
base_description=str(shared_collection.get("description") or ""),
icon=shared_collection.get("icon"),
color=shared_collection.get("color"),
)
if not created.get("ok"):
return jsonify({"ok": False, "error": created.get("error") or "שגיאה ביצירת אוסף"}), 400
dest_collection = created.get("collection") or {}
dest_collection_id = str(dest_collection.get("id") or "").strip()
if not dest_collection_id:
return jsonify({"ok": False, "error": "שגיאה ביצירת אוסף"}), 500
inserted = 0
skipped = 0
items_to_add: List[Dict[str, Any]] = []
for doc in documents:
if not isinstance(doc, dict):
continue
saved = _save_shared_document_to_user(db_ref, user_id=user_id, doc=doc)
if not saved.get("ok"):
continue
action = saved.get("action")
if action == "inserted":
inserted += 1
else:
skipped += 1
items_to_add.append({"source": saved.get("source") or "regular", "file_name": saved.get("file_name") or ""})
if items_to_add:
try:
mgr.add_items(user_id, dest_collection_id, items_to_add)
except Exception:
pass
_invalidate_user_collections_cache(user_id, collection_id=dest_collection_id)
return jsonify({
"ok": True,
"saved": int(inserted),
"skipped": int(skipped),
"collection_id": dest_collection_id,
"collection_url": _build_app_url(f"/collections/{dest_collection_id}"),
"message": f"נשמר בהצלחה ({int(inserted) + int(skipped)} קבצים)",
})
except Exception as e:
rid = _get_request_id()
uid = session.get("user_id")
try:
emit_event(
"collections_shared_save_all_error",
severity="anomaly",
operation="collections.shared_save_all",
handled=True,
request_id=rid,
user_id=int(uid) if uid else None,
token=str(token),
error=str(e),
)
except Exception:
pass
logger.error("Error saving shared collection: %s", e, exc_info=True)
return jsonify({"ok": False, "error": "שגיאה בשמירת האוסף"}), 500
@collections_bp.route('/shared/<token>/files/<file_id>/save', methods=['POST'])
@require_auth
@traced("collections.shared_save_file")
def save_shared_file_to_webapp(token: str, file_id: str):
"""שמירת קובץ בודד מהאוסף המשותף ב-Webapp (לתוך אוסף 'שולחן עבודה' כברירת מחדל)."""
try:
user_id = int(session["user_id"])
db_ref = get_db()
mgr = get_manager()
details = mgr.get_shared_file_details(token, file_id)
if not details.get("ok"):
error_message = details.get("error", "שגיאה בשמירה")
status = 404 if error_message == "לא נמצא" else 500
return jsonify({"ok": False, "error": error_message}), status
file_payload = details.get("file") or {}
if not isinstance(file_payload, dict):
return jsonify({"ok": False, "error": "שגיאה בשמירה"}), 500
doc = {
"file_name": file_payload.get("file_name"),
"content": file_payload.get("content") or "",
"language": file_payload.get("language") or "text",
"description": file_payload.get("description") or "",
"tags": file_payload.get("tags") or [],
"source": file_payload.get("source") or "regular",
}
saved = _save_shared_document_to_user(db_ref, user_id=user_id, doc=doc)
if not saved.get("ok"):
return jsonify({"ok": False, "error": saved.get("error") or "שמירת קובץ נכשלה"}), 500
workspace_id = _resolve_workspace_collection_id(mgr, user_id=user_id)
if workspace_id:
try:
mgr.add_items(user_id, workspace_id, [{"source": saved.get("source") or "regular", "file_name": saved.get("file_name") or ""}])
except Exception:
pass
_invalidate_user_collections_cache(user_id, collection_id=workspace_id)
collection_url = _build_app_url(f"/collections/{workspace_id}")
msg_suffix = "לאוסף שולחן עבודה"
else:
_invalidate_user_collections_cache(user_id)
collection_url = _build_app_url("/collections")
msg_suffix = "לחשבון"
return jsonify({
"ok": True,
"file_name": saved.get("file_name"),
"action": saved.get("action"),
"collection_url": collection_url,
"message": f"נשמר בהצלחה {msg_suffix}",
})
except Exception as e:
rid = _get_request_id()
uid = session.get("user_id")
try:
emit_event(
"collections_shared_save_file_error",
severity="anomaly",
operation="collections.shared_save_file",
handled=True,
request_id=rid,
user_id=int(uid) if uid else None,
token=str(token),
file_id=str(file_id),
error=str(e),
)
except Exception:
pass
logger.error("Error saving shared file: %s", e, exc_info=True)
return jsonify({"ok": False, "error": "שגיאה בשמירת הקובץ"}), 500
# ==================== Error Handlers ====================
@collections_bp.errorhandler(404)
def not_found(error):
return jsonify({'ok': False, 'error': 'Endpoint not found'}), 404
@collections_bp.errorhandler(500)
def internal_error(error):
logger.error(f"Internal error: {error}")
return jsonify({'ok': False, 'error': 'Internal server error'}), 500