diff --git a/backend/app/api/routes/admin.py b/backend/app/api/routes/admin.py
index 056a5d5..0a8380a 100644
--- a/backend/app/api/routes/admin.py
+++ b/backend/app/api/routes/admin.py
@@ -5,7 +5,7 @@ from app.repositories.scheme_artifacts import artifact_exists, list_scheme_artif
from app.repositories.scheme_versions import get_current_scheme_version
from app.repositories.schemes import get_scheme_record_by_scheme_id, list_scheme_records
from app.repositories.uploads import get_upload_record_by_upload_id
-from app.security.auth import require_api_key
+from app.security.auth import require_admin_api_key
from app.services.artifact_maintenance import (
cleanup_publish_preview_storage,
inspect_publish_preview_storage,
@@ -19,7 +19,7 @@ router = APIRouter()
@router.get(f"{settings.api_v1_prefix}/admin/schemes/{{scheme_id}}/current/artifacts")
async def list_current_scheme_artifacts(
scheme_id: str,
- role: str = Depends(require_api_key),
+ role: str = Depends(require_admin_api_key),
):
scheme = await get_scheme_record_by_scheme_id(scheme_id)
version = await get_current_scheme_version(
@@ -50,7 +50,7 @@ async def list_current_scheme_artifacts(
@router.get(f"{settings.api_v1_prefix}/admin/schemes/{{scheme_id}}/current/validation")
async def validate_current_scheme(
scheme_id: str,
- role: str = Depends(require_api_key),
+ role: str = Depends(require_admin_api_key),
):
scheme = await get_scheme_record_by_scheme_id(scheme_id)
version = await get_current_scheme_version(
@@ -74,7 +74,7 @@ async def validate_current_scheme(
async def regenerate_current_display(
scheme_id: str,
mode: str = Query(default="passthrough"),
- role: str = Depends(require_api_key),
+ role: str = Depends(require_admin_api_key),
):
scheme = await get_scheme_record_by_scheme_id(scheme_id)
version = await get_current_scheme_version(
@@ -98,7 +98,7 @@ async def bulk_backfill_display_artifacts(
mode: str = Query(default="passthrough"),
limit: int = Query(default=100, ge=1, le=1000),
only_missing: bool = Query(default=True),
- role: str = Depends(require_api_key),
+ role: str = Depends(require_admin_api_key),
):
schemes = await list_scheme_records(limit=limit, offset=0)
@@ -168,7 +168,7 @@ async def bulk_backfill_display_artifacts(
@router.get(f"{settings.api_v1_prefix}/admin/artifacts/publish-preview/audit")
async def audit_publish_preview_storage(
- role: str = Depends(require_api_key),
+ role: str = Depends(require_admin_api_key),
):
return await inspect_publish_preview_storage()
@@ -176,6 +176,6 @@ async def audit_publish_preview_storage(
@router.post(f"{settings.api_v1_prefix}/admin/artifacts/publish-preview/cleanup")
async def cleanup_publish_preview_artifacts_endpoint(
dry_run: bool = Query(default=True),
- role: str = Depends(require_api_key),
+ role: str = Depends(require_admin_api_key),
):
return await cleanup_publish_preview_storage(dry_run=dry_run)
diff --git a/backend/app/api/routes/admin_cleanup.py b/backend/app/api/routes/admin_cleanup.py
index b6fa2de..d93554c 100644
--- a/backend/app/api/routes/admin_cleanup.py
+++ b/backend/app/api/routes/admin_cleanup.py
@@ -6,7 +6,7 @@ from app.schemas.admin_cleanup import (
PricingCleanupExecuteResponse,
PricingCleanupPreviewResponse,
)
-from app.security.auth import require_api_key
+from app.security.auth import require_admin_api_key
from app.services.pricing_cleanup import (
build_pricing_cleanup_preview,
execute_pricing_cleanup,
@@ -25,7 +25,7 @@ async def get_pricing_cleanup_preview(
name_prefix: list[str] = Query(default_factory=list),
pricing_category_id: list[str] = Query(default_factory=list),
delete_only_without_rules: bool = Query(default=True),
- role: str = Depends(require_api_key),
+ role: str = Depends(require_admin_api_key),
):
return await build_pricing_cleanup_preview(
scheme_id=scheme_id,
@@ -43,7 +43,7 @@ async def get_pricing_cleanup_preview(
async def post_pricing_cleanup(
scheme_id: str,
payload: PricingCleanupExecuteRequest,
- role: str = Depends(require_api_key),
+ role: str = Depends(require_admin_api_key),
):
return await execute_pricing_cleanup(
scheme_id=scheme_id,
diff --git a/backend/app/security/auth.py b/backend/app/security/auth.py
index 917567f..c0f333a 100644
--- a/backend/app/security/auth.py
+++ b/backend/app/security/auth.py
@@ -1,4 +1,4 @@
-from fastapi import Header, HTTPException, status
+from fastapi import Depends, Header, HTTPException, status
from app.core.config import settings
from app.domain.roles import UserRole
@@ -31,3 +31,12 @@ async def require_api_key(
)
return role
+
+
+async def require_admin_api_key(role: str = Depends(require_api_key)) -> str:
+ if role != UserRole.ADMIN.value:
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="Admin role required",
+ )
+ return role
diff --git a/backend/docs/smoke-regression.md b/backend/docs/smoke-regression.md
index 1d83f51..88b014c 100644
--- a/backend/docs/smoke-regression.md
+++ b/backend/docs/smoke-regression.md
@@ -23,6 +23,10 @@ Primary operator regressions:
- `backend/scripts/smoke_core.sh`
- `backend/scripts/smoke_pricing_publish.sh`
+- `backend/scripts/smoke_version_lifecycle.sh`
+- `backend/scripts/smoke_admin_ops.sh`
+- `backend/scripts/smoke_authz_admin_ops.sh`
+- `backend/scripts/smoke_upload_negative.sh`
- `backend/scripts/smoke_regression.sh`
- `backend/scripts/editor_mutation_regression.sh`
@@ -32,7 +36,11 @@ The scripts are expected to fail fast on any contract break or unexpected 5xx.
- first runs `smoke_core.sh`
- then runs `smoke_pricing_publish.sh`
-- returns non-zero if either scenario fails
+- then runs `smoke_version_lifecycle.sh`
+- then runs `smoke_admin_ops.sh`
+- then runs `smoke_authz_admin_ops.sh`
+- then runs `smoke_upload_negative.sh`
+- returns non-zero if any scenario fails
## Scenario split
@@ -68,6 +76,70 @@ Important:
- it intentionally checks both a priced seat and an unpriced seat on the same fresh scheme
- it does not rely on historical pricing IDs, rules, or old schemes
+### Version lifecycle smoke
+
+Use:
+
+- `backend/scripts/smoke_version_lifecycle.sh`
+
+This scenario uploads a fresh SVG, publishes version 1, creates version 2 from published current, mutates the new draft, publishes version 2, rolls back to version 1, and then runs unpublish on the current scheme.
+
+Important:
+
+- it validates multi-version lifecycle beyond fresh upload
+- it checks that `draft/ensure` creates a new draft only after current becomes published
+- it verifies rollback switches `current_version_number` to the requested target version
+- it verifies the rolled-back current structure matches the target version semantics, not the later mutated draft
+- it checks audit trail for `scheme.published`, `scheme.version.created`, `scheme.rolled_back`, and `scheme.unpublished`
+
+### Admin/ops smoke
+
+Use:
+
+- `backend/scripts/smoke_admin_ops.sh`
+
+This scenario uploads a fresh SVG and prepares its own admin-cleanup fixture inside the scenario before checking current-artifact inspection, validation, publish-preview audit/cleanup, and pricing-category cleanup preview/dry-run.
+
+Important:
+
+- it creates its own pricing categories for cleanup preview
+- it creates its own protected pricing rule so cleanup preview has both deletable and skipped categories
+- it does not rely on historical orphan artifacts, old schemes, or dirty pricing state
+- it checks publish-preview cleanup in both dry-run and execute modes
+- it requires the final publish-preview audit state to be healthy: `orphan_files_count=0` and `missing_files_for_db_rows_count=0`
+- it executes destructive pricing cleanup only for self-created safe fixture data
+
+### Admin authz smoke
+
+Use:
+
+- `backend/scripts/smoke_authz_admin_ops.sh`
+
+This scenario uploads a fresh SVG, prepares its own cleanup fixture data, and then checks permission boundaries for admin/operator/viewer on admin/ops endpoints.
+
+Important:
+
+- admin must be allowed on tested admin endpoints
+- operator and viewer must be denied with controlled 403 responses
+- the scenario does not rely on historical scheme ids or dirty pricing state
+- destructive pricing cleanup execution is validated with fresh self-created fixture categories only
+
+### Negative upload smoke
+
+Use:
+
+- `backend/scripts/smoke_upload_negative.sh`
+
+This scenario checks controlled upload failures for invalid inputs.
+
+Important:
+
+- empty upload must fail with a controlled 4xx
+- non-SVG uploads must fail with a controlled 4xx
+- invalid extension/content-type combinations must fail with a controlled 4xx
+- oversize upload must fail with a controlled 413 when the configured size limit is exceeded
+- no negative case is allowed to return 500
+
## 1. Health / system
- GET /healthz -> 200 (smoke uses a bounded retry/wait loop and fails explicitly if the API never becomes ready)
@@ -140,7 +212,110 @@ Validate:
- priced-seat checks happen only after explicit pricing fixture creation
- publish flow is validated on a fresh scheme, not on historical DB data
-## 4. Legacy endpoint families
+## 4. Version lifecycle smoke coverage
+
+`smoke_version_lifecycle.sh` checks:
+
+- POST /api/v1/schemes/upload -> 200
+- GET scheme detail/current immediately after upload -> version 1 draft
+- POST draft ensure on version 1 -> 200 and remains same draft
+- POST pricing category/rule fixture -> 200
+- POST draft/pricing/snapshot on version 1 -> 200
+- POST publish on version 1 -> 200
+- POST draft ensure from published current -> 200 and creates version 2
+- PATCH one draft seat field on version 2 -> 200
+- GET draft compare-preview on version 2 -> 200 and shows changed state
+- POST draft/pricing/snapshot on version 2 -> 200
+- POST publish on version 2 -> 200
+- POST rollback to version 1 -> 200
+- POST unpublish current -> 200
+- GET audit -> 200 with lifecycle events present
+
+Validate:
+
+- version numbering advances from 1 to 2 only when current was published
+- current pointer tracks the published version before rollback
+- rollback switches current pointer back to the requested target version
+- rolled-back current structure matches version 1 semantics after version 2 mutation
+- lifecycle audit events are present and JSON-serializable
+
+## 5. Admin/ops smoke coverage
+
+`smoke_admin_ops.sh` checks:
+
+- POST /api/v1/schemes/upload -> 200
+- POST draft ensure on the fresh scheme -> 200
+- POST pricing category fixture for cleanup preview -> 200
+- POST protected pricing rule fixture -> 200
+- POST draft/pricing/snapshot -> 200
+- GET draft/publish-preview?refresh=true -> 200
+- GET draft/publish-preview -> 200
+- GET /api/v1/admin/schemes/{scheme_id}/current/artifacts -> 200
+- GET /api/v1/admin/schemes/{scheme_id}/current/validation -> 200
+- GET /api/v1/admin/artifacts/publish-preview/audit -> 200
+- POST /api/v1/admin/artifacts/publish-preview/cleanup?dry_run=true -> 200
+- POST /api/v1/admin/artifacts/publish-preview/cleanup?dry_run=false -> 200
+- GET /api/v1/admin/artifacts/publish-preview/audit after cleanup -> 200
+- GET /api/v1/admin/schemes/{scheme_id}/pricing/categories/cleanup-preview -> 200
+- POST /api/v1/admin/schemes/{scheme_id}/pricing/categories/cleanup with dry_run=true -> 200
+- POST /api/v1/admin/schemes/{scheme_id}/pricing/categories/cleanup with dry_run=false -> 200
+- GET /api/v1/schemes/{scheme_id}/pricing after destructive cleanup -> 200
+- repeated cleanup preview/dry-run after destructive cleanup -> 200
+
+Validate:
+
+- admin artifact listing stays readable for current draft version
+- admin validation stays readable for current draft version
+- publish-preview cleanup dry-run stays non-destructive and mirrors pre-clean audit counts
+- publish-preview cleanup execute removes all orphan preview files and missing DB rows
+- final publish-preview audit is strict healthy state: `orphan_files_count=0`, `missing_files_for_db_rows_count=0`, and `db_rows_count == disk_files_count`
+- pricing cleanup preview identifies both deletable and protected categories created inside the scenario
+- pricing cleanup dry-run never mutates fixture data
+- destructive pricing cleanup deletes only the safe category without rules
+- protected pricing category and its rule remain after destructive cleanup
+- repeated cleanup state remains stable after destructive cleanup
+
+## 6. Admin authz smoke coverage
+
+`smoke_authz_admin_ops.sh` checks:
+
+- POST /api/v1/schemes/upload -> 200
+- POST draft ensure on the fresh scheme -> 200
+- POST pricing fixture categories/rule for cleanup authz checks -> 200
+- POST draft/publish-preview refresh fixture -> 200
+- GET /api/v1/admin/artifacts/publish-preview/audit as admin -> 200
+- GET /api/v1/admin/artifacts/publish-preview/audit as operator/viewer -> 403
+- POST /api/v1/admin/artifacts/publish-preview/cleanup?dry_run=true as admin -> 200
+- POST /api/v1/admin/artifacts/publish-preview/cleanup?dry_run=true as operator/viewer -> 403
+- GET /api/v1/admin/schemes/{scheme_id}/pricing/categories/cleanup-preview as admin -> 200
+- GET /api/v1/admin/schemes/{scheme_id}/pricing/categories/cleanup-preview as operator/viewer -> 403
+- POST /api/v1/admin/schemes/{scheme_id}/pricing/categories/cleanup with dry_run=true as admin -> 200
+- POST /api/v1/admin/schemes/{scheme_id}/pricing/categories/cleanup with dry_run=true as operator/viewer -> 403
+- POST /api/v1/admin/schemes/{scheme_id}/pricing/categories/cleanup with dry_run=false as operator/viewer -> 403
+- POST /api/v1/admin/schemes/{scheme_id}/pricing/categories/cleanup with dry_run=false as admin -> 200
+
+Validate:
+
+- expected role matrix is explicit and enforced
+- admin endpoints stay available to admin
+- operator and viewer are denied without 500
+- destructive cleanup execution remains constrained to self-created safe fixture data
+## 7. Negative upload smoke coverage
+
+`smoke_upload_negative.sh` checks:
+
+- POST /api/v1/schemes/upload with empty SVG body -> controlled 400
+- POST /api/v1/schemes/upload with non-SVG text/plain body -> controlled 400
+- POST /api/v1/schemes/upload with SVG body but invalid extension/content-type pair -> controlled 400
+- POST /api/v1/schemes/upload with body larger than manifest max_file_size_bytes -> controlled 413
+
+Validate:
+
+- upload validation rejects bad inputs with explicit 4xx contracts
+- configured max file size is read from manifest, not hardcoded in the script
+- no negative upload case returns 500
+
+## 8. Legacy endpoint families
The sections below remain the API baseline by area, but regression execution is now split between clean-DB core smoke and pricing/publish smoke.
@@ -302,14 +477,20 @@ Validate:
- GET /api/v1/admin/schemes/{scheme_id}/current/validation -> 200
- GET /api/v1/admin/artifacts/publish-preview/audit -> 200
- POST /api/v1/admin/artifacts/publish-preview/cleanup?dry_run=true -> 200
+- POST /api/v1/admin/artifacts/publish-preview/cleanup?dry_run=false -> 200
- GET /api/v1/admin/schemes/{scheme_id}/pricing/categories/cleanup-preview -> 200
- POST /api/v1/admin/schemes/{scheme_id}/pricing/categories/cleanup with dry_run=true -> 200
+- POST /api/v1/admin/schemes/{scheme_id}/pricing/categories/cleanup with dry_run=false -> 200
Validate:
- artifact audit does not report orphan files or missing files for DB rows in normal state
+- healthy publish-preview audit is strict: `orphan_files_count=0` and `missing_files_for_db_rows_count=0`
- validation report is readable and deterministic
- pricing cleanup preview returns matched candidates and safe_to_delete_count
- pricing cleanup dry-run returns deleted_count=0
+- destructive pricing cleanup deletes only safe fixture categories without rules
+- admin role is allowed on admin endpoints
+- operator/viewer are denied with controlled 403 on admin endpoints
- idempotent cleanup is valid in both states: `matched_total=0` with `would_delete_count=0`, or `matched_total>0` with `would_delete_count>0`
- smoke does not require cleanup dry-run to always find something to delete
- admin routes do not produce 500 for healthy scheme state
diff --git a/backend/scripts/smoke_admin_ops.sh b/backend/scripts/smoke_admin_ops.sh
new file mode 100644
index 0000000..d1036b6
--- /dev/null
+++ b/backend/scripts/smoke_admin_ops.sh
@@ -0,0 +1,319 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+TMP_DIR="$(mktemp -d)"
+trap 'rm -rf "${TMP_DIR}"' EXIT
+
+# shellcheck source=backend/scripts/smoke_common.sh
+source "${SCRIPT_DIR}/smoke_common.sh"
+
+wait_for_health
+
+create_fresh_scheme_from_upload "smoke-admin-ops"
+
+request "scheme_current" "GET" "${API_URL}/api/v1/schemes/${SCHEME_ID}/current" "200"
+CURRENT_VERSION_ID="$(json_get "${TMP_DIR}/scheme_current.body" "scheme_version_id")"
+echo "CURRENT_VERSION_ID=${CURRENT_VERSION_ID}"
+
+request "ensure_draft" "POST" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/draft/ensure?expected_current_scheme_version_id=${CURRENT_VERSION_ID}" \
+ "200"
+DRAFT_VERSION_ID="$(json_get "${TMP_DIR}/ensure_draft.body" "scheme_version_id")"
+echo "DRAFT_VERSION_ID=${DRAFT_VERSION_ID}"
+
+request "draft_structure" "GET" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/draft/structure?expected_scheme_version_id=${DRAFT_VERSION_ID}" \
+ "200"
+
+TARGET_SEAT_ID="$(python3 - "${TMP_DIR}/draft_structure.body" <<'PY'
+import json
+import sys
+from pathlib import Path
+
+payload = json.loads(Path(sys.argv[1]).read_text(encoding="utf-8"))
+seat = next((item for item in payload.get("seats", []) if item.get("seat_id")), None)
+if seat is None:
+ raise SystemExit("No seat with seat_id found for admin ops smoke")
+print(seat["seat_id"])
+PY
+)"
+echo "TARGET_SEAT_ID=${TARGET_SEAT_ID}"
+
+STAMP="$(date +%s)-$$"
+CLEANUP_PREFIX="ADMINOPS_CLEAN_${STAMP}_"
+DELETE_CATEGORY_NAME="adminops-clean-delete-${STAMP}"
+DELETE_CATEGORY_CODE="${CLEANUP_PREFIX}DELETE"
+KEEP_CATEGORY_NAME="adminops-clean-keep-${STAMP}"
+KEEP_CATEGORY_CODE="${CLEANUP_PREFIX}KEEP"
+
+request "create_delete_category" "POST" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/pricing/categories?expected_scheme_version_id=${DRAFT_VERSION_ID}" \
+ "200" \
+ "{\"name\":\"${DELETE_CATEGORY_NAME}\",\"code\":\"${DELETE_CATEGORY_CODE}\"}"
+DELETE_CATEGORY_ID="$(json_get "${TMP_DIR}/create_delete_category.body" "pricing_category_id")"
+echo "DELETE_CATEGORY_ID=${DELETE_CATEGORY_ID}"
+
+request "create_keep_category" "POST" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/pricing/categories?expected_scheme_version_id=${DRAFT_VERSION_ID}" \
+ "200" \
+ "{\"name\":\"${KEEP_CATEGORY_NAME}\",\"code\":\"${KEEP_CATEGORY_CODE}\"}"
+KEEP_CATEGORY_ID="$(json_get "${TMP_DIR}/create_keep_category.body" "pricing_category_id")"
+echo "KEEP_CATEGORY_ID=${KEEP_CATEGORY_ID}"
+
+request "create_keep_category_rule" "POST" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/pricing/rules?expected_scheme_version_id=${DRAFT_VERSION_ID}" \
+ "200" \
+ "{\"pricing_category_id\":\"${KEEP_CATEGORY_ID}\",\"target_type\":\"seat\",\"target_ref\":\"${TARGET_SEAT_ID}\",\"amount\":\"555.00\",\"currency\":\"RUB\"}"
+KEEP_RULE_ID="$(json_get "${TMP_DIR}/create_keep_category_rule.body" "price_rule_id")"
+echo "KEEP_RULE_ID=${KEEP_RULE_ID}"
+
+request "draft_pricing_snapshot" "POST" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/draft/pricing/snapshot?expected_scheme_version_id=${DRAFT_VERSION_ID}" \
+ "200"
+
+request "publish_preview_refresh" "GET" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/draft/publish-preview?refresh=true&expected_scheme_version_id=${DRAFT_VERSION_ID}" \
+ "200"
+request "publish_preview_cached" "GET" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/draft/publish-preview?expected_scheme_version_id=${DRAFT_VERSION_ID}" \
+ "200"
+
+request "admin_current_artifacts" "GET" \
+ "${API_URL}/api/v1/admin/schemes/${SCHEME_ID}/current/artifacts" \
+ "200"
+assert_json_eq "${TMP_DIR}/admin_current_artifacts.body" "scheme_version_id" "${DRAFT_VERSION_ID}"
+assert_json_int_ge "${TMP_DIR}/admin_current_artifacts.body" "total" "4"
+assert_file_contains "${TMP_DIR}/admin_current_artifacts.body" "\"artifact_type\":\"publish_preview\""
+
+request "admin_current_validation" "GET" \
+ "${API_URL}/api/v1/admin/schemes/${SCHEME_ID}/current/validation" \
+ "200"
+assert_json_eq "${TMP_DIR}/admin_current_validation.body" "scheme_version_id" "${DRAFT_VERSION_ID}"
+assert_file_contains "${TMP_DIR}/admin_current_validation.body" "\"report\":"
+
+request "publish_preview_audit" "GET" \
+ "${API_URL}/api/v1/admin/artifacts/publish-preview/audit" \
+ "200"
+assert_json_eq "${TMP_DIR}/publish_preview_audit.body" "artifact_type" "publish_preview"
+assert_json_int_ge "${TMP_DIR}/publish_preview_audit.body" "db_rows_count" "1"
+assert_json_int_ge "${TMP_DIR}/publish_preview_audit.body" "disk_files_count" "1"
+PRE_CLEANUP_DB_ROWS_COUNT="$(json_get "${TMP_DIR}/publish_preview_audit.body" "db_rows_count")"
+PRE_CLEANUP_DISK_FILES_COUNT="$(json_get "${TMP_DIR}/publish_preview_audit.body" "disk_files_count")"
+PRE_CLEANUP_ORPHAN_FILES_COUNT="$(json_get "${TMP_DIR}/publish_preview_audit.body" "orphan_files_count")"
+PRE_CLEANUP_MISSING_FILES_COUNT="$(json_get "${TMP_DIR}/publish_preview_audit.body" "missing_files_for_db_rows_count")"
+echo "PRE_CLEANUP_DB_ROWS_COUNT=${PRE_CLEANUP_DB_ROWS_COUNT}"
+echo "PRE_CLEANUP_DISK_FILES_COUNT=${PRE_CLEANUP_DISK_FILES_COUNT}"
+echo "PRE_CLEANUP_ORPHAN_FILES_COUNT=${PRE_CLEANUP_ORPHAN_FILES_COUNT}"
+echo "PRE_CLEANUP_MISSING_FILES_COUNT=${PRE_CLEANUP_MISSING_FILES_COUNT}"
+
+request "publish_preview_cleanup_dry_run" "POST" \
+ "${API_URL}/api/v1/admin/artifacts/publish-preview/cleanup?dry_run=true" \
+ "200"
+assert_json_eq "${TMP_DIR}/publish_preview_cleanup_dry_run.body" "artifact_type" "publish_preview"
+assert_json_eq "${TMP_DIR}/publish_preview_cleanup_dry_run.body" "dry_run" "true"
+assert_json_int_eq "${TMP_DIR}/publish_preview_cleanup_dry_run.body" "deleted_files_count" "0"
+assert_json_int_eq "${TMP_DIR}/publish_preview_cleanup_dry_run.body" "deleted_db_rows_count" "0"
+assert_json_int_eq "${TMP_DIR}/publish_preview_cleanup_dry_run.body" "orphan_files_count" "${PRE_CLEANUP_ORPHAN_FILES_COUNT}"
+assert_json_int_eq "${TMP_DIR}/publish_preview_cleanup_dry_run.body" "missing_files_for_db_rows_count" "${PRE_CLEANUP_MISSING_FILES_COUNT}"
+
+request "publish_preview_cleanup_execute" "POST" \
+ "${API_URL}/api/v1/admin/artifacts/publish-preview/cleanup?dry_run=false" \
+ "200"
+assert_json_eq "${TMP_DIR}/publish_preview_cleanup_execute.body" "artifact_type" "publish_preview"
+assert_json_eq "${TMP_DIR}/publish_preview_cleanup_execute.body" "dry_run" "false"
+assert_json_int_eq "${TMP_DIR}/publish_preview_cleanup_execute.body" "orphan_files_count" "${PRE_CLEANUP_ORPHAN_FILES_COUNT}"
+assert_json_int_eq "${TMP_DIR}/publish_preview_cleanup_execute.body" "missing_files_for_db_rows_count" "${PRE_CLEANUP_MISSING_FILES_COUNT}"
+assert_json_int_eq "${TMP_DIR}/publish_preview_cleanup_execute.body" "deleted_files_count" "${PRE_CLEANUP_ORPHAN_FILES_COUNT}"
+assert_json_int_eq "${TMP_DIR}/publish_preview_cleanup_execute.body" "deleted_db_rows_count" "${PRE_CLEANUP_MISSING_FILES_COUNT}"
+
+request "publish_preview_audit_after_cleanup" "GET" \
+ "${API_URL}/api/v1/admin/artifacts/publish-preview/audit" \
+ "200"
+assert_json_eq "${TMP_DIR}/publish_preview_audit_after_cleanup.body" "artifact_type" "publish_preview"
+assert_json_int_eq "${TMP_DIR}/publish_preview_audit_after_cleanup.body" "orphan_files_count" "0"
+assert_json_int_eq "${TMP_DIR}/publish_preview_audit_after_cleanup.body" "missing_files_for_db_rows_count" "0"
+
+POST_CLEANUP_DB_ROWS_COUNT="$(json_get "${TMP_DIR}/publish_preview_audit_after_cleanup.body" "db_rows_count")"
+POST_CLEANUP_DISK_FILES_COUNT="$(json_get "${TMP_DIR}/publish_preview_audit_after_cleanup.body" "disk_files_count")"
+POST_CLEANUP_ORPHAN_FILES_COUNT="$(json_get "${TMP_DIR}/publish_preview_audit_after_cleanup.body" "orphan_files_count")"
+POST_CLEANUP_MISSING_FILES_COUNT="$(json_get "${TMP_DIR}/publish_preview_audit_after_cleanup.body" "missing_files_for_db_rows_count")"
+echo "POST_CLEANUP_DB_ROWS_COUNT=${POST_CLEANUP_DB_ROWS_COUNT}"
+echo "POST_CLEANUP_DISK_FILES_COUNT=${POST_CLEANUP_DISK_FILES_COUNT}"
+echo "POST_CLEANUP_ORPHAN_FILES_COUNT=${POST_CLEANUP_ORPHAN_FILES_COUNT}"
+echo "POST_CLEANUP_MISSING_FILES_COUNT=${POST_CLEANUP_MISSING_FILES_COUNT}"
+
+if [[ "${POST_CLEANUP_DB_ROWS_COUNT}" != "${POST_CLEANUP_DISK_FILES_COUNT}" ]]; then
+ fail "publish-preview audit mismatch after cleanup: db_rows_count=${POST_CLEANUP_DB_ROWS_COUNT}, disk_files_count=${POST_CLEANUP_DISK_FILES_COUNT}"
+fi
+echo "[OK] publish-preview audit is fully consistent after cleanup"
+
+request "pricing_cleanup_preview" "GET" \
+ "${API_URL}/api/v1/admin/schemes/${SCHEME_ID}/pricing/categories/cleanup-preview?code_prefix=${CLEANUP_PREFIX}" \
+ "200"
+assert_json_eq "${TMP_DIR}/pricing_cleanup_preview.body" "scheme_id" "${SCHEME_ID}"
+assert_json_int_eq "${TMP_DIR}/pricing_cleanup_preview.body" "total_candidates" "2"
+assert_json_int_eq "${TMP_DIR}/pricing_cleanup_preview.body" "safe_to_delete_count" "1"
+
+python3 - "${TMP_DIR}/pricing_cleanup_preview.body" "${DELETE_CATEGORY_ID}" "${KEEP_CATEGORY_ID}" <<'PY'
+import json
+import sys
+from pathlib import Path
+
+payload = json.loads(Path(sys.argv[1]).read_text(encoding="utf-8"))
+delete_category_id = sys.argv[2]
+keep_category_id = sys.argv[3]
+
+items = {item["pricing_category_id"]: item for item in payload.get("items", [])}
+if delete_category_id not in items:
+ raise SystemExit(f"Delete candidate {delete_category_id} missing from cleanup preview")
+if keep_category_id not in items:
+ raise SystemExit(f"Protected category {keep_category_id} missing from cleanup preview")
+if not items[delete_category_id]["deletable"]:
+ raise SystemExit("Delete candidate is expected to be deletable")
+if items[keep_category_id]["deletable"]:
+ raise SystemExit("Protected category is expected to be skipped because it has rules")
+PY
+echo "[OK] pricing cleanup preview matched expected deletable/skipped categories"
+
+request "pricing_cleanup_dry_run" "POST" \
+ "${API_URL}/api/v1/admin/schemes/${SCHEME_ID}/pricing/categories/cleanup" \
+ "200" \
+ "{\"code_prefixes\":[\"${CLEANUP_PREFIX}\"],\"name_prefixes\":[],\"pricing_category_ids\":[],\"delete_only_without_rules\":true,\"dry_run\":true}"
+assert_json_eq "${TMP_DIR}/pricing_cleanup_dry_run.body" "scheme_id" "${SCHEME_ID}"
+assert_json_eq "${TMP_DIR}/pricing_cleanup_dry_run.body" "dry_run" "true"
+assert_json_int_eq "${TMP_DIR}/pricing_cleanup_dry_run.body" "matched_total" "2"
+assert_json_int_eq "${TMP_DIR}/pricing_cleanup_dry_run.body" "would_delete_count" "1"
+assert_json_int_eq "${TMP_DIR}/pricing_cleanup_dry_run.body" "deleted_count" "0"
+assert_json_int_eq "${TMP_DIR}/pricing_cleanup_dry_run.body" "skipped_count" "1"
+
+python3 - "${TMP_DIR}/pricing_cleanup_dry_run.body" "${DELETE_CATEGORY_ID}" "${KEEP_CATEGORY_ID}" <<'PY'
+import json
+import sys
+from pathlib import Path
+
+payload = json.loads(Path(sys.argv[1]).read_text(encoding="utf-8"))
+delete_category_id = sys.argv[2]
+keep_category_id = sys.argv[3]
+
+would_delete_ids = set(payload.get("would_delete_category_ids", []))
+if would_delete_ids != {delete_category_id}:
+ raise SystemExit(
+ f"Dry run expected would_delete_category_ids={[delete_category_id]}, got={sorted(would_delete_ids)}"
+ )
+
+skipped_ids = {item["pricing_category_id"] for item in payload.get("skipped", [])}
+if skipped_ids != {keep_category_id}:
+ raise SystemExit(
+ f"Dry run expected skipped={[keep_category_id]}, got={sorted(skipped_ids)}"
+ )
+PY
+echo "[OK] pricing cleanup dry-run kept protected category and selected only empty fixture category"
+
+request "pricing_cleanup_execute" "POST" \
+ "${API_URL}/api/v1/admin/schemes/${SCHEME_ID}/pricing/categories/cleanup" \
+ "200" \
+ "{\"code_prefixes\":[\"${CLEANUP_PREFIX}\"],\"name_prefixes\":[],\"pricing_category_ids\":[],\"delete_only_without_rules\":true,\"dry_run\":false}"
+assert_json_eq "${TMP_DIR}/pricing_cleanup_execute.body" "scheme_id" "${SCHEME_ID}"
+assert_json_eq "${TMP_DIR}/pricing_cleanup_execute.body" "dry_run" "false"
+assert_json_int_eq "${TMP_DIR}/pricing_cleanup_execute.body" "matched_total" "2"
+assert_json_int_eq "${TMP_DIR}/pricing_cleanup_execute.body" "would_delete_count" "1"
+assert_json_int_eq "${TMP_DIR}/pricing_cleanup_execute.body" "deleted_count" "1"
+assert_json_int_eq "${TMP_DIR}/pricing_cleanup_execute.body" "skipped_count" "1"
+
+python3 - "${TMP_DIR}/pricing_cleanup_execute.body" "${DELETE_CATEGORY_ID}" "${KEEP_CATEGORY_ID}" <<'PY'
+import json
+import sys
+from pathlib import Path
+
+payload = json.loads(Path(sys.argv[1]).read_text(encoding="utf-8"))
+delete_category_id = sys.argv[2]
+keep_category_id = sys.argv[3]
+
+deleted_ids = set(payload.get("deleted_category_ids", []))
+if deleted_ids != {delete_category_id}:
+ raise SystemExit(
+ f"Cleanup execute expected deleted_category_ids={[delete_category_id]}, got={sorted(deleted_ids)}"
+ )
+
+skipped_ids = {item["pricing_category_id"] for item in payload.get("skipped", [])}
+if skipped_ids != {keep_category_id}:
+ raise SystemExit(
+ f"Cleanup execute expected skipped={[keep_category_id]}, got={sorted(skipped_ids)}"
+ )
+PY
+echo "[OK] pricing cleanup execute deleted only safe fixture category"
+
+request "pricing_bundle_after_cleanup" "GET" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/pricing" \
+ "200"
+assert_json_len_eq "${TMP_DIR}/pricing_bundle_after_cleanup.body" "categories" "1"
+assert_json_len_eq "${TMP_DIR}/pricing_bundle_after_cleanup.body" "rules" "1"
+
+python3 - "${TMP_DIR}/pricing_bundle_after_cleanup.body" "${DELETE_CATEGORY_ID}" "${KEEP_CATEGORY_ID}" "${KEEP_RULE_ID}" <<'PY'
+import json
+import sys
+from pathlib import Path
+
+payload = json.loads(Path(sys.argv[1]).read_text(encoding="utf-8"))
+delete_category_id = sys.argv[2]
+keep_category_id = sys.argv[3]
+keep_rule_id = sys.argv[4]
+
+category_ids = {item["pricing_category_id"] for item in payload.get("categories", [])}
+rule_ids = {item["price_rule_id"] for item in payload.get("rules", [])}
+
+if delete_category_id in category_ids:
+ raise SystemExit("Deleted cleanup category still present in pricing bundle")
+if keep_category_id not in category_ids:
+ raise SystemExit("Protected cleanup category missing after execute cleanup")
+if keep_rule_id not in rule_ids:
+ raise SystemExit("Protected pricing rule missing after execute cleanup")
+PY
+echo "[OK] pricing bundle reflects destructive cleanup result"
+
+request "pricing_cleanup_preview_after_cleanup" "GET" \
+ "${API_URL}/api/v1/admin/schemes/${SCHEME_ID}/pricing/categories/cleanup-preview?code_prefix=${CLEANUP_PREFIX}" \
+ "200"
+assert_json_eq "${TMP_DIR}/pricing_cleanup_preview_after_cleanup.body" "scheme_id" "${SCHEME_ID}"
+assert_json_int_eq "${TMP_DIR}/pricing_cleanup_preview_after_cleanup.body" "total_candidates" "1"
+assert_json_int_eq "${TMP_DIR}/pricing_cleanup_preview_after_cleanup.body" "safe_to_delete_count" "0"
+
+request "pricing_cleanup_dry_run_after_cleanup" "POST" \
+ "${API_URL}/api/v1/admin/schemes/${SCHEME_ID}/pricing/categories/cleanup" \
+ "200" \
+ "{\"code_prefixes\":[\"${CLEANUP_PREFIX}\"],\"name_prefixes\":[],\"pricing_category_ids\":[],\"delete_only_without_rules\":true,\"dry_run\":true}"
+assert_json_eq "${TMP_DIR}/pricing_cleanup_dry_run_after_cleanup.body" "scheme_id" "${SCHEME_ID}"
+assert_json_eq "${TMP_DIR}/pricing_cleanup_dry_run_after_cleanup.body" "dry_run" "true"
+assert_json_int_eq "${TMP_DIR}/pricing_cleanup_dry_run_after_cleanup.body" "matched_total" "1"
+assert_json_int_eq "${TMP_DIR}/pricing_cleanup_dry_run_after_cleanup.body" "would_delete_count" "0"
+assert_json_int_eq "${TMP_DIR}/pricing_cleanup_dry_run_after_cleanup.body" "deleted_count" "0"
+assert_json_int_eq "${TMP_DIR}/pricing_cleanup_dry_run_after_cleanup.body" "skipped_count" "1"
+
+python3 - "${TMP_DIR}/pricing_cleanup_dry_run_after_cleanup.body" "${KEEP_CATEGORY_ID}" <<'PY'
+import json
+import sys
+from pathlib import Path
+
+payload = json.loads(Path(sys.argv[1]).read_text(encoding="utf-8"))
+keep_category_id = sys.argv[2]
+
+would_delete_ids = payload.get("would_delete_category_ids", [])
+if would_delete_ids:
+ raise SystemExit(f"Expected no deletable categories after cleanup, got={would_delete_ids}")
+
+skipped_ids = {item["pricing_category_id"] for item in payload.get("skipped", [])}
+if skipped_ids != {keep_category_id}:
+ raise SystemExit(
+ f"Post-cleanup dry run expected skipped={[keep_category_id]}, got={sorted(skipped_ids)}"
+ )
+PY
+echo "[OK] repeated cleanup state is stable after destructive cleanup"
+
+echo
+echo "===== done ====="
+echo "[OK] smoke admin ops completed successfully"
+echo "FRESH_SCHEME_ID=${SCHEME_ID}"
+echo "DELETE_CATEGORY_ID=${DELETE_CATEGORY_ID}"
+echo "KEEP_CATEGORY_ID=${KEEP_CATEGORY_ID}"
+echo "KEEP_RULE_ID=${KEEP_RULE_ID}"
diff --git a/backend/scripts/smoke_authz_admin_ops.sh b/backend/scripts/smoke_authz_admin_ops.sh
new file mode 100644
index 0000000..5faa29b
--- /dev/null
+++ b/backend/scripts/smoke_authz_admin_ops.sh
@@ -0,0 +1,166 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+TMP_DIR="$(mktemp -d)"
+trap 'rm -rf "${TMP_DIR}"' EXIT
+
+# shellcheck source=backend/scripts/smoke_common.sh
+source "${SCRIPT_DIR}/smoke_common.sh"
+
+ADMIN_API_KEY="${ADMIN_API_KEY:-admin-local-dev-key}"
+OPERATOR_API_KEY="${OPERATOR_API_KEY:-operator-local-dev-key}"
+VIEWER_API_KEY="${VIEWER_API_KEY:-viewer-local-dev-key}"
+
+wait_for_health
+
+create_fresh_scheme_from_upload "smoke-authz-admin-ops"
+
+request "scheme_current" "GET" "${API_URL}/api/v1/schemes/${SCHEME_ID}/current" "200"
+CURRENT_VERSION_ID="$(json_get "${TMP_DIR}/scheme_current.body" "scheme_version_id")"
+echo "CURRENT_VERSION_ID=${CURRENT_VERSION_ID}"
+
+request "ensure_draft" "POST" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/draft/ensure?expected_current_scheme_version_id=${CURRENT_VERSION_ID}" \
+ "200"
+DRAFT_VERSION_ID="$(json_get "${TMP_DIR}/ensure_draft.body" "scheme_version_id")"
+echo "DRAFT_VERSION_ID=${DRAFT_VERSION_ID}"
+
+request "draft_structure" "GET" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/draft/structure?expected_scheme_version_id=${DRAFT_VERSION_ID}" \
+ "200"
+
+TARGET_SEAT_ID="$(python3 - "${TMP_DIR}/draft_structure.body" <<'PY'
+import json
+import sys
+from pathlib import Path
+
+payload = json.loads(Path(sys.argv[1]).read_text(encoding="utf-8"))
+seat = next((item for item in payload.get("seats", []) if item.get("seat_id")), None)
+if seat is None:
+ raise SystemExit("No seat with seat_id found for authz admin ops smoke")
+print(seat["seat_id"])
+PY
+)"
+echo "TARGET_SEAT_ID=${TARGET_SEAT_ID}"
+
+STAMP="$(date +%s)-$$"
+CLEANUP_PREFIX="AUTHZ_ADMINOPS_${STAMP}_"
+DELETE_CATEGORY_NAME="authz-adminops-delete-${STAMP}"
+DELETE_CATEGORY_CODE="${CLEANUP_PREFIX}DELETE"
+KEEP_CATEGORY_NAME="authz-adminops-keep-${STAMP}"
+KEEP_CATEGORY_CODE="${CLEANUP_PREFIX}KEEP"
+
+request "create_delete_category" "POST" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/pricing/categories?expected_scheme_version_id=${DRAFT_VERSION_ID}" \
+ "200" \
+ "{\"name\":\"${DELETE_CATEGORY_NAME}\",\"code\":\"${DELETE_CATEGORY_CODE}\"}"
+DELETE_CATEGORY_ID="$(json_get "${TMP_DIR}/create_delete_category.body" "pricing_category_id")"
+echo "DELETE_CATEGORY_ID=${DELETE_CATEGORY_ID}"
+
+request "create_keep_category" "POST" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/pricing/categories?expected_scheme_version_id=${DRAFT_VERSION_ID}" \
+ "200" \
+ "{\"name\":\"${KEEP_CATEGORY_NAME}\",\"code\":\"${KEEP_CATEGORY_CODE}\"}"
+KEEP_CATEGORY_ID="$(json_get "${TMP_DIR}/create_keep_category.body" "pricing_category_id")"
+echo "KEEP_CATEGORY_ID=${KEEP_CATEGORY_ID}"
+
+request "create_keep_category_rule" "POST" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/pricing/rules?expected_scheme_version_id=${DRAFT_VERSION_ID}" \
+ "200" \
+ "{\"pricing_category_id\":\"${KEEP_CATEGORY_ID}\",\"target_type\":\"seat\",\"target_ref\":\"${TARGET_SEAT_ID}\",\"amount\":\"666.00\",\"currency\":\"RUB\"}"
+KEEP_RULE_ID="$(json_get "${TMP_DIR}/create_keep_category_rule.body" "price_rule_id")"
+echo "KEEP_RULE_ID=${KEEP_RULE_ID}"
+
+request "draft_pricing_snapshot" "POST" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/draft/pricing/snapshot?expected_scheme_version_id=${DRAFT_VERSION_ID}" \
+ "200"
+request "publish_preview_refresh" "GET" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/draft/publish-preview?refresh=true&expected_scheme_version_id=${DRAFT_VERSION_ID}" \
+ "200"
+
+request_with_api_key "${ADMIN_API_KEY}" "admin_publish_preview_audit" "GET" \
+ "${API_URL}/api/v1/admin/artifacts/publish-preview/audit" "200"
+request_with_api_key "${OPERATOR_API_KEY}" "operator_publish_preview_audit" "GET" \
+ "${API_URL}/api/v1/admin/artifacts/publish-preview/audit" "403"
+request_with_api_key "${VIEWER_API_KEY}" "viewer_publish_preview_audit" "GET" \
+ "${API_URL}/api/v1/admin/artifacts/publish-preview/audit" "403"
+assert_file_contains "${TMP_DIR}/operator_publish_preview_audit.body" "Admin role required"
+assert_file_contains "${TMP_DIR}/viewer_publish_preview_audit.body" "Admin role required"
+
+request_with_api_key "${ADMIN_API_KEY}" "admin_publish_preview_cleanup_dry_run" "POST" \
+ "${API_URL}/api/v1/admin/artifacts/publish-preview/cleanup?dry_run=true" "200"
+request_with_api_key "${OPERATOR_API_KEY}" "operator_publish_preview_cleanup_dry_run" "POST" \
+ "${API_URL}/api/v1/admin/artifacts/publish-preview/cleanup?dry_run=true" "403"
+request_with_api_key "${VIEWER_API_KEY}" "viewer_publish_preview_cleanup_dry_run" "POST" \
+ "${API_URL}/api/v1/admin/artifacts/publish-preview/cleanup?dry_run=true" "403"
+assert_file_contains "${TMP_DIR}/operator_publish_preview_cleanup_dry_run.body" "Admin role required"
+assert_file_contains "${TMP_DIR}/viewer_publish_preview_cleanup_dry_run.body" "Admin role required"
+
+request_with_api_key "${ADMIN_API_KEY}" "admin_pricing_cleanup_preview" "GET" \
+ "${API_URL}/api/v1/admin/schemes/${SCHEME_ID}/pricing/categories/cleanup-preview?code_prefix=${CLEANUP_PREFIX}" "200"
+request_with_api_key "${OPERATOR_API_KEY}" "operator_pricing_cleanup_preview" "GET" \
+ "${API_URL}/api/v1/admin/schemes/${SCHEME_ID}/pricing/categories/cleanup-preview?code_prefix=${CLEANUP_PREFIX}" "403"
+request_with_api_key "${VIEWER_API_KEY}" "viewer_pricing_cleanup_preview" "GET" \
+ "${API_URL}/api/v1/admin/schemes/${SCHEME_ID}/pricing/categories/cleanup-preview?code_prefix=${CLEANUP_PREFIX}" "403"
+assert_file_contains "${TMP_DIR}/operator_pricing_cleanup_preview.body" "Admin role required"
+assert_file_contains "${TMP_DIR}/viewer_pricing_cleanup_preview.body" "Admin role required"
+
+request_with_api_key "${ADMIN_API_KEY}" "admin_pricing_cleanup_dry_run" "POST" \
+ "${API_URL}/api/v1/admin/schemes/${SCHEME_ID}/pricing/categories/cleanup" "200" \
+ "{\"code_prefixes\":[\"${CLEANUP_PREFIX}\"],\"name_prefixes\":[],\"pricing_category_ids\":[],\"delete_only_without_rules\":true,\"dry_run\":true}"
+request_with_api_key "${OPERATOR_API_KEY}" "operator_pricing_cleanup_dry_run" "POST" \
+ "${API_URL}/api/v1/admin/schemes/${SCHEME_ID}/pricing/categories/cleanup" "403" \
+ "{\"code_prefixes\":[\"${CLEANUP_PREFIX}\"],\"name_prefixes\":[],\"pricing_category_ids\":[],\"delete_only_without_rules\":true,\"dry_run\":true}"
+request_with_api_key "${VIEWER_API_KEY}" "viewer_pricing_cleanup_dry_run" "POST" \
+ "${API_URL}/api/v1/admin/schemes/${SCHEME_ID}/pricing/categories/cleanup" "403" \
+ "{\"code_prefixes\":[\"${CLEANUP_PREFIX}\"],\"name_prefixes\":[],\"pricing_category_ids\":[],\"delete_only_without_rules\":true,\"dry_run\":true}"
+assert_file_contains "${TMP_DIR}/operator_pricing_cleanup_dry_run.body" "Admin role required"
+assert_file_contains "${TMP_DIR}/viewer_pricing_cleanup_dry_run.body" "Admin role required"
+
+request_with_api_key "${OPERATOR_API_KEY}" "operator_pricing_cleanup_execute" "POST" \
+ "${API_URL}/api/v1/admin/schemes/${SCHEME_ID}/pricing/categories/cleanup" "403" \
+ "{\"code_prefixes\":[\"${CLEANUP_PREFIX}\"],\"name_prefixes\":[],\"pricing_category_ids\":[],\"delete_only_without_rules\":true,\"dry_run\":false}"
+request_with_api_key "${VIEWER_API_KEY}" "viewer_pricing_cleanup_execute" "POST" \
+ "${API_URL}/api/v1/admin/schemes/${SCHEME_ID}/pricing/categories/cleanup" "403" \
+ "{\"code_prefixes\":[\"${CLEANUP_PREFIX}\"],\"name_prefixes\":[],\"pricing_category_ids\":[],\"delete_only_without_rules\":true,\"dry_run\":false}"
+assert_file_contains "${TMP_DIR}/operator_pricing_cleanup_execute.body" "Admin role required"
+assert_file_contains "${TMP_DIR}/viewer_pricing_cleanup_execute.body" "Admin role required"
+
+request_with_api_key "${ADMIN_API_KEY}" "admin_pricing_cleanup_execute" "POST" \
+ "${API_URL}/api/v1/admin/schemes/${SCHEME_ID}/pricing/categories/cleanup" "200" \
+ "{\"code_prefixes\":[\"${CLEANUP_PREFIX}\"],\"name_prefixes\":[],\"pricing_category_ids\":[],\"delete_only_without_rules\":true,\"dry_run\":false}"
+assert_json_int_eq "${TMP_DIR}/admin_pricing_cleanup_execute.body" "deleted_count" "1"
+assert_json_int_eq "${TMP_DIR}/admin_pricing_cleanup_execute.body" "skipped_count" "1"
+
+request "pricing_bundle_after_admin_cleanup_execute" "GET" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/pricing" "200"
+assert_json_len_eq "${TMP_DIR}/pricing_bundle_after_admin_cleanup_execute.body" "categories" "1"
+assert_json_len_eq "${TMP_DIR}/pricing_bundle_after_admin_cleanup_execute.body" "rules" "1"
+
+python3 - "${TMP_DIR}/pricing_bundle_after_admin_cleanup_execute.body" "${DELETE_CATEGORY_ID}" "${KEEP_CATEGORY_ID}" "${KEEP_RULE_ID}" <<'PY'
+import json
+import sys
+from pathlib import Path
+
+payload = json.loads(Path(sys.argv[1]).read_text(encoding="utf-8"))
+delete_category_id = sys.argv[2]
+keep_category_id = sys.argv[3]
+keep_rule_id = sys.argv[4]
+
+category_ids = {item["pricing_category_id"] for item in payload.get("categories", [])}
+rule_ids = {item["price_rule_id"] for item in payload.get("rules", [])}
+
+if delete_category_id in category_ids:
+ raise SystemExit("Authz cleanup execute left deletable category behind")
+if keep_category_id not in category_ids:
+ raise SystemExit("Authz cleanup execute removed protected category")
+if keep_rule_id not in rule_ids:
+ raise SystemExit("Authz cleanup execute removed protected rule")
+PY
+echo "[OK] admin cleanup execute remained destructive only for safe fixture category"
+
+echo
+echo "===== done ====="
+echo "[OK] smoke authz admin ops completed successfully"
+echo "FRESH_SCHEME_ID=${SCHEME_ID}"
diff --git a/backend/scripts/smoke_common.sh b/backend/scripts/smoke_common.sh
index aaebbab..ef260df 100644
--- a/backend/scripts/smoke_common.sh
+++ b/backend/scripts/smoke_common.sh
@@ -101,6 +101,58 @@ PY
fi
}
+request_with_api_key() {
+ local api_key="$1"
+ local name="$2"
+ local method="$3"
+ local url="$4"
+ local expected_status="$5"
+ local body="${6:-}"
+ local out_file="${TMP_DIR}/${name}.body"
+ local status_file="${TMP_DIR}/${name}.status"
+
+ echo
+ echo "===== ${name} ====="
+
+ if [[ -n "${body}" ]]; then
+ curl -sS \
+ -X "${method}" \
+ -H "X-API-Key: ${api_key}" \
+ -H "Content-Type: application/json" \
+ -o "${out_file}" \
+ -w "%{http_code}" \
+ "${url}" \
+ --data "${body}" > "${status_file}"
+ else
+ curl -sS \
+ -X "${method}" \
+ -H "X-API-Key: ${api_key}" \
+ -o "${out_file}" \
+ -w "%{http_code}" \
+ "${url}" > "${status_file}"
+ fi
+
+ local actual_status
+ actual_status="$(python3 - "$status_file" <<'PY'
+from pathlib import Path
+import sys
+print(Path(sys.argv[1]).read_text(encoding="utf-8").strip())
+PY
+)"
+
+ echo "[${method}] ${url} -> ${actual_status}"
+ python3 - "$out_file" <<'PY'
+from pathlib import Path
+import sys
+print(Path(sys.argv[1]).read_text(encoding="utf-8"))
+PY
+ echo
+
+ if [[ "${actual_status}" != "${expected_status}" ]]; then
+ fail "Unexpected HTTP status for ${name}: expected ${expected_status}, got ${actual_status}"
+ fi
+}
+
upload_svg() {
local name="$1"
local upload_filename="$2"
@@ -141,6 +193,51 @@ PY
fi
}
+upload_file_expect_status() {
+ local name="$1"
+ local file_path="$2"
+ local upload_filename="$3"
+ local content_type="$4"
+ local expected_status="$5"
+ local out_file="${TMP_DIR}/${name}.body"
+ local status_file="${TMP_DIR}/${name}.status"
+
+ if [[ ! -f "${file_path}" ]]; then
+ fail "Upload fixture file not found: ${file_path}"
+ fi
+
+ echo
+ echo "===== ${name} ====="
+
+ curl -sS \
+ -X POST \
+ -H "X-API-Key: ${API_KEY}" \
+ -o "${out_file}" \
+ -w "%{http_code}" \
+ -F "file=@${file_path};filename=${upload_filename};type=${content_type}" \
+ "${API_URL}/api/v1/schemes/upload" > "${status_file}"
+
+ local actual_status
+ actual_status="$(python3 - "$status_file" <<'PY'
+from pathlib import Path
+import sys
+print(Path(sys.argv[1]).read_text(encoding="utf-8").strip())
+PY
+)"
+
+ echo "[POST] ${API_URL}/api/v1/schemes/upload -> ${actual_status}"
+ python3 - "$out_file" <<'PY'
+from pathlib import Path
+import sys
+print(Path(sys.argv[1]).read_text(encoding="utf-8"))
+PY
+ echo
+
+ if [[ "${actual_status}" != "${expected_status}" ]]; then
+ fail "Upload failed for ${upload_filename}: expected ${expected_status}, got ${actual_status}"
+ fi
+}
+
json_get() {
local file="$1"
local expr="$2"
diff --git a/backend/scripts/smoke_regression.sh b/backend/scripts/smoke_regression.sh
index e6f2367..97c9c22 100755
--- a/backend/scripts/smoke_regression.sh
+++ b/backend/scripts/smoke_regression.sh
@@ -10,6 +10,22 @@ echo
echo "===== smoke pricing/publish ====="
bash "${SCRIPT_DIR}/smoke_pricing_publish.sh"
+echo
+echo "===== smoke version lifecycle ====="
+bash "${SCRIPT_DIR}/smoke_version_lifecycle.sh"
+
+echo
+echo "===== smoke admin ops ====="
+bash "${SCRIPT_DIR}/smoke_admin_ops.sh"
+
+echo
+echo "===== smoke authz admin ops ====="
+bash "${SCRIPT_DIR}/smoke_authz_admin_ops.sh"
+
+echo
+echo "===== smoke upload negative ====="
+bash "${SCRIPT_DIR}/smoke_upload_negative.sh"
+
echo
echo "===== done ====="
echo "[OK] smoke regression orchestration completed successfully"
diff --git a/backend/scripts/smoke_upload_negative.sh b/backend/scripts/smoke_upload_negative.sh
new file mode 100644
index 0000000..b29f446
--- /dev/null
+++ b/backend/scripts/smoke_upload_negative.sh
@@ -0,0 +1,53 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+TMP_DIR="$(mktemp -d)"
+trap 'rm -rf "${TMP_DIR}"' EXIT
+
+# shellcheck source=backend/scripts/smoke_common.sh
+source "${SCRIPT_DIR}/smoke_common.sh"
+
+wait_for_health
+require_fixture_svg
+
+request "manifest" "GET" "${API_URL}/api/v1/manifest" "200"
+MAX_FILE_SIZE_BYTES="$(json_get "${TMP_DIR}/manifest.body" "svg_limits.max_file_size_bytes")"
+echo "MAX_FILE_SIZE_BYTES=${MAX_FILE_SIZE_BYTES}"
+
+EMPTY_SVG_PATH="${TMP_DIR}/empty.svg"
+NON_SVG_PATH="${TMP_DIR}/not-svg.txt"
+SVG_BODY_WRONG_EXTENSION_PATH="${TMP_DIR}/svg-body.txt"
+OVERSIZE_SVG_PATH="${TMP_DIR}/oversize.svg"
+
+: > "${EMPTY_SVG_PATH}"
+printf 'plain text payload\n' > "${NON_SVG_PATH}"
+cp "${FIXTURE_SVG_PATH}" "${SVG_BODY_WRONG_EXTENSION_PATH}"
+
+python3 - "${OVERSIZE_SVG_PATH}" "${MAX_FILE_SIZE_BYTES}" <<'PY'
+import sys
+from pathlib import Path
+
+output_path = Path(sys.argv[1])
+max_file_size_bytes = int(sys.argv[2])
+payload = ""
+output_path.write_text(payload, encoding="utf-8")
+if output_path.stat().st_size <= max_file_size_bytes:
+ raise SystemExit("Generated oversize SVG is not larger than configured limit")
+PY
+
+upload_file_expect_status "upload_empty_file" "${EMPTY_SVG_PATH}" "empty.svg" "image/svg+xml" "400"
+assert_file_contains "${TMP_DIR}/upload_empty_file.body" "Uploaded file is empty"
+
+upload_file_expect_status "upload_non_svg_text_plain" "${NON_SVG_PATH}" "not-svg.txt" "text/plain" "400"
+assert_file_contains "${TMP_DIR}/upload_non_svg_text_plain.body" "Only SVG files are allowed"
+
+upload_file_expect_status "upload_svg_body_wrong_extension" "${SVG_BODY_WRONG_EXTENSION_PATH}" "valid-svg-body.txt" "text/plain" "400"
+assert_file_contains "${TMP_DIR}/upload_svg_body_wrong_extension.body" "Only SVG files are allowed"
+
+upload_file_expect_status "upload_oversize_svg" "${OVERSIZE_SVG_PATH}" "oversize.svg" "image/svg+xml" "413"
+assert_file_contains "${TMP_DIR}/upload_oversize_svg.body" "SVG file exceeds configured size limit"
+
+echo
+echo "===== done ====="
+echo "[OK] smoke upload negative completed successfully"
diff --git a/backend/scripts/smoke_version_lifecycle.sh b/backend/scripts/smoke_version_lifecycle.sh
new file mode 100644
index 0000000..ac68f83
--- /dev/null
+++ b/backend/scripts/smoke_version_lifecycle.sh
@@ -0,0 +1,236 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+TMP_DIR="$(mktemp -d)"
+trap 'rm -rf "${TMP_DIR}"' EXIT
+
+# shellcheck source=backend/scripts/smoke_common.sh
+source "${SCRIPT_DIR}/smoke_common.sh"
+
+wait_for_health
+
+create_fresh_scheme_from_upload "smoke-version-lifecycle"
+
+request "scheme_detail_initial" "GET" "${API_URL}/api/v1/schemes/${SCHEME_ID}" "200"
+assert_json_eq "${TMP_DIR}/scheme_detail_initial.body" "status" "draft"
+assert_json_int_eq "${TMP_DIR}/scheme_detail_initial.body" "current_version_number" "1"
+
+request "scheme_current_initial" "GET" "${API_URL}/api/v1/schemes/${SCHEME_ID}/current" "200"
+VERSION1_ID="$(json_get "${TMP_DIR}/scheme_current_initial.body" "scheme_version_id")"
+assert_json_int_eq "${TMP_DIR}/scheme_current_initial.body" "version_number" "1"
+assert_json_eq "${TMP_DIR}/scheme_current_initial.body" "status" "draft"
+echo "VERSION1_ID=${VERSION1_ID}"
+
+request "ensure_draft_v1" "POST" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/draft/ensure?expected_current_scheme_version_id=${VERSION1_ID}" \
+ "200"
+assert_json_eq "${TMP_DIR}/ensure_draft_v1.body" "scheme_version_id" "${VERSION1_ID}"
+assert_json_eq "${TMP_DIR}/ensure_draft_v1.body" "created" "false"
+
+request "draft_structure_v1" "GET" \
+ "${API_URL}/api/v1/schemes/${SCHEME_ID}/draft/structure?expected_scheme_version_id=${VERSION1_ID}" \
+ "200"
+
+read -r VERSION1_SEAT_RECORD_ID VERSION1_SEAT_ID ORIGINAL_ROW_LABEL ORIGINAL_SEAT_NUMBER <