chore(backend): finalize backend baseline and frontend handoff contract
freeze the current backend contract for frontend integration document the stabilized backend surface and handoff expectations mark the current state as the baseline for further frontend work
This commit is contained in:
173
backend/scripts/smoke_artifact_corruption.sh
Normal file
173
backend/scripts/smoke_artifact_corruption.sh
Normal file
@@ -0,0 +1,173 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
TMP_DIR="$(mktemp -d)"
|
||||
trap 'rm -rf "${TMP_DIR}"' EXIT
|
||||
|
||||
# shellcheck source=backend/scripts/smoke_common.sh
|
||||
source "${SCRIPT_DIR}/smoke_common.sh"
|
||||
|
||||
set -a
|
||||
source "${REPO_ROOT}/.env"
|
||||
set +a
|
||||
|
||||
wait_for_health
|
||||
|
||||
create_fresh_scheme_from_upload "smoke-artifact-corruption"
|
||||
|
||||
request "scheme_current" "GET" "${API_URL}/api/v1/schemes/${SCHEME_ID}/current" "200"
|
||||
CURRENT_VERSION_ID="$(json_get "${TMP_DIR}/scheme_current.body" "scheme_version_id")"
|
||||
echo "CURRENT_VERSION_ID=${CURRENT_VERSION_ID}"
|
||||
|
||||
request "ensure_draft" "POST" \
|
||||
"${API_URL}/api/v1/schemes/${SCHEME_ID}/draft/ensure?expected_current_scheme_version_id=${CURRENT_VERSION_ID}" \
|
||||
"200"
|
||||
DRAFT_VERSION_ID="$(json_get "${TMP_DIR}/ensure_draft.body" "scheme_version_id")"
|
||||
echo "DRAFT_VERSION_ID=${DRAFT_VERSION_ID}"
|
||||
|
||||
request "initial_publish_preview_audit" "GET" \
|
||||
"${API_URL}/api/v1/admin/artifacts/publish-preview/audit" \
|
||||
"200"
|
||||
assert_json_int_eq "${TMP_DIR}/initial_publish_preview_audit.body" "orphan_files_count" "0"
|
||||
assert_json_int_eq "${TMP_DIR}/initial_publish_preview_audit.body" "missing_files_for_db_rows_count" "0"
|
||||
|
||||
request "publish_preview_refresh_case_a" "GET" \
|
||||
"${API_URL}/api/v1/schemes/${SCHEME_ID}/draft/publish-preview?refresh=true&expected_scheme_version_id=${DRAFT_VERSION_ID}" \
|
||||
"200"
|
||||
request "admin_current_artifacts_case_a" "GET" \
|
||||
"${API_URL}/api/v1/admin/schemes/${SCHEME_ID}/current/artifacts" \
|
||||
"200"
|
||||
|
||||
read -r CASE_A_ARTIFACT_ID CASE_A_STORAGE_PATH <<EOF
|
||||
$(python3 - "${TMP_DIR}/admin_current_artifacts_case_a.body" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
payload = json.loads(Path(sys.argv[1]).read_text(encoding="utf-8"))
|
||||
items = [item for item in payload.get("items", []) if item.get("artifact_type") == "publish_preview"]
|
||||
if not items:
|
||||
raise SystemExit("No publish_preview artifact found for case A")
|
||||
item = items[-1]
|
||||
print(item["artifact_id"], item["storage_path"])
|
||||
PY
|
||||
)
|
||||
EOF
|
||||
echo "CASE_A_ARTIFACT_ID=${CASE_A_ARTIFACT_ID}"
|
||||
echo "CASE_A_STORAGE_PATH=${CASE_A_STORAGE_PATH}"
|
||||
|
||||
docker compose exec -T svg-service python - "${CASE_A_STORAGE_PATH}" <<'PY'
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
path = Path(sys.argv[1])
|
||||
if not path.exists():
|
||||
raise SystemExit(f"Case A preview file missing before manual removal: {path}")
|
||||
path.unlink()
|
||||
PY
|
||||
echo "[OK] case A manually removed preview file while DB row remains"
|
||||
|
||||
request "audit_case_a_broken" "GET" \
|
||||
"${API_URL}/api/v1/admin/artifacts/publish-preview/audit" \
|
||||
"200"
|
||||
assert_json_int_eq "${TMP_DIR}/audit_case_a_broken.body" "orphan_files_count" "0"
|
||||
assert_json_int_eq "${TMP_DIR}/audit_case_a_broken.body" "missing_files_for_db_rows_count" "1"
|
||||
assert_file_contains "${TMP_DIR}/audit_case_a_broken.body" "\"artifact_id\":\"${CASE_A_ARTIFACT_ID}\""
|
||||
|
||||
request "cleanup_case_a_dry_run" "POST" \
|
||||
"${API_URL}/api/v1/admin/artifacts/publish-preview/cleanup?dry_run=true" \
|
||||
"200"
|
||||
assert_json_int_eq "${TMP_DIR}/cleanup_case_a_dry_run.body" "orphan_files_count" "0"
|
||||
assert_json_int_eq "${TMP_DIR}/cleanup_case_a_dry_run.body" "missing_files_for_db_rows_count" "1"
|
||||
assert_json_int_eq "${TMP_DIR}/cleanup_case_a_dry_run.body" "deleted_files_count" "0"
|
||||
assert_json_int_eq "${TMP_DIR}/cleanup_case_a_dry_run.body" "deleted_db_rows_count" "0"
|
||||
|
||||
request "cleanup_case_a_execute" "POST" \
|
||||
"${API_URL}/api/v1/admin/artifacts/publish-preview/cleanup?dry_run=false" \
|
||||
"200"
|
||||
assert_json_int_eq "${TMP_DIR}/cleanup_case_a_execute.body" "orphan_files_count" "0"
|
||||
assert_json_int_eq "${TMP_DIR}/cleanup_case_a_execute.body" "missing_files_for_db_rows_count" "1"
|
||||
assert_json_int_eq "${TMP_DIR}/cleanup_case_a_execute.body" "deleted_files_count" "0"
|
||||
assert_json_int_eq "${TMP_DIR}/cleanup_case_a_execute.body" "deleted_db_rows_count" "1"
|
||||
assert_file_contains "${TMP_DIR}/cleanup_case_a_execute.body" "\"${CASE_A_ARTIFACT_ID}\""
|
||||
|
||||
request "audit_case_a_healthy" "GET" \
|
||||
"${API_URL}/api/v1/admin/artifacts/publish-preview/audit" \
|
||||
"200"
|
||||
assert_json_int_eq "${TMP_DIR}/audit_case_a_healthy.body" "orphan_files_count" "0"
|
||||
assert_json_int_eq "${TMP_DIR}/audit_case_a_healthy.body" "missing_files_for_db_rows_count" "0"
|
||||
|
||||
request "publish_preview_refresh_case_b" "GET" \
|
||||
"${API_URL}/api/v1/schemes/${SCHEME_ID}/draft/publish-preview?refresh=true&expected_scheme_version_id=${DRAFT_VERSION_ID}" \
|
||||
"200"
|
||||
request "admin_current_artifacts_case_b" "GET" \
|
||||
"${API_URL}/api/v1/admin/schemes/${SCHEME_ID}/current/artifacts" \
|
||||
"200"
|
||||
|
||||
read -r CASE_B_ARTIFACT_ID CASE_B_STORAGE_PATH <<EOF
|
||||
$(python3 - "${TMP_DIR}/admin_current_artifacts_case_b.body" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
payload = json.loads(Path(sys.argv[1]).read_text(encoding="utf-8"))
|
||||
items = [item for item in payload.get("items", []) if item.get("artifact_type") == "publish_preview"]
|
||||
if not items:
|
||||
raise SystemExit("No publish_preview artifact found for case B")
|
||||
item = items[-1]
|
||||
print(item["artifact_id"], item["storage_path"])
|
||||
PY
|
||||
)
|
||||
EOF
|
||||
echo "CASE_B_ARTIFACT_ID=${CASE_B_ARTIFACT_ID}"
|
||||
echo "CASE_B_STORAGE_PATH=${CASE_B_STORAGE_PATH}"
|
||||
|
||||
CASE_B_DELETE_COUNT="$(docker compose exec -T postgres psql -U "${POSTGRES_USER}" -d "${POSTGRES_DB}" -Atc "with deleted as (delete from scheme_artifacts where artifact_id='${CASE_B_ARTIFACT_ID}' and artifact_type='publish_preview' and scheme_id='${SCHEME_ID}' returning 1) select count(*) from deleted;")"
|
||||
if [[ "${CASE_B_DELETE_COUNT}" != "1" ]]; then
|
||||
fail "Case B expected to delete exactly one publish_preview DB row, got ${CASE_B_DELETE_COUNT}"
|
||||
fi
|
||||
echo "[OK] case B manually removed publish_preview DB row while file remains"
|
||||
|
||||
request "audit_case_b_broken" "GET" \
|
||||
"${API_URL}/api/v1/admin/artifacts/publish-preview/audit" \
|
||||
"200"
|
||||
assert_json_int_eq "${TMP_DIR}/audit_case_b_broken.body" "orphan_files_count" "1"
|
||||
assert_json_int_eq "${TMP_DIR}/audit_case_b_broken.body" "missing_files_for_db_rows_count" "0"
|
||||
assert_file_contains "${TMP_DIR}/audit_case_b_broken.body" "\"${CASE_B_STORAGE_PATH}\""
|
||||
|
||||
request "cleanup_case_b_dry_run" "POST" \
|
||||
"${API_URL}/api/v1/admin/artifacts/publish-preview/cleanup?dry_run=true" \
|
||||
"200"
|
||||
assert_json_int_eq "${TMP_DIR}/cleanup_case_b_dry_run.body" "orphan_files_count" "1"
|
||||
assert_json_int_eq "${TMP_DIR}/cleanup_case_b_dry_run.body" "missing_files_for_db_rows_count" "0"
|
||||
assert_json_int_eq "${TMP_DIR}/cleanup_case_b_dry_run.body" "deleted_files_count" "0"
|
||||
assert_json_int_eq "${TMP_DIR}/cleanup_case_b_dry_run.body" "deleted_db_rows_count" "0"
|
||||
|
||||
request "cleanup_case_b_execute" "POST" \
|
||||
"${API_URL}/api/v1/admin/artifacts/publish-preview/cleanup?dry_run=false" \
|
||||
"200"
|
||||
assert_json_int_eq "${TMP_DIR}/cleanup_case_b_execute.body" "orphan_files_count" "1"
|
||||
assert_json_int_eq "${TMP_DIR}/cleanup_case_b_execute.body" "missing_files_for_db_rows_count" "0"
|
||||
assert_json_int_eq "${TMP_DIR}/cleanup_case_b_execute.body" "deleted_files_count" "1"
|
||||
assert_json_int_eq "${TMP_DIR}/cleanup_case_b_execute.body" "deleted_db_rows_count" "0"
|
||||
assert_file_contains "${TMP_DIR}/cleanup_case_b_execute.body" "\"${CASE_B_STORAGE_PATH}\""
|
||||
|
||||
request "final_publish_preview_audit" "GET" \
|
||||
"${API_URL}/api/v1/admin/artifacts/publish-preview/audit" \
|
||||
"200"
|
||||
assert_json_int_eq "${TMP_DIR}/final_publish_preview_audit.body" "orphan_files_count" "0"
|
||||
assert_json_int_eq "${TMP_DIR}/final_publish_preview_audit.body" "missing_files_for_db_rows_count" "0"
|
||||
|
||||
FINAL_DB_ROWS_COUNT="$(json_get "${TMP_DIR}/final_publish_preview_audit.body" "db_rows_count")"
|
||||
FINAL_DISK_FILES_COUNT="$(json_get "${TMP_DIR}/final_publish_preview_audit.body" "disk_files_count")"
|
||||
if [[ "${FINAL_DB_ROWS_COUNT}" != "${FINAL_DISK_FILES_COUNT}" ]]; then
|
||||
fail "Final publish-preview audit mismatch after remediation: db_rows_count=${FINAL_DB_ROWS_COUNT}, disk_files_count=${FINAL_DISK_FILES_COUNT}"
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "===== done ====="
|
||||
echo "[OK] smoke artifact corruption completed successfully"
|
||||
echo "FRESH_SCHEME_ID=${SCHEME_ID}"
|
||||
echo "CASE_A_ARTIFACT_ID=${CASE_A_ARTIFACT_ID}"
|
||||
echo "CASE_B_ARTIFACT_ID=${CASE_B_ARTIFACT_ID}"
|
||||
Reference in New Issue
Block a user