Complete publish preview Phase 2A with retention, refresh and cache consistency

This commit is contained in:
greebo
2026-03-19 18:20:21 +03:00
parent c91c5abf15
commit 4c2b910765
5 changed files with 157 additions and 29 deletions

View File

@@ -35,6 +35,7 @@ class Settings(BaseSettings):
svg_display_technical_text_patterns: str = "debug,tech,helper,tmp,service" svg_display_technical_text_patterns: str = "debug,tech,helper,tmp,service"
storage_root_dir: str = "/data" storage_root_dir: str = "/data"
publish_preview_retention_per_variant: int = 2
model_config = SettingsConfigDict( model_config = SettingsConfigDict(
env_file=".env", env_file=".env",

View File

@@ -1,6 +1,6 @@
from uuid import uuid4 from uuid import uuid4
from sqlalchemy import asc, desc, select from sqlalchemy import asc, delete, desc, select
from app.db.session import AsyncSessionLocal from app.db.session import AsyncSessionLocal
from app.models.scheme_artifact import SchemeArtifactRecord from app.models.scheme_artifact import SchemeArtifactRecord
@@ -99,3 +99,16 @@ async def get_latest_scheme_artifact(
result = await session.execute(stmt) result = await session.execute(stmt)
return result.scalar_one_or_none() return result.scalar_one_or_none()
async def delete_scheme_artifacts_by_artifact_ids(artifact_ids: list[str]) -> int:
if not artifact_ids:
return 0
async with AsyncSessionLocal() as session:
stmt = delete(SchemeArtifactRecord).where(
SchemeArtifactRecord.artifact_id.in_(artifact_ids)
)
result = await session.execute(stmt)
await session.commit()
return int(result.rowcount or 0)

View File

@@ -18,6 +18,24 @@ from app.services.scheme_validation import build_scheme_validation_report
from app.services.structure_diff import build_structure_diff from app.services.structure_diff import build_structure_diff
def _serialize_artifacts(artifacts_rows: list) -> dict:
return {
"total": len(artifacts_rows),
"items": [
{
"artifact_id": row.artifact_id,
"artifact_type": row.artifact_type,
"artifact_variant": row.artifact_variant,
"status": row.status,
"storage_path": row.storage_path,
"meta_json": row.meta_json,
"created_at": row.created_at.isoformat(),
}
for row in artifacts_rows
],
}
async def build_publish_preview_bundle( async def build_publish_preview_bundle(
*, *,
scheme_id: str, scheme_id: str,
@@ -62,21 +80,7 @@ async def build_publish_preview_bundle(
except Exception: except Exception:
unpriced += 1 unpriced += 1
artifacts = { artifacts = _serialize_artifacts(artifacts_rows)
"total": len(artifacts_rows),
"items": [
{
"artifact_id": row.artifact_id,
"artifact_type": row.artifact_type,
"artifact_variant": row.artifact_variant,
"status": row.status,
"storage_path": row.storage_path,
"meta_json": row.meta_json,
"created_at": row.created_at.isoformat(),
}
for row in artifacts_rows
],
}
pricing_coverage = { pricing_coverage = {
"snapshot_available": snapshot_available, "snapshot_available": snapshot_available,
@@ -126,10 +130,21 @@ async def get_or_build_publish_preview_bundle(
scheme_version_id=scheme_version_id, scheme_version_id=scheme_version_id,
baseline_override_scheme_version_id=baseline_override_scheme_version_id, baseline_override_scheme_version_id=baseline_override_scheme_version_id,
) )
await save_publish_preview_artifact(
save_result = await save_publish_preview_artifact(
scheme_id=scheme_id, scheme_id=scheme_id,
scheme_version_id=scheme_version_id, scheme_version_id=scheme_version_id,
payload=payload, payload=payload,
baseline_scheme_version_id=payload["structure_diff"]["baseline_scheme_version_id"], baseline_scheme_version_id=payload["structure_diff"]["baseline_scheme_version_id"],
) )
artifacts_rows = await list_scheme_artifacts(scheme_version_id=scheme_version_id)
payload["artifacts"] = _serialize_artifacts(artifacts_rows)
payload["summary"]["has_artifacts"] = payload["artifacts"]["total"] > 0
payload["summary"]["preview_cache_cleanup"] = save_result["cleanup"]
artifact = save_result["artifact"]
path = Path(artifact.storage_path)
path.write_text(json.dumps(payload, ensure_ascii=False, indent=2), encoding="utf-8")
return payload return payload

View File

@@ -5,7 +5,11 @@ from pathlib import Path
from uuid import uuid4 from uuid import uuid4
from app.core.config import settings from app.core.config import settings
from app.repositories.scheme_artifacts import create_scheme_artifact, list_scheme_artifacts from app.repositories.scheme_artifacts import (
create_scheme_artifact,
delete_scheme_artifacts_by_artifact_ids,
list_scheme_artifacts,
)
def _preview_storage_dir() -> Path: def _preview_storage_dir() -> Path:
@@ -14,6 +18,64 @@ def _preview_storage_dir() -> Path:
return path return path
def _cleanup_preview_file(storage_path: str) -> None:
path = Path(storage_path)
try:
if path.exists() and path.is_file():
path.unlink()
except FileNotFoundError:
pass
parent = path.parent
preview_root = Path(settings.storage_preview_dir)
try:
if parent != preview_root and parent.exists():
parent.rmdir()
except OSError:
pass
async def cleanup_publish_preview_artifacts(
*,
scheme_version_id: str,
baseline_scheme_version_id: str | None,
) -> dict:
retention = max(1, settings.publish_preview_retention_per_variant)
variant = baseline_scheme_version_id or "default"
rows = await list_scheme_artifacts(
scheme_version_id=scheme_version_id,
artifact_type="publish_preview",
artifact_variant=variant,
)
if len(rows) <= retention:
return {
"retention": retention,
"deleted_count": 0,
"deleted_artifact_ids": [],
}
rows_sorted = sorted(
rows,
key=lambda row: (row.created_at, row.id),
reverse=True,
)
to_delete = rows_sorted[retention:]
deleted_artifact_ids = [row.artifact_id for row in to_delete]
for row in to_delete:
_cleanup_preview_file(row.storage_path)
deleted_count = await delete_scheme_artifacts_by_artifact_ids(deleted_artifact_ids)
return {
"retention": retention,
"deleted_count": deleted_count,
"deleted_artifact_ids": deleted_artifact_ids,
}
async def save_publish_preview_artifact( async def save_publish_preview_artifact(
*, *,
scheme_id: str, scheme_id: str,
@@ -37,7 +99,16 @@ async def save_publish_preview_artifact(
"summary": payload.get("summary"), "summary": payload.get("summary"),
}, },
) )
return artifact
cleanup = await cleanup_publish_preview_artifacts(
scheme_version_id=scheme_version_id,
baseline_scheme_version_id=baseline_scheme_version_id,
)
return {
"artifact": artifact,
"cleanup": cleanup,
}
async def get_latest_publish_preview_artifact( async def get_latest_publish_preview_artifact(
@@ -45,15 +116,13 @@ async def get_latest_publish_preview_artifact(
scheme_version_id: str, scheme_version_id: str,
baseline_scheme_version_id: str | None, baseline_scheme_version_id: str | None,
): ):
rows = await list_scheme_artifacts(scheme_version_id=scheme_version_id) rows = await list_scheme_artifacts(
variant = baseline_scheme_version_id or "default" scheme_version_id=scheme_version_id,
artifact_type="publish_preview",
matching = [ artifact_variant=baseline_scheme_version_id or "default",
row for row in rows )
if row.artifact_type == "publish_preview" and row.artifact_variant == variant if not rows:
]
if not matching:
return None return None
matching.sort(key=lambda row: (row.created_at, row.id), reverse=True) rows.sort(key=lambda row: (row.created_at, row.id), reverse=True)
return matching[0] return rows[0]

View File

@@ -0,0 +1,30 @@
# Publish Preview — Phase 2A Status
## Status
Phase 2A is mostly closed.
## Confirmed
- retention policy works
- `refresh=true` works
- cached read works
- response contract is more consistent
- cleanup actually removes old records from the database
## Retention result
For the current draft version, exactly 2 preview artifacts remain in the database, matching the configured retention policy.
## Response contract
The response now explicitly returns:
- `preview_cache_cleanup.retention=2`
- `deleted_count`
- `deleted_artifact_ids`
## Runtime state
- logs are clean
- no new exceptions were observed