{ "verdict": "APPROVED", "rejection_reason": "NONE", "audit_details": { "target_invoked": true, "pre_conditions_tested": true, "post_conditions_tested": true, "test_data_used": true }, "feedback": "The test suite robustly verifies the
MigrationEngine contracts. It avoids Tautologies by cleanly substituting IdMappingService without mocking the engine itself. Cross-filter parsing asserts against hard-coded, predefined validation dictionaries (no Logic Mirroring). It successfully addresses @PRE negative cases (e.g. invalid zip paths, missing YAMLs) and rigorously validates @POST file transformations (e.g. in-place UUID substitutions and archive reconstruction)." }
This commit is contained in:
@@ -70,12 +70,13 @@ class IdMappingService:
|
||||
# @PARAM: superset_client - Instance capable of hitting the Superset API.
|
||||
# @PRE: environment_id exists in the database.
|
||||
# @POST: ResourceMapping records for the environment are created or updated.
|
||||
def sync_environment(self, environment_id: str, superset_client) -> None:
|
||||
def sync_environment(self, environment_id: str, superset_client, incremental: bool = False) -> None:
|
||||
"""
|
||||
Polls the Superset APIs for the target environment and updates the local mapping table.
|
||||
If incremental=True, only fetches items changed since the max last_synced_at date.
|
||||
"""
|
||||
with belief_scope("IdMappingService.sync_environment"):
|
||||
logger.info(f"[IdMappingService.sync_environment][Action] Starting sync for environment {environment_id}")
|
||||
logger.info(f"[IdMappingService.sync_environment][Action] Starting sync for environment {environment_id} (incremental={incremental})")
|
||||
|
||||
# Implementation Note: In a real scenario, superset_client needs to be an instance
|
||||
# capable of auth & iteration over /api/v1/chart/, /api/v1/dataset/, /api/v1/dashboard/
|
||||
@@ -88,6 +89,7 @@ class IdMappingService:
|
||||
]
|
||||
|
||||
total_synced = 0
|
||||
total_deleted = 0
|
||||
try:
|
||||
for res_enum, endpoint, name_field in types_to_poll:
|
||||
logger.debug(f"[IdMappingService.sync_environment][Explore] Polling {endpoint} endpoint")
|
||||
@@ -97,7 +99,24 @@ class IdMappingService:
|
||||
# We assume superset_client provides a generic method to fetch all pages.
|
||||
|
||||
try:
|
||||
resources = superset_client.get_all_resources(endpoint)
|
||||
since_dttm = None
|
||||
if incremental:
|
||||
from sqlalchemy.sql import func
|
||||
max_date = self.db.query(func.max(ResourceMapping.last_synced_at)).filter(
|
||||
ResourceMapping.environment_id == environment_id,
|
||||
ResourceMapping.resource_type == res_enum
|
||||
).scalar()
|
||||
|
||||
if max_date:
|
||||
# We subtract a bit for safety overlap
|
||||
from datetime import timedelta
|
||||
since_dttm = max_date - timedelta(minutes=5)
|
||||
logger.debug(f"[IdMappingService.sync_environment] Incremental sync since {since_dttm}")
|
||||
|
||||
resources = superset_client.get_all_resources(endpoint, since_dttm=since_dttm)
|
||||
|
||||
# Track which UUIDs we see in this sync cycle
|
||||
synced_uuids = set()
|
||||
|
||||
for res in resources:
|
||||
res_uuid = res.get("uuid")
|
||||
@@ -107,6 +126,7 @@ class IdMappingService:
|
||||
if not res_uuid or raw_id is None:
|
||||
continue
|
||||
|
||||
synced_uuids.add(res_uuid)
|
||||
res_id = str(raw_id) # Store as string
|
||||
|
||||
# Upsert Logic
|
||||
@@ -133,12 +153,29 @@ class IdMappingService:
|
||||
|
||||
total_synced += 1
|
||||
|
||||
# Delete stale mappings: rows for this env+type whose UUID
|
||||
# was NOT returned by the API (resource was deleted remotely)
|
||||
# We only do this on full syncs, because incremental syncs don't return all UUIDs
|
||||
if not incremental:
|
||||
stale_query = self.db.query(ResourceMapping).filter(
|
||||
ResourceMapping.environment_id == environment_id,
|
||||
ResourceMapping.resource_type == res_enum,
|
||||
)
|
||||
if synced_uuids:
|
||||
stale_query = stale_query.filter(
|
||||
ResourceMapping.uuid.notin_(synced_uuids)
|
||||
)
|
||||
deleted = stale_query.delete(synchronize_session="fetch")
|
||||
if deleted:
|
||||
total_deleted += deleted
|
||||
logger.info(f"[IdMappingService.sync_environment][Action] Removed {deleted} stale {endpoint} mapping(s) for {environment_id}")
|
||||
|
||||
except Exception as loop_e:
|
||||
logger.error(f"[IdMappingService.sync_environment][Reason] Error polling {endpoint}: {loop_e}")
|
||||
# Continue to next resource type instead of blowing up the whole sync
|
||||
|
||||
self.db.commit()
|
||||
logger.info(f"[IdMappingService.sync_environment][Coherence:OK] Successfully synced {total_synced} items.")
|
||||
logger.info(f"[IdMappingService.sync_environment][Coherence:OK] Successfully synced {total_synced} items and deleted {total_deleted} stale items.")
|
||||
|
||||
except Exception as e:
|
||||
self.db.rollback()
|
||||
|
||||
Reference in New Issue
Block a user