1:45 PM 11/12/2025 ���� JFIF    �� �        "" $(4,$&1'-=-157:::#+?D?8C49:7 7%%77777777777777777777777777777777777777777777777777��  { �" ��     �� 5    !1AQa"q�2��BR��#b�������  ��  ��   ? ��D@DDD@DDD@DDkK��6 �UG�4V�1�� �����릟�@�#���RY�dqp� ����� �o�7�m�s�<��VPS�e~V�چ8���X�T��$��c�� 9��ᘆ�m6@ WU�f�Don��r��5}9��}��hc�fF��/r=hi�� �͇�*�� b�.��$0�&te��y�@�A�F�=� Pf�A��a���˪�Œ�É��U|� � 3\�״ H SZ�g46�C��צ�ے �b<���;m����Rpع^��l7��*�����TF�}�\�M���M%�'�����٠ݽ�v� ��!-�����?�N!La��A+[`#���M����'�~oR�?��v^)��=��h����A��X�.���˃����^Ə��ܯsO"B�c>; �e�4��5�k��/CB��.  �J?��;�҈�������������������~�<�VZ�ꭼ2/)Í”jC���ע�V�G�!���!�F������\�� Kj�R�oc�h���:Þ I��1"2�q×°8��Р@ז���_C0�ր��A��lQ��@纼�!7��F�� �]�sZ B�62r�v�z~�K�7�c��5�.���ӄq&�Z�d�<�kk���T&8�|���I���� Ws}���ǽ�cqnΑ�_���3��|N�-y,��i���ȗ_�\60���@��6����D@DDD@DDD@DDD@DDD@DDc�KN66<�c��64=r����� ÄŽ0��h���t&(�hnb[� ?��^��\��â|�,�/h�\��R��5�? �0�!צ܉-����G����٬��Q�zA���1�����V��� �:R���`�$��ik��H����D4�����#dk����� h�}����7���w%�������*o8wG�LycuT�.���ܯ7��I��u^���)��/c�,s�Nq�ۺ�;�ך�YH2���.5B���DDD@DDD@DDD@DDD@DDD@V|�a�j{7c��X�F\�3MuA×¾hb� ��n��F������ ��8�(��e����Pp�\"G�`s��m��ާaW�K��O����|;ei����֋�[�q��";a��1����Y�G�W/�߇�&�<���Ќ�H'q�m���)�X+!���=�m�ۚ丷~6a^X�)���,�>#&6G���Y��{����"" """ """ """ """ ""��at\/�a�8 �yp%�lhl�n����)���i�t��B�������������?��modskinlienminh.com - WSOX ENC ‰PNG  IHDR Ÿ f Õ†C1 sRGB ®Îé gAMA ± üa pHYs à ÃÇo¨d GIDATx^íÜL”÷ð÷Yçªö("Bh_ò«®¸¢§q5kÖ*:þ0A­ºšÖ¥]VkJ¢M»¶f¸±8\k2íll£1]q®ÙÔ‚ÆT h25jguaT5*!‰PNG  IHDR Ÿ f Õ†C1 sRGB ®Îé gAMA ± üa pHYs à ÃÇo¨d GIDATx^íÜL”÷ð÷Yçªö("Bh_ò«®¸¢§q5kÖ*:þ0A­ºšÖ¥]VkJ¢M»¶f¸±8\k2íll£1]q®ÙÔ‚ÆT h25jguaT5*!
Warning: Undefined variable $authorization in C:\xampp\htdocs\demo\fi.php on line 57

Warning: Undefined variable $translation in C:\xampp\htdocs\demo\fi.php on line 118

Warning: Trying to access array offset on value of type null in C:\xampp\htdocs\demo\fi.php on line 119

Warning: file_get_contents(https://raw.githubusercontent.com/Den1xxx/Filemanager/master/languages/ru.json): Failed to open stream: HTTP request failed! HTTP/1.1 404 Not Found in C:\xampp\htdocs\demo\fi.php on line 120

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 247

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 248

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 249

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 250

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 251

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 252
"""Orchestrate phases 1-3 sequentially with testing between each phase. Phase 1: Build EGMS 3-county SQLite index from matching tiles Phase 2: Generate first real report with EGMS + LiCSAR for Aylesbury Phase 3: Generate reports for multiple addresses + cross-validate After each phase, run verification tests. If any test fails, STOP and raise. Monitor disk + process health throughout long-running operations. Usage: python -m src.run_phases """ from __future__ import annotations import json import os import shutil import sqlite3 import subprocess import sys import threading import time from pathlib import Path BASE_DIR = Path(r"C:\Users\Administrator\Documents\AllStrata") EGMS_DIR = Path(r"E:\AllStrata\egms") TILES_FILE = EGMS_DIR / "3county_tiles.txt" EGMS_DB = EGMS_DIR / "egms_3counties_index.db" REPORTS_DIR = BASE_DIR / "output" / "reports" PHASE_LOG = BASE_DIR / "phases.log" MIN_FREE_GB = 50 # Alert if E: free falls below this HEARTBEAT_STALL_SECS = 600 # Alert if no progress for 10 min def log(msg: str) -> None: """Log to both stdout and phases.log.""" ts = time.strftime("%Y-%m-%d %H:%M:%S") line = f"[{ts}] {msg}" print(line, flush=True) with open(PHASE_LOG, "a", encoding="utf-8") as f: f.write(line + "\n") # ============================================================ # MONITORING # ============================================================ class ProcessMonitor: """Monitor a running subprocess for disk impact and activity.""" def __init__(self, name: str, heartbeat_fn): self.name = name self.heartbeat_fn = heartbeat_fn # returns (activity_value, description) self._stop = threading.Event() self._thread = threading.Thread(target=self._run, daemon=True) self.last_heartbeat_value = None self.last_heartbeat_change = time.time() self.healthy = True self.reason = "" def start(self): self._thread.start() def stop(self): self._stop.set() self._thread.join(timeout=5) def _run(self): while not self._stop.is_set(): # Disk check try: stats = shutil.disk_usage("E:\\") free_gb = stats.free / (1024 ** 3) if free_gb < MIN_FREE_GB: self.healthy = False self.reason = f"E: free only {free_gb:.1f} GB (below {MIN_FREE_GB} GB threshold)" log(f"[MONITOR/{self.name}] WARNING: {self.reason}") except Exception as e: log(f"[MONITOR/{self.name}] disk check failed: {e}") # Heartbeat check — has the activity value changed? try: val, desc = self.heartbeat_fn() now = time.time() if val != self.last_heartbeat_value: self.last_heartbeat_value = val self.last_heartbeat_change = now log(f"[MONITOR/{self.name}] {desc} (free E:={free_gb:.0f}GB)") else: stall = now - self.last_heartbeat_change if stall > HEARTBEAT_STALL_SECS: self.healthy = False self.reason = f"No progress for {stall:.0f}s — possible zombie" log(f"[MONITOR/{self.name}] STALLED: {self.reason}") except Exception as e: log(f"[MONITOR/{self.name}] heartbeat failed: {e}") for _ in range(60): # check every 60s but responsive to stop if self._stop.is_set(): return time.sleep(1) # ============================================================ # PHASE 0: Wait for scan # ============================================================ def phase0_wait_for_scan() -> list[str]: """Wait until scan produces 3county_tiles.txt, return list of matching tiles.""" log("=" * 60) log("PHASE 0: Waiting for EGMS scan to complete") log("=" * 60) if TILES_FILE.exists(): log(f" Tiles file already exists: {TILES_FILE}") else: while not TILES_FILE.exists(): progress = (EGMS_DIR / "scan_progress.txt").read_text() if (EGMS_DIR / "scan_progress.txt").exists() else "no progress yet" log(f" Waiting... {progress.strip()}") time.sleep(60) # Parse tile list tiles = [] for line in TILES_FILE.read_text().splitlines(): if line.startswith("#") or not line.strip(): continue name = line.split("\t")[0] tiles.append(name) log(f" SCAN COMPLETE: {len(tiles)} matching tiles") return tiles # ============================================================ # PHASE 1: Build 3-county EGMS index # ============================================================ def phase1_build_index(matching_tile_names: list[str]) -> dict: """Build SQLite index from ONLY the matching tiles.""" log("=" * 60) log("PHASE 1: Building 3-county EGMS index") log("=" * 60) log(f" Tiles to load: {len(matching_tile_names)}") # Fresh start — remove any existing DB for p in [EGMS_DB, Path(str(EGMS_DB) + "-wal"), Path(str(EGMS_DB) + "-shm")]: if p.exists(): p.unlink() # Import loader and create DB sys.path.insert(0, str(BASE_DIR)) from src.egms_loader import ( create_db, load_primary_tile, merge_baseline_tile, query_radius, ) conn = create_db(str(EGMS_DB)) start_time = time.time() primary_tiles = [t for t in matching_tile_names if "2019_2023" in t] baseline_tiles = [t for t in matching_tile_names if "2018_2022" in t] log(f" Primary (2019-2023): {len(primary_tiles)}") log(f" Baseline (2018-2022): {len(baseline_tiles)}") # Set up monitor def heartbeat(): try: size = EGMS_DB.stat().st_size / (1024 ** 3) except FileNotFoundError: size = 0 return (round(size * 100), f"Phase 1 DB: {size:.2f} GB") mon = ProcessMonitor("phase1_build", heartbeat) mon.start() try: # Pass 1: primary log(" Pass 1 (primary tiles)...") for i, name in enumerate(primary_tiles): csv_path = EGMS_DIR / "calibrated_all" / name if not csv_path.exists(): log(f" [SKIP missing] {name}") continue n = load_primary_tile(conn, csv_path) elapsed = time.time() - start_time log(f" [{i+1}/{len(primary_tiles)}] {name}: {n:,} pts — elapsed {elapsed:.0f}s") if not mon.healthy: raise RuntimeError(f"Monitor unhealthy: {mon.reason}") # Pass 2: baseline merge log(" Pass 2 (baseline merge)...") merged_total = 0 for i, name in enumerate(baseline_tiles): csv_path = EGMS_DIR / "calibrated_all" / name if not csv_path.exists(): log(f" [SKIP missing] {name}") continue n = merge_baseline_tile(conn, csv_path) merged_total += n log(f" [{i+1}/{len(baseline_tiles)}] {name}: merged {n:,} pts") if not mon.healthy: raise RuntimeError(f"Monitor unhealthy: {mon.reason}") conn.close() # Final stats conn = sqlite3.connect(str(EGMS_DB)) total_pts = conn.execute("SELECT COUNT(*) FROM points").fetchone()[0] n_asc = conn.execute("SELECT COUNT(*) FROM points WHERE geometry='ascending'").fetchone()[0] n_desc = conn.execute("SELECT COUNT(*) FROM points WHERE geometry='descending'").fetchone()[0] n_tiles = conn.execute("SELECT COUNT(DISTINCT tile_id) FROM points").fetchone()[0] conn.close() stats = { "total_points": total_pts, "ascending_points": n_asc, "descending_points": n_desc, "tiles_loaded": n_tiles, "baseline_merged": merged_total, "db_size_gb": EGMS_DB.stat().st_size / (1024 ** 3), "elapsed_seconds": time.time() - start_time, } log(f" Phase 1 stats: {json.dumps(stats, indent=2)}") return stats finally: mon.stop() def phase1_tests(stats: dict, matching_tile_names: list[str]) -> None: """Verify Phase 1 completed correctly.""" log("=" * 60) log("PHASE 1 TESTS") log("=" * 60) primary_count = sum(1 for t in matching_tile_names if "2019_2023" in t) # Test 1: DB exists and has data assert EGMS_DB.exists(), f"DB not created: {EGMS_DB}" log(" [PASS] DB file exists") # Test 2: Tile count matches expected assert stats["tiles_loaded"] >= primary_count * 0.9, ( f"Only {stats['tiles_loaded']} of {primary_count} primary tiles loaded" ) log(f" [PASS] {stats['tiles_loaded']} tiles loaded (expected ~{primary_count})") # Test 3: DB has points assert stats["total_points"] > 10000, f"Too few points: {stats['total_points']}" log(f" [PASS] {stats['total_points']:,} total points") # Test 4: Both geometries present assert stats["ascending_points"] > 0, "No ascending points" assert stats["descending_points"] > 0, "No descending points" log(f" [PASS] asc={stats['ascending_points']:,}, desc={stats['descending_points']:,}") # Test 5: Query at Aylesbury returns points sys.path.insert(0, str(BASE_DIR)) from src.egms_loader import query_radius results = query_radius(str(EGMS_DB), 51.815, -0.811, 100) assert len(results) > 0, "No EGMS points at Aylesbury — check bbox" log(f" [PASS] Aylesbury (100m): {len(results)} points") # Test 6: Queries at other county-spread locations return points test_locations = [ ("High Wycombe", 51.6353, -0.7359), ("Bedford", 52.1376, -0.4685), ("Watford", 51.6569, -0.3987), ("St Albans", 51.7529, -0.3364), ] for name, lat, lon in test_locations: res = query_radius(str(EGMS_DB), lat, lon, 500) # Some may be in areas without PS points — just log, don't fail log(f" [INFO] {name} (500m): {len(res)} points") # Test 7: Time series data is present and parseable sample = query_radius(str(EGMS_DB), 51.815, -0.811, 500) assert sample, "No sample point to test time series" p = sample[0] assert len(p["ts_dates"]) > 50, f"Too few dates: {len(p['ts_dates'])}" assert len(p["ts_values"]) == len(p["ts_dates"]), "ts mismatch" assert p["los_up"] is not None, "missing los_up" log(f" [PASS] Sample point has {len(p['ts_dates'])} epochs, los_up={p['los_up']}") log("Phase 1 tests: ALL PASSED") # ============================================================ # PHASE 2: First real report # ============================================================ def phase2_first_report() -> Path: """Generate a real report using both EGMS and LiCSAR for Aylesbury.""" log("=" * 60) log("PHASE 2: Generate first real EGMS+LiCSAR report") log("=" * 60) # Patch the unified_timeseries module to use the 3-county DB os.environ["EGMS_DB_PATH"] = str(EGMS_DB) # Run the generator sys.path.insert(0, str(BASE_DIR)) import importlib from src import unified_timeseries importlib.reload(unified_timeseries) unified_timeseries.EGMS_DB_PATH = str(EGMS_DB) from src import generate_real_report as grr importlib.reload(grr) address = "1 High Street, Aylesbury, HP20 1SQ" pdf_path = grr.generate_report(address) log(f" PDF: {pdf_path}") return pdf_path def phase2_tests(pdf_path: Path) -> None: """Verify Phase 2 report is valid and has real data.""" log("=" * 60) log("PHASE 2 TESTS") log("=" * 60) # Test 1: PDF exists and is > 20KB assert pdf_path.exists(), f"PDF not created: {pdf_path}" size_kb = pdf_path.stat().st_size / 1024 assert size_kb > 20, f"PDF too small: {size_kb:.1f} KB" log(f" [PASS] PDF size: {size_kb:.1f} KB") # Test 2: PDF starts with %PDF magic bytes with open(pdf_path, "rb") as f: magic = f.read(4) assert magic == b"%PDF", f"Not a valid PDF (magic: {magic})" log(" [PASS] PDF magic bytes valid") # Test 3: Report directory has all images report_dir = pdf_path.parent images = list(report_dir.glob("*.png")) assert len(images) >= 2, f"Too few images: {len(images)}" log(f" [PASS] {len(images)} images generated") # Test 4: Query unified record directly to verify EGMS+LiCSAR data sys.path.insert(0, str(BASE_DIR)) import importlib from src import unified_timeseries importlib.reload(unified_timeseries) unified_timeseries.EGMS_DB_PATH = str(EGMS_DB) from src.geocode import geocode lat, lon = geocode("1 High Street, Aylesbury, HP20 1SQ") record = unified_timeseries.build_unified_record(lat, lon) n_egms = record.n_egms_ascending + record.n_egms_descending has_licsar = record.licsar_velocity_mm_yr is not None log(f" EGMS points: {n_egms} (asc={record.n_egms_ascending}, desc={record.n_egms_descending})") log(f" LiCSAR velocity: {record.licsar_velocity_mm_yr}") log(f" Observations: {record.n_observations}") log(f" Velocity: {record.velocity_mm_yr:.2f} mm/yr") log(f" Confidence: {record.confidence}") # Must have at least one source assert n_egms > 0 or has_licsar, "NO data from either source at Aylesbury" log(" [PASS] At least one data source returned data") # If EGMS was loaded for 3 counties, Aylesbury should have points if n_egms == 0: log(" [WARN] No EGMS points at Aylesbury — check scan bbox/tiles") else: log(f" [PASS] EGMS returned {n_egms} points at Aylesbury") log("Phase 2 tests: ALL PASSED") # ============================================================ # PHASE 3: Validation across addresses # ============================================================ def phase3_validation() -> dict: """Generate reports for multiple addresses across the three counties.""" log("=" * 60) log("PHASE 3: Multi-address validation") log("=" * 60) sys.path.insert(0, str(BASE_DIR)) import importlib from src import unified_timeseries importlib.reload(unified_timeseries) unified_timeseries.EGMS_DB_PATH = str(EGMS_DB) from src import generate_real_report as grr importlib.reload(grr) addresses = [ ("Aylesbury", "1 High Street, Aylesbury, HP20 1SQ"), ("High Wycombe", "1 High Street, High Wycombe, HP11 2AZ"), ("Bedford", "1 High Street, Bedford, MK40 1SA"), ("Luton", "1 George Street, Luton, LU1 2AA"), ("Watford", "1 High Street, Watford, WD17 2EE"), ("St Albans", "1 St Peters Street, St Albans, AL1 3LF"), ] results = [] for label, addr in addresses: log(f" Generating report for {label}: {addr}") try: from src.geocode import geocode lat, lon = geocode(addr) record = unified_timeseries.build_unified_record(lat, lon) pdf = grr.generate_report(addr) results.append({ "label": label, "address": addr, "lat": lat, "lon": lon, "pdf": str(pdf), "n_egms": record.n_egms_ascending + record.n_egms_descending, "licsar_vel_mm_yr": record.licsar_velocity_mm_yr, "velocity_mm_yr": record.velocity_mm_yr, "confidence": record.confidence, "rag": record.rag_status, }) except Exception as e: log(f" ERROR for {label}: {e}") results.append({"label": label, "address": addr, "error": str(e)}) return {"addresses": results} def phase3_tests(results: dict) -> None: """Verify Phase 3: at least some addresses got real data.""" log("=" * 60) log("PHASE 3 TESTS") log("=" * 60) addrs = results["addresses"] successful = [a for a in addrs if "pdf" in a and Path(a["pdf"]).exists()] log(f" {len(successful)}/{len(addrs)} PDFs generated") # Test: at least half of properties have data with_data = [a for a in successful if a.get("n_egms", 0) > 0 or a.get("licsar_vel_mm_yr") is not None] assert len(with_data) >= len(addrs) // 2, f"Only {len(with_data)} of {len(addrs)} have data" log(f" [PASS] {len(with_data)}/{len(addrs)} have satellite data") # Summary table log(" Validation summary:") log(f" {'Label':<15} {'EGMS':>5} {'LiCSAR':>10} {'Vel':>8} {'Conf':<10} {'RAG':<6}") for a in successful: log( f" {a['label']:<15} " f"{a.get('n_egms', 0):>5} " f"{a.get('licsar_vel_mm_yr', '—'):>10} " f"{a.get('velocity_mm_yr', 0):>8.2f} " f"{a.get('confidence', '—'):<10} " f"{a.get('rag', '—'):<6}" ) log("Phase 3 tests: PASSED") # ============================================================ # ORCHESTRATION # ============================================================ def main(): try: tiles = phase0_wait_for_scan() if len(tiles) == 0: raise RuntimeError("Scan found ZERO matching tiles — check bbox and CSVs") stats = phase1_build_index(tiles) phase1_tests(stats, tiles) pdf = phase2_first_report() phase2_tests(pdf) results = phase3_validation() phase3_tests(results) # Save summary summary = { "phase1": stats, "phase2_pdf": str(pdf), "phase3": results, } summary_file = BASE_DIR / "phases_summary.json" summary_file.write_text(json.dumps(summary, indent=2, default=str)) log("=" * 60) log("ALL PHASES COMPLETE") log("=" * 60) log(f"Summary: {summary_file}") except Exception as e: log(f"FAILED: {type(e).__name__}: {e}") import traceback log(traceback.format_exc()) raise if __name__ == "__main__": main()