1:45 PM 11/12/2025 ���� JFIF    �� �        "" $(4,$&1'-=-157:::#+?D?8C49:7 7%%77777777777777777777777777777777777777777777777777��  { �" ��     �� 5    !1AQa"q�2��BR��#b�������  ��  ��   ? ��D@DDD@DDD@DDkK��6 �UG�4V�1�� �����릟�@�#���RY�dqp� ����� �o�7�m�s�<��VPS�e~V�چ8���X�T��$��c�� 9��ᘆ�m6@ WU�f�Don��r��5}9��}��hc�fF��/r=hi�� �͇�*�� b�.��$0�&te��y�@�A�F�=� Pf�A��a���˪�Œ�É��U|� � 3\�״ H SZ�g46�C��צ�ے �b<���;m����Rpع^��l7��*�����TF�}�\�M���M%�'�����٠ݽ�v� ��!-�����?�N!La��A+[`#���M����'�~oR�?��v^)��=��h����A��X�.���˃����^Ə��ܯsO"B�c>; �e�4��5�k��/CB��.  �J?��;�҈�������������������~�<�VZ�ꭼ2/)Í”jC���ע�V�G�!���!�F������\�� Kj�R�oc�h���:Þ I��1"2�q×°8��Р@ז���_C0�ր��A��lQ��@纼�!7��F�� �]�sZ B�62r�v�z~�K�7�c��5�.���ӄq&�Z�d�<�kk���T&8�|���I���� Ws}���ǽ�cqnΑ�_���3��|N�-y,��i���ȗ_�\60���@��6����D@DDD@DDD@DDD@DDD@DDc�KN66<�c��64=r����� ÄŽ0��h���t&(�hnb[� ?��^��\��â|�,�/h�\��R��5�? �0�!צ܉-����G����٬��Q�zA���1�����V��� �:R���`�$��ik��H����D4�����#dk����� h�}����7���w%�������*o8wG�LycuT�.���ܯ7��I��u^���)��/c�,s�Nq�ۺ�;�ך�YH2���.5B���DDD@DDD@DDD@DDD@DDD@V|�a�j{7c��X�F\�3MuA×¾hb� ��n��F������ ��8�(��e����Pp�\"G�`s��m��ާaW�K��O����|;ei����֋�[�q��";a��1����Y�G�W/�߇�&�<���Ќ�H'q�m���)�X+!���=�m�ۚ丷~6a^X�)���,�>#&6G���Y��{����"" """ """ """ """ ""��at\/�a�8 �yp%�lhl�n����)���i�t��B�������������?��modskinlienminh.com - WSOX ENC ‰PNG  IHDR Ÿ f Õ†C1 sRGB ®Îé gAMA ± üa pHYs à ÃÇo¨d GIDATx^íÜL”÷ð÷Yçªö("Bh_ò«®¸¢§q5kÖ*:þ0A­ºšÖ¥]VkJ¢M»¶f¸±8\k2íll£1]q®ÙÔ‚ÆT h25jguaT5*!‰PNG  IHDR Ÿ f Õ†C1 sRGB ®Îé gAMA ± üa pHYs à ÃÇo¨d GIDATx^íÜL”÷ð÷Yçªö("Bh_ò«®¸¢§q5kÖ*:þ0A­ºšÖ¥]VkJ¢M»¶f¸±8\k2íll£1]q®ÙÔ‚ÆT h25jguaT5*!
Warning: Undefined variable $authorization in C:\xampp\htdocs\demo\fi.php on line 57

Warning: Undefined variable $translation in C:\xampp\htdocs\demo\fi.php on line 118

Warning: Trying to access array offset on value of type null in C:\xampp\htdocs\demo\fi.php on line 119

Warning: file_get_contents(https://raw.githubusercontent.com/Den1xxx/Filemanager/master/languages/ru.json): Failed to open stream: HTTP request failed! HTTP/1.1 404 Not Found in C:\xampp\htdocs\demo\fi.php on line 120

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 247

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 248

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 249

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 250

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 251

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 252
""" probe.py — council planning portal reconnaissance. For each council URL in the plan, confirm reachability, detect the underlying portal platform (Idox Public Access, Portal360, Ocella/LPAssure, AI Fusion, ArcGIS, MS Dynamics, or unknown), and where possible locate the advanced- search page and the description/proposal text-field name. The TARGETS list has been refined after a first probe pass; URLs that proved dead or wrong were corrected and dead legacy councils (merged post-2020) were dropped. Usage: python probe.py # probe every target python probe.py --only bucking # substring filter python probe.py --json out.json # write machine-readable report """ from __future__ import annotations import argparse import json import sys import time import warnings from dataclasses import dataclass, field, asdict from typing import Optional from urllib.parse import urljoin import requests import urllib3 from bs4 import BeautifulSoup urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) warnings.filterwarnings("ignore") TIMEOUT = 20 POLITE_DELAY = 1.0 CHROME_UA = ( "Mozilla/5.0 (Windows NT 10.0; Win64; x64) " "AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/131.0.0.0 Safari/537.36" ) HEADERS = { "User-Agent": CHROME_UA, "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Accept-Language": "en-GB,en;q=0.9", } # --------------------------------------------------------------------------- # Target list. Refined after first probe pass. # status: # ok - URL confirmed working in prior diagnostic # needs_ssl - works only with verify=False (self-signed/expired chain) # deferred - non-Idox or bot-blocked; not in v1 Idox scraper scope # dropped - legacy subdomain DNS-dead (data merged into unified) # --------------------------------------------------------------------------- TARGETS = [ # ---- Bucks ---- ("bucks_main", "Bucks", "Buckinghamshire (unified)", "https://publicaccess.buckinghamshire.gov.uk/online-applications/", "ok"), ("bucks_aylesbury", "Bucks", "Aylesbury Vale (legacy)", "https://publicaccess.aylesburyvaledc.gov.uk/online-applications/", "ok"), # Wycombe/Chiltern/South Bucks legacy subdomains are DNS-dead post April-2020 merger; # pre-2020 records for those areas are expected in bucks_main. ("bucks_wycombe", "Bucks", "Wycombe (legacy)", "https://publicaccess.wycombe.gov.uk/online-applications/", "dropped"), ("bucks_chiltern", "Bucks", "Chiltern (legacy)", "https://publicaccess.chiltern.gov.uk/online-applications/", "dropped"), ("bucks_southbucks", "Bucks", "South Bucks (legacy)", "https://publicaccess.southbucks.gov.uk/online-applications/", "dropped"), # ---- Beds ---- ("beds_luton", "Beds", "Luton", "https://planning.luton.gov.uk/online-applications/", "ok"), ("beds_centralbeds", "Beds", "Central Bedfordshire", "https://cbc.aifusion.io/planning/publicSearch.html", "deferred"), ("beds_bedford", "Beds", "Bedford Borough", "https://apps.bedford.gov.uk/lvplanning/", "deferred"), # ---- Herts ---- ("herts_dacorum", "Herts", "Dacorum", "https://planning.dacorum.gov.uk/publicaccess/search.do?action=advanced", "deferred"), # bot-blocked ("herts_stalbans", "Herts", "St Albans", "https://planningapplications.stalbans.gov.uk/planning", "deferred"), # Portal360 ("herts_watford", "Herts", "Watford", "https://pa.watford.gov.uk/publicaccess/", "ok"), # note /publicaccess/ path ("herts_threerivers", "Herts", "Three Rivers", "https://www3.threerivers.gov.uk/online-applications/", "ok"), ("herts_hertsmere", "Herts", "Hertsmere", "https://www6.hertsmere.gov.uk/online-applications/", "needs_ssl"), ("herts_welhat", "Herts", "Welwyn Hatfield", "https://planning.welhat.gov.uk/", "deferred"), # custom ("herts_broxbourne", "Herts", "Broxbourne", "https://planning.broxbourne.gov.uk/Planning/lg/GFPlanningWelcome.page", "deferred"), # Ocella/LPAssure ("herts_eastherts", "Herts", "East Herts", "https://publicaccess.eastherts.gov.uk/online-applications/", "ok"), ("herts_northherts", "Herts", "North Herts", "https://pa2.north-herts.gov.uk/online-applications/", "ok"), ("herts_stevenage", "Herts", "Stevenage", "https://publicaccess.stevenage.gov.uk/online-applications/", "ok"), # ---- Aggregator fallback ---- ("planning_explorer", "N/A", "Planning Explorer", "https://www.planningexplorer.co.uk", "deferred"), ] @dataclass class ProbeResult: key: str county: str council: str requested_url: str expected: str final_url: str = "" status: int = 0 http_ok: bool = False platform: str = "unknown" advanced_search_url: str = "" description_field: str = "" error_class: str = "" ssl_bypass_used: bool = False notes: list = field(default_factory=list) # --------------------------------------------------------------------------- # HTTP helpers # --------------------------------------------------------------------------- def _get(session: requests.Session, url: str, result: Optional[ProbeResult] = None): """GET with automatic SSL-fallback to verify=False on SSL errors.""" try: return session.get(url, timeout=TIMEOUT, allow_redirects=True, verify=True) except requests.exceptions.SSLError: try: r = session.get(url, timeout=TIMEOUT, allow_redirects=True, verify=False) if result is not None: result.ssl_bypass_used = True result.notes.append("SSL cert chain invalid; verify=False used") return r except requests.RequestException as exc: if result is not None: result.error_class = type(exc).__name__ result.notes.append(f"{type(exc).__name__} on retry: {str(exc)[:120]}") return None except requests.RequestException as exc: if result is not None: result.error_class = type(exc).__name__ result.notes.append(f"{type(exc).__name__}: {str(exc)[:120]}") return None def _detect_platform(resp: requests.Response) -> str: """Platform signatures — intentionally strict, ordered from most specific.""" url = resp.url.lower() body = resp.text.lower() if resp.text else "" # ArcGIS webapp (Bedford) if "arcgis.com" in url or "arcgis web application" in body: return "arcgis" # AI Fusion if "aifusion.io" in url or "aifusion" in body: return "aifusion" # MS Dynamics if "microsoftcrmportals.com" in url or "dynamics365portals" in url or "powerappsportals" in url: return "msdynamics" # Portal360 (St Albans, some others) if "portal360" in body or "planningapplications." in url: return "portal360" # Ocella / LPAssure (Broxbourne) if "lpassure" in url or "/planning/lg/" in url or "ocella" in body: return "ocella_lpassure" # Planning Explorer aggregator if "planningexplorer.co.uk" in url: return "planning_explorer" # Idox Public Access — landing pages carry simplesearchform + /online-applications # (or /publicaccess) in the rendered HTML; search pages also carry searchCriteria.* idox_path = "/online-applications/" in url or "/publicaccess/" in url idox_body_markers = ("simplesearchform", "searchcriteria.description", "advancedsearchform", "idoxsoftware") if idox_path and any(m in body for m in idox_body_markers): return "idox" if "idoxsoftware" in body and "searchcriteria" in body: return "idox" return "unknown" def _find_advanced_search(session: requests.Session, base_url: str, result: ProbeResult) -> tuple[str, str]: """For an Idox instance, locate the advanced search page and description field.""" # Two common Idox path conventions: /online-applications/ or /publicaccess/. roots = {base_url} for stem in ("online-applications", "publicaccess"): if stem in base_url: roots.add(base_url.split(stem)[0] + stem + "/") candidates = [] for root in roots: candidates.append(urljoin(root, "search.do?action=advanced")) verify_mode = not result.ssl_bypass_used for url in candidates: try: resp = session.get(url, timeout=TIMEOUT, allow_redirects=True, verify=verify_mode) except requests.RequestException: continue if resp.status_code != 200: continue soup = BeautifulSoup(resp.text, "lxml") field_el = soup.find("input", {"name": "searchCriteria.description"}) \ or soup.find("textarea", {"name": "searchCriteria.description"}) if field_el is not None: return (resp.url, "searchCriteria.description") return ("", "") # --------------------------------------------------------------------------- # Main probe # --------------------------------------------------------------------------- def probe_target(key: str, county: str, council: str, url: str, expected: str) -> ProbeResult: result = ProbeResult(key=key, county=county, council=council, requested_url=url, expected=expected) if expected == "dropped": result.notes.append("legacy council merged into unified authority — data expected in unified portal; skipping probe") result.platform = "dropped" return result session = requests.Session() session.headers.update(HEADERS) resp = _get(session, url, result) if resp is None: return result result.final_url = resp.url result.status = resp.status_code result.http_ok = 200 <= resp.status_code < 400 result.platform = _detect_platform(resp) if not result.http_ok: result.notes.append(f"HTTP {resp.status_code}") # Known case: Watford returns 403 at /online-applications/ but 200 at /publicaccess/. # Plan caller should have supplied the correct path; if not, we flag it. return result if result.platform == "idox": adv_url, field = _find_advanced_search(session, resp.url, result) if adv_url: result.advanced_search_url = adv_url result.description_field = field else: result.notes.append("Idox detected but advanced-search page not found") time.sleep(POLITE_DELAY) return result def format_summary(results: list[ProbeResult]) -> str: lines = [] lines.append(f"{'key':<22} {'county':<6} {'platform':<18} {'status':<6} {'ssl':<4} url") lines.append("-" * 140) for r in results: url_disp = r.final_url or r.requested_url if len(url_disp) > 70: url_disp = url_disp[:67] + "..." ssl = "byp" if r.ssl_bypass_used else "-" lines.append(f"{r.key:<22} {r.county:<6} {r.platform:<18} {r.status:<6} {ssl:<4} {url_disp}") lines.append("") lines.append("Errors / notes:") for r in results: if r.notes: lines.append(f" [{r.key}] {'; '.join(r.notes)}") lines.append("") lines.append("Idox advanced-search endpoints:") for r in results: if r.platform == "idox" and r.advanced_search_url: lines.append(f" {r.key:<22} {r.advanced_search_url} (field={r.description_field})") # V1 scrape roster v1 = [r for r in results if r.platform == "idox" and r.advanced_search_url] lines.append("") lines.append(f"V1 Idox scrape roster: {len(v1)} councils") for r in v1: lines.append(f" - {r.county:<6} {r.council}") deferred = [r for r in results if r.platform in ("aifusion", "arcgis", "msdynamics", "portal360", "ocella_lpassure", "planning_explorer") or r.expected == "deferred"] lines.append("") lines.append(f"Deferred (non-Idox or bot-blocked): {len(deferred)} councils") for r in deferred: lines.append(f" - {r.county:<6} {r.council:<30} platform={r.platform} note={'; '.join(r.notes) or '-'}") dropped = [r for r in results if r.platform == "dropped"] lines.append("") lines.append(f"Dropped (DNS dead): {len(dropped)} councils") for r in dropped: lines.append(f" - {r.county:<6} {r.council}") return "\n".join(lines) def main(): parser = argparse.ArgumentParser() parser.add_argument("--only", help="substring filter on key or council name") parser.add_argument("--json", help="write machine-readable report to this path") args = parser.parse_args() targets = TARGETS if args.only: needle = args.only.lower() targets = [t for t in TARGETS if needle in t[0].lower() or needle in t[2].lower()] if not targets: print(f"No targets match --only={args.only}", file=sys.stderr) sys.exit(2) print(f"Probing {len(targets)} target(s)...\n") results = [] for key, county, council, url, expected in targets: print(f" [{key}] {url}", flush=True) r = probe_target(key, county, council, url, expected) results.append(r) print(f" -> status={r.status} platform={r.platform} ssl_bypass={r.ssl_bypass_used}", flush=True) print() print(format_summary(results)) if args.json: with open(args.json, "w", encoding="utf-8") as f: json.dump([asdict(r) for r in results], f, indent=2, default=str) print(f"\nJSON report: {args.json}") if __name__ == "__main__": main()