sirbastiano94 commited on
Commit
54783a8
·
verified ·
1 Parent(s): 74d82ea

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +617 -0
app.py ADDED
@@ -0,0 +1,617 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import date, datetime, time, timedelta
2
+ import json
3
+ from pathlib import Path
4
+ import ssl
5
+ import tempfile
6
+ import xml.etree.ElementTree as ET
7
+ from typing import List, Optional, Tuple
8
+
9
+ import gradio as gr
10
+ import folium
11
+ from folium.plugins import MarkerCluster
12
+ import pandas as pd
13
+ from huggingface_hub import hf_hub_download
14
+
15
+ try:
16
+ from gradio.components import Date as GrDateComponent
17
+ except (ImportError, AttributeError):
18
+ GrDateComponent = getattr(gr, "Date", None) or getattr(gr, "DatePicker", None)
19
+
20
+ try:
21
+ from shapely import wkt as shapely_wkt
22
+ from shapely.geometry import Point
23
+
24
+ SHAPELY_AVAILABLE = True
25
+ except Exception: # ImportError or attribute issues
26
+ shapely_wkt = None
27
+ Point = None
28
+ SHAPELY_AVAILABLE = False
29
+
30
+
31
+ DEFAULT_CENTER = "41.9028,12.4964"
32
+ DEFAULT_ZOOM = 12
33
+ DEFAULT_TILES = "CartoDB positron"
34
+ DEFAULT_DATE_PROMPT = "Select the date to pull AIS data."
35
+ DEFAULT_TIME_PROMPT = "Set start and end times to describe the daily window."
36
+ DEFAULT_DATE = "2025-08-25"
37
+ DEFAULT_START_TIME = "10:00:00"
38
+ DEFAULT_END_TIME = "12:00:00"
39
+ DEFAULT_AOI_WKT = """POLYGON((4.2100 51.3700,4.4800 51.3700,4.5100 51.2900,4.4650 51.1700,4.2500 51.1700,4.1900 51.2500,4.2100 51.3700))"""
40
+ HF_REPO_ID = "Lore0123/AISPortal"
41
+ HF_FILE_TEMPLATE = "{date}_ais.parquet"
42
+ DATE_FMT = "%Y-%m-%d"
43
+ DEFAULT_DATE_OBJ = datetime.strptime(DEFAULT_DATE, DATE_FMT).date()
44
+ MAX_POINTS = 10_000
45
+ BANNER_PATH = (Path(__file__).resolve().parent / "src" / "banner.png")
46
+ TILE_OPTIONS = {
47
+ "OpenStreetMap": {
48
+ "tiles": "OpenStreetMap",
49
+ "attr": "© OpenStreetMap contributors",
50
+ },
51
+ "Stamen Terrain": {
52
+ "tiles": "Stamen Terrain",
53
+ "attr": "Map tiles by Stamen Design, CC BY 3.0 — Data © OpenStreetMap contributors",
54
+ },
55
+ "CartoDB positron": {
56
+ "tiles": "https://{s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}{r}.png",
57
+ "attr": "© OpenStreetMap contributors © CARTO",
58
+ },
59
+ "CartoDB dark_matter": {
60
+ "tiles": "https://{s}.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}{r}.png",
61
+ "attr": "© OpenStreetMap contributors © CARTO",
62
+ },
63
+ }
64
+
65
+
66
+ def _parse_center(center: str) -> Tuple[float, float]:
67
+ """
68
+ Parse "lat,lon" into (lat, lon).
69
+ """
70
+ try:
71
+ lat_str, lon_str = [x.strip() for x in center.split(",")]
72
+ lat, lon = float(lat_str), float(lon_str)
73
+ if not (-90 <= lat <= 90 and -180 <= lon <= 180):
74
+ raise ValueError
75
+ return lat, lon
76
+ except Exception:
77
+ # Default: Rome
78
+ return 41.9028, 12.4964
79
+
80
+
81
+ def _parse_date(value) -> Optional[date]:
82
+ if not value:
83
+ return None
84
+ if isinstance(value, date):
85
+ return value
86
+ if isinstance(value, str):
87
+ raw = value.strip()
88
+ if not raw:
89
+ return None
90
+ try:
91
+ return datetime.strptime(raw, DATE_FMT).date()
92
+ except ValueError:
93
+ return None
94
+ return None
95
+
96
+
97
+ def _iterate_dates(start: Optional[date], end: Optional[date]) -> List[date]:
98
+ if start and end:
99
+ if end < start:
100
+ start, end = end, start
101
+ elif start:
102
+ end = start
103
+ elif end:
104
+ start = end
105
+ else:
106
+ return []
107
+ current = start
108
+ dates: List[date] = []
109
+ while current <= end:
110
+ dates.append(current)
111
+ current += timedelta(days=1)
112
+ return dates
113
+
114
+
115
+ def _normalize_column_key(value: str) -> str:
116
+ return "".join(ch for ch in value.lower() if ch.isalnum())
117
+
118
+
119
+ def _find_column(df: pd.DataFrame, candidates: List[str]) -> Optional[str]:
120
+ normalized_map = {}
121
+ for col in df.columns:
122
+ normalized_map.setdefault(_normalize_column_key(col), col)
123
+
124
+ for candidate in candidates:
125
+ key = _normalize_column_key(candidate)
126
+ if key in normalized_map:
127
+ return normalized_map[key]
128
+
129
+ return None
130
+
131
+
132
+ def _parse_time(value: Optional[str]) -> Optional[time]:
133
+ if not value:
134
+ return None
135
+ if isinstance(value, str):
136
+ raw = value.strip()
137
+ if not raw:
138
+ return None
139
+ for fmt in ("%H:%M:%S", "%H:%M"):
140
+ try:
141
+ parsed = datetime.strptime(raw, fmt)
142
+ return parsed.time()
143
+ except ValueError:
144
+ continue
145
+ return None
146
+ return None
147
+
148
+
149
+ def _build_time_mask(datetimes: pd.Series,
150
+ start_time_obj: Optional[time],
151
+ end_time_obj: Optional[time]) -> Optional[pd.Series]:
152
+ if start_time_obj is None and end_time_obj is None:
153
+ return None
154
+ dt_series = pd.to_datetime(datetimes, errors="coerce", utc=False)
155
+ valid = dt_series.notna()
156
+ times = dt_series.dt.time
157
+ cond = pd.Series(True, index=dt_series.index)
158
+ if start_time_obj and end_time_obj:
159
+ if start_time_obj <= end_time_obj:
160
+ cond &= (times >= start_time_obj) & (times <= end_time_obj)
161
+ else:
162
+ cond &= (times >= start_time_obj) | (times <= end_time_obj)
163
+ elif start_time_obj:
164
+ cond &= times >= start_time_obj
165
+ else:
166
+ cond &= times <= end_time_obj
167
+ return cond & valid
168
+
169
+
170
+ def _load_ais_points(start_date: Optional[str],
171
+ end_date: Optional[str],
172
+ start_time: Optional[str],
173
+ end_time: Optional[str]) -> Tuple[pd.DataFrame, List[str]]:
174
+ """Download AIS parquet files, filter them, and return the full filtered rows."""
175
+ start = _parse_date(start_date)
176
+ end = _parse_date(end_date)
177
+ dates = _iterate_dates(start, end)
178
+ if not dates:
179
+ return pd.DataFrame(columns=["name", "lat", "lon", "source_date", "timestamp", "mmsi"]), []
180
+
181
+ frames: List[pd.DataFrame] = []
182
+ errors: List[str] = []
183
+ start_time_obj = _parse_time(start_time)
184
+ end_time_obj = _parse_time(end_time)
185
+
186
+ for day in dates:
187
+ filename = HF_FILE_TEMPLATE.format(date=day.isoformat())
188
+ try:
189
+ local_path = hf_hub_download(
190
+ repo_id=HF_REPO_ID,
191
+ filename=filename,
192
+ repo_type="dataset"
193
+ )
194
+ except Exception as exc: # pragma: no cover - network dependent
195
+ errors.append(f"{day}: download failed ({exc})")
196
+ continue
197
+
198
+ try:
199
+ df = pd.read_parquet(local_path)
200
+ except Exception as exc: # pragma: no cover - file dependent
201
+ errors.append(f"{day}: failed to read parquet ({exc})")
202
+ continue
203
+
204
+ lat_col = _find_column(df, ["lat", "latitude"])
205
+ lon_col = _find_column(df, ["lon", "longitude", "long", "lng"])
206
+ if lat_col is None or lon_col is None:
207
+ errors.append(f"{day}: missing latitude/longitude columns")
208
+ continue
209
+
210
+ time_col = _find_column(df, [
211
+ "tstamp",
212
+ "timestamp",
213
+ "time",
214
+ "datetime",
215
+ "basedatetime",
216
+ "baseDateTime",
217
+ "received_time",
218
+ "receivedtime"
219
+ ])
220
+
221
+ if time_col is not None:
222
+ mask = _build_time_mask(df[time_col], start_time_obj, end_time_obj)
223
+ if mask is not None:
224
+ df = df[mask.fillna(False)]
225
+ elif start_time_obj or end_time_obj:
226
+ errors.append(f"{day}: no timestamp column for time filtering")
227
+
228
+ if df.empty:
229
+ continue
230
+ lat_series = pd.to_numeric(df[lat_col], errors="coerce")
231
+ lon_series = pd.to_numeric(df[lon_col], errors="coerce")
232
+ valid_mask = lat_series.notna() & lon_series.notna()
233
+ if not valid_mask.any():
234
+ continue
235
+
236
+ subset = df.loc[valid_mask].copy()
237
+ subset["lat"] = lat_series.loc[valid_mask].astype(float)
238
+ subset["lon"] = lon_series.loc[valid_mask].astype(float)
239
+
240
+ name_col = _find_column(df, ["name", "shipname", "vessel", "imo", "callsign", "vesselname"])
241
+ if name_col is not None:
242
+ subset_names = subset[name_col].fillna("").astype(str)
243
+ else:
244
+ subset_names = pd.Series("", index=subset.index)
245
+ subset["name"] = subset_names.replace({"nan": "", "None": ""})
246
+
247
+ subset["source_date"] = day.isoformat()
248
+
249
+ mmsi_col = _find_column(df, ["mmsi", "mmsi_id"])
250
+ if mmsi_col is not None:
251
+ subset_mmsi = subset[mmsi_col].fillna("").astype(str)
252
+ subset_mmsi = subset_mmsi.replace({"nan": "", "None": ""})
253
+ subset["mmsi"] = subset_mmsi
254
+ else:
255
+ subset["mmsi"] = ""
256
+
257
+ if time_col is not None:
258
+ ts_series = pd.to_datetime(subset[time_col], errors="coerce", utc=True)
259
+ try:
260
+ ts_local = ts_series.dt.tz_convert(None)
261
+ except TypeError: # already naive
262
+ ts_local = ts_series
263
+ subset["timestamp"] = ts_local.dt.strftime("%Y-%m-%d %H:%M:%S").fillna("")
264
+ else:
265
+ subset["timestamp"] = ""
266
+
267
+ frames.append(subset.reset_index(drop=True))
268
+
269
+ if not frames:
270
+ return pd.DataFrame(columns=[
271
+ "name",
272
+ "lat",
273
+ "lon",
274
+ "source_date",
275
+ "timestamp",
276
+ "mmsi"
277
+ ]), errors
278
+
279
+ result = pd.concat(frames, ignore_index=True)
280
+ return result, errors
281
+
282
+
283
+ def render_map(selected_date,
284
+ start_time: Optional[str],
285
+ end_time: Optional[str],
286
+ aoi_wkt: Optional[str]) -> Tuple[str, str, str]:
287
+ """
288
+ Build a Leaflet map and return full HTML (rendered by Gradio HTML component).
289
+ """
290
+ lat, lon = _parse_center(DEFAULT_CENTER)
291
+ tile_cfg = TILE_OPTIONS[DEFAULT_TILES]
292
+ map_kwargs = {
293
+ "location": [lat, lon],
294
+ "zoom_start": DEFAULT_ZOOM,
295
+ "tiles": tile_cfg.get("tiles", DEFAULT_TILES),
296
+ "control_scale": True,
297
+ "width": "100%",
298
+ "height": "600px",
299
+ }
300
+ attr = tile_cfg.get("attr")
301
+ if attr:
302
+ map_kwargs["attr"] = attr
303
+ m = folium.Map(**map_kwargs)
304
+
305
+ # Points
306
+ bounds: List[Tuple[float, float]] = []
307
+ point_count = 0
308
+ error_message: Optional[str] = None
309
+ error_marker_added = False
310
+ selected_date_str = _coerce_date_string(selected_date)
311
+ export_df = pd.DataFrame()
312
+ try:
313
+ export_df, errors = _load_ais_points(selected_date_str, selected_date_str, start_time, end_time)
314
+
315
+ if not export_df.empty:
316
+ export_df, aoi_error = _filter_by_aoi(export_df, aoi_wkt)
317
+ if aoi_error:
318
+ errors.append(aoi_error)
319
+
320
+ map_df = pd.DataFrame()
321
+ if not export_df.empty:
322
+ map_df = export_df[["name", "lat", "lon", "source_date", "timestamp", "mmsi"]].copy()
323
+ if len(map_df) > MAX_POINTS:
324
+ sampled_idx = map_df.sample(MAX_POINTS, random_state=0).index
325
+ map_df = map_df.loc[sampled_idx]
326
+ map_df = map_df.reset_index(drop=True)
327
+
328
+ if not map_df.empty:
329
+ cluster = MarkerCluster(name="AIS Points").add_to(m)
330
+ for _, r in map_df.iterrows():
331
+ name_raw = r.get("name")
332
+ name = str(name_raw).strip() if name_raw is not None else ""
333
+ if name.lower() == "nan":
334
+ name = ""
335
+ source_date = r.get("source_date", "?")
336
+ timestamp = r.get("timestamp")
337
+ mmsi = str(r.get("mmsi") or "").strip()
338
+
339
+ details = []
340
+ if name:
341
+ details.append(f"Name: {name}")
342
+ if mmsi:
343
+ details.append(f"MMSI: {mmsi}")
344
+ details.append(f"Date: {source_date}")
345
+ if isinstance(timestamp, str) and timestamp:
346
+ details.append(f"Timestamp: {timestamp}")
347
+ details.append(f"Lat: {r['lat']:.6f}")
348
+ details.append(f"Lon: {r['lon']:.6f}")
349
+
350
+ popup = "<br>".join(details)
351
+ folium.Marker([r["lat"], r["lon"]], popup=popup).add_to(cluster)
352
+ bounds.append((r["lat"], r["lon"]))
353
+ point_count = len(map_df)
354
+ error_message = _summarize_errors(errors)
355
+ except Exception as e:
356
+ error_message = f"AIS data error: {e}"
357
+ _add_error_marker(m, lat, lon, error_message)
358
+ error_marker_added = True
359
+
360
+ if error_message and not error_marker_added:
361
+ _add_error_marker(m, lat, lon, error_message)
362
+
363
+ # Fit to data if any bounds collected
364
+ if bounds:
365
+ m.fit_bounds(bounds, padding=(20, 20))
366
+ html = m._repr_html_()
367
+
368
+ date_range = _format_date_display(selected_date_str, default_prompt=DEFAULT_DATE_PROMPT)
369
+ time_range = _format_range(start_time, end_time, default_prompt=DEFAULT_TIME_PROMPT)
370
+ info_lines = [
371
+ "### Selected Period",
372
+ f"- Date: {date_range}",
373
+ f"- Times: {time_range}",
374
+ f"- Points on map: {point_count}"
375
+ ]
376
+ if error_message:
377
+ info_lines.append(f"- Error: {error_message}")
378
+ ssl_msg = _ssl_warning()
379
+ if ssl_msg:
380
+ info_lines.append(f"- SSL: {ssl_msg}")
381
+ export_payload = export_df.reset_index(drop=True)
382
+ data_json = export_payload.to_json(orient="records") if not export_payload.empty else "[]"
383
+ return html, "\n".join(info_lines), data_json
384
+
385
+
386
+ def _format_range(start: Optional[str], end: Optional[str], default_prompt: str) -> str:
387
+ start_clean = _clean_input(start)
388
+ end_clean = _clean_input(end)
389
+ if not start_clean and not end_clean:
390
+ return default_prompt
391
+ return f"{start_clean or '—'} → {end_clean or '—'}"
392
+
393
+
394
+ def _clean_input(value: Optional[str]) -> Optional[str]:
395
+ if value is None:
396
+ return None
397
+ if isinstance(value, str):
398
+ cleaned = value.strip()
399
+ return cleaned or None
400
+ return str(value)
401
+
402
+
403
+ def _filter_by_aoi(df: pd.DataFrame, wkt_text: Optional[str]) -> Tuple[pd.DataFrame, Optional[str]]:
404
+ wkt_clean = _clean_input(wkt_text)
405
+ if not wkt_clean:
406
+ return df, None
407
+ if not SHAPELY_AVAILABLE or shapely_wkt is None or Point is None:
408
+ return df, "AOI filter unavailable: install shapely."
409
+ try:
410
+ geom = shapely_wkt.loads(wkt_clean)
411
+ except Exception as exc:
412
+ return df, f"AOI parse error: {exc}"
413
+ if geom.is_empty:
414
+ return df, "AOI geometry is empty."
415
+
416
+ def contains_point(row) -> bool:
417
+ try:
418
+ pt = Point(float(row["lon"]), float(row["lat"]))
419
+ except Exception:
420
+ return False
421
+ return geom.contains(pt) or geom.touches(pt)
422
+
423
+ mask = df.apply(contains_point, axis=1)
424
+ if mask.sum() == 0:
425
+ return df.iloc[0:0].copy(), "AOI filter removed all points."
426
+ return df[mask].reset_index(drop=True), None
427
+
428
+
429
+ def _summarize_errors(errors: List[str]) -> Optional[str]:
430
+ if not errors:
431
+ return None
432
+ unique: List[str] = []
433
+ for err in errors:
434
+ if err not in unique:
435
+ unique.append(err)
436
+ if len(unique) == 3:
437
+ break
438
+ extra = len(errors) - len(unique)
439
+ message = "; ".join(unique)
440
+ if extra > 0:
441
+ message += f"; (+{extra} more)"
442
+ return message
443
+
444
+
445
+ def _add_error_marker(map_obj: folium.Map, lat: float, lon: float, message: str) -> None:
446
+ folium.Marker(
447
+ [lat, lon],
448
+ icon=folium.DivIcon(html=f"<div style='color:red;font-weight:bold;'>{message}</div>")
449
+ ).add_to(map_obj)
450
+
451
+
452
+ def _ssl_warning() -> Optional[str]:
453
+ backend = getattr(ssl, "OPENSSL_VERSION", "")
454
+ if "LibreSSL" in backend:
455
+ return "Detected LibreSSL; Hugging Face downloads need OpenSSL 1.1.1+. Use Python from python.org or upgrade SSL."
456
+ return None
457
+
458
+
459
+ def export_data(fmt: str, data_json: Optional[str]) -> str:
460
+ fmt_clean = (fmt or "").strip().upper()
461
+ if not data_json or not data_json.strip():
462
+ raise gr.Error("No AIS data available to export.")
463
+ try:
464
+ records = json.loads(data_json)
465
+ except json.JSONDecodeError as exc:
466
+ raise gr.Error(f"Export failed: invalid data ({exc}).")
467
+ if not records:
468
+ raise gr.Error("No AIS data available to export.")
469
+
470
+ df = pd.DataFrame(records)
471
+ if df.empty:
472
+ raise gr.Error("No AIS data available to export.")
473
+
474
+ suffix = {
475
+ "CSV": ".csv",
476
+ "JSON": ".json",
477
+ "XML": ".xml",
478
+ }.get(fmt_clean)
479
+ if suffix is None:
480
+ raise gr.Error(f"Unsupported format: {fmt}.")
481
+
482
+ with tempfile.NamedTemporaryFile(delete=False, suffix=suffix) as tmp:
483
+ path = tmp.name
484
+
485
+ if fmt_clean == "CSV":
486
+ df.to_csv(path, index=False)
487
+ elif fmt_clean == "JSON":
488
+ df.to_json(path, orient="records", indent=2)
489
+ else: # XML
490
+ root = ET.Element("AISData")
491
+ for record in records:
492
+ entry = ET.SubElement(root, "Record")
493
+ for key, value in record.items():
494
+ child = ET.SubElement(entry, key)
495
+ child.text = "" if value is None else str(value)
496
+ tree = ET.ElementTree(root)
497
+ tree.write(path, encoding="utf-8", xml_declaration=True)
498
+
499
+ return path
500
+
501
+
502
+ def _coerce_date_string(value) -> Optional[str]:
503
+ parsed = _parse_date(value)
504
+ if parsed is not None:
505
+ return parsed.isoformat()
506
+ cleaned = _clean_input(value)
507
+ return cleaned
508
+
509
+
510
+ def _format_date_display(value: Optional[str], default_prompt: str) -> str:
511
+ parsed = _parse_date(value)
512
+ if parsed is not None:
513
+ return parsed.isoformat()
514
+ cleaned = _clean_input(value)
515
+ return cleaned or default_prompt
516
+
517
+
518
+ with gr.Blocks(title="AIS MAP - ESA") as demo:
519
+ if BANNER_PATH.exists():
520
+ gr.Image(
521
+ value=str(BANNER_PATH),
522
+ show_label=False,
523
+ interactive=False,
524
+ elem_id="banner",
525
+ )
526
+ gr.Markdown(
527
+ """
528
+ #### This data access provides globally collected Automatic Identification System (AIS) data, structured and organized on a daily basis for consistent access and analysis. Lightweight utilities to fetch and normalize AIS (Automatic Identification System) data from the AIS Hub webservice.
529
+ """
530
+ )
531
+ gr.Markdown(
532
+ """
533
+ *--Developed by ESA Φ-lab - accelerating the future of Earth Observation (EO) through disruptive/transformational innovations and commercialisation.--*
534
+ """
535
+ )
536
+
537
+ gr.Markdown("## Φ-lab Interactive AIS Map")
538
+ gr.Markdown(
539
+ """
540
+ ### Quick guide
541
+ Select the **date** to retrieve AIS snapshots, optionally narrow the **UTC time window**, and focus on your study area by pasting an **AOI polygon** in WKT form. Hit **Apply Filters** to refresh the map; use **Export** to download the full table of filtered messages.
542
+ """
543
+ )
544
+
545
+ initial_date_value = DEFAULT_DATE_OBJ if GrDateComponent is not None else DEFAULT_DATE
546
+
547
+ with gr.Row():
548
+ if GrDateComponent is not None:
549
+ selected_date = GrDateComponent(
550
+ label="Date",
551
+ value=initial_date_value,
552
+ )
553
+ else:
554
+ selected_date = gr.Textbox(
555
+ label="Date (YYYY-MM-DD)",
556
+ value=initial_date_value,
557
+ placeholder="YYYY-MM-DD",
558
+ scale=1,
559
+ max_lines=1,
560
+ min_width=160,
561
+ )
562
+ start_time = gr.Textbox(
563
+ label="Start time",
564
+ placeholder="HH:MM:SS",
565
+ value=DEFAULT_START_TIME,
566
+ scale=1,
567
+ max_lines=1,
568
+ min_width=120,
569
+ )
570
+ end_time = gr.Textbox(
571
+ label="End time",
572
+ placeholder="HH:MM:SS",
573
+ value=DEFAULT_END_TIME,
574
+ scale=1,
575
+ max_lines=1,
576
+ min_width=120,
577
+ )
578
+
579
+ with gr.Row():
580
+ aoi_wkt = gr.Textbox(
581
+ label="AOI (Polygon WKT)",
582
+ placeholder="POLYGON((lon lat, ...))",
583
+ value=DEFAULT_AOI_WKT,
584
+ lines=3,
585
+ max_lines=6,
586
+ )
587
+
588
+ btn = gr.Button("Apply Filters", variant="primary")
589
+
590
+ initial_map, initial_info, initial_data = render_map(
591
+ initial_date_value,
592
+ DEFAULT_START_TIME,
593
+ DEFAULT_END_TIME,
594
+ DEFAULT_AOI_WKT
595
+ )
596
+ out = gr.HTML(label="Map", value=initial_map, elem_id="map-view")
597
+ period = gr.Markdown(value=initial_info, elem_id="period-info")
598
+ data_state = gr.State(initial_data)
599
+
600
+ input_components = [selected_date, start_time, end_time, aoi_wkt]
601
+
602
+ with gr.Row():
603
+ export_format = gr.Dropdown(
604
+ ["CSV", "JSON", "XML"],
605
+ value="CSV",
606
+ label="Export format",
607
+ scale=1,
608
+ )
609
+ export_btn = gr.Button("Export", variant="secondary")
610
+ download = gr.File(label="Download", file_count="single")
611
+
612
+ demo.load(render_map, inputs=input_components, outputs=[out, period, data_state])
613
+ btn.click(render_map, inputs=input_components, outputs=[out, period, data_state])
614
+ export_btn.click(export_data, inputs=[export_format, data_state], outputs=download)
615
+
616
+ if __name__ == "__main__":
617
+ demo.launch(share=True)