Urban Centers at Flood Risk Analysis

Urban Centers at Flood Risk Analysis#

This notebook guides you through:

  1. Selecting an Area of Interest (AOI) interactively on a map.

  2. Fetching demographic and flood risk data.

  3. Identifying urban centers within the AOI that are at the highest risk due to flooding, using GHS settlement population metrics.

  4. Visualizing and ranking these areas.

Open In Colab

# !pip install geopandas ipyleaflet
import json
import requests
import pandas as pd
import geopandas as gpd
from shapely.geometry import shape
from ipyleaflet import Map, DrawControl
from space2stats_client import Space2StatsClient
from space2stats_client.widgets import AOISelector
aoi_selector = AOISelector(center=(27.0, 29.7), zoom=6)
aoi_selector.display()
aoi = aoi_selector.aoi
aoi
                                            geometry        name
0  POLYGON ((23.14275 -18.94162, 23.14275 -16.892...  User AOI 1
client = Space2StatsClient()
# If you are behind a corporate proxy or encounter TLS certificate issues, you can
# temporarily disable certificate verification by passing verify_ssl=False.
# This is insecure and should only be used for troubleshooting, not in production.
# client = Space2StatsClient(verify_ssl=False)
fields = [
    "sum_pop_2025", "sum_f_2025", "sum_m_2025", "pop_flood", "pop_flood_pct",
    "ghs_11_pop", "ghs_12_pop", "ghs_13_pop", "ghs_21_pop", "ghs_22_pop", "ghs_23_pop", "ghs_30_pop"
]
client.get_fields()
c:\WBG\Anaconda3\envs\s2s_ingest\lib\site-packages\urllib3\connectionpool.py:1099: InsecureRequestWarning: Unverified HTTPS request is being made to host 'space2stats.ds.io'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings
  warnings.warn(
['fires_density_mean',
 'pop',
 'pop_flood',
 'pop_flood_pct',
 'ghs_11_count',
 'ghs_12_count',
 'ghs_13_count',
 'ghs_21_count',
 'ghs_22_count',
 'ghs_23_count',
 'ghs_30_count',
 'ghs_total_count',
 'ghs_11_pop',
 'ghs_12_pop',
 'ghs_13_pop',
 'ghs_21_pop',
 'ghs_22_pop',
 'ghs_23_pop',
 'ghs_30_pop',
 'ghs_total_pop',
 'sum_viirs_ntl_2012',
 'sum_viirs_ntl_2013',
 'sum_viirs_ntl_2014',
 'sum_viirs_ntl_2015',
 'sum_viirs_ntl_2016',
 'sum_viirs_ntl_2017',
 'sum_viirs_ntl_2018',
 'sum_viirs_ntl_2019',
 'sum_viirs_ntl_2020',
 'sum_viirs_ntl_2021',
 'sum_viirs_ntl_2022',
 'sum_viirs_ntl_2023',
 'sum_viirs_ntl_2024',
 'sum_built_area_m_1975',
 'sum_built_area_m_1980',
 'sum_built_area_m_1985',
 'sum_built_area_m_1990',
 'sum_built_area_m_1995',
 'sum_built_area_m_2000',
 'sum_built_area_m_2005',
 'sum_built_area_m_2010',
 'sum_built_area_m_2015',
 'sum_built_area_m_2020',
 'sum_built_area_m_2025',
 'sum_built_area_m_2030',
 'sum_f_00_2025',
 'sum_f_01_2025',
 'sum_f_05_2025',
 'sum_f_10_2025',
 'sum_f_15_2025',
 'sum_f_20_2025',
 'sum_f_25_2025',
 'sum_f_30_2025',
 'sum_f_35_2025',
 'sum_f_40_2025',
 'sum_f_45_2025',
 'sum_f_50_2025',
 'sum_f_55_2025',
 'sum_f_60_2025',
 'sum_f_65_2025',
 'sum_f_70_2025',
 'sum_f_75_2025',
 'sum_f_80_2025',
 'sum_f_85_2025',
 'sum_f_90_2025',
 'sum_m_00_2025',
 'sum_m_01_2025',
 'sum_m_05_2025',
 'sum_m_10_2025',
 'sum_m_15_2025',
 'sum_m_20_2025',
 'sum_m_25_2025',
 'sum_m_30_2025',
 'sum_m_35_2025',
 'sum_m_40_2025',
 'sum_m_45_2025',
 'sum_m_50_2025',
 'sum_m_55_2025',
 'sum_m_60_2025',
 'sum_m_65_2025',
 'sum_m_70_2025',
 'sum_m_75_2025',
 'sum_m_80_2025',
 'sum_m_85_2025',
 'sum_m_90_2025',
 'sum_pop_2015',
 'sum_pop_2016',
 'sum_pop_2017',
 'sum_pop_2018',
 'sum_pop_2019',
 'sum_pop_2021',
 'sum_pop_2022',
 'sum_pop_2023',
 'sum_pop_2024',
 'sum_pop_2025',
 'sum_pop_2026',
 'sum_pop_2027',
 'sum_pop_2028',
 'sum_pop_2029',
 'sum_pop_2030',
 'sum_pop_2020',
 'sum_f_2025',
 'sum_m_2025',
 'cy_frequency_mean',
 'landslide_susceptibility_mean_2023',
 'drought_spei_1_5_rp100_mean']
# Get available topics/datasets
df = client.get_summary(
    gdf=aoi.gdf,
    spatial_join_method="centroid",
    fields=fields,
    geometry="polygon"
)
Fetching data for boundary 1 of 1...
c:\WBG\Anaconda3\envs\s2s_ingest\lib\site-packages\urllib3\connectionpool.py:1099: InsecureRequestWarning: Unverified HTTPS request is being made to host 'space2stats.ds.io'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings
  warnings.warn(
# Convert df to gdf
if isinstance(df.geometry.iloc[0], str):
    df["geometry"] = df.geometry.apply(json.loads)
df["geometry"] = df.geometry.apply(shape)
gdf = gpd.GeoDataFrame(df, geometry="geometry", crs="EPSG:4326")
# Define urban_pop to only include semi-dense urban clusters (22_POP), dense urban clusters (23_POP) and urban centres (30_POP)
gdf["urban_pop"] = gdf["ghs_22_pop"] + gdf["ghs_23_pop"] + gdf["ghs_30_pop"] + gdf["ghs_21_pop"]

# Calculate risk score using only the updated urban_pop
gdf["risk_score"] = gdf["pop_flood_pct"] * gdf["urban_pop"]

# Filter to areas where urban_pop is significant (e.g., > 100 people)
urban_gdf = gdf[gdf["urban_pop"] > 10].copy()

max_score = urban_gdf["risk_score"].max()
urban_gdf["risk_score_norm"] = (urban_gdf["risk_score"] / max_score) * 100

# Now sorting and other operations will not raise SettingWithCopyWarning
urban_gdf = urban_gdf.sort_values("risk_score_norm", ascending=False)
urban_gdf.head()
name index_gdf index_h3 hex_id geometry sum_pop_2025 sum_f_2025 sum_m_2025 pop_flood pop_flood_pct ghs_11_pop ghs_12_pop ghs_13_pop ghs_21_pop ghs_22_pop ghs_23_pop ghs_30_pop urban_pop risk_score risk_score_norm
1565 User AOI 1 0 1565 86975c6e7ffffff POLYGON ((24.32095 -17.56453, 24.336 -17.53104... 17680.664062 9280.703758 8400.042776 20431.485318 0.776742 473.656001 1322.125516 898.069742 771.438040 0.000000 22345.831186 0.000000 23117.269227 17956.151296 100.000000
1561 User AOI 1 0 1561 86975c6c7ffffff POLYGON ((24.31304 -17.50169, 24.32809 -17.468... 44916.179688 23155.416574 21760.899724 5703.574008 0.371334 71.713740 1442.819373 0.000000 5825.892044 0.000000 32459.696646 0.000000 38285.588691 14216.731279 79.174713
569 User AOI 1 0 569 869751447ffffff POLYGON ((25.87178 -17.90025, 25.88668 -17.866... 67300.851562 33801.402960 33499.064533 9551.015205 0.140600 49.595044 824.989772 0.000000 6481.926981 0.000000 0.000000 81314.995703 87796.922685 12344.234663 68.746551
571 User AOI 1 0 571 869751457ffffff POLYGON ((25.92485 -17.86218, 25.93974 -17.828... 50264.140625 25244.575717 25019.274290 3817.371648 0.103021 91.652360 1439.760560 0.000000 1724.191948 0.000000 0.000000 51737.034581 53461.226529 5507.628617 30.672657
528 User AOI 1 0 528 86975128fffffff POLYGON ((25.28298 -17.84014, 25.29795 -17.806... 10345.385742 5195.925647 5149.443467 2167.917276 0.379247 93.356993 302.018942 0.000000 2430.788262 5397.047936 5111.016867 0.000000 12938.853066 4907.021088 27.327800
# Now create your risk map
m_risk = urban_gdf.explore(
    column="risk_score_norm",
    tooltip=["sum_pop_2025", "pop_flood", "pop_flood_pct", "urban_pop", "risk_score"],
    cmap="OrRd",
    legend=True,
    scheme="quantiles",
    legend_kwds=dict(colorbar=True, caption="Urban Flood Score", interval=False),
    style_kwds=dict(weight=0.5, fillOpacity=0.8),
    name="Urban Flood Risk",
)

aoi.gdf.explore(
    m=m_risk,  # Add to the existing map
    color='red',
    weight=3,
    fill=False,
    name="AOI Boundary"
)

m_risk.save("urban_flood_risk_map.html")