Skip to content

Commit

Permalink
Merge branch 'master' into aa-staging
Browse files Browse the repository at this point in the history
  • Loading branch information
wadhwamatic authored Jan 16, 2025
2 parents 8271424 + 3ad6e08 commit e235d18
Show file tree
Hide file tree
Showing 50 changed files with 2,831 additions and 1,553 deletions.
4 changes: 4 additions & 0 deletions .github/workflows/api.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,10 @@ jobs:

steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install Poetry
uses: snok/install-poetry@v1
with:
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ The configuration is split into three files that you can find in `src/config`:
- 2. `layers.json`
- 3. `tables.json`

Since many layers are common across multiple countries, we created shared configuration files that any deployment can access. These layers are generated by WFP globally and made available through the Humanitarian Data Cube. You can find the layers and their associated styles / legends in `src/config/shared`:
Since many layers are common across multiple countries, we created shared configuration files that any deployment can access. Many of these layers are generated by WFP globally and made available through the Humanitarian Data Cube. You can find the layers and their associated styles / legends in `src/config/shared`:
- 1. `legends.json`
- 2. `layers.json`

Expand Down
195 changes: 195 additions & 0 deletions api/app/googleflood.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,195 @@
"""Get data from Google Floods API"""

import logging
from concurrent.futures import ThreadPoolExecutor, as_completed
from datetime import datetime, timezone
from os import getenv
from urllib.parse import urlencode

from fastapi import HTTPException

from .utils import make_request_with_retries

logger = logging.getLogger(__name__)

GOOGLE_FLOODS_API_KEY = getenv("GOOGLE_FLOODS_API_KEY", "")
if GOOGLE_FLOODS_API_KEY == "":
logger.warning("Missing backend parameter: GOOGLE_FLOODS_API_KEY")


def format_gauge_to_geojson(data):
"""Format Gauge data to GeoJSON"""
geojson = {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [
data["gaugeLocation"]["longitude"],
data["gaugeLocation"]["latitude"],
],
},
"properties": {
"gaugeId": data["gaugeId"],
"issuedTime": data["issuedTime"],
"siteName": data["siteName"],
"riverName": (
data["river"] if "river" in data and len(data["river"]) > 1 else None
),
"severity": data.get("severity", None),
"source": data.get("source", None),
"qualityVerified": data.get("qualityVerified", None),
"thresholds": data.get("thresholds", None),
"gaugeValueUnit": data.get("gaugeValueUnit", None),
},
}
if "inundationMapSet" in data:
geojson["properties"]["inundationMapSet"] = data["inundationMapSet"]
return geojson


def fetch_flood_status(region_code):
"""Fetch flood status for a region code"""
flood_status_url = f"https://floodforecasting.googleapis.com/v1/floodStatus:searchLatestFloodStatusByArea?key={GOOGLE_FLOODS_API_KEY}"
status_response = make_request_with_retries(
flood_status_url, method="post", data={"regionCode": region_code}, retries=3
)
return status_response


def fetch_flood_statuses_concurrently(region_codes: list[str]) -> list[dict]:
"""Fetch flood statuses concurrently for a list of region codes."""
flood_statuses = []
with ThreadPoolExecutor() as executor:
future_to_region = {
executor.submit(fetch_flood_status, code): code for code in region_codes
}
for future in as_completed(future_to_region):
status_response = future.result()
if "error" in status_response:
logger.error("Error in response: %s", status_response["error"])
raise HTTPException(
status_code=500,
detail="Error fetching flood status data from Google API",
)
flood_statuses.extend(status_response.get("floodStatuses", []))
return flood_statuses


def get_google_flood_dates(region_codes: list[str]):
"""Fetch dates from the Google Floods API."""
flood_statuses = fetch_flood_statuses_concurrently(region_codes)

parsed_issued_times = [
datetime.strptime(status["issuedTime"], "%Y-%m-%dT%H:%M:%S.%fZ")
for status in flood_statuses
if "issuedTime" in status
]
parsed_issued_times.sort(reverse=True) # Sort in descending order

# Format only the most recent date
most_recent_date = (
{
"date": parsed_issued_times[0]
.replace(tzinfo=timezone.utc)
.strftime("%Y-%m-%d")
}
if parsed_issued_times
else {}
)

return [most_recent_date] if most_recent_date else []


def get_google_floods_gauges(
region_codes: list[str],
as_geojson: bool = True,
):
"""Get statistical charts data"""
initial_gauges = fetch_flood_statuses_concurrently(region_codes)

gauge_details_params = urlencode(
{"names": [f"gauges/{gauge['gaugeId']}" for gauge in initial_gauges]},
doseq=True,
)
gauges_details_url = f"https://floodforecasting.googleapis.com/v1/gauges:batchGet?key={GOOGLE_FLOODS_API_KEY}&{gauge_details_params}"

gauge_models_params = urlencode(
{"names": [f"gaugeModels/{gauge['gaugeId']}" for gauge in initial_gauges]},
doseq=True,
)
gauges_models_url = f"https://floodforecasting.googleapis.com/v1/gaugeModels:batchGet?key={GOOGLE_FLOODS_API_KEY}&{gauge_models_params}"

# Run both requests
details_response = make_request_with_retries(gauges_details_url)
models_response = make_request_with_retries(gauges_models_url)

# Create maps for quick lookup
gauge_details_map = {
item["gaugeId"]: item for item in details_response.get("gauges", [])
}
gauge_models_map = {
item["gaugeId"]: item for item in models_response.get("gaugeModels", [])
}

gauges_details = []
for gauge in initial_gauges:
gauge_id = gauge["gaugeId"]
detail = gauge_details_map.get(gauge_id, {})
model = gauge_models_map.get(gauge_id, {})
merged_gauge = {**gauge, **detail, **model}
gauges_details.append(merged_gauge)

if as_geojson:
features = []
for gauge in gauges_details:
try:
feature = format_gauge_to_geojson(gauge)
features.append(feature)
except Exception as e:
logger.error(
"Failed to format gauge %s: %s", gauge.get("gaugeId"), str(e)
)
continue

geojson_feature_collection = {
"type": "FeatureCollection",
"features": features,
}
return geojson_feature_collection
return gauges_details


def get_google_floods_gauge_forecast(gauge_ids: list[str]):
"""Get forecast data for a gauge"""

gauge_params = urlencode(
{"gaugeIds": [gauge_id for gauge_id in gauge_ids]},
doseq=True,
)
forecast_url = f"https://floodforecasting.googleapis.com/v1/gauges:queryGaugeForecasts?key={GOOGLE_FLOODS_API_KEY}&{gauge_params}"
forecast_response = make_request_with_retries(forecast_url)

forecasts = forecast_response.get("forecasts", {})

forecast_data = {}
for gauge_id in gauge_ids:
forecast_map = {}
for forecast in forecasts.get(gauge_id, {}).get("forecasts", []):
issued_time = forecast.get("issuedTime")
for forecast_range in forecast.get("forecastRanges", []):
start_time = forecast_range.get("forecastStartTime")
value = round(forecast_range.get("value"), 2)

# Deduplicate by forecastStartTime, keeping the most recent issuedTime
if (
start_time not in forecast_map
or issued_time > forecast_map[start_time]["issuedTime"]
):
forecast_map[start_time] = {
"issuedTime": issued_time,
"value": [start_time, value],
}

forecast_data[gauge_id] = list(forecast_map.values())

return forecast_data
62 changes: 62 additions & 0 deletions api/app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,11 @@
from app.database.alert_model import AlchemyEncoder, AlertModel
from app.database.database import AlertsDataBase
from app.database.user_info_model import UserInfoModel
from app.googleflood import (
get_google_flood_dates,
get_google_floods_gauge_forecast,
get_google_floods_gauges,
)
from app.hdc import get_hdc_stats
from app.kobo import get_form_dates, get_form_responses, parse_datetime_params
from app.models import AcledRequest, RasterGeotiffModel
Expand Down Expand Up @@ -410,3 +415,60 @@ def post_raster_geotiff(raster_geotiff: RasterGeotiffModel):
return JSONResponse(
content={"download_url": presigned_download_url}, status_code=200
)


@app.get("/google-floods/gauges/")
def get_google_floods_gauges_api(region_codes: list[str] = Query(...)):
"""
Get the Google Floods gauges for a list of regions.
"""
if not region_codes:
raise HTTPException(
status_code=400,
detail="At least one region code must be provided.",
)
for region_code in region_codes:
if len(region_code) != 2:
raise HTTPException(
status_code=400,
detail=f"Region code '{region_code}' must be exactly two characters (iso2).",
)

iso2_codes = [region_code.upper() for region_code in region_codes]
return get_google_floods_gauges(iso2_codes)


@app.get("/google-floods/dates/")
def get_google_floods_dates_api(region_codes: list[str] = Query(...)):
"""
Get the Google Floods dates for a list of regions.
"""
if not region_codes:
raise HTTPException(
status_code=400,
detail="At least one region code must be provided.",
)

for region_code in region_codes:
if len(region_code) != 2:
raise HTTPException(
status_code=400,
detail=f"Region code '{region_code}' must be exactly two characters (iso2).",
)

iso2_codes = [region_code.upper() for region_code in region_codes]
return get_google_flood_dates(iso2_codes)


@app.get("/google-floods/gauges/forecasts")
def get_google_floods_gauge_forecast_api(
gauge_ids: str = Query(..., description="Comma-separated list of gauge IDs")
):
"""Get forecast data for a gauge or multiple gauges"""
gauge_id_list = [id.strip() for id in gauge_ids.split(",")]
if not gauge_id_list:
raise HTTPException(
status_code=400,
detail="gauge_ids must be provided and contain at least one value.",
)
return get_google_floods_gauge_forecast(gauge_id_list)
3 changes: 3 additions & 0 deletions api/app/pytest.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[pytest]
log_cli = true
log_cli_level = INFO
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
interactions:
- request:
body: null
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
User-Agent:
- python-requests/2.32.0
method: GET
uri: https://floodforecasting.googleapis.com/v1/gauges:queryGaugeForecasts?gaugeIds=hybas_1121465590
response:
body:
string: !!binary |
H4sIAAAAAAAC/9Vay24bRxC86ysEnaVBvx+8+wcSnRIEgZIojoHEBiw5QGDo39OkY4GkL9zWEhMB
PJDLnd3isruqq3s+X1xeXv3+4eP9r3cPjw9Xm8vPdaAO/fHPL3cPPyMSiqkmPH9zdPqP/x28fP56
d8q7h4dP97/dvvvrvs65IiC5gbghuUXeKG00hlEE2A9X1/vLvl75u7v3b+8PL398i935f9/9+Wl7
B40cyS4caRLCrtfHp3699PePdx8fvwHGtwCb3esQ0cHSN++//UX7Cw/WPV2fCD1xACKIszEF0lLo
0oUua0APYjYh9QZy7SLXFyOveBHX1ArvTrhYF7m9HLkNURcNSYNcDt270P3l0HmkkVuoisVi5NFF
Hi9HDqNiXNzAhXA59OxCzxdDdx3uFeTIELo8Xhia0A8WHkLf+/TT8/u9H3SKkuhWSYQ2IJUOmbme
kvgg8Ajt/dMT2bg4zRQsK1rhtdFx1QMVpZSY2XnqM/lYRgZVwLTSayod07AIr2gHXh4tE9nYS7wF
I0UbuOdSMSRVMc1MjUd+Dio+GXoJoBiIdNKTsQscz6ohtnMjudOQKgU5VtOQqtEEvJg4O9XlTCK2
QQ4oaIBqi5NrLg+X9klX/GbyMFZx6RgK2pKQmUy89fFVzSejCSyPl5lkXLqNLE0HOJWLeSj2jMg5
mHgB7MLMDgbL7V/eQA/60cL1RcR3IsIbqOobQsRXExGt3CqrT66cjdyaycVaHio1xckaKjK3RREu
xuzReeYzWxQ4oNsTmknDMbAAI/VsyFQe3nYKpCSkBXwmE1d5ysHaqebPQ8QLkCNzIuLyVnkBoC5y
OquExK6XpdupiJdr4PUkJAcRGoaVu2kYkZlEnIOZm3w2k4Z9WOk1g722grgMlKNls68yk4ir3BCm
svCI/rpq4ooWrYTvSMhcIg4bllRVfEb8T5h4QdszVaJkZLHvq/v3BsVHC9eXkLxF2QBtkAYZg+Rq
EgIjJcopN+chc4d29f9Gq8yZXhRrAjlkS/+mFsU5AJ072TWZimEEUKjJcvmbzMUwzJw620fmMnEZ
qCrkIQN7z/wMXHwydBruTkHeaB8Wgt6Y+Gjh6jJS+f9lfxb7SNzueFhLRpzLiQjJK+wT0xDndsN1
JhVXjFZsFh13dmdNbVAU8vL56OG4WP8mdyh4VIinl5b4Yss9mY5haBkohWg0P+fSsdkgFG7sKzsP
F5+MWwYLRaouH1oWgN6g+Gjh+iryJYtwIzyUSuVX62eZV6XT7f/NJWOLEYbUGn7NJWPLwVuvT5av
rDB2HGAogGyL+0JzqbiiBcpEARMtn6BNpmIdgk7Z2s4+mY25ivoSvmZP6wxsfDJyHKUl6oqtHO2N
io8WnqIjF/tHtp+eLp4u/gULG2d/gzIAAA==
headers:
Alt-Svc:
- h3=":443"; ma=2592000,h3-29=":443"; ma=2592000
Cache-Control:
- private
Content-Encoding:
- gzip
Content-Type:
- application/json; charset=UTF-8
Date:
- Sat, 31 Aug 2024 00:23:43 GMT
Server:
- ESF
Server-Timing:
- gfet4t7; dur=397
Transfer-Encoding:
- chunked
Vary:
- Origin
- X-Origin
- Referer
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- '0'
status:
code: 200
message: OK
version: 1
Loading

0 comments on commit e235d18

Please sign in to comment.