From 5ebcee21fe3430c02725dab4e3b7e90f15051055 Mon Sep 17 00:00:00 2001 From: Shautvast Date: Mon, 30 Mar 2026 11:01:20 +0200 Subject: [PATCH] fix install errors --- backend/docker-compose.yml | 1 + backend/initdb/01_extensions.sql | 2 + backend/scripts/02_import_tiles.sh | 27 +++--- backend/scripts/03_import_pois.sh | 40 +++++++-- backend/scripts/04_import_geocoding.sh | 40 ++++----- backend/scripts/05_import_routing.sh | 37 +++++---- backend/scripts/06_build_offline_packages.sh | 86 +++++++------------- backend/scripts/update_all.sh | 30 +++---- 8 files changed, 127 insertions(+), 136 deletions(-) create mode 100644 backend/initdb/01_extensions.sql diff --git a/backend/docker-compose.yml b/backend/docker-compose.yml index 2b15f12..4ed5444 100644 --- a/backend/docker-compose.yml +++ b/backend/docker-compose.yml @@ -43,6 +43,7 @@ services: POSTGRES_DB: maps volumes: - maps-pgdata:/var/lib/postgresql/data + - ./initdb:/docker-entrypoint-initdb.d redis: image: docker.io/redis:7-alpine diff --git a/backend/initdb/01_extensions.sql b/backend/initdb/01_extensions.sql new file mode 100644 index 0000000..67a0700 --- /dev/null +++ b/backend/initdb/01_extensions.sql @@ -0,0 +1,2 @@ +CREATE EXTENSION IF NOT EXISTS postgis; +CREATE EXTENSION IF NOT EXISTS hstore; diff --git a/backend/scripts/02_import_tiles.sh b/backend/scripts/02_import_tiles.sh index d04bfcd..81a9e87 100755 --- a/backend/scripts/02_import_tiles.sh +++ b/backend/scripts/02_import_tiles.sh @@ -1,27 +1,22 @@ #!/bin/bash # scripts/02_import_tiles.sh +# Import OSM data into PostGIS using osm2pgsql's default schema. +# Martin serves vector tiles directly from these tables. -PBF_FILE="/data/osm/region.osm.pbf" -PG_CONN="postgresql://maps:maps@postgres:5432/maps" +set -euo pipefail -# Import OSM data into PostGIS using openmaptiles schema -# This creates the tables that Martin reads for tile generation -cd /opt/openmaptiles +PBF_FILE="${PBF_FILE:-/data/osm/region.osm.pbf}" +PG_CONN="${PG_CONN:-postgres://maps:maps@postgres:5432/maps}" + +echo "=== Importing tile data from $PBF_FILE ===" -# osm2pgsql import with openmaptiles mapping osm2pgsql \ --create \ --slim \ --database "$PG_CONN" \ - --style openmaptiles.style \ - --tag-transform-script lua/tagtransform.lua \ - --number-processes 4 \ - --cache 4096 \ - --flat-nodes /data/osm/nodes.cache \ + --hstore \ + --number-processes 2 \ + --cache 512 \ "$PBF_FILE" -# Run openmaptiles SQL post-processing to create materialized views -# that Martin serves as tile layers -psql "$PG_CONN" -f build/openmaptiles.sql - -echo "Tile data import complete. Martin will serve tiles from PostGIS." +echo "Tile data import complete. Tables: planet_osm_point, planet_osm_line, planet_osm_polygon, planet_osm_roads" diff --git a/backend/scripts/03_import_pois.sh b/backend/scripts/03_import_pois.sh index 6473060..d0e7da8 100755 --- a/backend/scripts/03_import_pois.sh +++ b/backend/scripts/03_import_pois.sh @@ -1,21 +1,43 @@ #!/bin/bash # scripts/03_import_pois.sh +# Create the pois table and import POI data using osm2pgsql flex output. -PBF_FILE="/data/osm/region.osm.pbf" -PG_CONN="postgresql://maps:maps@postgres:5432/maps" +set -euo pipefail -# Run the initial migration to create the pois table -psql "$PG_CONN" -f /app/migrations/001_create_pois.sql +PBF_FILE="${PBF_FILE:-/data/osm/region.osm.pbf}" +PG_CONN="${PG_CONN:-postgres://maps:maps@postgres:5432/maps}" + +echo "=== Creating pois table ===" + +psql "$PG_CONN" <<'SQL' +CREATE TABLE IF NOT EXISTS pois ( + osm_id BIGINT NOT NULL, + osm_type CHAR(1) NOT NULL, + name TEXT, + category TEXT, + geometry GEOMETRY(Point, 4326) NOT NULL, + address JSONB, + tags JSONB, + opening_hours TEXT, + phone TEXT, + website TEXT, + wheelchair TEXT, + PRIMARY KEY (osm_type, osm_id) +); + +CREATE INDEX IF NOT EXISTS pois_geometry_idx ON pois USING GIST (geometry); +CREATE INDEX IF NOT EXISTS pois_category_idx ON pois (category); +SQL + +echo "=== Importing POIs from $PBF_FILE ===" -# Import POIs using osm2pgsql with a custom Lua transform osm2pgsql \ --create \ --output=flex \ - --style /app/scripts/poi_flex.lua \ + --style /app/scripts/pois.lua \ --database "$PG_CONN" \ - --cache 2048 \ - --number-processes 4 \ - --flat-nodes /data/osm/nodes.cache \ + --cache 512 \ + --number-processes 2 \ "$PBF_FILE" echo "POI import complete." diff --git a/backend/scripts/04_import_geocoding.sh b/backend/scripts/04_import_geocoding.sh index b635814..9435cac 100755 --- a/backend/scripts/04_import_geocoding.sh +++ b/backend/scripts/04_import_geocoding.sh @@ -1,31 +1,27 @@ #!/bin/bash # scripts/04_import_geocoding.sh +# Download Photon's pre-built country geocoding index. +# This avoids needing to run Nominatim (heavy, needs Java). +# Photon provides ready-to-use country extracts. + +set -euo pipefail -PBF_FILE="/data/osm/region.osm.pbf" -NOMINATIM_DATA="/data/nominatim" PHOTON_DATA="/data/photon" +# Change COUNTRY_CODE to match your region (nl=Netherlands, de=Germany, fr=France, etc.) +COUNTRY_CODE="${PHOTON_COUNTRY_CODE:-nl}" -# --- Nominatim Import --- -# Nominatim builds a PostgreSQL database with geocoding data. -# Photon reads from this database to build its Elasticsearch index. +mkdir -p "$PHOTON_DATA" -nominatim import \ - --osm-file "$PBF_FILE" \ - --project-dir "$NOMINATIM_DATA" \ - --threads 4 +echo "=== Downloading Photon index for country: $COUNTRY_CODE ===" -# --- Photon Import --- -# Photon reads the Nominatim database and builds an Elasticsearch index. -# This index is what Photon uses to serve search queries. +# Photon country extracts from GraphHopper (maintained by komoot/photon project) +PHOTON_URL="https://download1.graphhopper.com/public/extracts/by-country-code/${COUNTRY_CODE}/photon-db-${COUNTRY_CODE}-latest.tar.bz2" -java -jar /opt/photon/photon.jar \ - -nominatim-import \ - -host localhost \ - -port 5432 \ - -database nominatim \ - -user nominatim \ - -password nominatim \ - -data-dir "$PHOTON_DATA" \ - -languages en,nl,de,fr +wget -O "${PHOTON_DATA}/photon-db.tar.bz2" "$PHOTON_URL" -echo "Geocoding index built. Photon is ready to serve." +echo "=== Extracting Photon index ===" +tar -xjf "${PHOTON_DATA}/photon-db.tar.bz2" -C "$PHOTON_DATA" +rm "${PHOTON_DATA}/photon-db.tar.bz2" + +echo "Photon index ready at $PHOTON_DATA" +echo "Mount $PHOTON_DATA/photon_data into the photon container as /photon/photon_data" diff --git a/backend/scripts/05_import_routing.sh b/backend/scripts/05_import_routing.sh index 677847c..dff0ef7 100755 --- a/backend/scripts/05_import_routing.sh +++ b/backend/scripts/05_import_routing.sh @@ -1,31 +1,38 @@ #!/bin/bash # scripts/05_import_routing.sh +# Preprocess OSM data for OSRM routing. +# OSRM tools (osrm-extract, osrm-partition, osrm-customize) only exist inside +# the osrm/osrm-backend Docker image, so we run one-off containers here. -PBF_FILE="/data/osm/region.osm.pbf" +set -euo pipefail + +PBF_FILE="${PBF_FILE:-/data/osm/region.osm.pbf}" OSRM_DATA="/data/osrm" +NETWORK="maps-backend_maps-net" -# Process each profile: driving, walking, cycling for PROFILE in car foot bicycle; do PROFILE_DIR="${OSRM_DATA}/${PROFILE}" mkdir -p "$PROFILE_DIR" cp "$PBF_FILE" "${PROFILE_DIR}/region.osm.pbf" - # Step 1: Extract — parse the PBF and produce an .osrm file - # Uses the appropriate profile from OSRM's bundled profiles - osrm-extract \ - --profile /opt/osrm-profiles/${PROFILE}.lua \ - --threads 4 \ - "${PROFILE_DIR}/region.osm.pbf" + echo "=== Processing OSRM profile: $PROFILE ===" - # Step 2: Partition — create a recursive multi-level partition - osrm-partition \ - "${PROFILE_DIR}/region.osrm" + podman run --rm \ + -v "${PROFILE_DIR}:/data" \ + docker.io/osrm/osrm-backend:latest \ + osrm-extract -p "/opt/${PROFILE}.lua" /data/region.osm.pbf - # Step 3: Customize — compute edge weights for the partition - osrm-customize \ - "${PROFILE_DIR}/region.osrm" + podman run --rm \ + -v "${PROFILE_DIR}:/data" \ + docker.io/osrm/osrm-backend:latest \ + osrm-partition /data/region.osrm - echo "OSRM ${PROFILE} profile ready." + podman run --rm \ + -v "${PROFILE_DIR}:/data" \ + docker.io/osrm/osrm-backend:latest \ + osrm-customize /data/region.osrm + + echo "OSRM $PROFILE profile ready at $PROFILE_DIR" done echo "All OSRM profiles processed." diff --git a/backend/scripts/06_build_offline_packages.sh b/backend/scripts/06_build_offline_packages.sh index 34085ed..203d1e2 100755 --- a/backend/scripts/06_build_offline_packages.sh +++ b/backend/scripts/06_build_offline_packages.sh @@ -1,72 +1,44 @@ #!/bin/bash # scripts/06_build_offline_packages.sh +# Register a region in the offline_regions table. +# Actual tile/routing/POI files are served directly from their data directories; +# this script just records metadata so the API can list available regions. -PG_CONN="postgresql://maps:maps@postgres:5432/maps" -PACKAGES_DIR="/data/offline_packages" -REGION_ID="amsterdam" -BBOX="4.7288,52.2783,5.0796,52.4311" # minLon,minLat,maxLon,maxLat +set -euo pipefail -mkdir -p "${PACKAGES_DIR}/${REGION_ID}" +PG_CONN="${PG_CONN:-postgres://maps:maps@postgres:5432/maps}" +REGION_ID="${REGION_ID:-amsterdam}" +REGION_NAME="${REGION_NAME:-Amsterdam}" +BBOX="${BBOX:-4.7288,52.2783,5.0796,52.4311}" # minLon,minLat,maxLon,maxLat -# --- Tiles: extract MBTiles for the bounding box --- -# Use martin-cp (Martin's CLI tool) to export tiles from PostGIS to MBTiles -martin-cp \ - --output-file "${PACKAGES_DIR}/${REGION_ID}/tiles.mbtiles" \ - --mbtiles-type flat \ - --bbox "$BBOX" \ - --min-zoom 0 \ - --max-zoom 16 \ - --source openmaptiles \ - --connect "$PG_CONN" +IFS=',' read -r MIN_LON MIN_LAT MAX_LON MAX_LAT <<< "$BBOX" -# --- POIs: export to SQLite with FTS5 index --- -# Custom Rust tool or Python script that queries PostGIS and writes SQLite -/app/tools/export_pois_sqlite \ - --bbox "$BBOX" \ - --pg-conn "$PG_CONN" \ - --output "${PACKAGES_DIR}/${REGION_ID}/pois.db" - -# --- Routing: tar the OSRM files per profile --- -for PROFILE in car foot bicycle; do - tar -cf "${PACKAGES_DIR}/${REGION_ID}/routing-${PROFILE}.tar" \ - -C "/data/osrm/${PROFILE}" \ - region.osrm region.osrm.cell_metrics region.osrm.cells \ - region.osrm.datasource_names region.osrm.ebg region.osrm.ebg_nodes \ - region.osrm.edges region.osrm.fileIndex region.osrm.geometry \ - region.osrm.icd region.osrm.maneuver_overrides \ - region.osrm.mldgr region.osrm.names region.osrm.nbg_nodes \ - region.osrm.partition region.osrm.properties \ - region.osrm.ramIndex region.osrm.timestamp \ - region.osrm.tld region.osrm.tls region.osrm.turn_duration_penalties \ - region.osrm.turn_penalties_index region.osrm.turn_weight_penalties -done - -# --- Update offline_regions table with file sizes --- -TILES_SIZE=$(stat -f%z "${PACKAGES_DIR}/${REGION_ID}/tiles.mbtiles" 2>/dev/null || stat -c%s "${PACKAGES_DIR}/${REGION_ID}/tiles.mbtiles") -ROUTING_SIZE=0 -for PROFILE in car foot bicycle; do - SIZE=$(stat -f%z "${PACKAGES_DIR}/${REGION_ID}/routing-${PROFILE}.tar" 2>/dev/null || stat -c%s "${PACKAGES_DIR}/${REGION_ID}/routing-${PROFILE}.tar") - ROUTING_SIZE=$((ROUTING_SIZE + SIZE)) -done -POIS_SIZE=$(stat -f%z "${PACKAGES_DIR}/${REGION_ID}/pois.db" 2>/dev/null || stat -c%s "${PACKAGES_DIR}/${REGION_ID}/pois.db") +echo "=== Registering offline region: $REGION_NAME ($REGION_ID) ===" psql "$PG_CONN" < >(tee -a "$LOGFILE") 2>&1 - -echo "=== OSM data update started at $(date -u) ===" +echo "=== Maps data import started at $(date -u) ===" # Step 1: Download latest PBF /app/scripts/01_download.sh -# Step 2: Import tile data +# Step 2: Import tile data into PostGIS (osm2pgsql default schema) /app/scripts/02_import_tiles.sh -# Step 3: Import POI data +# Step 3: Import POI data into PostGIS (pois table) /app/scripts/03_import_pois.sh -# Step 4: Update geocoding index +# Step 4: Download Photon geocoding index /app/scripts/04_import_geocoding.sh -# Step 5: Rebuild OSRM routing graphs +# Step 5: Preprocess OSRM routing graphs (runs osrm containers internally) /app/scripts/05_import_routing.sh -# Step 6: Rebuild offline packages +# Step 6: Register offline regions in the database /app/scripts/06_build_offline_packages.sh -# Step 7: Flush tile cache in Redis (tiles have changed) -redis-cli -h redis FLUSHDB - -# Step 8: Restart services to pick up new data -docker compose restart martin osrm-driving osrm-walking osrm-cycling - -echo "=== OSM data update completed at $(date -u) ===" +echo "=== Maps data import completed at $(date -u) ===" +echo "" +echo "Next steps:" +echo " - Restart martin to pick up new tile data: podman compose restart martin" +echo " - Restart osrm services with new data: podman compose restart osrm-driving osrm-walking osrm-cycling" +echo " - Mount /data/photon/photon_data into the photon container and restart it"