fix install errors

This commit is contained in:
Shautvast 2026-03-30 11:01:20 +02:00
parent e2a8a00b1d
commit 5ebcee21fe
8 changed files with 127 additions and 136 deletions

View file

@ -43,6 +43,7 @@ services:
POSTGRES_DB: maps
volumes:
- maps-pgdata:/var/lib/postgresql/data
- ./initdb:/docker-entrypoint-initdb.d
redis:
image: docker.io/redis:7-alpine

View file

@ -0,0 +1,2 @@
CREATE EXTENSION IF NOT EXISTS postgis;
CREATE EXTENSION IF NOT EXISTS hstore;

View file

@ -1,27 +1,22 @@
#!/bin/bash
# scripts/02_import_tiles.sh
# Import OSM data into PostGIS using osm2pgsql's default schema.
# Martin serves vector tiles directly from these tables.
PBF_FILE="/data/osm/region.osm.pbf"
PG_CONN="postgresql://maps:maps@postgres:5432/maps"
set -euo pipefail
# Import OSM data into PostGIS using openmaptiles schema
# This creates the tables that Martin reads for tile generation
cd /opt/openmaptiles
PBF_FILE="${PBF_FILE:-/data/osm/region.osm.pbf}"
PG_CONN="${PG_CONN:-postgres://maps:maps@postgres:5432/maps}"
echo "=== Importing tile data from $PBF_FILE ==="
# osm2pgsql import with openmaptiles mapping
osm2pgsql \
--create \
--slim \
--database "$PG_CONN" \
--style openmaptiles.style \
--tag-transform-script lua/tagtransform.lua \
--number-processes 4 \
--cache 4096 \
--flat-nodes /data/osm/nodes.cache \
--hstore \
--number-processes 2 \
--cache 512 \
"$PBF_FILE"
# Run openmaptiles SQL post-processing to create materialized views
# that Martin serves as tile layers
psql "$PG_CONN" -f build/openmaptiles.sql
echo "Tile data import complete. Martin will serve tiles from PostGIS."
echo "Tile data import complete. Tables: planet_osm_point, planet_osm_line, planet_osm_polygon, planet_osm_roads"

View file

@ -1,21 +1,43 @@
#!/bin/bash
# scripts/03_import_pois.sh
# Create the pois table and import POI data using osm2pgsql flex output.
PBF_FILE="/data/osm/region.osm.pbf"
PG_CONN="postgresql://maps:maps@postgres:5432/maps"
set -euo pipefail
# Run the initial migration to create the pois table
psql "$PG_CONN" -f /app/migrations/001_create_pois.sql
PBF_FILE="${PBF_FILE:-/data/osm/region.osm.pbf}"
PG_CONN="${PG_CONN:-postgres://maps:maps@postgres:5432/maps}"
echo "=== Creating pois table ==="
psql "$PG_CONN" <<'SQL'
CREATE TABLE IF NOT EXISTS pois (
osm_id BIGINT NOT NULL,
osm_type CHAR(1) NOT NULL,
name TEXT,
category TEXT,
geometry GEOMETRY(Point, 4326) NOT NULL,
address JSONB,
tags JSONB,
opening_hours TEXT,
phone TEXT,
website TEXT,
wheelchair TEXT,
PRIMARY KEY (osm_type, osm_id)
);
CREATE INDEX IF NOT EXISTS pois_geometry_idx ON pois USING GIST (geometry);
CREATE INDEX IF NOT EXISTS pois_category_idx ON pois (category);
SQL
echo "=== Importing POIs from $PBF_FILE ==="
# Import POIs using osm2pgsql with a custom Lua transform
osm2pgsql \
--create \
--output=flex \
--style /app/scripts/poi_flex.lua \
--style /app/scripts/pois.lua \
--database "$PG_CONN" \
--cache 2048 \
--number-processes 4 \
--flat-nodes /data/osm/nodes.cache \
--cache 512 \
--number-processes 2 \
"$PBF_FILE"
echo "POI import complete."

View file

@ -1,31 +1,27 @@
#!/bin/bash
# scripts/04_import_geocoding.sh
# Download Photon's pre-built country geocoding index.
# This avoids needing to run Nominatim (heavy, needs Java).
# Photon provides ready-to-use country extracts.
set -euo pipefail
PBF_FILE="/data/osm/region.osm.pbf"
NOMINATIM_DATA="/data/nominatim"
PHOTON_DATA="/data/photon"
# Change COUNTRY_CODE to match your region (nl=Netherlands, de=Germany, fr=France, etc.)
COUNTRY_CODE="${PHOTON_COUNTRY_CODE:-nl}"
# --- Nominatim Import ---
# Nominatim builds a PostgreSQL database with geocoding data.
# Photon reads from this database to build its Elasticsearch index.
mkdir -p "$PHOTON_DATA"
nominatim import \
--osm-file "$PBF_FILE" \
--project-dir "$NOMINATIM_DATA" \
--threads 4
echo "=== Downloading Photon index for country: $COUNTRY_CODE ==="
# --- Photon Import ---
# Photon reads the Nominatim database and builds an Elasticsearch index.
# This index is what Photon uses to serve search queries.
# Photon country extracts from GraphHopper (maintained by komoot/photon project)
PHOTON_URL="https://download1.graphhopper.com/public/extracts/by-country-code/${COUNTRY_CODE}/photon-db-${COUNTRY_CODE}-latest.tar.bz2"
java -jar /opt/photon/photon.jar \
-nominatim-import \
-host localhost \
-port 5432 \
-database nominatim \
-user nominatim \
-password nominatim \
-data-dir "$PHOTON_DATA" \
-languages en,nl,de,fr
wget -O "${PHOTON_DATA}/photon-db.tar.bz2" "$PHOTON_URL"
echo "Geocoding index built. Photon is ready to serve."
echo "=== Extracting Photon index ==="
tar -xjf "${PHOTON_DATA}/photon-db.tar.bz2" -C "$PHOTON_DATA"
rm "${PHOTON_DATA}/photon-db.tar.bz2"
echo "Photon index ready at $PHOTON_DATA"
echo "Mount $PHOTON_DATA/photon_data into the photon container as /photon/photon_data"

View file

@ -1,31 +1,38 @@
#!/bin/bash
# scripts/05_import_routing.sh
# Preprocess OSM data for OSRM routing.
# OSRM tools (osrm-extract, osrm-partition, osrm-customize) only exist inside
# the osrm/osrm-backend Docker image, so we run one-off containers here.
PBF_FILE="/data/osm/region.osm.pbf"
set -euo pipefail
PBF_FILE="${PBF_FILE:-/data/osm/region.osm.pbf}"
OSRM_DATA="/data/osrm"
NETWORK="maps-backend_maps-net"
# Process each profile: driving, walking, cycling
for PROFILE in car foot bicycle; do
PROFILE_DIR="${OSRM_DATA}/${PROFILE}"
mkdir -p "$PROFILE_DIR"
cp "$PBF_FILE" "${PROFILE_DIR}/region.osm.pbf"
# Step 1: Extract — parse the PBF and produce an .osrm file
# Uses the appropriate profile from OSRM's bundled profiles
osrm-extract \
--profile /opt/osrm-profiles/${PROFILE}.lua \
--threads 4 \
"${PROFILE_DIR}/region.osm.pbf"
echo "=== Processing OSRM profile: $PROFILE ==="
# Step 2: Partition — create a recursive multi-level partition
osrm-partition \
"${PROFILE_DIR}/region.osrm"
podman run --rm \
-v "${PROFILE_DIR}:/data" \
docker.io/osrm/osrm-backend:latest \
osrm-extract -p "/opt/${PROFILE}.lua" /data/region.osm.pbf
# Step 3: Customize — compute edge weights for the partition
osrm-customize \
"${PROFILE_DIR}/region.osrm"
podman run --rm \
-v "${PROFILE_DIR}:/data" \
docker.io/osrm/osrm-backend:latest \
osrm-partition /data/region.osrm
echo "OSRM ${PROFILE} profile ready."
podman run --rm \
-v "${PROFILE_DIR}:/data" \
docker.io/osrm/osrm-backend:latest \
osrm-customize /data/region.osrm
echo "OSRM $PROFILE profile ready at $PROFILE_DIR"
done
echo "All OSRM profiles processed."

View file

@ -1,72 +1,44 @@
#!/bin/bash
# scripts/06_build_offline_packages.sh
# Register a region in the offline_regions table.
# Actual tile/routing/POI files are served directly from their data directories;
# this script just records metadata so the API can list available regions.
PG_CONN="postgresql://maps:maps@postgres:5432/maps"
PACKAGES_DIR="/data/offline_packages"
REGION_ID="amsterdam"
BBOX="4.7288,52.2783,5.0796,52.4311" # minLon,minLat,maxLon,maxLat
set -euo pipefail
mkdir -p "${PACKAGES_DIR}/${REGION_ID}"
PG_CONN="${PG_CONN:-postgres://maps:maps@postgres:5432/maps}"
REGION_ID="${REGION_ID:-amsterdam}"
REGION_NAME="${REGION_NAME:-Amsterdam}"
BBOX="${BBOX:-4.7288,52.2783,5.0796,52.4311}" # minLon,minLat,maxLon,maxLat
# --- Tiles: extract MBTiles for the bounding box ---
# Use martin-cp (Martin's CLI tool) to export tiles from PostGIS to MBTiles
martin-cp \
--output-file "${PACKAGES_DIR}/${REGION_ID}/tiles.mbtiles" \
--mbtiles-type flat \
--bbox "$BBOX" \
--min-zoom 0 \
--max-zoom 16 \
--source openmaptiles \
--connect "$PG_CONN"
IFS=',' read -r MIN_LON MIN_LAT MAX_LON MAX_LAT <<< "$BBOX"
# --- POIs: export to SQLite with FTS5 index ---
# Custom Rust tool or Python script that queries PostGIS and writes SQLite
/app/tools/export_pois_sqlite \
--bbox "$BBOX" \
--pg-conn "$PG_CONN" \
--output "${PACKAGES_DIR}/${REGION_ID}/pois.db"
# --- Routing: tar the OSRM files per profile ---
for PROFILE in car foot bicycle; do
tar -cf "${PACKAGES_DIR}/${REGION_ID}/routing-${PROFILE}.tar" \
-C "/data/osrm/${PROFILE}" \
region.osrm region.osrm.cell_metrics region.osrm.cells \
region.osrm.datasource_names region.osrm.ebg region.osrm.ebg_nodes \
region.osrm.edges region.osrm.fileIndex region.osrm.geometry \
region.osrm.icd region.osrm.maneuver_overrides \
region.osrm.mldgr region.osrm.names region.osrm.nbg_nodes \
region.osrm.partition region.osrm.properties \
region.osrm.ramIndex region.osrm.timestamp \
region.osrm.tld region.osrm.tls region.osrm.turn_duration_penalties \
region.osrm.turn_penalties_index region.osrm.turn_weight_penalties
done
# --- Update offline_regions table with file sizes ---
TILES_SIZE=$(stat -f%z "${PACKAGES_DIR}/${REGION_ID}/tiles.mbtiles" 2>/dev/null || stat -c%s "${PACKAGES_DIR}/${REGION_ID}/tiles.mbtiles")
ROUTING_SIZE=0
for PROFILE in car foot bicycle; do
SIZE=$(stat -f%z "${PACKAGES_DIR}/${REGION_ID}/routing-${PROFILE}.tar" 2>/dev/null || stat -c%s "${PACKAGES_DIR}/${REGION_ID}/routing-${PROFILE}.tar")
ROUTING_SIZE=$((ROUTING_SIZE + SIZE))
done
POIS_SIZE=$(stat -f%z "${PACKAGES_DIR}/${REGION_ID}/pois.db" 2>/dev/null || stat -c%s "${PACKAGES_DIR}/${REGION_ID}/pois.db")
echo "=== Registering offline region: $REGION_NAME ($REGION_ID) ==="
psql "$PG_CONN" <<SQL
INSERT INTO offline_regions (id, name, description, bbox, tiles_size_bytes, routing_size_bytes, pois_size_bytes, last_updated)
CREATE TABLE IF NOT EXISTS offline_regions (
id TEXT PRIMARY KEY,
name TEXT NOT NULL,
description TEXT,
bbox GEOMETRY(Polygon, 4326),
tiles_size_bytes BIGINT NOT NULL DEFAULT 0,
routing_size_bytes BIGINT NOT NULL DEFAULT 0,
pois_size_bytes BIGINT NOT NULL DEFAULT 0,
last_updated TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
INSERT INTO offline_regions (id, name, bbox, tiles_size_bytes, routing_size_bytes, pois_size_bytes, last_updated)
VALUES (
'${REGION_ID}',
'Amsterdam',
'Amsterdam metropolitan area',
ST_MakeEnvelope(4.7288, 52.2783, 5.0796, 52.4311, 4326),
${TILES_SIZE},
${ROUTING_SIZE},
${POIS_SIZE},
'${REGION_NAME}',
ST_MakeEnvelope(${MIN_LON}, ${MIN_LAT}, ${MAX_LON}, ${MAX_LAT}, 4326),
0,
0,
0,
NOW()
)
ON CONFLICT (id) DO UPDATE SET
tiles_size_bytes = EXCLUDED.tiles_size_bytes,
routing_size_bytes = EXCLUDED.routing_size_bytes,
pois_size_bytes = EXCLUDED.pois_size_bytes,
last_updated = EXCLUDED.last_updated;
last_updated = NOW();
SQL
echo "Offline package for ${REGION_ID} built."
echo "Region $REGION_ID registered."

View file

@ -1,36 +1,32 @@
#!/bin/bash
# scripts/update_all.sh
# Full weekly data update pipeline
# Full data import pipeline. Run steps individually if you only need to refresh one.
set -euo pipefail
LOGFILE="/var/log/maps-update.log"
exec > >(tee -a "$LOGFILE") 2>&1
echo "=== OSM data update started at $(date -u) ==="
echo "=== Maps data import started at $(date -u) ==="
# Step 1: Download latest PBF
/app/scripts/01_download.sh
# Step 2: Import tile data
# Step 2: Import tile data into PostGIS (osm2pgsql default schema)
/app/scripts/02_import_tiles.sh
# Step 3: Import POI data
# Step 3: Import POI data into PostGIS (pois table)
/app/scripts/03_import_pois.sh
# Step 4: Update geocoding index
# Step 4: Download Photon geocoding index
/app/scripts/04_import_geocoding.sh
# Step 5: Rebuild OSRM routing graphs
# Step 5: Preprocess OSRM routing graphs (runs osrm containers internally)
/app/scripts/05_import_routing.sh
# Step 6: Rebuild offline packages
# Step 6: Register offline regions in the database
/app/scripts/06_build_offline_packages.sh
# Step 7: Flush tile cache in Redis (tiles have changed)
redis-cli -h redis FLUSHDB
# Step 8: Restart services to pick up new data
docker compose restart martin osrm-driving osrm-walking osrm-cycling
echo "=== OSM data update completed at $(date -u) ==="
echo "=== Maps data import completed at $(date -u) ==="
echo ""
echo "Next steps:"
echo " - Restart martin to pick up new tile data: podman compose restart martin"
echo " - Restart osrm services with new data: podman compose restart osrm-driving osrm-walking osrm-cycling"
echo " - Mount /data/photon/photon_data into the photon container and restart it"