install scripts
This commit is contained in:
parent
b76708385b
commit
e8e72ba075
10 changed files with 421 additions and 18 deletions
|
|
@ -91,24 +91,22 @@ services:
|
|||
ports:
|
||||
- "5002:5000"
|
||||
|
||||
importer:
|
||||
image: docker.io/iboates/osm2pgsql:latest
|
||||
networks:
|
||||
- maps-net
|
||||
volumes:
|
||||
- ../data:/data
|
||||
environment:
|
||||
DATABASE_URL: "postgres://maps:maps@postgres:5432/maps"
|
||||
command: >
|
||||
osm2pgsql
|
||||
--database postgres://maps:maps@postgres:5432/maps
|
||||
--create --slim -G --hstore
|
||||
/data/region.osm.pbf
|
||||
depends_on:
|
||||
- postgres
|
||||
profiles:
|
||||
- import
|
||||
restart: "no"
|
||||
importer:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: importer.Dockerfile
|
||||
networks:
|
||||
- maps-net
|
||||
volumes:
|
||||
- ../data:/data
|
||||
environment:
|
||||
PG_CONN: "postgres://maps:maps@postgres:5432/maps"
|
||||
PBF_FILE: "/data/region.osm.pbf"
|
||||
depends_on:
|
||||
- postgres
|
||||
profiles:
|
||||
- import
|
||||
restart: "no"
|
||||
|
||||
volumes:
|
||||
maps-pgdata:
|
||||
|
|
|
|||
18
backend/importer.Dockerfile
Normal file
18
backend/importer.Dockerfile
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
FROM docker.io/arm64v8/debian:bookworm-slim
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
osm2pgsql \
|
||||
postgresql-client \
|
||||
wget \
|
||||
git \
|
||||
lua5.3 \
|
||||
curl \
|
||||
ca-certificates \
|
||||
redis-tools \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
COPY scripts/ /app/scripts/
|
||||
RUN chmod +x /app/scripts/*.sh
|
||||
|
||||
ENTRYPOINT ["/bin/bash"]
|
||||
17
backend/scripts/01_download.sh
Executable file
17
backend/scripts/01_download.sh
Executable file
|
|
@ -0,0 +1,17 @@
|
|||
#!/bin/bash
|
||||
# scripts/01_download.sh
|
||||
|
||||
|
||||
REGION="europe/netherlands"
|
||||
DATA_DIR="/data/osm"
|
||||
GEOFABRIK_BASE="https://download.geofabrik.de"
|
||||
|
||||
mkdir -p "$DATA_DIR"
|
||||
|
||||
# Download PBF extract (or update if already present)
|
||||
wget -N "${GEOFABRIK_BASE}/${REGION}-latest.osm.pbf" \
|
||||
-O "${DATA_DIR}/region.osm.pbf"
|
||||
|
||||
# Download the corresponding state file for future diff updates
|
||||
wget -N "${GEOFABRIK_BASE}/${REGION}-updates/state.txt" \
|
||||
-O "${DATA_DIR}/state.txt"
|
||||
32
backend/scripts/02_import_tiles.sh
Executable file
32
backend/scripts/02_import_tiles.sh
Executable file
|
|
@ -0,0 +1,32 @@
|
|||
#!/bin/bash
|
||||
# scripts/02_import_tiles.sh
|
||||
|
||||
PBF_FILE="/data/osm/region.osm.pbf"
|
||||
PG_CONN="postgresql://maps:maps@postgres:5432/maps"
|
||||
|
||||
# Clone openmaptiles toolchain (once)
|
||||
if [ ! -d "/opt/openmaptiles" ]; then
|
||||
git clone https://github.com/openmaptiles/openmaptiles.git /opt/openmaptiles
|
||||
fi
|
||||
|
||||
# Import OSM data into PostGIS using openmaptiles schema
|
||||
# This creates the tables that Martin reads for tile generation
|
||||
cd /opt/openmaptiles
|
||||
|
||||
# osm2pgsql import with openmaptiles mapping
|
||||
osm2pgsql \
|
||||
--create \
|
||||
--slim \
|
||||
--database "$PG_CONN" \
|
||||
--style openmaptiles.style \
|
||||
--tag-transform-script lua/tagtransform.lua \
|
||||
--number-processes 4 \
|
||||
--cache 4096 \
|
||||
--flat-nodes /data/osm/nodes.cache \
|
||||
"$PBF_FILE"
|
||||
|
||||
# Run openmaptiles SQL post-processing to create materialized views
|
||||
# that Martin serves as tile layers
|
||||
psql "$PG_CONN" -f build/openmaptiles.sql
|
||||
|
||||
echo "Tile data import complete. Martin will serve tiles from PostGIS."
|
||||
21
backend/scripts/03_import_pois.sh
Executable file
21
backend/scripts/03_import_pois.sh
Executable file
|
|
@ -0,0 +1,21 @@
|
|||
#!/bin/bash
|
||||
# scripts/03_import_pois.sh
|
||||
|
||||
PBF_FILE="/data/osm/region.osm.pbf"
|
||||
PG_CONN="postgresql://maps:maps@postgres:5432/maps"
|
||||
|
||||
# Run the initial migration to create the pois table
|
||||
psql "$PG_CONN" -f /app/migrations/001_create_pois.sql
|
||||
|
||||
# Import POIs using osm2pgsql with a custom Lua transform
|
||||
osm2pgsql \
|
||||
--create \
|
||||
--output=flex \
|
||||
--style /app/scripts/poi_flex.lua \
|
||||
--database "$PG_CONN" \
|
||||
--cache 2048 \
|
||||
--number-processes 4 \
|
||||
--flat-nodes /data/osm/nodes.cache \
|
||||
"$PBF_FILE"
|
||||
|
||||
echo "POI import complete."
|
||||
31
backend/scripts/04_import_geocoding.sh
Executable file
31
backend/scripts/04_import_geocoding.sh
Executable file
|
|
@ -0,0 +1,31 @@
|
|||
#!/bin/bash
|
||||
# scripts/04_import_geocoding.sh
|
||||
|
||||
PBF_FILE="/data/osm/region.osm.pbf"
|
||||
NOMINATIM_DATA="/data/nominatim"
|
||||
PHOTON_DATA="/data/photon"
|
||||
|
||||
# --- Nominatim Import ---
|
||||
# Nominatim builds a PostgreSQL database with geocoding data.
|
||||
# Photon reads from this database to build its Elasticsearch index.
|
||||
|
||||
nominatim import \
|
||||
--osm-file "$PBF_FILE" \
|
||||
--project-dir "$NOMINATIM_DATA" \
|
||||
--threads 4
|
||||
|
||||
# --- Photon Import ---
|
||||
# Photon reads the Nominatim database and builds an Elasticsearch index.
|
||||
# This index is what Photon uses to serve search queries.
|
||||
|
||||
java -jar /opt/photon/photon.jar \
|
||||
-nominatim-import \
|
||||
-host localhost \
|
||||
-port 5432 \
|
||||
-database nominatim \
|
||||
-user nominatim \
|
||||
-password nominatim \
|
||||
-data-dir "$PHOTON_DATA" \
|
||||
-languages en,nl,de,fr
|
||||
|
||||
echo "Geocoding index built. Photon is ready to serve."
|
||||
31
backend/scripts/05_import_routing.sh
Executable file
31
backend/scripts/05_import_routing.sh
Executable file
|
|
@ -0,0 +1,31 @@
|
|||
#!/bin/bash
|
||||
# scripts/05_import_routing.sh
|
||||
|
||||
PBF_FILE="/data/osm/region.osm.pbf"
|
||||
OSRM_DATA="/data/osrm"
|
||||
|
||||
# Process each profile: driving, walking, cycling
|
||||
for PROFILE in car foot bicycle; do
|
||||
PROFILE_DIR="${OSRM_DATA}/${PROFILE}"
|
||||
mkdir -p "$PROFILE_DIR"
|
||||
cp "$PBF_FILE" "${PROFILE_DIR}/region.osm.pbf"
|
||||
|
||||
# Step 1: Extract — parse the PBF and produce an .osrm file
|
||||
# Uses the appropriate profile from OSRM's bundled profiles
|
||||
osrm-extract \
|
||||
--profile /opt/osrm-profiles/${PROFILE}.lua \
|
||||
--threads 4 \
|
||||
"${PROFILE_DIR}/region.osm.pbf"
|
||||
|
||||
# Step 2: Partition — create a recursive multi-level partition
|
||||
osrm-partition \
|
||||
"${PROFILE_DIR}/region.osrm"
|
||||
|
||||
# Step 3: Customize — compute edge weights for the partition
|
||||
osrm-customize \
|
||||
"${PROFILE_DIR}/region.osrm"
|
||||
|
||||
echo "OSRM ${PROFILE} profile ready."
|
||||
done
|
||||
|
||||
echo "All OSRM profiles processed."
|
||||
72
backend/scripts/06_build_offline_packages.sh
Executable file
72
backend/scripts/06_build_offline_packages.sh
Executable file
|
|
@ -0,0 +1,72 @@
|
|||
#!/bin/bash
|
||||
# scripts/06_build_offline_packages.sh
|
||||
|
||||
PG_CONN="postgresql://maps:maps@postgres:5432/maps"
|
||||
PACKAGES_DIR="/data/offline_packages"
|
||||
REGION_ID="amsterdam"
|
||||
BBOX="4.7288,52.2783,5.0796,52.4311" # minLon,minLat,maxLon,maxLat
|
||||
|
||||
mkdir -p "${PACKAGES_DIR}/${REGION_ID}"
|
||||
|
||||
# --- Tiles: extract MBTiles for the bounding box ---
|
||||
# Use martin-cp (Martin's CLI tool) to export tiles from PostGIS to MBTiles
|
||||
martin-cp \
|
||||
--output-file "${PACKAGES_DIR}/${REGION_ID}/tiles.mbtiles" \
|
||||
--mbtiles-type flat \
|
||||
--bbox "$BBOX" \
|
||||
--min-zoom 0 \
|
||||
--max-zoom 16 \
|
||||
--source openmaptiles \
|
||||
--connect "$PG_CONN"
|
||||
|
||||
# --- POIs: export to SQLite with FTS5 index ---
|
||||
# Custom Rust tool or Python script that queries PostGIS and writes SQLite
|
||||
/app/tools/export_pois_sqlite \
|
||||
--bbox "$BBOX" \
|
||||
--pg-conn "$PG_CONN" \
|
||||
--output "${PACKAGES_DIR}/${REGION_ID}/pois.db"
|
||||
|
||||
# --- Routing: tar the OSRM files per profile ---
|
||||
for PROFILE in car foot bicycle; do
|
||||
tar -cf "${PACKAGES_DIR}/${REGION_ID}/routing-${PROFILE}.tar" \
|
||||
-C "/data/osrm/${PROFILE}" \
|
||||
region.osrm region.osrm.cell_metrics region.osrm.cells \
|
||||
region.osrm.datasource_names region.osrm.ebg region.osrm.ebg_nodes \
|
||||
region.osrm.edges region.osrm.fileIndex region.osrm.geometry \
|
||||
region.osrm.icd region.osrm.maneuver_overrides \
|
||||
region.osrm.mldgr region.osrm.names region.osrm.nbg_nodes \
|
||||
region.osrm.partition region.osrm.properties \
|
||||
region.osrm.ramIndex region.osrm.timestamp \
|
||||
region.osrm.tld region.osrm.tls region.osrm.turn_duration_penalties \
|
||||
region.osrm.turn_penalties_index region.osrm.turn_weight_penalties
|
||||
done
|
||||
|
||||
# --- Update offline_regions table with file sizes ---
|
||||
TILES_SIZE=$(stat -f%z "${PACKAGES_DIR}/${REGION_ID}/tiles.mbtiles" 2>/dev/null || stat -c%s "${PACKAGES_DIR}/${REGION_ID}/tiles.mbtiles")
|
||||
ROUTING_SIZE=0
|
||||
for PROFILE in car foot bicycle; do
|
||||
SIZE=$(stat -f%z "${PACKAGES_DIR}/${REGION_ID}/routing-${PROFILE}.tar" 2>/dev/null || stat -c%s "${PACKAGES_DIR}/${REGION_ID}/routing-${PROFILE}.tar")
|
||||
ROUTING_SIZE=$((ROUTING_SIZE + SIZE))
|
||||
done
|
||||
POIS_SIZE=$(stat -f%z "${PACKAGES_DIR}/${REGION_ID}/pois.db" 2>/dev/null || stat -c%s "${PACKAGES_DIR}/${REGION_ID}/pois.db")
|
||||
|
||||
psql "$PG_CONN" <<SQL
|
||||
INSERT INTO offline_regions (id, name, description, bbox, tiles_size_bytes, routing_size_bytes, pois_size_bytes, last_updated)
|
||||
VALUES (
|
||||
'${REGION_ID}',
|
||||
'Amsterdam',
|
||||
'Amsterdam metropolitan area',
|
||||
ST_MakeEnvelope(4.7288, 52.2783, 5.0796, 52.4311, 4326),
|
||||
${TILES_SIZE},
|
||||
${ROUTING_SIZE},
|
||||
${POIS_SIZE},
|
||||
NOW()
|
||||
)
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
tiles_size_bytes = EXCLUDED.tiles_size_bytes,
|
||||
routing_size_bytes = EXCLUDED.routing_size_bytes,
|
||||
pois_size_bytes = EXCLUDED.pois_size_bytes,
|
||||
last_updated = EXCLUDED.last_updated;
|
||||
SQL
|
||||
|
||||
echo "Offline package for ${REGION_ID} built."
|
||||
147
backend/scripts/pois.lua
Normal file
147
backend/scripts/pois.lua
Normal file
|
|
@ -0,0 +1,147 @@
|
|||
-- scripts/poi_flex.lua
|
||||
-- osm2pgsql flex output for POI extraction
|
||||
|
||||
local pois = osm2pgsql.define_table({
|
||||
name = 'pois',
|
||||
ids = { type = 'any', type_column = 'osm_type', id_column = 'osm_id' },
|
||||
columns = {
|
||||
{ column = 'name', type = 'text', not_null = true },
|
||||
{ column = 'category', type = 'text', not_null = true },
|
||||
{ column = 'geometry', type = 'point', projection = 4326, not_null = true },
|
||||
{ column = 'address', type = 'jsonb' },
|
||||
{ column = 'tags', type = 'jsonb' },
|
||||
{ column = 'opening_hours', type = 'text' },
|
||||
{ column = 'phone', type = 'text' },
|
||||
{ column = 'website', type = 'text' },
|
||||
{ column = 'wheelchair', type = 'text' },
|
||||
},
|
||||
})
|
||||
|
||||
-- Maps OSM amenity/shop/tourism/leisure tags to normalized categories
|
||||
local category_map = {
|
||||
-- amenity
|
||||
restaurant = 'restaurant',
|
||||
fast_food = 'restaurant',
|
||||
cafe = 'cafe',
|
||||
pharmacy = 'pharmacy',
|
||||
hospital = 'hospital',
|
||||
clinic = 'hospital',
|
||||
fuel = 'fuel',
|
||||
parking = 'parking',
|
||||
atm = 'atm',
|
||||
bank = 'atm',
|
||||
bus_station = 'public_transport',
|
||||
hotel = 'hotel',
|
||||
-- shop
|
||||
supermarket = 'supermarket',
|
||||
convenience = 'shop',
|
||||
clothes = 'shop',
|
||||
hairdresser = 'shop',
|
||||
bakery = 'shop',
|
||||
-- tourism
|
||||
attraction = 'tourist_attraction',
|
||||
museum = 'tourist_attraction',
|
||||
viewpoint = 'tourist_attraction',
|
||||
-- leisure
|
||||
park = 'park',
|
||||
garden = 'park',
|
||||
playground = 'park',
|
||||
}
|
||||
|
||||
local function get_category(tags)
|
||||
for _, key in ipairs({'amenity', 'shop', 'tourism', 'leisure'}) do
|
||||
local val = tags[key]
|
||||
if val and category_map[val] then
|
||||
return category_map[val]
|
||||
end
|
||||
end
|
||||
return nil
|
||||
end
|
||||
|
||||
local function build_address(tags)
|
||||
local addr = {}
|
||||
if tags['addr:street'] then addr.street = tags['addr:street'] end
|
||||
if tags['addr:housenumber'] then addr.housenumber = tags['addr:housenumber'] end
|
||||
if tags['addr:postcode'] then addr.postcode = tags['addr:postcode'] end
|
||||
if tags['addr:city'] then addr.city = tags['addr:city'] end
|
||||
if next(addr) then return addr end
|
||||
return nil
|
||||
end
|
||||
|
||||
local function build_extra_tags(tags)
|
||||
local extra = {}
|
||||
local dominated = {
|
||||
'name', 'amenity', 'shop', 'tourism', 'leisure',
|
||||
'addr:street', 'addr:housenumber', 'addr:postcode', 'addr:city',
|
||||
'opening_hours', 'phone', 'contact:phone',
|
||||
'website', 'contact:website', 'wheelchair',
|
||||
}
|
||||
local skip = {}
|
||||
for _, k in ipairs(dominated) do skip[k] = true end
|
||||
for k, v in pairs(tags) do
|
||||
if not skip[k] and not k:match('^addr:') then
|
||||
extra[k] = v
|
||||
end
|
||||
end
|
||||
if next(extra) then return extra end
|
||||
return nil
|
||||
end
|
||||
|
||||
function osm2pgsql.process_node(object)
|
||||
local tags = object.tags
|
||||
if not tags.name then return end
|
||||
local category = get_category(tags)
|
||||
if not category then return end
|
||||
|
||||
pois:insert({
|
||||
name = tags.name,
|
||||
category = category,
|
||||
geometry = object:as_point(),
|
||||
address = build_address(tags),
|
||||
tags = build_extra_tags(tags),
|
||||
opening_hours = tags.opening_hours,
|
||||
phone = tags.phone or tags['contact:phone'],
|
||||
website = tags.website or tags['contact:website'],
|
||||
wheelchair = tags.wheelchair,
|
||||
})
|
||||
end
|
||||
|
||||
function osm2pgsql.process_way(object)
|
||||
local tags = object.tags
|
||||
if not tags.name then return end
|
||||
local category = get_category(tags)
|
||||
if not category then return end
|
||||
if not object.is_closed then return end
|
||||
|
||||
pois:insert({
|
||||
name = tags.name,
|
||||
category = category,
|
||||
geometry = object:as_polygon():centroid(),
|
||||
address = build_address(tags),
|
||||
tags = build_extra_tags(tags),
|
||||
opening_hours = tags.opening_hours,
|
||||
phone = tags.phone or tags['contact:phone'],
|
||||
website = tags.website or tags['contact:website'],
|
||||
wheelchair = tags.wheelchair,
|
||||
})
|
||||
end
|
||||
|
||||
function osm2pgsql.process_relation(object)
|
||||
local tags = object.tags
|
||||
if not tags.name then return end
|
||||
local category = get_category(tags)
|
||||
if not category then return end
|
||||
if tags.type ~= 'multipolygon' then return end
|
||||
|
||||
pois:insert({
|
||||
name = tags.name,
|
||||
category = category,
|
||||
geometry = object:as_multipolygon():centroid(),
|
||||
address = build_address(tags),
|
||||
tags = build_extra_tags(tags),
|
||||
opening_hours = tags.opening_hours,
|
||||
phone = tags.phone or tags['contact:phone'],
|
||||
website = tags.website or tags['contact:website'],
|
||||
wheelchair = tags.wheelchair,
|
||||
})
|
||||
end
|
||||
36
backend/scripts/update_all.sh
Executable file
36
backend/scripts/update_all.sh
Executable file
|
|
@ -0,0 +1,36 @@
|
|||
#!/bin/bash
|
||||
# scripts/update_all.sh
|
||||
# Full weekly data update pipeline
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
LOGFILE="/var/log/maps-update.log"
|
||||
exec > >(tee -a "$LOGFILE") 2>&1
|
||||
|
||||
echo "=== OSM data update started at $(date -u) ==="
|
||||
|
||||
# Step 1: Download latest PBF
|
||||
/app/scripts/01_download.sh
|
||||
|
||||
# Step 2: Import tile data
|
||||
/app/scripts/02_import_tiles.sh
|
||||
|
||||
# Step 3: Import POI data
|
||||
/app/scripts/03_import_pois.sh
|
||||
|
||||
# Step 4: Update geocoding index
|
||||
/app/scripts/04_import_geocoding.sh
|
||||
|
||||
# Step 5: Rebuild OSRM routing graphs
|
||||
/app/scripts/05_import_routing.sh
|
||||
|
||||
# Step 6: Rebuild offline packages
|
||||
/app/scripts/06_build_offline_packages.sh
|
||||
|
||||
# Step 7: Flush tile cache in Redis (tiles have changed)
|
||||
redis-cli -h redis FLUSHDB
|
||||
|
||||
# Step 8: Restart services to pick up new data
|
||||
docker compose restart martin osrm-driving osrm-walking osrm-cycling
|
||||
|
||||
echo "=== OSM data update completed at $(date -u) ==="
|
||||
Loading…
Add table
Reference in a new issue