Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add optional port parameter to Elasticsearch management functions #360

Closed
wants to merge 14 commits into from
16 changes: 9 additions & 7 deletions cmd/elastic.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ function elastic_schema_drop(){ compose_run 'schema' node scripts/drop_index "$@
function elastic_schema_create(){ compose_run 'schema' ./bin/create_index; }
function elastic_start(){
mkdir -p $DATA_DIR/elasticsearch
# attemp to set proper permissions if running as root
# Attempt to set proper permissions if running as root
chown $DOCKER_USER $DATA_DIR/elasticsearch 2>/dev/null || true
compose_exec up -d elasticsearch
}
Expand All @@ -24,11 +24,11 @@ function elastic_status(){
--output /dev/null \
--silent \
--write-out "%{http_code}" \
"http://${ELASTIC_HOST:-localhost:9200}/_cluster/health?wait_for_status=yellow&timeout=1s" \
"http://${ELASTIC_HOST:-localhost}:${ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=1s" \
|| true;
}

# the same function but with a trailing newline
# Function to get HTTP status with a trailing newline, using optional port
function elastic_status_newline(){ echo $(elastic_status); }
register 'elastic' 'status' 'HTTP status code of the elasticsearch service' elastic_status_newline

Expand All @@ -39,10 +39,10 @@ function elastic_wait(){
i=1
while [[ "$i" -le "$retry_count" ]]; do
if [[ $(elastic_status) -eq 200 ]]; then
echo "Elasticsearch up!"
echo "Elasticsearch up on port ${ELASTIC_PORT:-9200}!"
exit 0
elif [[ $(elastic_status) -eq 408 ]]; then
# 408 indicates the server is up but not yet yellow status
# 408 indicates the server is up but has not reached yellow status yet
printf ":"
else
printf "."
Expand All @@ -58,11 +58,13 @@ function elastic_wait(){

register 'elastic' 'wait' 'wait for elasticsearch to start up' elastic_wait

function elastic_info(){ curl -s "http://${ELASTIC_HOST:-localhost:9200}/"; }
# Function to get Elasticsearch version and build info with an optional port argument
function elastic_info(){ curl -s "http://${ELASTIC_HOST:-localhost}:${ELASTIC_PORT:-9200}/"; }
register 'elastic' 'info' 'display elasticsearch version and build info' elastic_info

# Function to display a summary of document counts per source/layer with optional port
function elastic_stats(){
curl -s "http://${ELASTIC_HOST:-localhost:9200}/pelias/_search?request_cache=true&timeout=10s&pretty=true" \
curl -s "http://${ELASTIC_HOST:-localhost}:${ELASTIC_PORT:-9200}/pelias/_search?request_cache=true&timeout=10s&pretty=true" \
-H 'Content-Type: application/json' \
-d '{
"aggs": {
Expand Down
2 changes: 1 addition & 1 deletion projects/germany/.env
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
COMPOSE_PROJECT_NAME=pelias
COMPOSE_PROJECT_NAME=pelias_germany
DATA_DIR=./data
15 changes: 4 additions & 11 deletions projects/germany/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,10 @@ Please ensure that's all working fine before continuing.

# Run a Build

To run a complete build, execute the following commands:

```bash
pelias compose pull
pelias elastic start
pelias elastic wait
pelias elastic create
pelias download all
pelias prepare all
pelias import all
pelias compose up
To run a complete build, execute
```
chmod +x generate.sh
./generate.sh
```

# Make an Example Query
Expand Down
39 changes: 38 additions & 1 deletion projects/germany/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,20 +1,27 @@
version: '3'
networks:
default:
driver: bridge
shared-net:
external: true # Dies gibt an, dass es sich um ein externes Netzwerk handelt
services:
libpostal:
image: pelias/libpostal-service
container_name: pelias_libpostal
user: "${DOCKER_USER}"
restart: always
ports: [ "127.0.0.1:4400:4400" ]
networks:
- default
- shared-net
schema:
image: pelias/schema:master
container_name: pelias_schema
user: "${DOCKER_USER}"
volumes:
- "./pelias.json:/code/pelias.json"
networks:
- default
- shared-net
api:
image: pelias/api:master
container_name: pelias_api
Expand All @@ -24,6 +31,9 @@ services:
ports: [ "0.0.0.0:4000:4000" ]
volumes:
- "./pelias.json:/code/pelias.json"
networks:
- default
- shared-net
placeholder:
image: pelias/placeholder:master
container_name: pelias_placeholder
Expand All @@ -35,27 +45,39 @@ services:
- "./pelias.json:/code/pelias.json"
- "${DATA_DIR}:/data"
- "./blacklist/:/data/blacklist"
networks:
- default
- shared-net
whosonfirst:
image: pelias/whosonfirst:master
container_name: pelias_whosonfirst
user: "${DOCKER_USER}"
volumes:
- "./pelias.json:/code/pelias.json"
- "${DATA_DIR}:/data"
networks:
- default
- shared-net
openstreetmap:
image: pelias/openstreetmap:master
container_name: pelias_openstreetmap
user: "${DOCKER_USER}"
volumes:
- "./pelias.json:/code/pelias.json"
- "${DATA_DIR}:/data"
networks:
- default
- shared-net
openaddresses:
image: pelias/openaddresses:master
container_name: pelias_openaddresses
user: "${DOCKER_USER}"
volumes:
- "./pelias.json:/code/pelias.json"
- "${DATA_DIR}:/data"
networks:
- default
- shared-net
csv-importer:
image: pelias/csv-importer:master
container_name: pelias_csv_importer
Expand All @@ -64,13 +86,19 @@ services:
- "./pelias.json:/code/pelias.json"
- "${DATA_DIR}:/data"
- "./blacklist/:/data/blacklist"
networks:
- default
- shared-net
polylines:
image: pelias/polylines:master
container_name: pelias_polylines
user: "${DOCKER_USER}"
volumes:
- "./pelias.json:/code/pelias.json"
- "${DATA_DIR}:/data"
networks:
- default
- shared-net
interpolation:
image: pelias/interpolation:master
container_name: pelias_interpolation
Expand All @@ -81,6 +109,9 @@ services:
volumes:
- "./pelias.json:/code/pelias.json"
- "${DATA_DIR}:/data"
networks:
- default
- shared-net
pip:
image: pelias/pip-service:master
container_name: pelias_pip-service
Expand All @@ -91,6 +122,9 @@ services:
volumes:
- "./pelias.json:/code/pelias.json"
- "${DATA_DIR}:/data"
networks:
- default
- shared-net
elasticsearch:
image: pelias/elasticsearch:7.16.1
container_name: pelias_elasticsearch
Expand All @@ -110,3 +144,6 @@ services:
cap_add: [ "IPC_LOCK" ]
security_opt:
- seccomp=unconfined
networks:
- default
- shared-net
161 changes: 161 additions & 0 deletions projects/germany/generate.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,161 @@
#!/bin/bash

# Logdatei setzen
LOGFILE="logfile.txt"

# Terminal-Sitzung in die Logdatei schreiben
exec > >(tee -a $LOGFILE) 2>&1

# Maximale Anzahl der Versuche
MAX_RETRIES=3

# Standardmäßig werden alle Schritte ausgeführt
SKIP_DOWNLOAD=false
SKIP_PREPARE=false
SKIP_IMPORT=false

# Fehlerbehandlungsfunktion
handle_error() {
local error_message="Fehler: $1"
echo "$error_message"

# E-Mail senden über msmtp
{
echo "From: [email protected]"
echo "To: [email protected]"
echo "Subject: Fehler im Pelias Setup"
echo
echo "$error_message"
echo
echo "Logfile-Inhalt:"
tail -n 50 "$LOGFILE"
} | msmtp [email protected]

exit 1
}

# Sicheres Ausführen von Befehlen mit Wiederholungslogik
safe_execute() {
echo "Ausführen: $1"
local command=$1
local retries=0

until [ $retries -ge $MAX_RETRIES ]
do
eval $command
if [ $? -eq 0 ]; then
echo "$command abgeschlossen"
return 0
fi

output=$(eval $command 2>&1)
if echo "$output" | grep -q "no such service"; then
echo "Dienst existiert nicht, überspringe: $command"
return 0
fi

retries=$((retries+1))
echo "Fehler beim Ausführen von: $command. Versuch $retries von $MAX_RETRIES."
done

handle_error "$command nach $MAX_RETRIES Versuchen fehlgeschlagen"
}

# Syntaxprüfung
echo "Starte Syntaxprüfung..."

# Überprüfen der .env-Datei
if [ -f ".env" ]; then
if grep -Eq '^[^#]*=.*$' .env; then
echo ".env Syntax ist korrekt."
else
handle_error ".env hat einen Syntaxfehler."
fi
else
handle_error ".env Datei existiert nicht."
fi

# Überprüfen der docker-compose.yml-Datei
if [ -f "docker-compose.yml" ]; then
docker-compose config -q || handle_error "docker-compose.yml hat einen Syntaxfehler."
echo "docker-compose.yml Syntax ist korrekt."
else
handle_error "docker-compose.yml Datei existiert nicht."
fi

# Überprüfen der pelias.json-Datei
if [ -f "pelias.json" ]; then
jq empty pelias.json || handle_error "pelias.json hat einen Syntaxfehler."
echo "pelias.json Syntax ist korrekt."
else
handle_error "pelias.json Datei existiert nicht."
fi

# Überprüfen, ob Pelias Docker läuft
if docker ps | grep -q "pelias"; then
echo "Pelias Docker läuft, beende ihn..."
pelias compose down || handle_error "pelias compose down"
else
echo "Pelias Docker läuft nicht."
fi

# Pelias-Befehle ausführen
safe_execute "pelias compose pull"
safe_execute "pelias elastic start"
safe_execute "pelias elastic wait"

# Reset-Abfrage vor pelias elastic create
read -t 10 -p "Möchten Sie Elasticsearch zurücksetzen? (Standard: Nein) [ja/Nein]: " reset_choice
reset_choice=${reset_choice:-nein}
if [[ "$reset_choice" =~ ^[Jj]a$ ]]; then
echo "Zurücksetzen von Elasticsearch..."
pelias elastic drop || handle_error "pelias elastic drop"
else
echo "Kein Zurücksetzen von Elasticsearch durchgeführt."
fi

# Elasticsearch Index erstellen, wenn er nicht bereits existiert
create_output=$(pelias elastic create 2>&1)
if echo "$create_output" | grep -q "resource_already_exists_exception"; then
echo "Index existiert bereits, weiter mit dem nächsten Schritt."
elif echo "$create_output" | grep -q "acknowledged"; then
echo "Index erfolgreich erstellt, weiter mit dem nächsten Schritt."
else
handle_error "pelias elastic create: $create_output"
fi

# Download-Schritt (optional überspringen)
if [ "$SKIP_DOWNLOAD" = "false" ]; then
safe_execute "pelias download all"
else
echo "Pelias Download-Schritt wird übersprungen."
fi

# Vorbereitungsschritte und Valhalla-Tiles erstellen (optional überspringen)
if [ "$SKIP_PREPARE" = "false" ]; then
safe_execute "mkdir -p data/valhalla"
safe_execute "docker run --rm -it \
--network shared-net \
--user 1000:1000 \
-v '/nfs-data/docker/appdata/pelias/germany/data/valhalla:/data/valhalla' \
-v '/nfs-data/docker/appdata/pelias/germany/data/openstreetmap:/data/openstreetmap' \
-v '/nfs-data/docker/appdata/pelias/germany/data/polylines:/data/polylines' \
pelias/valhalla_baseimage \
/bin/bash -c \"./scripts/build_tiles.sh && \
find /data/valhalla/valhalla_tiles | sort -n | tar cf /data/valhalla/valhalla_tiles.tar --no-recursion -T - && \
./scripts/export_edges.sh\""
safe_execute "pelias prepare interpolation"
safe_execute "pelias prepare placeholder"
else
echo "Vorbereitungsschritte und Valhalla-Tiles-Erstellung werden übersprungen."
fi

# Daten importieren (optional überspringen)
if [ "$SKIP_IMPORT" = "false" ]; then
safe_execute "pelias import all"
else
echo "Datenimport wird übersprungen."
fi

# Pelias-Dienste starten
safe_execute "pelias compose up"
Loading