-
Notifications
You must be signed in to change notification settings - Fork 1
/
start
executable file
·61 lines (55 loc) · 1.8 KB
/
start
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
#!/bin/bash
trap "echo -ne '\nstopping container...' && docker stop refinery-tokenizer > /dev/null 2>&1 && echo -ne '\t\t [done]\n'" EXIT
HIDE_BUILD=0
while getopts :s flag
do
case "${flag}" in
s) HIDE_BUILD=1;;
esac
done
HOST_IP=$(docker network inspect bridge --format='{{json .IPAM.Config}}' | grep -o '[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}' | tail -1)
echo -ne 'stopping old container...'
if [ $HIDE_BUILD -eq 1 ]
then
docker stop refinery-tokenizer > /dev/null 2>&1
else
docker stop refinery-tokenizer
fi
echo -ne '\t\t [done]\n'
INFERENCE_DIR=${PWD%/*}/dev-setup/inference/
if [ ! -d "$INFERENCE_DIR" ]
then
INFERENCE_DIR=${PWD%/*/*}/dev-setup/inference/
if [ ! -d "$INFERENCE_DIR" ]
then
# to include volume for local development, use the dev-setup inference folder:
# alternative use manual logic with
# -v /path/to/dev-setup/inference:/models \
echo "Can't find model data directory: $INFERENCE_DIR -> stopping"
exit 1
fi
fi
echo -ne 'building container...'
if [ $HIDE_BUILD -eq 1 ]
then
docker build -t refinery-tokenizer-dev -f dev.Dockerfile . > /dev/null 2>&1
else
docker build -t refinery-tokenizer-dev -f dev.Dockerfile .
fi
echo -ne '\t\t [done]\n'
echo -ne 'starting...'
docker run -d --rm \
--name refinery-tokenizer \
-p 7061:80 \
-e S3_ENDPOINT_LOCAL=object-storage:9000 \
-e S3_ACCESS_KEY=kern \
-e S3_SECRET_KEY=r6ywtR33!DMlaL*SUUdy \
-e WS_NOTIFY_ENDPOINT=http://refinery-websocket:8080 \
-e POSTGRES=postgresql://postgres:kern@graphql-postgres:5432 \
-v "$INFERENCE_DIR":/inference \
--mount type=bind,source="$(pwd)"/,target=/app \
-v /var/run/docker.sock:/var/run/docker.sock \
--network dev-setup_default \
refinery-tokenizer-dev > /dev/null 2>&1
echo -ne '\t\t\t [done]\n'
docker logs -f refinery-tokenizer