-
Notifications
You must be signed in to change notification settings - Fork 0
/
docker-run-log-pipeline-event.sh
executable file
·70 lines (60 loc) · 2.48 KB
/
docker-run-log-pipeline-event.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
#!/bin/bash
set -e
PROJECT_NAME="$(<configuration/docker_image_project_name.txt)"
IMAGE_NAME=$PROJECT_NAME-log-pipeline-event
while [[ $# -gt 0 ]]; do
case "$1" in
--profile-cpu)
PROFILE_CPU=true
CPU_PROFILE_OUTPUT_PATH="$2"
shift 2;;
--)
shift
break;;
*)
break;;
esac
done
# Check that the correct number of arguments were provided.
if [[ $# -ne 5 ]]; then
echo "Usage: ./docker-run-log-pipeline-event.sh
[--profile-cpu <cpu-profile-output-path>] <user> <google-cloud-credentials-file-path>\
<pipeline-configuration-file-path> <run-id> <event-key>"
echo "Updates pipeline event/status to a firebase table to aid in monitoring"
exit
fi
# Assign the program arguments to bash variables.
USER=$1
INPUT_GOOGLE_CLOUD_CREDENTIALS=$2
INPUT_PIPELINE_CONFIGURATION=$3
RUN_ID=$4
EVENT_KEY=$5
# Build an image for this pipeline stage.
docker build --build-arg INSTALL_CPU_PROFILER="$PROFILE_CPU" -t "$IMAGE_NAME" .
# Create a container from the image that was just built.
if [[ "$PROFILE_CPU" = true ]]; then
PROFILE_CPU_CMD="pyflame -o /data/cpu.prof -t"
SYS_PTRACE_CAPABILITY="--cap-add SYS_PTRACE"
fi
CMD="pipenv run $PROFILE_CPU_CMD python -u log_pipeline_event.py \
\"$USER\" /credentials/google-cloud-credentials.json \
/data/pipeline-configuration.json \"$RUN_ID\" \"$EVENT_KEY\"
"
container="$(docker container create ${SYS_PTRACE_CAPABILITY} -w /app "$IMAGE_NAME" /bin/bash -c "$CMD")"
echo "Created container $container"
container_short_id=${container:0:7}
# Copy input data into the container
echo "Copying $INPUT_GOOGLE_CLOUD_CREDENTIALS -> $container_short_id:/credentials/google-cloud-credentials.json"
docker cp "$INPUT_GOOGLE_CLOUD_CREDENTIALS" "$container:/credentials/google-cloud-credentials.json"
echo "Copying $INPUT_PIPELINE_CONFIGURATION -> $container_short_id:/data/pipeline_configuration.json"
docker cp "$INPUT_PIPELINE_CONFIGURATION" "$container:/data/pipeline-configuration.json"
# Run the container
echo "Starting container $container_short_id"
docker start -a -i "$container"
if [[ "$PROFILE_CPU" = true ]]; then
echo "Copying $container_short_id:/data/cpu.prof -> $CPU_PROFILE_OUTPUT_PATH"
mkdir -p "$(dirname "$CPU_PROFILE_OUTPUT_PATH")"
docker cp "$container:/data/cpu.prof" "$CPU_PROFILE_OUTPUT_PATH"
fi
# Tear down the container, now that all expected output files have been copied out successfully
docker container rm "$container" >/dev/null