forked from DataBiosphere/data-store
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Makefile
142 lines (107 loc) · 3.93 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
include common.mk
MODULES=dss tests
all: test
lint:
flake8 $(MODULES) chalice/*.py daemons/*/*.py
# TODO: remove --no-strict-optional when the codebase is ready for it.
mypy:
mypy --ignore-missing-imports --no-strict-optional $(MODULES)
export DSS_TEST_MODE?=standalone
tests:=$(wildcard tests/test_*.py)
serial_tests:=tests/test_search.py \
tests/test_indexer.py \
tests/test_subscriptions.py
parallel_tests:=$(filter-out $(serial_tests),$(tests))
# Run all standalone tests in parallel
#
test: $(tests) daemon-import-test
coverage combine
rm -f .coverage.*
daemon-import-test:
$(MAKE) -C daemons import-test
# Serialize the standalone tests that start a local Elasticsearch instance in
# order to prevent more than one such instance at a time.
#
safe_test: serial_test parallel_test
coverage combine
rm -f .coverage.*
parallel_test: $(parallel_tests) daemon-import-test
serial_test:
$(MAKE) -j1 $(serial_tests)
# A pattern rule that runs a single test script
#
$(tests): %.py : mypy lint
@if [[ "${DSS_TEST_MODE}" == *"integration"* ]]; then \
aws lambda delete-function-concurrency --function-name dss-notify-dev; \
fi
coverage run -p --source=dss $*.py $(DSS_UNITTEST_OPTS)
@if [[ "${DSS_TEST_MODE}" == *"integration"* ]]; then \
aws lambda put-function-concurrency --function-name dss-notify-dev --reserved-concurrent-executions 0; \
fi
# Run standalone and integration tests
#
all_test:
$(MAKE) DSS_TEST_MODE="standalone integration" test
# Run integration tests only
#
integration_test:
$(MAKE) DSS_TEST_MODE="integration" test
smoketest:
$(MAKE) DSS_TEST_MODE="integration" tests/test_smoketest.py
smoketest-prod:
$(MAKE) DSS_TEST_MODE="integration" tests/test_prod_smoketest.py
scaletest:
./tests/scalability/scale_test_runner.py -r 10 -d 30
deploy: check-env check-secrets deploy-chalice deploy-daemons
aws lambda put-function-concurrency --function-name dss-notify-dev --reserved-concurrent-executions 0
force-deploy: deploy-chalice deploy-daemons
deploy-chalice:
$(MAKE) -C chalice deploy
deploy-daemons: deploy-daemons-serial deploy-daemons-parallel
deploy-daemons-serial:
$(MAKE) -j1 -C daemons deploy-serial
deploy-daemons-parallel:
$(MAKE) -C daemons deploy-parallel
plan-infra:
$(MAKE) -C infra plan-all
deploy-infra:
$(MAKE) -C infra apply-all
check-env:
scripts/check_env.py --special
check-secrets:
scripts/check_deployment_secrets.py
generate-dependencies:
scripts/generate_upload_requirements_layer.sh
release_integration:
scripts/release.sh master integration
release_staging:
scripts/release.sh integration staging
release_prod:
scripts/release.sh staging prod
clean:
git clean -Xdf chalice daemons $(MODULES)
git clean -df {chalice,daemons/*}/{chalicelib,domovoilib,vendor}
git checkout $$(git status --porcelain {chalice,daemons/*}/.chalice/config.json | awk '{print $$2}')
-rm -rf .*-env
-rm -rf node_modules
rm -rf dependencies
rm -rf temp_chalice
rm -rf dss-dependencies-${DSS_DEPLOYMENT_STAGE}.zip
refresh_all_requirements:
@echo -n '' >| requirements.txt
@echo -n '' >| requirements-dev.txt
@if [ $$(uname -s) == "Darwin" ]; then sleep 1; fi # this is require because Darwin HFS+ only has second-resolution for timestamps.
@touch requirements.txt.in requirements-dev.txt.in
@$(MAKE) requirements.txt requirements-dev.txt
requirements.txt requirements-dev.txt : %.txt : %.txt.in
[ ! -e .requirements-env ] || exit 1
virtualenv -p $(shell which python3) .$<-env
.$<-env/bin/pip install -r $@
.$<-env/bin/pip install -r $<
echo "# You should not edit this file directly. Instead, you should edit $<." >| $@
.$<-env/bin/pip freeze >> $@
rm -rf .$<-env
# scripts/find_missing_wheels.py requirements.txt # Disabled by akislyuk (circular dependency issues)
requirements-dev.txt : requirements.txt.in
.PHONY: all lint mypy test safe_test _serial_test all_test integration_test smoketest daemon-import-test $(tests)
.PHONY: deploy deploy-chalice deploy-daemons deploy-infra plan-infra