This repository was archived by the owner on May 1, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 74
Expand file tree
/
Copy pathMakefile
More file actions
155 lines (117 loc) · 6.13 KB
/
Makefile
File metadata and controls
155 lines (117 loc) · 6.13 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
ROOT = $(shell echo "$$PWD")
COVERAGE_DIR = $(ROOT)/build/coverage
DATABASES = default analytics analytics_v1
.DEFAULT_GOAL := help
TOX=''
ifdef TOXENV
TOX := tox -- #to isolate each tox environment if TOXENV is defined
endif
help: ## display this help message
@echo "Please use \`make <target>' where <target> is one of"
@perl -nle'print $& if m{^[\.a-zA-Z_-]+:.*?## .*$$}' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m %-25s\033[0m %s\n", $$1, $$2}'
.PHONY: requirements develop clean diff.report view.diff.report quality static docs check_keywords
requirements: ## install base requirements
pip3 install -q -r requirements/base.txt
production-requirements: ## install production requirements
pip3 install -r requirements.txt
test.run_elasticsearch:
docker-compose up -d
test.stop_elasticsearch:
docker-compose stop
test.requirements: requirements ## install base and test requirements
pip3 install -q -r requirements/test.txt
tox.requirements: ## install tox requirements
pip3 install -q -r requirements/tox.txt
develop: test.requirements ## install test and dev requirements
pip3 install -q -r requirements/dev.txt
upgrade:
pip3 install -q -r requirements/pip_tools.txt
pip-compile --upgrade --allow-unsafe -o requirements/pip.txt requirements/pip.in
pip-compile --upgrade -o requirements/pip_tools.txt requirements/pip_tools.in
pip-compile --upgrade -o requirements/base.txt requirements/base.in
pip-compile --upgrade -o requirements/doc.txt requirements/doc.in
pip-compile --upgrade -o requirements/dev.txt requirements/dev.in
pip-compile --upgrade -o requirements/production.txt requirements/production.in
pip-compile --upgrade -o requirements/test.txt requirements/test.in
pip-compile --upgrade -o requirements/tox.txt requirements/tox.in
pip-compile --upgrade -o requirements/ci.txt requirements/ci.in
scripts/post-pip-compile.sh \
requirements/pip_tools.txt \
requirements/base.txt \
requirements/doc.txt \
requirements/dev.txt \
requirements/production.txt \
requirements/test.txt \
requirements/tox.txt \
requirements/ci.txt
## Let tox control the Django version for tests
grep -e "^django==" requirements/base.txt > requirements/django.txt
sed '/^[dD]jango==/d' requirements/test.txt > requirements/test.tmp
mv requirements/test.tmp requirements/test.txt
clean:
$(TOX)coverage erase
find . -name '*.pyc' -delete
main.test: clean
export COVERAGE_DIR=$(COVERAGE_DIR) && \
$(TOX)pytest --cov-report html --cov-report xml
test:
ifeq ($(DJANGO_SETTINGS_MODULE),analyticsdataserver.settings.devstack)
test: main.test
else
test: test.run_elasticsearch main.test test.stop_elasticsearch
endif
diff.report: test.requirements ## Show the diff in quality and coverage
diff-cover $(COVERAGE_DIR)/coverage.xml --html-report $(COVERAGE_DIR)/diff_cover.html
diff-quality --violations=pycodestyle --html-report $(COVERAGE_DIR)/diff_quality_pycodestyle.html
diff-quality --violations=pylint --html-report $(COVERAGE_DIR)/diff_quality_pylint.html
view.diff.report: ## Show the diff in quality and coverage using xdg
xdg-open file:///$(COVERAGE_DIR)/diff_cover.html
xdg-open file:///$(COVERAGE_DIR)/diff_quality_pycodestyle.html
xdg-open file:///$(COVERAGE_DIR)/diff_quality_pylint.html
run_check_isort:
$(TOX)isort --check-only --recursive --diff analytics_data_api/ analyticsdataserver/
run_pycodestyle:
$(TOX)pycodestyle --config=.pycodestyle analytics_data_api analyticsdataserver
run_pylint:
$(TOX)pylint -j 0 --rcfile=pylintrc analytics_data_api analyticsdataserver
run_isort:
$(TOX)isort --recursive analytics_data_api/ analyticsdataserver/
quality: run_pylint run_check_isort run_pycodestyle ## run_pylint, run_check_isort, run_pycodestyle (Installs tox requirements.)
validate: test.requirements test quality ## Runs make test and make quality. (Installs test requirements.)
static: ## Runs collectstatic
python manage.py collectstatic --noinput
migrate: ## Runs django migrations with syncdb and default database
./manage.py migrate --noinput --run-syncdb --database=default
migrate-all: ## Runs migrations on all databases
$(foreach db_name,$(DATABASES),./manage.py migrate --noinput --run-syncdb --database=$(db_name);)
loaddata: migrate-all ## Runs migrations and generates fake data
python manage.py loaddata problem_response_answer_distribution --database=analytics
python manage.py loaddata problem_response_answer_distribution_analytics_v1 --database=analytics_v1
python manage.py generate_fake_course_data --database=analytics
python manage.py generate_fake_course_data --database=analytics_v1
create_indices: ## Create ElasticSearch indices
python manage.py create_elasticsearch_learners_indices
demo: requirements clean loaddata ## Runs make clean, requirements, and loaddata, sets api key to edx
python manage.py set_api_key edx edx
# Target used by edx-analytics-dashboard during its testing.
github_ci: test.requirements clean migrate-all ## Used by CI for testing
python manage.py set_api_key edx edx
python manage.py loaddata problem_response_answer_distribution --database=analytics
python manage.py generate_fake_course_data --num-weeks=2 --no-videos --course-id "edX/DemoX/Demo_Course"
docker_build:
docker build . -f Dockerfile -t openedx/analytics-data-api
docker build . -f Dockerfile --target newrelic -t openedx/analytics-data-api:latest-newrelic
docker_tag: docker_build
docker tag openedx/analytics-data-api openedx/analytics-data-api:${GITHUB_SHA}
docker tag openedx/analytics-data-api:latest-newrelic openedx/analytics-data-api:${GITHUB_SHA}-newrelic
docker_auth:
echo "$$DOCKERHUB_PASSWORD" | docker login -u "$$DOCKERHUB_USERNAME" --password-stdin
docker_push: docker_tag docker_auth ## push to docker hub
docker push 'openedx/analytics-data-api:latest'
docker push "openedx/analytics-data-api:${GITHUB_SHA}"
docker push 'openedx/analytics-data-api:latest-newrelic'
docker push "openedx/analytics-data-api:${GITHUB_SHA}-newrelic"
docs: tox.requirements
tox -e docs
check_keywords: ## Scan the Django models in all installed apps in this project for restricted field names
python manage.py check_reserved_keywords --override_file db_keyword_overrides.yml