-
-
Notifications
You must be signed in to change notification settings - Fork 302
Expand file tree
/
Copy pathMakefile
More file actions
237 lines (183 loc) · 8.41 KB
/
Makefile
File metadata and controls
237 lines (183 loc) · 8.41 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
# standalone install method
DOCKER_COMPOSE ?= docker-compose
CELERY = cd contentcuration/ && celery -A contentcuration worker -l info --concurrency=3 --task-events
# support new plugin installation for docker-compose
ifeq (, $(shell command -v docker-compose 2>/dev/null))
DOCKER_COMPOSE := docker compose
endif
###############################################################
# PRODUCTION COMMANDS #########################################
###############################################################
# These are production commands which may be invoked in deployments
altprodserver: NUM_PROCS:=3
altprodserver: NUM_THREADS:=5
altprodserver: collectstatic compilemessages
cd contentcuration/ && gunicorn contentcuration.wsgi:application --timeout=4000 --error-logfile=/var/log/gunicorn-error.log --workers=${NUM_PROCS} --threads=${NUM_THREADS} --bind=0.0.0.0:8081 --pid=/tmp/contentcuration.pid --log-level=debug || sleep infinity
prodceleryworkers:
$(CELERY)
collectstatic:
python contentcuration/manage.py collectstatic --noinput
compilemessages:
python contentcuration/manage.py compilemessages
migrate:
python contentcuration/manage.py migrate || true
python contentcuration/manage.py loadconstants
# This is a special command that is we'll reuse to run data migrations outside of the normal
# django migration process. This is useful for long running migrations which we don't want to block
# the CD build. Do not delete!
# Procedure:
# 1) Add a new management command for the migration
# 2) Call it here
# 3) Perform the release
# 4) Remove the management command from this `deploy-migrate` recipe
# 5) Repeat!
deploy-migrate:
echo "Nothing to do here!"
contentnodegc:
python contentcuration/manage.py garbage_collect
filedurations:
python contentcuration/manage.py set_file_duration
learningactivities:
python contentcuration/manage.py set_default_learning_activities
set-tsvectors:
python contentcuration/manage.py set_channel_tsvectors
python contentcuration/manage.py set_contentnode_tsvectors --published
reconcile:
python contentcuration/manage.py reconcile_publishing_status
python contentcuration/manage.py reconcile_change_tasks
###############################################################
# END PRODUCTION COMMANDS #####################################
###############################################################
###############################################################
# I18N COMMANDS ###############################################
###############################################################
i18n-extract-frontend:
# generate frontend messages
pnpm makemessages
i18n-extract-backend:
# generate backend messages
cd contentcuration && python manage.py makemessages --all
i18n-extract: i18n-extract-frontend i18n-extract-backend
i18n-transfer-context:
pnpm transfercontext
i18n-django-compilemessages:
# Change working directory to contentcuration/ such that compilemessages
# finds only the .po files nested there.
cd contentcuration && python manage.py compilemessages
CROWDIN_BRANCH ?= unstable
i18n-upload: i18n-extract
pnpm exec crowdin upload sources --branch ${CROWDIN_BRANCH}
i18n-pretranslate:
pnpm exec crowdin pre-translate --branch ${CROWDIN_BRANCH} --translate-untranslated-only --method=tm
i18n-pretranslate-approve-all:
pnpm exec crowdin pre-translate --branch ${CROWDIN_BRANCH} --translate-untranslated-only --method=tm --auto-approve-option=all
i18n-download-translations: i18n-extract-frontend
touch contentcuration/locale/.crowdin-download-marker
pnpm exec crowdin download --branch ${CROWDIN_BRANCH}
@if [ -z "$$(find contentcuration/locale/*/LC_MESSAGES -type f \( -name '*.po' -o -name '*.csv' \) -newer contentcuration/locale/.crowdin-download-marker 2>/dev/null)" ]; then \
echo "❌ ERROR: No translation files were downloaded - Crowdin download may have failed silently"; \
echo "Check the output above for errors during the download process"; \
rm -f contentcuration/locale/.crowdin-download-marker; \
exit 1; \
fi
@echo "✅ Translation files downloaded successfully"
rm -f contentcuration/locale/.crowdin-download-marker
pnpm exec kolibri-i18n code-gen --output-dir ./contentcuration/contentcuration/frontend/shared/i18n
$(MAKE) i18n-django-compilemessages
pnpm exec kolibri-i18n create-message-files --namespace contentcuration --searchPath ./contentcuration/contentcuration/frontend
i18n-download: i18n-download-translations
i18n-download-glossary:
pnpm exec crowdin glossary download
i18n-upload-glossary:
pnpm exec crowdin glossary upload
###############################################################
# END I18N COMMANDS ###########################################
###############################################################
# When using apidocs, this should clean out all modules
clean-docs:
$(MAKE) -C docs clean
docs: clean-docs
# Adapt to apidocs
# sphinx-apidoc -d 10 -H "Python Reference" -o docs/py_modules/ kolibri kolibri/test kolibri/deployment/ kolibri/dist/
$(MAKE) -C docs html
setup:
python contentcuration/manage.py setup
################################################################
# DEVELOPMENT COMMANDS #########################################
################################################################
test:
pytest
dummyusers:
cd contentcuration/ && python manage.py loaddata contentcuration/fixtures/admin_user.json
cd contentcuration/ && python manage.py loaddata contentcuration/fixtures/admin_user_token.json
hascaptions:
python contentcuration/manage.py set_orm_based_has_captions
BRANCH_NAME := $(shell git rev-parse --abbrev-ref HEAD | sed 's/[^a-zA-Z0-9_-]/-/g')
export COMPOSE_PROJECT_NAME=studio_$(BRANCH_NAME)
purge-postgres: .docker/pgpass
-PGPASSFILE=.docker/pgpass dropdb -U learningequality "kolibri-studio" --port 5432 -h localhost
PGPASSFILE=.docker/pgpass createdb -U learningequality "kolibri-studio" --port 5432 -h localhost
destroy-and-recreate-database: purge-postgres setup
devceleryworkers:
DJANGO_SETTINGS_MODULE=contentcuration.dev_settings $(CELERY) --pool=threads
run-services:
$(MAKE) -j 2 dcservicesup devceleryworkers
.docker/minio:
mkdir -p $@
.docker/postgres:
mkdir -p $@
.docker/pgpass:
echo "localhost:5432:kolibri-studio:learningequality:kolibri" > $@
chmod 600 $@
.docker/postgres/init.sql: .docker/pgpass
# assumes postgres is running in a docker container
PGPASSFILE=.docker/pgpass pg_dump --host localhost --port 5432 --username learningequality --dbname "kolibri-studio" --exclude-table-data=contentcuration_change --file $@
dcbuild:
# build all studio docker image and all dependent services using docker-compose
$(DOCKER_COMPOSE) build
dcup: .docker/minio .docker/postgres
# run all services
$(DOCKER_COMPOSE) up
dcdown:
# run make deverver in foreground with all dependent services using $(DOCKER_COMPOSE)
$(DOCKER_COMPOSE) down
dcclean:
# stop all containers and delete volumes
$(DOCKER_COMPOSE) down -v
docker image prune -f
dcshell:
# bash shell inside the (running!) studio-app container
$(DOCKER_COMPOSE) exec studio-app /usr/bin/fish
devserver-stripe:
# Start stripe CLI listener and dev server with webhook secret auto-configured.
# Requires: stripe CLI installed and authenticated (stripe login).
# The listener output is teed to a temp file so we can extract the signing secret.
@STRIPE_LOG=$$(mktemp); \
stripe listen --api-key $$STRIPE_TEST_SECRET_KEY --forward-to localhost:8080/api/stripe/webhook/ > "$$STRIPE_LOG" 2>&1 & \
STRIPE_PID=$$!; \
trap "kill $$STRIPE_PID 2>/dev/null; rm -f $$STRIPE_LOG" EXIT; \
echo "Waiting for Stripe CLI..."; \
for i in 1 2 3 4 5 6 7 8 9 10; do \
WEBHOOK_SECRET=$$(grep -o 'whsec_[a-zA-Z0-9_]*' "$$STRIPE_LOG" | head -1); \
[ -n "$$WEBHOOK_SECRET" ] && break; \
sleep 1; \
done; \
if [ -z "$$WEBHOOK_SECRET" ]; then \
echo "ERROR: Could not extract webhook secret from Stripe CLI"; \
exit 1; \
fi; \
echo "Stripe webhook secret: $$WEBHOOK_SECRET"; \
tail -f "$$STRIPE_LOG" & \
export STRIPE_TEST_WEBHOOK_SECRET=$$WEBHOOK_SECRET; \
pnpm devserver
dcpsql: .docker/pgpass
PGPASSFILE=.docker/pgpass psql --host localhost --port 5432 --username learningequality --dbname "kolibri-studio"
dctest: .docker/minio .docker/postgres
# run backend tests inside docker, in new instances
$(DOCKER_COMPOSE) run studio-app make test
dcservicesup: .docker/minio .docker/postgres
# launch all studio's dependent services using docker-compose
$(DOCKER_COMPOSE) up minio postgres redis
dcservicesdown:
# stop services that were started using dcservicesup
$(DOCKER_COMPOSE) down