diff --git a/Dockerfile b/Dockerfile index 5e99b511..fb854489 100644 --- a/Dockerfile +++ b/Dockerfile @@ -177,6 +177,7 @@ COPY --from=source $MW_ORIGIN_FILES $MW_ORIGIN_FILES # Default values ENV MW_ENABLE_JOB_RUNNER=true \ MW_JOB_RUNNER_PAUSE=2 \ + MW_JOB_RUNNER_MEMORY_LIMIT=512M \ MW_ENABLE_TRANSCODER=true \ MW_JOB_TRANSCODER_PAUSE=60 \ MW_MAP_DOMAIN_TO_DOCKER_GATEWAY=true \ diff --git a/_sources/scripts/maintenance-scripts/mw_job_runner.sh b/_sources/scripts/maintenance-scripts/mw_job_runner.sh index 32993344..33e58437 100755 --- a/_sources/scripts/maintenance-scripts/mw_job_runner.sh +++ b/_sources/scripts/maintenance-scripts/mw_job_runner.sh @@ -22,13 +22,13 @@ if [ -f "$MW_VOLUME/config/wikis.yaml" ]; then while true; do # Job types that need to be run ASAP no matter how many of them are in the queue # Those jobs should be very "cheap" to run - php $RJ --type="enotifNotify" --server="https://$wiki_url" --wiki="$wiki_id" + php $RJ --memory-limit="$MW_JOB_RUNNER_MEMORY_LIMIT" --type="enotifNotify" --server="https://$wiki_url" --wiki="$wiki_id" sleep 1 - php $RJ --type="createPage" --server="https://$wiki_url" --wiki="$wiki_id" + php $RJ --memory-limit="$MW_JOB_RUNNER_MEMORY_LIMIT" --type="createPage" --server="https://$wiki_url" --wiki="$wiki_id" sleep 1 - php $RJ --type="refreshLinks" --server="https://$wiki_url" --wiki="$wiki_id" + php $RJ --memory-limit="$MW_JOB_RUNNER_MEMORY_LIMIT" --type="refreshLinks" --server="https://$wiki_url" --wiki="$wiki_id" sleep 1 - php $RJ --type="htmlCacheUpdate" --maxjobs=500 --server="https://$wiki_url" --wiki="$wiki_id" + php $RJ --memory-limit="$MW_JOB_RUNNER_MEMORY_LIMIT" --type="htmlCacheUpdate" --maxjobs=500 --server="https://$wiki_url" --wiki="$wiki_id" sleep 1 # Everything else, limit the number of jobs on each batch # The --wait parameter will pause the execution here until new jobs are added,