diff --git a/.circleci/config.yml b/.circleci/config.yml
index 7277588bd4b..a956a110a2e 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -3,7 +3,7 @@ jobs:
build_test_deploy:
machine:
image: ubuntu-2004:202111-02
- docker_layer_caching: false
+ docker_layer_caching: true
resource_class: large
environment:
USER_NAME: circleci
@@ -11,15 +11,7 @@ jobs:
USER_GID: 1000
TZ: Europe/Berlin
steps:
- - run:
- name: "Clone Repository"
- command: |
- if [ -n "$CIRCLE_BRANCH" ]; then
- git clone --depth 5 "$CIRCLE_REPOSITORY_URL" --branch "$CIRCLE_BRANCH" --no-tags .
- fi
- if [ -n "$CIRCLE_TAG" ]; then
- git clone --depth 5 "$CIRCLE_REPOSITORY_URL" --branch "$CIRCLE_TAG" .
- fi
+ - checkout
- run:
name: "Custom environment variables"
command: |
@@ -35,28 +27,48 @@ jobs:
- run:
name: Build webknossos-dev docker image
command: |
- docker pull scalableminds/webknossos-dev:$NORMALIZED_BRANCH || true
+ docker pull lincbrain/webknossos-dev:$NORMALIZED_BRANCH || true
DEV_CACHE=$NORMALIZED_BRANCH docker-compose build base
- run:
name: Prepare dependency folders
- command: mkdir -p project/target target ~/.ivy2 ~/.cache/coursier
+ command: mkdir -p project/target target ~/.ivy2 ~/.cache/coursier ~/.cache/yarn
+ - restore_cache:
+ name: Restore target cache
+ keys:
+ - target-cache-{{ checksum ".circleci/cache_version" }}-{{ .Branch }}
+ - target-cache-{{ checksum ".circleci/cache_version" }}-master
+ - restore_cache:
+ name: Restore sbt cache
+ keys:
+ - sbt-cache-{{ checksum ".circleci/cache_version" }}-{{ checksum "project/Dependencies.scala" }}
+ - sbt-cache-{{ checksum ".circleci/cache_version" }}-
+ - restore_cache:
+ name: Restore yarn cache
+ keys:
+ - yarn-cache-{{ checksum ".circleci/cache_version" }}-{{ checksum "yarn.lock" }}
+ - yarn-cache-{{ checksum ".circleci/cache_version" }}-
- run:
name: Install frontend dependencies
- command: docker-compose run -e PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true base yarn install --immutable
+ command: docker-compose run -e PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true base yarn install --frozen-lockfile
- run:
name: Assert unique evolution numbers
command: docker-compose run base tools/postgres/dbtool.js assert-unique-evolution-numbers
- - run:
- name: Assert schema.sql and evolutions are equal
- command: |
- docker-compose up -d postgres
- sleep 3
- docker-compose run compile tools/postgres/dbtool.js check-evolutions-schema
- - run:
- name: Build frontend documentation
- command: |
- WK_VERSION=${CIRCLE_TAG:-${CIRCLE_BUILD_NUM:-dev}}
- docker-compose run base yarn run docs --project-version $WK_VERSION
+ - restore_cache:
+ name: Restore webpack cache
+ keys:
+ - webpack-cache-{{ checksum ".circleci/cache_version" }}-{{ .Branch }}
+ - webpack-cache-{{ checksum ".circleci/cache_version" }}-master
+# - run:
+# name: Assert schema.sql and evolutions are equal
+# command: |
+# docker-compose up -d postgres
+# sleep 3
+# docker-compose run compile tools/postgres/dbtool.js check-evolutions-schema
+# - run:
+# name: Build frontend documentation
+# command: |
+# WK_VERSION=${CIRCLE_TAG:-${CIRCLE_BUILD_NUM:-dev}}
+# docker-compose run base yarn run docs --project-version $WK_VERSION
- run:
name: Build webknossos (webpack)
command: |
@@ -77,15 +89,35 @@ jobs:
name: Build webknossos-tracingstore (sbt)
command: docker-compose run base sbt -no-colors -DfailOnWarning "project webknossosTracingstore" copyMessages compile stage
- - run:
- name: Checksum App Dirs
- command: find app webknossos-datastore/app webknossos-tracingstore/app -type f -exec md5sum {} \; | sort -k 2 | md5sum > app_checksum.txt
+ - save_cache:
+ name: Save target cache
+ key: target-cache-{{ checksum ".circleci/cache_version" }}-{{ .Branch }}-{{ .Revision }}
+ paths:
+ - "project/target"
+ - "target"
+
+ - save_cache:
+ name: Save sbt cache
+ key: sbt-cache-{{ checksum ".circleci/cache_version" }}-{{ checksum "project/Dependencies.scala" }}
+ paths:
+ - "~/.ivy2"
+ - "~/.cache/coursier"
+
+ - save_cache:
+ name: Save yarn cache
+ key: yarn-cache-{{ checksum ".circleci/cache_version" }}-{{ checksum "yarn.lock" }}
+ paths:
+ - "~/.cache/yarn"
+
+ - save_cache:
+ name: Save webpack cache
+ key: webpack-cache-{{ checksum ".circleci/cache_version" }}-{{ .Branch }}-{{ .Revision }}
+ paths:
+ - "node_modules/.cache/webpack"
- run:
name: Build webknossos docker image
- command: |
- docker pull scalableminds/webknossos:$NORMALIZED_BRANCH || true
- DEV_CACHE=$NORMALIZED_BRANCH docker-compose build --pull webknossos
+ command: docker-compose build --pull webknossos
- run:
name: Build webknossos-datastore docker image
@@ -95,70 +127,84 @@ jobs:
name: Build webknossos-tracingstore docker image
command: docker-compose build --pull webknossos-tracingstore
- - run:
- name: Lint frontend code and check formatting
- command: |
- .circleci/not-on-master.sh docker-compose run base bash -c "yarn run check-frontend"
- - run:
- name: Check for cyclic dependencies in front-end
- command: |
- .circleci/not-on-master.sh docker-compose run base yarn check-cyclic-dependencies
- - run:
- name: Run frontend tests
- command: |
- .circleci/not-on-master.sh docker-compose run base yarn test-verbose
- - run:
- name: Lint backend code and check formatting
- command: |
- .circleci/not-on-master.sh docker-compose run backend-lint-format
- - run:
- name: Run backend tests
- command: |
- .circleci/not-on-master.sh docker-compose run backend-tests
- - run:
- name: Run end-to-end tests
- command: |
- for i in {1..3}; do # retry
- .circleci/not-on-master.sh docker-compose run e2e-tests && s=0 && break || s=$?
- done
- (exit $s)
- - run:
- name: Validate frontend types
- command: |
- .circleci/not-on-master.sh docker-compose run base yarn typecheck
- - run:
- name: Start webknossos, datastore and tracingstore
- command: docker-compose up -d webknossos && docker-compose up -d webknossos-datastore && docker-compose up -d webknossos-tracingstore
-
- - run:
- name: Run webknossos smoke test
- command: |
- for i in {1..20}; do # retry
- curl --fail -v http://localhost:9000/api/health && s=0 && break || s=$?
- sleep 5
- done
- (exit $s)
-
- - run:
- name: Run webknossos-datastore smoke test
- command: |
- for i in {1..20}; do # retry
- curl --fail -v http://localhost:9090/data/health && s=0 && break || s=$?
- sleep 5
- done
- (exit $s)
-
- - run:
- name: Run webknossos-tracingstore smoke test
- command: |
- for i in {1..20}; do # retry
- curl --fail -v http://localhost:9050/tracings/health && s=0 && break || s=$?
- sleep 5
- done
- (exit $s)
- - run:
- name: Stop webknossos, datastore and tracingstore
- command: docker-compose down --volumes --remove-orphans
+# - run:
+# name: Lint frontend code and check formatting
+# command: |
+# .circleci/not-on-master.sh docker-compose run base bash -c "yarn run check-frontend"
+# - run:
+# name: Check for cyclic dependencies in front-end
+# command: |
+# .circleci/not-on-master.sh docker-compose run base yarn check-cyclic-dependencies
+# - run:
+# name: Run frontend tests
+# command: |
+# .circleci/not-on-master.sh docker-compose run base yarn test-verbose
+# - run:
+# name: Lint backend code and check formatting
+# command: |
+# .circleci/not-on-master.sh docker-compose run backend-lint-format
+# - run:
+# name: Run backend tests
+# command: |
+# .circleci/not-on-master.sh docker-compose run backend-tests
+# - run:
+# name: Run end-to-end tests
+# command: |
+# for i in {1..3}; do # retry
+# .circleci/not-on-master.sh docker-compose run e2e-tests && s=0 && break || s=$?
+# done
+# (exit $s)
+# - run:
+# name: Validate frontend types
+# command: |
+# .circleci/not-on-master.sh docker-compose run base yarn typecheck
+# - run:
+# name: Start webknossos
+# background: true
+# command: docker-compose up webknossos
+# - run:
+# name: Run webknossos smoke test
+# command: |
+# for i in {1..10}; do # retry
+# sleep 10
+# curl --fail -v http://localhost:9000/api/health && s=0 && break || s=$?
+# done
+# (exit $s)
+# - run:
+# name: Stop webknossos
+# command: docker-compose down --volumes --remove-orphans
+#
+# - run:
+# name: Start webknossos-datastore
+# background: true
+# command: docker-compose up webknossos-datastore
+# - run:
+# name: Run webknossos-datastore smoke test
+# command: |
+# for i in {1..10}; do # retry
+# sleep 10
+# curl --fail -v http://localhost:9090/data/health && s=0 && break || s=$?
+# done
+# (exit $s)
+# - run:
+# name: Stop webknossos-datastore
+# command: docker-compose down --volumes --remove-orphans
+#
+# - run:
+# name: Start webknossos-tracingstore
+# background: true
+# command: docker-compose up webknossos-tracingstore
+# - run:
+# name: Run webknossos-tracingstore smoke test
+# command: |
+# for i in {1..10}; do # retry
+# sleep 10
+# curl --fail -v http://localhost:9050/tracings/health && s=0 && break || s=$?
+# done
+# (exit $s)
+# - run:
+# name: Stop webknossos-tracingstore
+# command: docker-compose down --volumes --remove-orphans
- run:
name: Push docker images
@@ -176,21 +222,17 @@ jobs:
retry docker-compose push webknossos-tracingstore
if [ -n "$CIRCLE_BRANCH" ]; then
docker tag \
- scalableminds/webknossos:${DOCKER_TAG} \
- scalableminds/webknossos:${NORMALIZED_BRANCH}
- retry docker push scalableminds/webknossos:${NORMALIZED_BRANCH}
- docker tag \
- scalableminds/webknossos-datastore:${DOCKER_TAG} \
- scalableminds/webknossos-datastore:${NORMALIZED_BRANCH}
- retry docker push scalableminds/webknossos-datastore:${NORMALIZED_BRANCH}
+ lincbrain/webknossos:${DOCKER_TAG} \
+ lincbrain/webknossos:${NORMALIZED_BRANCH}
+ retry docker push lincbrain/webknossos:${NORMALIZED_BRANCH}
docker tag \
- scalableminds/webknossos-tracingstore:${DOCKER_TAG} \
- scalableminds/webknossos-tracingstore:${NORMALIZED_BRANCH}
- retry docker push scalableminds/webknossos-tracingstore:${NORMALIZED_BRANCH}
+ lincbrain/webknossos-datastore:${DOCKER_TAG} \
+ lincbrain/webknossos-datastore:${NORMALIZED_BRANCH}
+ retry docker push lincbrain/webknossos-datastore:${NORMALIZED_BRANCH}
docker tag \
- scalableminds/webknossos-dev \
- scalableminds/webknossos-dev:${NORMALIZED_BRANCH}
- retry docker push scalableminds/webknossos-dev:${NORMALIZED_BRANCH}
+ lincbrain/webknossos-tracingstore:${DOCKER_TAG} \
+ lincbrain/webknossos-tracingstore:${NORMALIZED_BRANCH}
+ retry docker push lincbrain/webknossos-tracingstore:${NORMALIZED_BRANCH}
fi
docker logout
- run:
@@ -226,7 +268,7 @@ jobs:
- run:
name: Install dependencies and sleep at least 3min
command: |
- yarn install --immutable &
+ yarn install --frozen-lockfile &
sleep 180 &
wait
- run:
@@ -247,15 +289,6 @@ jobs:
- store_artifacts:
path: frontend/javascripts/test/snapshots/type-check
- - run:
- name: Bundle screenshots
- when: always
- command: |
- tar -czvf screenshots.tar frontend/javascripts/test/screenshots
-
- - store_artifacts:
- path: screenshots.tar
-
wkorg_nightly:
docker:
- image: scalableminds/puppeteer:master
@@ -265,24 +298,7 @@ jobs:
- run:
name: Install dependencies
command: |
- yarn install --immutable
-
- - run:
- name: Assert GZIP
- command: |
- # Test gzipped assets
- curl -s -I -H "Accept-Encoding: gzip" https://webknossos.org/assets/bundle/main.js | grep -q "content-encoding: gzip"
- curl -s -I -H "Accept-Encoding: gzip" https://webknossos.org/assets/bundle/main.css | grep -q "content-encoding: gzip"
- # Test gzipped buckets
- curl -s -i \
- -H 'accept: application/octet-stream' \
- -H 'Accept-Encoding: gzip' \
- -H 'content-type: application/json' \
- --data-raw '[{"position":[2752,4320,1728],"additionalCoordinates":[],"mag":[1,1,1],"cubeSize":32,"fourBit":false}]' \
- 'https://data-humerus.webknossos.org/data/datasets/scalable_minds/l4dense_motta_et_al_demo/layers/segmentation/data?token=' \
- | grep -q "content-encoding: gzip"
- echo Success.
-
+ yarn install --frozen-lockfile
- run:
name: Run screenshot-tests
command: |
diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md
index a020bbdfafb..62280d35084 100644
--- a/CHANGELOG.unreleased.md
+++ b/CHANGELOG.unreleased.md
@@ -11,6 +11,31 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
[Commits](https://github.com/scalableminds/webknossos/compare/24.10.0...HEAD)
### Added
+- Added the option for the owner to lock explorative annotations. Locked annotations cannot be modified by any user. An annotation can be locked in the annotations table and when viewing the annotation via the navbar dropdown menu. [#7801](https://github.com/scalableminds/webknossos/pull/7801)
+- Added the option to set a default mapping for a dataset in the dataset view configuration. The default mapping is loaded when the dataset is opened and the user / url does not configure something else. [#7858](https://github.com/scalableminds/webknossos/pull/7858)
+- Uploading an annotation into a dataset that it was not created for now also works if the dataset is in a different organization. [#7816](https://github.com/scalableminds/webknossos/pull/7816)
+- When downloading + reuploading an annotation that is based on a segmentation layer with active mapping, that mapping is now still be selected after the reupload. [#7822](https://github.com/scalableminds/webknossos/pull/7822)
+- In the Voxelytics workflow list, the name of the WEBKNOSSOS user who started the job is displayed. [#7794](https://github.com/scalableminds/webknossos/pull/7795)
+- Start an alignment job (aligns the section in a dataset) via the "AI Analysis" button. [#7820](https://github.com/scalableminds/webknossos/pull/7820)
+- Added additional validation for the animation job modal. Bounding boxes must be larger then zero. [#7883](https://github.com/scalableminds/webknossos/pull/7883)
+
+### Changed
+- The "WEBKNOSSOS Changelog" modal now lazily loads its content potentially speeding up the initial loading time of WEBKNOSSOS and thus improving the UX. [#7843](https://github.com/scalableminds/webknossos/pull/7843)
+- Updated the min max settings for the histogram to allow floating point color layers to have negative min / max values. [#7873](https://github.com/scalableminds/webknossos/pull/7873)
+- Made the login, registration, forgot password and dataset dashboard pages more mobile friendly. [#7876](https://github.com/scalableminds/webknossos/pull/7876)
+- From now on only project owner get a notification email upon project overtime. The organization specific email list `overTimeMailingList` was removed. [#7842](https://github.com/scalableminds/webknossos/pull/7842)
+- Replaced skeleton comment tab component with antd's ``component. [#7802](https://github.com/scalableminds/webknossos/pull/7802)
+
+### Fixed
+- Fixed a bug where the warning to zoom in to see the agglomerate mapping was shown to the user even when the 3D viewport was maximized and no volume data was shown. [#7865](https://github.com/scalableminds/webknossos/issues/7865)
+- Fixed a bug where brushing on a fallback segmentation with active mapping and with segment index file would lead to failed saves. [#7833](https://github.com/scalableminds/webknossos/pull/7833)
+- Fixed a bug where sometimes old mismatching javascript code would be served after upgrades. [#7854](https://github.com/scalableminds/webknossos/pull/7854)
+- Fixed a bug where dataset uploads of zipped tiff data via the UI would be rejected. [#7856](https://github.com/scalableminds/webknossos/pull/7856)
+- Fixed a bug with incorrect valiation of layer names in the animation modal. [#7882](https://github.com/scalableminds/webknossos/pull/7882)
+
+- It is now possible to focus a bounding box in the bounding box tab by clicking its edges in a viewport or via a newly added context menu entry. [#8054](https://github.com/scalableminds/webknossos/pull/8054)
+### Added
+- Added an assertion to the backend to ensure unique keys in the metadata info of datasets and folders. [#8068](https://github.com/scalableminds/webknossos/issues/8068)
- It is now possible to add metadata in annotations to Trees and Segments. [#7875](https://github.com/scalableminds/webknossos/pull/7875)
- Added a summary row to the time tracking overview, where times and annotations/tasks are summed. [#8092](https://github.com/scalableminds/webknossos/pull/8092)
- Most sliders have been improved: Wheeling above a slider now changes its value and double-clicking its knob resets it to its default value. [#8095](https://github.com/scalableminds/webknossos/pull/8095)
diff --git a/Dockerfile b/Dockerfile
index 2a553862cc5..3b9642e09a4 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,31 +1,40 @@
FROM eclipse-temurin:21-jammy
ARG VERSION_NODE="18.x"
+# Install dependencies, including Node.js
RUN curl -sL "https://deb.nodesource.com/setup_${VERSION_NODE}" | bash - \
+ && apt-get update \
&& apt-get -y install libblosc1 libbrotli1 postgresql-client libdraco4 git nodejs \
+ && apt-get clean \
&& rm -rf /var/lib/apt/lists/*
+# Set the working directory
RUN mkdir -p /webknossos
WORKDIR /webknossos
+# Copy the application's binaries to the container
COPY target/universal/stage .
+# Setup user and group
RUN addgroup --system --gid 999 webknossos \
&& adduser --system --uid 999 --ingroup webknossos webknossos \
&& mkdir disk \
- && chown -R webknossos . \
- && chmod go+x bin/webknossos \
- && chmod go+w .
+ && chown -R webknossos:webknossos . \
+ && chmod -R go+rwx .
-RUN echo '#!/bin/bash\numask 002\nbin/webknossos "$@"\n' > /docker-entrypoint.sh \
+# Create a custom entrypoint
+RUN echo '#!/bin/bash\numask 000\nbin/webknossos "$@"\n' > /docker-entrypoint.sh \
&& chmod +x /docker-entrypoint.sh
-HEALTHCHECK \
- --interval=1m --timeout=5s --retries=10 \
+# Health check to ensure the service is running
+HEALTHCHECK --interval=1m --timeout=5s --retries=10 \
CMD curl --fail http://localhost:9000/api/buildinfo || exit 1
+# Switch to non-root user
USER webknossos
+# Expose the port the app runs on
EXPOSE 9000
+# Set the container's entry point
ENTRYPOINT [ "/docker-entrypoint.sh" ]
diff --git a/LINC_DEPLOYMENT.md b/LINC_DEPLOYMENT.md
new file mode 100644
index 00000000000..c0c283ceeb5
--- /dev/null
+++ b/LINC_DEPLOYMENT.md
@@ -0,0 +1,241 @@
+# LINC | WebKNOSSOS Deployment
+
+This document is designed to help deploy a new version for LINC | WebKNOSSOS via AWS EC2
+
+### Create an instance in AWS EC2 with at least 32GB of memory
+
+Proceed to AWS and create an AWS Linux instance
+
+• r5.2xlarge is suggested for instance type
+• x86_64 architecture is suggested
+• Ensure that ports 80 and 443 are available.
+• Ensure that the instance is reachable via Public IP address
+
+### Connect the instance to a Route 53 Domain Record
+
+Proceed to Route 53 and create an A Record with the desired domain that is pointing to the Public IP address of the EC2 Instance
+
+### Return to AWS EC2 and ssh onto the instance
+
+Once the instance is running, SSH onto the instance.
+
+First, install the appropriate dependencies -- you'll need docker, docker-compose (and most likely git and vim for file management)
+
+```shell
+sudo yum install docker git vim -y
+
+sudo service docker start
+
+sudo curl -L "https://github.com/docker/compose/releases/download/$(curl -s https://api.github.com/repos/docker/compose/releases/latest | grep -oP '"tag_name": "\K(.*)(?=")')/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
+
+sudo chmod +x /usr/local/bin/docker-compose
+```
+
+Next, proceed to do the following commands (These steps are mostly inline with https://docs.webknossos.org/webknossos/installation.html)
+
+```shell
+sudo mkdir opt && sudo cd opt
+sudo mkdir webknossos && sudo cd webknossos
+
+sudo mkdir certs && sudo mkdir certs-data
+
+sudo wget https://github.com/scalableminds/webknossos/raw/master/tools/hosting/docker-compose.yml
+
+sudo mkdir binaryData
+
+sudo chown -R 1000:1000 binaryData
+
+sudo touch nginx.conf
+```
+
+Next, you'll need to issue an SSL certificate directly on the server -- `certbot` is used here:
+
+```shell
+sudo docker run --rm -p 80:80 -v $(pwd)/certs:/etc/letsencrypt -v $(pwd)/certs-data:/data/letsencrypt certbot/certbot certonly --standalone -d webknossos.lincbrain.org --email admin@lincbrain.org --agree-tos --non-interactive
+```
+
+You'll need to next populate the nginx.conf -- replace `webknossos.lincbrain.org` with whatever A name you used in Route 53
+
+```shell
+events {}
+
+http {
+ # Main server block for the webknossos application
+ server {
+ listen 80;
+ server_name webknossos.lincbrain.org;
+
+ location /.well-known/acme-challenge/ {
+ root /data/letsencrypt;
+ }
+
+ location / {
+ return 301 https://$host$request_uri;
+ }
+ }
+
+ server {
+ listen 443 ssl http2;
+ server_name webknossos.lincbrain.org;
+
+ ssl_certificate /etc/letsencrypt/live/webknossos.lincbrain.org/fullchain.pem;
+ ssl_certificate_key /etc/letsencrypt/live/webknossos.lincbrain.org/privkey.pem;
+
+ # webknossos-specific overrides
+ client_max_body_size 0;
+ proxy_read_timeout 3600s;
+
+ location / {
+ set $cors '';
+ if ($http_origin ~* (https://staging--lincbrain-org\.netlify\.app|https://.*\.lincbrain\.org|https://lincbrain\.org)) {
+ set $cors 'true';
+ }
+
+ if ($cors = 'true') {
+ add_header 'Access-Control-Allow-Origin' "$http_origin" always;
+ add_header 'Access-Control-Allow-Credentials' 'true' always;
+ add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS' always;
+ add_header 'Access-Control-Allow-Headers' 'Accept, Content-Type, X-Requested-With, Authorization, Cookie' always;
+ }
+
+ if ($request_method = 'OPTIONS') {
+ add_header 'Access-Control-Allow-Origin' "$http_origin" always;
+ add_header 'Access-Control-Allow-Credentials' 'true' always;
+ add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS' always;
+ add_header 'Access-Control-Allow-Headers' 'Accept, Content-Type, X-Requested-With, Authorization, Cookie' always;
+ add_header 'Content-Length' 0 always;
+ add_header 'Content-Type' 'text/plain' always;
+ return 204;
+ }
+
+ proxy_pass http://webknossos-webknossos-1:9000;
+ proxy_http_version 1.1;
+ proxy_set_header Host $host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_set_header Cookie $http_cookie;
+ proxy_set_header Transfer-Encoding "";
+ proxy_buffering off;
+
+ proxy_hide_header Access-Control-Allow-Origin;
+ proxy_hide_header Access-Control-Allow-Credentials;
+ proxy_hide_header Access-Control-Allow-Methods;
+ proxy_hide_header Access-Control-Allow-Headers;
+ }
+ }
+
+ # Separate server block for serving the binaryData directory
+ server {
+ listen 8080;
+ server_name webknossos.lincbrain.org;
+
+ location /binaryData/ {
+ alias /home/ec2-user/opt/webknossos/binaryData/;
+ autoindex on;
+ autoindex_exact_size off;
+ autoindex_localtime on;
+ allow all;
+ }
+ }
+}
+```
+
+You'll next want to alter the `docker-compose` pulled earlier via `wget`
+
+Remove the `nginx-letsencrypt` service, and alter the `nginx` as such:
+
+```
+nginx-proxy:
+ image: nginx:latest
+ container_name: nginx-proxy
+ ports:
+ - "8080:8080"
+ - "80:80"
+ - "443:443"
+ volumes:
+ - ./nginx.conf:/etc/nginx/nginx.conf:ro
+ - ./certs:/etc/letsencrypt
+ - /home/ec2-user/opt/webknossos/binaryData:/home/ec2-user/opt/webknossos/binaryData:ro
+ depends_on:
+ - webknossos
+```
+
+`nginx` should now be able to be called appropriately via HTTPS once `webknossos` API is running
+
+Lastly, you'll want to start the API and supporting containers:
+
+```shell
+DOCKER_TAG=xx.yy.z PUBLIC_HOST=webknossos.example.com LETSENCRYPT_EMAIL=admin@example.com \
+docker compose up -d webknossos nginx
+```
+
+You can check the health of the containers via:
+
+```
+docker ps
+
+# or
+
+docker logs -f
+```
+
+## Backups
+
+### FossilDB
+
+FossilDB is a scalableminds database that extends from the open-source RocksDB.
+
+Temp steps / commands for FossilDB backup:
+
+1. Exec into EC2 instance
+2. Grab `fossildb-client` via `docker pull scalableminds/fossildb-client:master`
+3. Determine the appropriate internal network that the `fossildb` instance is running in within the Dockerized setup on EC2: `docker inspect -f '{{range .NetworkSettings.Networks}}{{.NetworkID}} {{end}}' webknossos-fossildb-1`
+4. `docker run --network scalableminds/fossildb-client:master webknossos-fossildb-1 backup` should create the backup
+5. The backup will be stored via `/home/ec2-user/opt/webknossos/persistent/fossildb/backup`
+
+## Creating a new WebKNOSSOS with pre-existing backups
+
+There are three different components that must be taken into account for a WebKNOSSOS clone:
+
+• mounted Docker volumes -- represented by the `binaryData` and `persistent` directories in the WebKNOSSOS file structure
+ - exported to AWS S3 via the `docker_volumes_backup.sh` cronjob script
+• FossilDB data (manged via `fossildb-client restore` commands)
+ - exported to AWS S3 via the `fossil_db_backup.sh` cronjob script
+• PostgresDB data (managed via `pg_dump` and `pg_restore` commands)
+ - exported to AWS S3 via the `postgres_backup.sh` cronjob script
+
+When setting up a new clone, first follow the standard deployment steps above, **however** do not create the `binraryData` folder
+
+You'll first want to restore the Docker volumes -- contained in the `webknosos_backups/` S3 subdirectory for wherever your cron jobs send the compressed backups
+
+Copy the appropriate assets from S3 to the EC2 instance via the `aws cp `
+
+For example:
+
+```
+aws s3 cp s3://linc-brain-mit-staging-us-east-2/fossildb_backups/backup_2024-08-20_02-00-02.tar.gz ./backup_2024-08-20_02-00-02.tar.gz
+```
+
+Once you decompress (can use a tool like `gunzip`) and then extract the files -- (e.g. `tar -cvzf /home/ec2-user/opt/webknossos/webknossos_backup.tar.gz .`)
+you are ready to proceed; however, ensure that `persistent` and `binaryData` folders from the extracted files are in the same directory as your `docker-compose.yml` file
+
+Next, you want to restore the `fossildb` instance -- this can simply be done via the `docker-compose run fossil-db-restore` command
+
+Almost there! You'll next want to bring up the remainder of the WebKNOSSOS API (along with the nginx-proxy, postgres, etc.) via `docker-compose --env-file env.txt webknossos nginx-proxy`
+
+Notably, this will bring up the `postgres` container (however, we've yet to restore the container!). Thus you'll want to:
+ - Mount the decompressed, unpacked backup (should be something like `.sql`). The mount command should be something similar to: `docker cp /local/path/to/postgres_backup.sql :/tmp/postgres_backup.sql`
+ - Exec into the `postgres` container and open a `psql` shell via `psql -U postgres`
+ - Next, drop the `webknossos` database -- e.g. `DROP DATABASE webknossos`
+ - Create the database `webknossos` -- e.g. `CREATE DATABASE webknossos`
+ - Restore the database's state via psql -- e.g. `psql -U postgres -d webknossos -f /tmp/webknossos_backup.sql`
+
+Your clone should be all set now!
+
+
+
+
+
+
+
diff --git a/LINC_DEVELOPMENT.md b/LINC_DEVELOPMENT.md
new file mode 100644
index 00000000000..2374006613c
--- /dev/null
+++ b/LINC_DEVELOPMENT.md
@@ -0,0 +1,37 @@
+# LINC | WebKNOSSOS Development
+
+This document is designed to help develop new features locally for LINC | WebKNOSSOS
+
+### CLI Installation
+
+WebKNOSSOS CLI tool docs say that there is support between Python>=3.8,<=3.11.
+There were some intractable failures with 3.11, so 3.10 is used here
+
+First, create a venv within 3.11 -- pyenv is a good tool to use if you are using 3.12 locally
+
+**Note: if you are on a Mac, you'll need to ensure you are emulating `x86_64` arch locally -- a quick fix for this is to prefix
+`arch -x86_64` with each command -- e.g. `arch -x86_64 pyenv exec python -m venv venv`**
+
+```shell
+# Set up local environment
+pyenv local 3.11
+pyenv exec python3 -m venv venv
+source venv/bin/activate
+
+# Install webknossos library
+pip install webknossos
+
+# Failure might occur with finding PIL import, thus possibly:
+pip install Pillow
+
+# At this point, you should be able to use the webknossos CLI --
+webknossos help
+```
+
+You'll need to set your authentication token as an env var `WK_TOKEN`: go to https://webknossos.lincbrain.org/auth/token
+(or https://webknossos-staging.lincbrain.org/auth/token if you are on staging)
+
+### Temp Links
+
+https://webknossos-staging.lincbrain.org/api/v5/user/annotations
+
diff --git a/app/models/annotation/Annotation.scala b/app/models/annotation/Annotation.scala
index 887bf7439c9..7486098a1ce 100755
--- a/app/models/annotation/Annotation.scala
+++ b/app/models/annotation/Annotation.scala
@@ -590,7 +590,7 @@ class AnnotationDAO @Inject()(sqlClient: SqlClient, annotationLayerDAO: Annotati
} yield count
// update operations
-
+ // Aaron
def insertOne(a: Annotation): Fox[Unit] = {
val insertAnnotationQuery = q"""
INSERT INTO webknossos.annotations(_id, _dataset, _task, _team, _user, description, visibility,
diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala
index f3b56e813d3..a1a9e3d670e 100755
--- a/app/models/annotation/AnnotationService.scala
+++ b/app/models/annotation/AnnotationService.scala
@@ -400,6 +400,9 @@ class AnnotationService @Inject()(
teamId <- selectSuitableTeam(user, dataset) ?~> "annotation.create.forbidden"
annotation = Annotation(ObjectId.generate, datasetId, None, teamId, user._id, annotationLayers)
_ <- annotationDAO.insertOne(annotation)
+ teamIdValidated = List(teamId)
+ _ <- Fox.serialCombined(teamIdValidated)(teamDAO.findOne(_)) ?~> "updateSharedTeams.failed.accessingTeam"
+ _ <- this.updateTeamsForSharedAnnotation(annotation._id, teamIdValidated)
} yield annotation
def makeAnnotationHybrid(annotation: Annotation, organizationId: String, fallbackLayerName: Option[String])(
diff --git a/app/models/dataset/credential/CredentialService.scala b/app/models/dataset/credential/CredentialService.scala
index 9fdacdadbdd..0f8025185a0 100644
--- a/app/models/dataset/credential/CredentialService.scala
+++ b/app/models/dataset/credential/CredentialService.scala
@@ -10,13 +10,13 @@ import com.scalableminds.webknossos.datastore.storage.{
}
import net.liftweb.common.Box.tryo
import play.api.libs.json.Json
-import utils.ObjectId
+import utils.{ObjectId, WkConf}
import java.net.URI
import javax.inject.Inject
import scala.concurrent.ExecutionContext
-class CredentialService @Inject()(credentialDAO: CredentialDAO) {
+class CredentialService @Inject()(credentialDAO: CredentialDAO, conf: WkConf) {
def createCredentialOpt(uri: URI,
credentialIdentifier: Option[String],
@@ -33,10 +33,23 @@ class CredentialService @Inject()(credentialDAO: CredentialDAO) {
userId.toString,
organizationId))
case DataVaultService.schemeS3 =>
- (credentialIdentifier, credentialSecret) match {
- case (Some(keyId), Some(secretKey)) =>
- Some(S3AccessKeyCredential(uri.toString, keyId, secretKey, userId.toString, organizationId))
- case _ => None
+ val s3PrivateBucketConfigKeyword = conf.WebKnossos.S3PrivateBucketConfig.keyword
+ val isPrivateBucketEnabled = conf.WebKnossos.S3PrivateBucketConfig.enabled
+ if (uri.toString.contains(s3PrivateBucketConfigKeyword) && isPrivateBucketEnabled) {
+ val s3AccessIdKey = sys.env("AWS_ACCESS_KEY_ID")
+ val s3SecretAccessKey = sys.env("AWS_ACCESS_KEY")
+ Some(
+ S3AccessKeyCredential(uri.toString,
+ s3AccessIdKey,
+ s3SecretAccessKey,
+ userId.toString,
+ organizationId.toString))
+ } else {
+ (credentialIdentifier, credentialSecret) match {
+ case (Some(keyId), Some(secretKey)) =>
+ Some(S3AccessKeyCredential(uri.toString, keyId, secretKey, userId.toString, organizationId.toString))
+ case _ => None
+ }
}
case DataVaultService.schemeGS =>
for {
diff --git a/app/security/WkSilhouetteEnvironment.scala b/app/security/WkSilhouetteEnvironment.scala
index 2f5f92b9c1c..e1650a541af 100644
--- a/app/security/WkSilhouetteEnvironment.scala
+++ b/app/security/WkSilhouetteEnvironment.scala
@@ -29,7 +29,7 @@ class WkSilhouetteEnvironment @Inject()(
private val cookieSettings = CookieAuthenticatorSettings(
conf.Silhouette.CookieAuthenticator.cookieName,
conf.Silhouette.CookieAuthenticator.cookiePath,
- None,
+ Some(conf.Silhouette.CookieAuthenticator.domain),
conf.Silhouette.CookieAuthenticator.secureCookie,
conf.Silhouette.CookieAuthenticator.httpOnlyCookie,
Some(Cookie.SameSite.Lax),
diff --git a/app/utils/WkConf.scala b/app/utils/WkConf.scala
index c71a644185e..b14de946b55 100644
--- a/app/utils/WkConf.scala
+++ b/app/utils/WkConf.scala
@@ -97,6 +97,11 @@ class WkConf @Inject()(configuration: Configuration) extends ConfigReader with L
val content: String = get[String]("webKnossos.securityTxt.content")
}
+ object S3PrivateBucketConfig {
+ val keyword: String = get[String]("webKnossos.s3PrivateBucketConfig.keyword")
+ val enabled: Boolean = get[Boolean]("webKnossos.s3PrivateBucketConfig.enabled")
+ }
+
val operatorData: String = get[String]("webKnossos.operatorData")
val children = List(User, Tasks, Cache, SampleOrganization, FetchUsedStorage, TermsOfService)
}
@@ -178,6 +183,7 @@ class WkConf @Inject()(configuration: Configuration) extends ConfigReader with L
object CookieAuthenticator {
val cookieName: String = get[String]("silhouette.cookieAuthenticator.cookieName")
val cookiePath: String = get[String]("silhouette.cookieAuthenticator.cookiePath")
+ val domain: String = get[String]("silhouette.cookieAuthenticator.domain")
val secureCookie: Boolean = get[Boolean]("silhouette.cookieAuthenticator.secureCookie")
val httpOnlyCookie: Boolean = get[Boolean]("silhouette.cookieAuthenticator.httpOnlyCookie")
val useFingerprinting: Boolean = get[Boolean]("silhouette.cookieAuthenticator.useFingerprinting")
diff --git a/app/views/main.scala.html b/app/views/main.scala.html
index f78be6c1aae..d8a1693f7ae 100755
--- a/app/views/main.scala.html
+++ b/app/views/main.scala.html
@@ -28,7 +28,7 @@
} else {
}
-
+
> $LOGFILE 2>&1
diff --git a/backup_scripts/ec2_interact.sh b/backup_scripts/ec2_interact.sh
new file mode 100755
index 00000000000..fd365523e16
--- /dev/null
+++ b/backup_scripts/ec2_interact.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+
+# to execute locally,
+# chmod +x ec2_interact.sh
+# ./ec2_interach.sh (start or stop)
+
+# Check if required argument (start/stop) is provided
+if [ "$#" -ne 1 ]; then
+ echo "Usage: $0 {start|stop}"
+ exit 1
+fi
+
+# Set AWS credentials in the script
+AWS_ACCESS_KEY_ID="temp"
+AWS_SECRET_ACCESS_KEY="temp/temp"
+AWS_DEFAULT_REGION="us-east-2"
+
+INSTANCE_ID="i-0fb27de75937dc7ea" # WebKNOSSOS prod
+
+export AWS_ACCESS_KEY_ID
+export AWS_SECRET_ACCESS_KEY
+export AWS_DEFAULT_REGION
+
+ACTION=$1
+
+case $ACTION in
+ start)
+ echo "Starting EC2 instance: $INSTANCE_ID"
+ aws ec2 start-instances --instance-ids $INSTANCE_ID
+ ;;
+ stop)
+ echo "Stopping EC2 instance: $INSTANCE_ID"
+ aws ec2 stop-instances --instance-ids $INSTANCE_ID
+ ;;
+ *)
+ echo "Invalid action: $ACTION. Use 'start' or 'stop'."
+ exit 1
+ ;;
+esac
+
+# Clear credentials after the command execution for security
+unset AWS_ACCESS_KEY_ID
+unset AWS_SECRET_ACCESS_KEY
+unset AWS_DEFAULT_REGION
+
+echo "Action $ACTION completed for instance $INSTANCE_ID."
diff --git a/backup_scripts/fossil_db_backup.sh b/backup_scripts/fossil_db_backup.sh
new file mode 100644
index 00000000000..22998776f3f
--- /dev/null
+++ b/backup_scripts/fossil_db_backup.sh
@@ -0,0 +1,52 @@
+#!/bin/bash
+
+# Log file for debugging
+LOGFILE="/home/ec2-user/opt/webknossos/backup.log"
+
+{
+ echo "Starting backup at $(date +"%Y-%m-%d_%H-%M-%S")"
+
+ # Set the environment variables
+ export AWS_ACCESS_KEY_ID=some-value
+ export AWS_SECRET_ACCESS_KEY=some-value
+ export AWS_DEFAULT_REGION=us-east-2
+ export S3_BUCKET=linc-brain-mit-prod-us-east-2
+
+ # Define the subdirectory to back up and the S3 bucket name
+ BACKUP_DIRECTORY="/home/ec2-user/opt/webknossos/persistent/fossildb/backup/private"
+ TIMESTAMP=$(date +"%Y-%m-%d_%H-%M-%S")
+ BACKUP_NAME="backup_$TIMESTAMP"
+
+ # Set the working directory to where docker-compose.yml is located
+ cd /home/ec2-user/opt/webknossos
+
+ # Call the docker-compose step without TTY
+ /usr/local/bin/docker-compose up -d --no-deps --no-recreate fossil-db-backup
+
+ if [ $? -ne 0 ]; then
+ echo "Docker-compose step failed"
+ exit 1
+ fi
+
+ # Create a snapshot of the backup directory using rsync to avoid file changes during copy
+ SNAPSHOT_DIRECTORY="/tmp/$BACKUP_NAME"
+ /usr/bin/rsync -a --delete $BACKUP_DIRECTORY/ $SNAPSHOT_DIRECTORY/
+
+ if [ $? -ne 0 ]; then
+ echo "Failed to create a snapshot using rsync"
+ exit 1
+ fi
+
+ # Upload the snapshot directory to the S3 bucket recursively
+ /usr/bin/aws s3 cp $SNAPSHOT_DIRECTORY s3://$S3_BUCKET/fossil_backups/$BACKUP_NAME/ --recursive
+
+ if [ $? -ne 0 ]; then
+ echo "Failed to upload to S3"
+ exit 1
+ fi
+
+ # Clean up the snapshot directory
+ /bin/rm -rf $SNAPSHOT_DIRECTORY
+
+ echo "Backup completed and uploaded to S3 at $(date +"%Y-%m-%d_%H-%M-%S")"
+} >> $LOGFILE 2>&1
diff --git a/backup_scripts/postgres_backup.sh b/backup_scripts/postgres_backup.sh
new file mode 100644
index 00000000000..85dcb04d5b7
--- /dev/null
+++ b/backup_scripts/postgres_backup.sh
@@ -0,0 +1,51 @@
+#!/bin/bash
+
+# Log file for debugging
+LOGFILE="/home/ec2-user/opt/webknossos/backup_postgres.log"
+
+{
+ echo "Starting Postgres backup at $(date +"%Y-%m-%d_%H-%M-%S")"
+
+ # Set the environment variables
+ export AWS_ACCESS_KEY_ID=some-value
+ export AWS_SECRET_ACCESS_KEY=some-value
+ export AWS_DEFAULT_REGION=us-east-2
+ export S3_BUCKET=linc-brain-mit-prod-us-east-2
+
+ # Define the backup name and S3 bucket
+ TIMESTAMP=$(date +"%Y-%m-%d_%H-%M-%S")
+ BACKUP_NAME="postgres_backup_dump_prod_$TIMESTAMP.sql"
+
+ # Set the working directory to where docker-compose.yml is located
+ cd /home/ec2-user/opt/webknossos
+
+ # Run the pg_dump command inside the postgres container
+ docker exec webknossos-postgres-1 pg_dump -U postgres -F c -b -v -f /tmp/$BACKUP_NAME webknossos
+
+ if [ $? -ne 0 ]; then
+ echo "pg_dump command failed"
+ exit 1
+ fi
+
+ # Copy the backup file from the container to the host
+ docker cp webknossos-postgres-1:/tmp/$BACKUP_NAME /tmp/$BACKUP_NAME
+
+ if [ $? -ne 0 ]; then
+ echo "Failed to copy backup file from container"
+ exit 1
+ fi
+
+ # Upload the backup to the S3 bucket
+ /usr/bin/aws s3 cp /tmp/$BACKUP_NAME s3://$S3_BUCKET/postgres_backups/$BACKUP_NAME
+
+ if [ $? -ne 0 ]; then
+ echo "Failed to upload to S3"
+ exit 1
+ fi
+
+ # Clean up the temporary backup file on the host and container
+ /bin/rm /tmp/$BACKUP_NAME
+ docker exec webknossos-postgres-1 /bin/bash -c "rm /tmp/$BACKUP_NAME"
+
+ echo "Postgres backup completed and uploaded to S3 at $(date +"%Y-%m-%d_%H-%M-%S")"
+} >> $LOGFILE 2>&1
diff --git a/bin/webknossos b/bin/webknossos
new file mode 100755
index 00000000000..384d983a615
--- /dev/null
+++ b/bin/webknossos
@@ -0,0 +1,373 @@
+#!/usr/bin/env bash
+
+### ------------------------------- ###
+### Helper methods for BASH scripts ###
+### ------------------------------- ###
+
+die() {
+ echo "$@" 1>&2
+ exit 1
+}
+
+realpath () {
+(
+ TARGET_FILE="$1"
+ CHECK_CYGWIN="$2"
+
+ cd "$(dirname "$TARGET_FILE")"
+ TARGET_FILE=$(basename "$TARGET_FILE")
+
+ COUNT=0
+ while [ -L "$TARGET_FILE" -a $COUNT -lt 100 ]
+ do
+ TARGET_FILE=$(readlink "$TARGET_FILE")
+ cd "$(dirname "$TARGET_FILE")"
+ TARGET_FILE=$(basename "$TARGET_FILE")
+ COUNT=$(($COUNT + 1))
+ done
+
+ if [ "$TARGET_FILE" == "." -o "$TARGET_FILE" == ".." ]; then
+ cd "$TARGET_FILE"
+ fi
+ TARGET_DIR="$(pwd -P)"
+ if [ "$TARGET_DIR" == "/" ]; then
+ TARGET_FILE="/$TARGET_FILE"
+ else
+ TARGET_FILE="$TARGET_DIR/$TARGET_FILE"
+ fi
+
+ # make sure we grab the actual windows path, instead of cygwin's path.
+ if [[ "x$CHECK_CYGWIN" == "x" ]]; then
+ echo "$TARGET_FILE"
+ else
+ echo $(cygwinpath "$TARGET_FILE")
+ fi
+)
+}
+
+# TODO - Do we need to detect msys?
+
+# Uses uname to detect if we're in the odd cygwin environment.
+is_cygwin() {
+ local os=$(uname -s)
+ case "$os" in
+ CYGWIN*) return 0 ;;
+ *) return 1 ;;
+ esac
+}
+
+# This can fix cygwin style /cygdrive paths so we get the
+# windows style paths.
+cygwinpath() {
+ local file="$1"
+ if is_cygwin; then
+ echo $(cygpath -w $file)
+ else
+ echo $file
+ fi
+}
+
+# Make something URI friendly
+make_url() {
+ url="$1"
+ local nospaces=${url// /%20}
+ if is_cygwin; then
+ echo "/${nospaces//\\//}"
+ else
+ echo "$nospaces"
+ fi
+}
+
+# This crazy function reads in a vanilla "linux" classpath string (only : are separators, and all /),
+# and returns a classpath with windows style paths, and ; separators.
+fixCygwinClasspath() {
+ OLDIFS=$IFS
+ IFS=":"
+ read -a classpath_members <<< "$1"
+ declare -a fixed_members
+ IFS=$OLDIFS
+ for i in "${!classpath_members[@]}"
+ do
+ fixed_members[i]=$(realpath "${classpath_members[i]}" "fix")
+ done
+ IFS=";"
+ echo "${fixed_members[*]}"
+ IFS=$OLDIFS
+}
+
+# Fix the classpath we use for cygwin.
+fix_classpath() {
+ cp="$1"
+ if is_cygwin; then
+ echo "$(fixCygwinClasspath "$cp")"
+ else
+ echo "$cp"
+ fi
+}
+# Detect if we should use JAVA_HOME or just try PATH.
+get_java_cmd() {
+ # High-priority override for Jlink images
+ if [[ -n "$bundled_jvm" ]]; then
+ echo "$bundled_jvm/bin/java"
+ elif [[ -n "$JAVA_HOME" ]] && [[ -x "$JAVA_HOME/bin/java" ]]; then
+ echo "$JAVA_HOME/bin/java"
+ else
+ echo "java"
+ fi
+}
+
+echoerr () {
+ echo 1>&2 "$@"
+}
+vlog () {
+ [[ $verbose || $debug ]] && echoerr "$@"
+}
+dlog () {
+ [[ $debug ]] && echoerr "$@"
+}
+execRunner () {
+ # print the arguments one to a line, quoting any containing spaces
+ [[ $verbose || $debug ]] && echo "# Executing command line:" && {
+ for arg; do
+ if printf "%s\n" "$arg" | grep -q ' '; then
+ printf "\"%s\"\n" "$arg"
+ else
+ printf "%s\n" "$arg"
+ fi
+ done
+ echo ""
+ }
+
+ # we use "exec" here for our pids to be accurate.
+ exec "$@"
+}
+addJava () {
+ dlog "[addJava] arg = '$1'"
+ java_args+=( "$1" )
+}
+addApp () {
+ dlog "[addApp] arg = '$1'"
+ app_commands+=( "$1" )
+}
+addResidual () {
+ dlog "[residual] arg = '$1'"
+ residual_args+=( "$1" )
+}
+addDebugger () {
+ addJava "-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=$1"
+}
+
+require_arg () {
+ local type="$1"
+ local opt="$2"
+ local arg="$3"
+ if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then
+ die "$opt requires <$type> argument"
+ fi
+}
+is_function_defined() {
+ declare -f "$1" > /dev/null
+}
+
+# Attempt to detect if the script is running via a GUI or not
+# TODO - Determine where/how we use this generically
+detect_terminal_for_ui() {
+ [[ ! -t 0 ]] && [[ "${#residual_args}" == "0" ]] && {
+ echo "true"
+ }
+ # SPECIAL TEST FOR MAC
+ [[ "$(uname)" == "Darwin" ]] && [[ "$HOME" == "$PWD" ]] && [[ "${#residual_args}" == "0" ]] && {
+ echo "true"
+ }
+}
+
+# Processes incoming arguments and places them in appropriate global variables. called by the run method.
+process_args () {
+ local no_more_snp_opts=0
+ while [[ $# -gt 0 ]]; do
+ case "$1" in
+ --) shift && no_more_snp_opts=1 && break ;;
+ -h|-help) usage; exit 1 ;;
+ -v|-verbose) verbose=1 && shift ;;
+ -d|-debug) debug=1 && shift ;;
+
+ -no-version-check) no_version_check=1 && shift ;;
+
+ -mem) echo "!! WARNING !! -mem option is ignored. Please use -J-Xmx and -J-Xms" && shift 2 ;;
+ -jvm-debug) require_arg port "$1" "$2" && addDebugger $2 && shift 2 ;;
+
+ -main) custom_mainclass="$2" && shift 2 ;;
+
+ -java-home) require_arg path "$1" "$2" && jre=`eval echo $2` && java_cmd="$jre/bin/java" && shift 2 ;;
+
+ -D*|-agentlib*|-agentpath*|-javaagent*|-XX*) addJava "$1" && shift ;;
+ -J*) addJava "${1:2}" && shift ;;
+ *) addResidual "$1" && shift ;;
+ esac
+ done
+
+ if [[ no_more_snp_opts ]]; then
+ while [[ $# -gt 0 ]]; do
+ addResidual "$1" && shift
+ done
+ fi
+
+ is_function_defined process_my_args && {
+ myargs=("${residual_args[@]}")
+ residual_args=()
+ process_my_args "${myargs[@]}"
+ }
+}
+
+# Actually runs the script.
+run() {
+ # TODO - check for sane environment
+
+ # process the combined args, then reset "$@" to the residuals
+ process_args "$@"
+ set -- "${residual_args[@]}"
+ argumentCount=$#
+
+ #check for jline terminal fixes on cygwin
+ if is_cygwin; then
+ stty -icanon min 1 -echo > /dev/null 2>&1
+ addJava "-Djline.terminal=jline.UnixTerminal"
+ addJava "-Dsbt.cygwin=true"
+ fi
+
+ # check java version
+ if [[ ! $no_version_check ]]; then
+ java_version_check
+ fi
+
+ if [ -n "$custom_mainclass" ]; then
+ mainclass=("$custom_mainclass")
+ else
+ mainclass=("${app_mainclass[@]}")
+ fi
+
+ # Fallback to custom mainclass if main class is not provided (this is the case if the JAR contains multiple apps)
+ if [ "$app_mainclass" = "" ] || [ $custom_mainclass ];then
+ if [ "$custom_mainclass" = "" ]; then
+ echo "You need to pass -main argument."
+ exit 1
+ fi
+
+ app_mainclass=$custom_mainclass
+ fi
+
+ # Now we check to see if there are any java opts on the environment. These get listed first, with the script able to override them.
+ if [[ "$JAVA_OPTS" != "" ]]; then
+ java_opts="${JAVA_OPTS}"
+ fi
+
+ # run sbt
+ execRunner "$java_cmd" \
+ ${java_opts[@]} \
+ "${java_args[@]}" \
+ -cp "$(fix_classpath "$app_classpath")" \
+ "${mainclass[@]}" \
+ "${app_commands[@]}" \
+ "${residual_args[@]}"
+
+ local exit_code=$?
+ if is_cygwin; then
+ stty icanon echo > /dev/null 2>&1
+ fi
+ exit $exit_code
+}
+
+# Loads a configuration file full of default command line options for this script.
+loadConfigFile() {
+ cat "$1" | sed $'/^\#/d;s/\r$//'
+}
+
+# Now check to see if it's a good enough version
+# TODO - Check to see if we have a configured default java version, otherwise use 1.8
+java_version_check() {
+ readonly java_version=$("$java_cmd" -version 2>&1 | awk -F '"' '/version/ {print $2}')
+ if [[ "$java_version" == "" ]]; then
+ echo
+ echo No java installations was detected.
+ echo Please go to http://www.java.com/getjava/ and download
+ echo
+ exit 1
+ else
+ local major=$(echo "$java_version" | cut -d'.' -f1)
+ if [[ "$major" -eq "1" ]]; then
+ local major=$(echo "$java_version" | cut -d'.' -f2)
+ fi
+ if [[ "$major" -lt "8" ]]; then
+ echo
+ echo The java installation you have is not up to date
+ echo $app_name requires at least version 1.8+, you have
+ echo version $java_version
+ echo
+ echo Please go to http://www.java.com/getjava/ and download
+ echo a valid Java Runtime and install before running $app_name.
+ echo
+ exit 1
+ fi
+ fi
+}
+
+### ------------------------------- ###
+### Start of customized settings ###
+### ------------------------------- ###
+usage() {
+ cat < Define a custom main class
+ -jvm-debug Turn on JVM debugging, open at the given port.
+
+ # java version (default: java from PATH, currently $(java -version 2>&1 | grep version))
+ -java-home alternate JAVA_HOME
+
+ # jvm options and output control
+ JAVA_OPTS environment variable, if unset uses "$java_opts"
+ -Dkey=val pass -Dkey=val directly to the java runtime
+ -J-X pass option -X directly to the java runtime
+ (-J is stripped)
+
+ # special option
+ -- To stop parsing built-in commands from the rest of the command-line.
+ e.g.) enabling debug and sending -d as app argument
+ \$ ./start-script -d -- -d
+
+In the case of duplicated or conflicting options, basically the order above
+shows precedence: JAVA_OPTS lowest, command line options highest except "--".
+Available main classes:
+ play.core.server.ProdServerStart
+EOM
+}
+
+### ------------------------------- ###
+### Main script ###
+### ------------------------------- ###
+
+declare -a residual_args
+declare -a java_args
+declare -a app_commands
+declare -r real_script_path="$(realpath "$0")"
+declare -r app_home="$(realpath "$(dirname "$real_script_path")")"
+# TODO - Check whether this is ok in cygwin...
+declare -r lib_dir="$(realpath "${app_home}/../lib")"
+declare -a app_mainclass=(play.core.server.ProdServerStart)
+
+declare -r script_conf_file="${app_home}/../conf/application.ini"
+declare -r app_classpath="$lib_dir/../conf/:$lib_dir/webknossos.webknossos-wk-sans-externalized.jar:$lib_dir/util.util-wk.jar:$lib_dir/webknossos-tracingstore.webknossos-tracingstore-wk.jar:$lib_dir/webknossos-datastore.webknossos-datastore-wk.jar:$lib_dir/webknossosjni.webknossosjni-wk.jar:$lib_dir/org.scala-lang.scala-library-2.13.11.jar:$lib_dir/org.playframework.twirl.twirl-api_2.13-2.0.3.jar:$lib_dir/org.playframework.play-server_2.13-3.0.1.jar:$lib_dir/org.playframework.play-pekko-http-server_2.13-3.0.1.jar:$lib_dir/org.playframework.play-logback_2.13-3.0.1.jar:$lib_dir/org.playframework.play-filters-helpers_2.13-3.0.1.jar:$lib_dir/commons-codec.commons-codec-1.16.0.jar:$lib_dir/org.playframework.silhouette.play-silhouette_2.13-10.0.0.jar:$lib_dir/org.playframework.silhouette.play-silhouette-crypto-jca_2.13-10.0.0.jar:$lib_dir/org.glassfish.jaxb.txw2-4.0.2.jar:$lib_dir/com.github.jwt-scala.jwt-play-json_2.13-9.2.0.jar:$lib_dir/com.typesafe.slick.slick_2.13-3.4.1.jar:$lib_dir/com.typesafe.slick.slick-hikaricp_2.13-3.4.1.jar:$lib_dir/com.typesafe.slick.slick-codegen_2.13-3.4.1.jar:$lib_dir/org.postgresql.postgresql-42.5.4.jar:$lib_dir/org.playframework.play_2.13-3.0.1.jar:$lib_dir/com.typesafe.play.play-json_2.13-2.10.1.jar:$lib_dir/org.apache.commons.commons-email-1.5.jar:$lib_dir/commons-io.commons-io-2.15.1.jar:$lib_dir/org.apache.commons.commons-lang3-3.14.0.jar:$lib_dir/net.liftweb.lift-common_2.13-3.5.0.jar:$lib_dir/org.reactivemongo.reactivemongo-bson-api_2.13-1.0.10.jar:$lib_dir/com.thesamet.scalapb.scalapb-runtime_2.13-0.11.13.jar:$lib_dir/com.typesafe.scala-logging.scala-logging_2.13-3.9.5.jar:$lib_dir/org.playframework.play-caffeine-cache_2.13-3.0.1.jar:$lib_dir/at.favre.lib.bcrypt-0.10.2.jar:$lib_dir/org.jgrapht.jgrapht-core-1.5.1.jar:$lib_dir/org.scala-lang.modules.scala-xml_2.13-2.2.0.jar:$lib_dir/org.playframework.play-streams_2.13-3.0.1.jar:$lib_dir/org.apache.pekko.pekko-http-core_2.13-1.0.0.jar:$lib_dir/ch.qos.logback.logback-classic-1.4.14.jar:$lib_dir/org.playframework.play-cache_2.13-3.0.1.jar:$lib_dir/org.playframework.play-ws_2.13-3.0.1.jar:$lib_dir/org.playframework.play-openid_2.13-3.0.0.jar:$lib_dir/com.auth0.java-jwt-3.19.4.jar:$lib_dir/org.codehaus.woodstox.woodstox-core-asl-4.0.6.jar:$lib_dir/com.github.jwt-scala.jwt-json-common_2.13-9.2.0.jar:$lib_dir/org.slf4j.slf4j-api-2.0.9.jar:$lib_dir/com.typesafe.config-1.4.3.jar:$lib_dir/org.reactivestreams.reactive-streams-1.0.4.jar:$lib_dir/org.scala-lang.modules.scala-collection-compat_2.13-2.9.0.jar:$lib_dir/com.zaxxer.HikariCP-4.0.3.jar:$lib_dir/org.checkerframework.checker-qual-3.37.0.jar:$lib_dir/org.playframework.play-build-link-3.0.1.jar:$lib_dir/org.playframework.play-configuration_2.13-3.0.1.jar:$lib_dir/org.slf4j.jul-to-slf4j-2.0.9.jar:$lib_dir/org.slf4j.jcl-over-slf4j-2.0.9.jar:$lib_dir/org.apache.pekko.pekko-actor_2.13-1.0.2.jar:$lib_dir/org.apache.pekko.pekko-actor-typed_2.13-1.0.2.jar:$lib_dir/org.apache.pekko.pekko-slf4j_2.13-1.0.2.jar:$lib_dir/org.apache.pekko.pekko-serialization-jackson_2.13-1.0.2.jar:$lib_dir/com.fasterxml.jackson.core.jackson-core-2.14.3.jar:$lib_dir/com.fasterxml.jackson.core.jackson-annotations-2.14.3.jar:$lib_dir/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.14.3.jar:$lib_dir/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.14.3.jar:$lib_dir/com.fasterxml.jackson.core.jackson-databind-2.14.3.jar:$lib_dir/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.14.3.jar:$lib_dir/com.fasterxml.jackson.module.jackson-module-parameter-names-2.14.3.jar:$lib_dir/com.fasterxml.jackson.module.jackson-module-scala_2.13-2.14.3.jar:$lib_dir/io.jsonwebtoken.jjwt-api-0.11.5.jar:$lib_dir/io.jsonwebtoken.jjwt-impl-0.11.5.jar:$lib_dir/io.jsonwebtoken.jjwt-jackson-0.11.5.jar:$lib_dir/org.playframework.play-json_2.13-3.0.1.jar:$lib_dir/com.google.guava.guava-32.1.3-jre.jar:$lib_dir/javax.inject.javax.inject-1.jar:$lib_dir/com.typesafe.ssl-config-core_2.13-0.6.1.jar:$lib_dir/org.scala-lang.modules.scala-parser-combinators_2.13-1.1.2.jar:$lib_dir/com.typesafe.play.play-functional_2.13-2.10.1.jar:$lib_dir/org.scala-lang.scala-reflect-2.13.11.jar:$lib_dir/com.sun.mail.javax.mail-1.5.6.jar:$lib_dir/com.thesamet.scalapb.lenses_2.13-0.11.13.jar:$lib_dir/com.google.protobuf.protobuf-java-3.21.7.jar:$lib_dir/com.github.ben-manes.caffeine.caffeine-3.1.8.jar:$lib_dir/com.github.ben-manes.caffeine.jcache-3.1.8.jar:$lib_dir/javax.cache.cache-api-1.1.1.jar:$lib_dir/at.favre.lib.bytes-1.5.0.jar:$lib_dir/com.thesamet.scalapb.scalapb-runtime-grpc_2.13-0.11.13.jar:$lib_dir/io.grpc.grpc-netty-shaded-1.47.1.jar:$lib_dir/io.grpc.grpc-services-1.47.1.jar:$lib_dir/com.google.code.gson.gson-2.10.1.jar:$lib_dir/org.playframework.play-ahc-ws_2.13-3.0.1.jar:$lib_dir/org.playframework.play-guice_2.13-3.0.1.jar:$lib_dir/org.typelevel.spire_2.13-0.17.0.jar:$lib_dir/net.debasishg.redisclient_2.13-3.42.jar:$lib_dir/cisd.jhdf5-19.04.1.jar:$lib_dir/edu.ucar.cdm-core-5.4.2.jar:$lib_dir/com.amazonaws.aws-java-sdk-s3-1.12.470.jar:$lib_dir/com.google.cloud.google-cloud-storage-2.13.1.jar:$lib_dir/org.lasersonlab.jblosc-1.0.1.jar:$lib_dir/org.apache.commons.commons-compress-1.21.jar:$lib_dir/com.github.luben.zstd-jni-1.5.5-5.jar:$lib_dir/com.aayushatharva.brotli4j.brotli4j-1.11.0.jar:$lib_dir/com.aayushatharva.brotli4j.native-linux-x86_64-1.11.0.jar:$lib_dir/com.aayushatharva.brotli4j.native-osx-x86_64-1.11.0.jar:$lib_dir/com.aayushatharva.brotli4j.native-osx-aarch64-1.11.0.jar:$lib_dir/org.lz4.lz4-java-1.8.0.jar:$lib_dir/org.jheaps.jheaps-0.13.jar:$lib_dir/org.apache.pekko.pekko-stream_2.13-1.0.2.jar:$lib_dir/org.apache.pekko.pekko-parsing_2.13-1.0.0.jar:$lib_dir/org.parboiled.parboiled_2.13-2.5.0.jar:$lib_dir/ch.qos.logback.logback-core-1.4.14.jar:$lib_dir/org.playframework.play-ws-standalone_2.13-3.0.1.jar:$lib_dir/org.playframework.play-ws-standalone-xml_2.13-3.0.1.jar:$lib_dir/org.playframework.play-ws-standalone-json_2.13-3.0.1.jar:$lib_dir/stax.stax-api-1.0.1.jar:$lib_dir/org.codehaus.woodstox.stax2-api-3.0.1.jar:$lib_dir/com.github.jwt-scala.jwt-core_2.13-9.2.0.jar:$lib_dir/org.playframework.play-exceptions-3.0.1.jar:$lib_dir/com.thoughtworks.paranamer.paranamer-2.8.jar:$lib_dir/org.playframework.play-functional_2.13-3.0.1.jar:$lib_dir/com.google.guava.failureaccess-1.0.1.jar:$lib_dir/com.google.guava.listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:$lib_dir/com.google.code.findbugs.jsr305-3.0.2.jar:$lib_dir/com.google.errorprone.error_prone_annotations-2.21.1.jar:$lib_dir/com.google.j2objc.j2objc-annotations-2.8.jar:$lib_dir/javax.activation.activation-1.1.jar:$lib_dir/org.osgi.org.osgi.service.component.annotations-1.5.1.jar:$lib_dir/jakarta.inject.jakarta.inject-api-2.0.1.jar:$lib_dir/io.grpc.grpc-stub-1.47.1.jar:$lib_dir/io.grpc.grpc-protobuf-1.47.1.jar:$lib_dir/io.perfmark.perfmark-api-0.25.0.jar:$lib_dir/io.grpc.grpc-core-1.47.1.jar:$lib_dir/com.google.protobuf.protobuf-java-util-3.21.7.jar:$lib_dir/org.playframework.play-ahc-ws-standalone_2.13-3.0.1.jar:$lib_dir/org.playframework.shaded-asynchttpclient-3.0.1.jar:$lib_dir/org.playframework.shaded-oauth-3.0.1.jar:$lib_dir/com.google.inject.guice-6.0.0.jar:$lib_dir/com.google.inject.extensions.guice-assistedinject-6.0.0.jar:$lib_dir/org.typelevel.spire-macros_2.13-0.17.0.jar:$lib_dir/org.typelevel.spire-platform_2.13-0.17.0.jar:$lib_dir/org.typelevel.spire-util_2.13-0.17.0.jar:$lib_dir/org.typelevel.algebra_2.13-2.0.1.jar:$lib_dir/org.apache.commons.commons-pool2-2.8.0.jar:$lib_dir/cisd.base-18.09.0.jar:$lib_dir/edu.ucar.udunits-5.4.2.jar:$lib_dir/edu.ucar.httpservices-5.4.2.jar:$lib_dir/com.beust.jcommander-1.78.jar:$lib_dir/com.google.re2j.re2j-1.3.jar:$lib_dir/joda-time.joda-time-2.10.3.jar:$lib_dir/org.jdom.jdom2-2.0.6.jar:$lib_dir/com.amazonaws.aws-java-sdk-kms-1.12.470.jar:$lib_dir/com.amazonaws.aws-java-sdk-core-1.12.470.jar:$lib_dir/com.amazonaws.jmespath-java-1.12.470.jar:$lib_dir/com.google.http-client.google-http-client-1.42.2.jar:$lib_dir/io.opencensus.opencensus-contrib-http-util-0.31.1.jar:$lib_dir/com.google.http-client.google-http-client-jackson2-1.42.2.jar:$lib_dir/com.google.api-client.google-api-client-2.0.0.jar:$lib_dir/com.google.oauth-client.google-oauth-client-1.34.1.jar:$lib_dir/com.google.http-client.google-http-client-gson-1.42.2.jar:$lib_dir/com.google.http-client.google-http-client-apache-v2-1.42.2.jar:$lib_dir/com.google.apis.google-api-services-storage-v1-rev20220705-2.0.0.jar:$lib_dir/com.google.cloud.google-cloud-core-2.8.20.jar:$lib_dir/com.google.auto.value.auto-value-annotations-1.10.jar:$lib_dir/com.google.api.grpc.proto-google-common-protos-2.9.6.jar:$lib_dir/com.google.cloud.google-cloud-core-http-2.8.20.jar:$lib_dir/com.google.http-client.google-http-client-appengine-1.42.2.jar:$lib_dir/com.google.api.gax-httpjson-0.104.2.jar:$lib_dir/com.google.api.gax-2.19.2.jar:$lib_dir/com.google.auth.google-auth-library-credentials-1.11.0.jar:$lib_dir/com.google.auth.google-auth-library-oauth2-http-1.11.0.jar:$lib_dir/com.google.api.api-common-2.2.1.jar:$lib_dir/javax.annotation.javax.annotation-api-1.3.2.jar:$lib_dir/io.opencensus.opencensus-api-0.31.1.jar:$lib_dir/io.grpc.grpc-context-1.49.2.jar:$lib_dir/com.google.api.grpc.proto-google-iam-v1-1.6.2.jar:$lib_dir/org.threeten.threetenbp-1.6.2.jar:$lib_dir/junit.junit-4.12.jar:$lib_dir/net.java.dev.jna.jna-4.2.2.jar:$lib_dir/com.aayushatharva.brotli4j.service-1.11.0.jar:$lib_dir/org.apache.pekko.pekko-protobuf-v3_2.13-1.0.2.jar:$lib_dir/org.osgi.osgi.annotation-8.1.0.jar:$lib_dir/org.osgi.org.osgi.namespace.extender-1.0.1.jar:$lib_dir/org.osgi.org.osgi.util.function-1.0.0.jar:$lib_dir/org.osgi.org.osgi.util.promise-1.0.0.jar:$lib_dir/io.grpc.grpc-api-1.47.1.jar:$lib_dir/io.grpc.grpc-protobuf-lite-1.47.1.jar:$lib_dir/com.google.android.annotations-4.1.1.4.jar:$lib_dir/org.codehaus.mojo.animal-sniffer-annotations-1.19.jar:$lib_dir/org.playframework.cachecontrol_2.13-3.0.0.jar:$lib_dir/aopalliance.aopalliance-1.0.jar:$lib_dir/org.typelevel.cats-kernel_2.13-2.1.1.jar:$lib_dir/org.apache.httpcomponents.httpclient-4.5.13.jar:$lib_dir/org.apache.httpcomponents.httpcore-4.4.13.jar:$lib_dir/org.apache.httpcomponents.httpmime-4.5.13.jar:$lib_dir/commons-logging.commons-logging-1.2.jar:$lib_dir/software.amazon.ion.ion-java-1.0.2.jar:$lib_dir/org.hamcrest.hamcrest-core-1.3.jar:$lib_dir/webknossos.webknossos-wk-assets.jar"
+
+addJava "-Duser.dir=$(realpath "$(cd "${app_home}/.."; pwd -P)" $(is_cygwin && echo "fix"))"
+
+# java_cmd is overrode in process_args when -java-home is used
+declare java_cmd=$(get_java_cmd)
+
+# if configuration files exist, prepend their contents to $@ so it can be processed by this runner
+[[ -f "$script_conf_file" ]] && set -- $(loadConfigFile "$script_conf_file") "$@"
+
+run "$@"
diff --git a/bin/webknossos.bat b/bin/webknossos.bat
new file mode 100644
index 00000000000..8828fd3d492
--- /dev/null
+++ b/bin/webknossos.bat
@@ -0,0 +1,184 @@
+@REM webknossos launcher script
+@REM
+@REM Environment:
+@REM JAVA_HOME - location of a JDK home dir (optional if java on path)
+@REM CFG_OPTS - JVM options (optional)
+@REM Configuration:
+@REM WEBKNOSSOS_config.txt found in the WEBKNOSSOS_HOME.
+@setlocal enabledelayedexpansion
+@setlocal enableextensions
+
+@echo off
+
+
+if "%WEBKNOSSOS_HOME%"=="" (
+ set "APP_HOME=%~dp0\\.."
+
+ rem Also set the old env name for backwards compatibility
+ set "WEBKNOSSOS_HOME=%~dp0\\.."
+) else (
+ set "APP_HOME=%WEBKNOSSOS_HOME%"
+)
+
+set "APP_LIB_DIR=%APP_HOME%\lib\"
+
+rem Detect if we were double clicked, although theoretically A user could
+rem manually run cmd /c
+for %%x in (!cmdcmdline!) do if %%~x==/c set DOUBLECLICKED=1
+
+rem FIRST we load the config file of extra options.
+set "CFG_FILE=%APP_HOME%\WEBKNOSSOS_config.txt"
+set CFG_OPTS=
+call :parse_config "%CFG_FILE%" CFG_OPTS
+
+rem We use the value of the JAVA_OPTS environment variable if defined, rather than the config.
+set _JAVA_OPTS=%JAVA_OPTS%
+if "!_JAVA_OPTS!"=="" set _JAVA_OPTS=!CFG_OPTS!
+
+rem We keep in _JAVA_PARAMS all -J-prefixed and -D-prefixed arguments
+rem "-J" is stripped, "-D" is left as is, and everything is appended to JAVA_OPTS
+set _JAVA_PARAMS=
+set _APP_ARGS=
+
+set "APP_CLASSPATH=%APP_LIB_DIR%\..\conf\;%APP_LIB_DIR%\webknossos.webknossos-wk-sans-externalized.jar;%APP_LIB_DIR%\util.util-wk.jar;%APP_LIB_DIR%\webknossos-tracingstore.webknossos-tracingstore-wk.jar;%APP_LIB_DIR%\webknossos-datastore.webknossos-datastore-wk.jar;%APP_LIB_DIR%\webknossosjni.webknossosjni-wk.jar;%APP_LIB_DIR%\org.scala-lang.scala-library-2.13.11.jar;%APP_LIB_DIR%\org.playframework.twirl.twirl-api_2.13-2.0.3.jar;%APP_LIB_DIR%\org.playframework.play-server_2.13-3.0.1.jar;%APP_LIB_DIR%\org.playframework.play-pekko-http-server_2.13-3.0.1.jar;%APP_LIB_DIR%\org.playframework.play-logback_2.13-3.0.1.jar;%APP_LIB_DIR%\org.playframework.play-filters-helpers_2.13-3.0.1.jar;%APP_LIB_DIR%\commons-codec.commons-codec-1.16.0.jar;%APP_LIB_DIR%\org.playframework.silhouette.play-silhouette_2.13-10.0.0.jar;%APP_LIB_DIR%\org.playframework.silhouette.play-silhouette-crypto-jca_2.13-10.0.0.jar;%APP_LIB_DIR%\org.glassfish.jaxb.txw2-4.0.2.jar;%APP_LIB_DIR%\com.github.jwt-scala.jwt-play-json_2.13-9.2.0.jar;%APP_LIB_DIR%\com.typesafe.slick.slick_2.13-3.4.1.jar;%APP_LIB_DIR%\com.typesafe.slick.slick-hikaricp_2.13-3.4.1.jar;%APP_LIB_DIR%\com.typesafe.slick.slick-codegen_2.13-3.4.1.jar;%APP_LIB_DIR%\org.postgresql.postgresql-42.5.4.jar;%APP_LIB_DIR%\org.playframework.play_2.13-3.0.1.jar;%APP_LIB_DIR%\com.typesafe.play.play-json_2.13-2.10.1.jar;%APP_LIB_DIR%\org.apache.commons.commons-email-1.5.jar;%APP_LIB_DIR%\commons-io.commons-io-2.15.1.jar;%APP_LIB_DIR%\org.apache.commons.commons-lang3-3.14.0.jar;%APP_LIB_DIR%\net.liftweb.lift-common_2.13-3.5.0.jar;%APP_LIB_DIR%\org.reactivemongo.reactivemongo-bson-api_2.13-1.0.10.jar;%APP_LIB_DIR%\com.thesamet.scalapb.scalapb-runtime_2.13-0.11.13.jar;%APP_LIB_DIR%\com.typesafe.scala-logging.scala-logging_2.13-3.9.5.jar;%APP_LIB_DIR%\org.playframework.play-caffeine-cache_2.13-3.0.1.jar;%APP_LIB_DIR%\at.favre.lib.bcrypt-0.10.2.jar;%APP_LIB_DIR%\org.jgrapht.jgrapht-core-1.5.1.jar;%APP_LIB_DIR%\org.scala-lang.modules.scala-xml_2.13-2.2.0.jar;%APP_LIB_DIR%\org.playframework.play-streams_2.13-3.0.1.jar;%APP_LIB_DIR%\org.apache.pekko.pekko-http-core_2.13-1.0.0.jar;%APP_LIB_DIR%\ch.qos.logback.logback-classic-1.4.14.jar;%APP_LIB_DIR%\org.playframework.play-cache_2.13-3.0.1.jar;%APP_LIB_DIR%\org.playframework.play-ws_2.13-3.0.1.jar;%APP_LIB_DIR%\org.playframework.play-openid_2.13-3.0.0.jar;%APP_LIB_DIR%\com.auth0.java-jwt-3.19.4.jar;%APP_LIB_DIR%\org.codehaus.woodstox.woodstox-core-asl-4.0.6.jar;%APP_LIB_DIR%\com.github.jwt-scala.jwt-json-common_2.13-9.2.0.jar;%APP_LIB_DIR%\org.slf4j.slf4j-api-2.0.9.jar;%APP_LIB_DIR%\com.typesafe.config-1.4.3.jar;%APP_LIB_DIR%\org.reactivestreams.reactive-streams-1.0.4.jar;%APP_LIB_DIR%\org.scala-lang.modules.scala-collection-compat_2.13-2.9.0.jar;%APP_LIB_DIR%\com.zaxxer.HikariCP-4.0.3.jar;%APP_LIB_DIR%\org.checkerframework.checker-qual-3.37.0.jar;%APP_LIB_DIR%\org.playframework.play-build-link-3.0.1.jar;%APP_LIB_DIR%\org.playframework.play-configuration_2.13-3.0.1.jar;%APP_LIB_DIR%\org.slf4j.jul-to-slf4j-2.0.9.jar;%APP_LIB_DIR%\org.slf4j.jcl-over-slf4j-2.0.9.jar;%APP_LIB_DIR%\org.apache.pekko.pekko-actor_2.13-1.0.2.jar;%APP_LIB_DIR%\org.apache.pekko.pekko-actor-typed_2.13-1.0.2.jar;%APP_LIB_DIR%\org.apache.pekko.pekko-slf4j_2.13-1.0.2.jar;%APP_LIB_DIR%\org.apache.pekko.pekko-serialization-jackson_2.13-1.0.2.jar;%APP_LIB_DIR%\com.fasterxml.jackson.core.jackson-core-2.14.3.jar;%APP_LIB_DIR%\com.fasterxml.jackson.core.jackson-annotations-2.14.3.jar;%APP_LIB_DIR%\com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.14.3.jar;%APP_LIB_DIR%\com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.14.3.jar;%APP_LIB_DIR%\com.fasterxml.jackson.core.jackson-databind-2.14.3.jar;%APP_LIB_DIR%\com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.14.3.jar;%APP_LIB_DIR%\com.fasterxml.jackson.module.jackson-module-parameter-names-2.14.3.jar;%APP_LIB_DIR%\com.fasterxml.jackson.module.jackson-module-scala_2.13-2.14.3.jar;%APP_LIB_DIR%\io.jsonwebtoken.jjwt-api-0.11.5.jar;%APP_LIB_DIR%\io.jsonwebtoken.jjwt-impl-0.11.5.jar;%APP_LIB_DIR%\io.jsonwebtoken.jjwt-jackson-0.11.5.jar;%APP_LIB_DIR%\org.playframework.play-json_2.13-3.0.1.jar;%APP_LIB_DIR%\com.google.guava.guava-32.1.3-jre.jar;%APP_LIB_DIR%\javax.inject.javax.inject-1.jar;%APP_LIB_DIR%\com.typesafe.ssl-config-core_2.13-0.6.1.jar;%APP_LIB_DIR%\org.scala-lang.modules.scala-parser-combinators_2.13-1.1.2.jar;%APP_LIB_DIR%\com.typesafe.play.play-functional_2.13-2.10.1.jar;%APP_LIB_DIR%\org.scala-lang.scala-reflect-2.13.11.jar;%APP_LIB_DIR%\com.sun.mail.javax.mail-1.5.6.jar;%APP_LIB_DIR%\com.thesamet.scalapb.lenses_2.13-0.11.13.jar;%APP_LIB_DIR%\com.google.protobuf.protobuf-java-3.21.7.jar;%APP_LIB_DIR%\com.github.ben-manes.caffeine.caffeine-3.1.8.jar;%APP_LIB_DIR%\com.github.ben-manes.caffeine.jcache-3.1.8.jar;%APP_LIB_DIR%\javax.cache.cache-api-1.1.1.jar;%APP_LIB_DIR%\at.favre.lib.bytes-1.5.0.jar;%APP_LIB_DIR%\com.thesamet.scalapb.scalapb-runtime-grpc_2.13-0.11.13.jar;%APP_LIB_DIR%\io.grpc.grpc-netty-shaded-1.47.1.jar;%APP_LIB_DIR%\io.grpc.grpc-services-1.47.1.jar;%APP_LIB_DIR%\com.google.code.gson.gson-2.10.1.jar;%APP_LIB_DIR%\org.playframework.play-ahc-ws_2.13-3.0.1.jar;%APP_LIB_DIR%\org.playframework.play-guice_2.13-3.0.1.jar;%APP_LIB_DIR%\org.typelevel.spire_2.13-0.17.0.jar;%APP_LIB_DIR%\net.debasishg.redisclient_2.13-3.42.jar;%APP_LIB_DIR%\cisd.jhdf5-19.04.1.jar;%APP_LIB_DIR%\edu.ucar.cdm-core-5.4.2.jar;%APP_LIB_DIR%\com.amazonaws.aws-java-sdk-s3-1.12.470.jar;%APP_LIB_DIR%\com.google.cloud.google-cloud-storage-2.13.1.jar;%APP_LIB_DIR%\org.lasersonlab.jblosc-1.0.1.jar;%APP_LIB_DIR%\org.apache.commons.commons-compress-1.21.jar;%APP_LIB_DIR%\com.github.luben.zstd-jni-1.5.5-5.jar;%APP_LIB_DIR%\com.aayushatharva.brotli4j.brotli4j-1.11.0.jar;%APP_LIB_DIR%\com.aayushatharva.brotli4j.native-linux-x86_64-1.11.0.jar;%APP_LIB_DIR%\com.aayushatharva.brotli4j.native-osx-x86_64-1.11.0.jar;%APP_LIB_DIR%\com.aayushatharva.brotli4j.native-osx-aarch64-1.11.0.jar;%APP_LIB_DIR%\org.lz4.lz4-java-1.8.0.jar;%APP_LIB_DIR%\org.jheaps.jheaps-0.13.jar;%APP_LIB_DIR%\org.apache.pekko.pekko-stream_2.13-1.0.2.jar;%APP_LIB_DIR%\org.apache.pekko.pekko-parsing_2.13-1.0.0.jar;%APP_LIB_DIR%\org.parboiled.parboiled_2.13-2.5.0.jar;%APP_LIB_DIR%\ch.qos.logback.logback-core-1.4.14.jar;%APP_LIB_DIR%\org.playframework.play-ws-standalone_2.13-3.0.1.jar;%APP_LIB_DIR%\org.playframework.play-ws-standalone-xml_2.13-3.0.1.jar;%APP_LIB_DIR%\org.playframework.play-ws-standalone-json_2.13-3.0.1.jar;%APP_LIB_DIR%\stax.stax-api-1.0.1.jar;%APP_LIB_DIR%\org.codehaus.woodstox.stax2-api-3.0.1.jar;%APP_LIB_DIR%\com.github.jwt-scala.jwt-core_2.13-9.2.0.jar;%APP_LIB_DIR%\org.playframework.play-exceptions-3.0.1.jar;%APP_LIB_DIR%\com.thoughtworks.paranamer.paranamer-2.8.jar;%APP_LIB_DIR%\org.playframework.play-functional_2.13-3.0.1.jar;%APP_LIB_DIR%\com.google.guava.failureaccess-1.0.1.jar;%APP_LIB_DIR%\com.google.guava.listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar;%APP_LIB_DIR%\com.google.code.findbugs.jsr305-3.0.2.jar;%APP_LIB_DIR%\com.google.errorprone.error_prone_annotations-2.21.1.jar;%APP_LIB_DIR%\com.google.j2objc.j2objc-annotations-2.8.jar;%APP_LIB_DIR%\javax.activation.activation-1.1.jar;%APP_LIB_DIR%\org.osgi.org.osgi.service.component.annotations-1.5.1.jar;%APP_LIB_DIR%\jakarta.inject.jakarta.inject-api-2.0.1.jar;%APP_LIB_DIR%\io.grpc.grpc-stub-1.47.1.jar;%APP_LIB_DIR%\io.grpc.grpc-protobuf-1.47.1.jar;%APP_LIB_DIR%\io.perfmark.perfmark-api-0.25.0.jar;%APP_LIB_DIR%\io.grpc.grpc-core-1.47.1.jar;%APP_LIB_DIR%\com.google.protobuf.protobuf-java-util-3.21.7.jar;%APP_LIB_DIR%\org.playframework.play-ahc-ws-standalone_2.13-3.0.1.jar;%APP_LIB_DIR%\org.playframework.shaded-asynchttpclient-3.0.1.jar;%APP_LIB_DIR%\org.playframework.shaded-oauth-3.0.1.jar;%APP_LIB_DIR%\com.google.inject.guice-6.0.0.jar;%APP_LIB_DIR%\com.google.inject.extensions.guice-assistedinject-6.0.0.jar;%APP_LIB_DIR%\org.typelevel.spire-macros_2.13-0.17.0.jar;%APP_LIB_DIR%\org.typelevel.spire-platform_2.13-0.17.0.jar;%APP_LIB_DIR%\org.typelevel.spire-util_2.13-0.17.0.jar;%APP_LIB_DIR%\org.typelevel.algebra_2.13-2.0.1.jar;%APP_LIB_DIR%\org.apache.commons.commons-pool2-2.8.0.jar;%APP_LIB_DIR%\cisd.base-18.09.0.jar;%APP_LIB_DIR%\edu.ucar.udunits-5.4.2.jar;%APP_LIB_DIR%\edu.ucar.httpservices-5.4.2.jar;%APP_LIB_DIR%\com.beust.jcommander-1.78.jar;%APP_LIB_DIR%\com.google.re2j.re2j-1.3.jar;%APP_LIB_DIR%\joda-time.joda-time-2.10.3.jar;%APP_LIB_DIR%\org.jdom.jdom2-2.0.6.jar;%APP_LIB_DIR%\com.amazonaws.aws-java-sdk-kms-1.12.470.jar;%APP_LIB_DIR%\com.amazonaws.aws-java-sdk-core-1.12.470.jar;%APP_LIB_DIR%\com.amazonaws.jmespath-java-1.12.470.jar;%APP_LIB_DIR%\com.google.http-client.google-http-client-1.42.2.jar;%APP_LIB_DIR%\io.opencensus.opencensus-contrib-http-util-0.31.1.jar;%APP_LIB_DIR%\com.google.http-client.google-http-client-jackson2-1.42.2.jar;%APP_LIB_DIR%\com.google.api-client.google-api-client-2.0.0.jar;%APP_LIB_DIR%\com.google.oauth-client.google-oauth-client-1.34.1.jar;%APP_LIB_DIR%\com.google.http-client.google-http-client-gson-1.42.2.jar;%APP_LIB_DIR%\com.google.http-client.google-http-client-apache-v2-1.42.2.jar;%APP_LIB_DIR%\com.google.apis.google-api-services-storage-v1-rev20220705-2.0.0.jar;%APP_LIB_DIR%\com.google.cloud.google-cloud-core-2.8.20.jar;%APP_LIB_DIR%\com.google.auto.value.auto-value-annotations-1.10.jar;%APP_LIB_DIR%\com.google.api.grpc.proto-google-common-protos-2.9.6.jar;%APP_LIB_DIR%\com.google.cloud.google-cloud-core-http-2.8.20.jar;%APP_LIB_DIR%\com.google.http-client.google-http-client-appengine-1.42.2.jar;%APP_LIB_DIR%\com.google.api.gax-httpjson-0.104.2.jar;%APP_LIB_DIR%\com.google.api.gax-2.19.2.jar;%APP_LIB_DIR%\com.google.auth.google-auth-library-credentials-1.11.0.jar;%APP_LIB_DIR%\com.google.auth.google-auth-library-oauth2-http-1.11.0.jar;%APP_LIB_DIR%\com.google.api.api-common-2.2.1.jar;%APP_LIB_DIR%\javax.annotation.javax.annotation-api-1.3.2.jar;%APP_LIB_DIR%\io.opencensus.opencensus-api-0.31.1.jar;%APP_LIB_DIR%\io.grpc.grpc-context-1.49.2.jar;%APP_LIB_DIR%\com.google.api.grpc.proto-google-iam-v1-1.6.2.jar;%APP_LIB_DIR%\org.threeten.threetenbp-1.6.2.jar;%APP_LIB_DIR%\junit.junit-4.12.jar;%APP_LIB_DIR%\net.java.dev.jna.jna-4.2.2.jar;%APP_LIB_DIR%\com.aayushatharva.brotli4j.service-1.11.0.jar;%APP_LIB_DIR%\org.apache.pekko.pekko-protobuf-v3_2.13-1.0.2.jar;%APP_LIB_DIR%\org.osgi.osgi.annotation-8.1.0.jar;%APP_LIB_DIR%\org.osgi.org.osgi.namespace.extender-1.0.1.jar;%APP_LIB_DIR%\org.osgi.org.osgi.util.function-1.0.0.jar;%APP_LIB_DIR%\org.osgi.org.osgi.util.promise-1.0.0.jar;%APP_LIB_DIR%\io.grpc.grpc-api-1.47.1.jar;%APP_LIB_DIR%\io.grpc.grpc-protobuf-lite-1.47.1.jar;%APP_LIB_DIR%\com.google.android.annotations-4.1.1.4.jar;%APP_LIB_DIR%\org.codehaus.mojo.animal-sniffer-annotations-1.19.jar;%APP_LIB_DIR%\org.playframework.cachecontrol_2.13-3.0.0.jar;%APP_LIB_DIR%\aopalliance.aopalliance-1.0.jar;%APP_LIB_DIR%\org.typelevel.cats-kernel_2.13-2.1.1.jar;%APP_LIB_DIR%\org.apache.httpcomponents.httpclient-4.5.13.jar;%APP_LIB_DIR%\org.apache.httpcomponents.httpcore-4.4.13.jar;%APP_LIB_DIR%\org.apache.httpcomponents.httpmime-4.5.13.jar;%APP_LIB_DIR%\commons-logging.commons-logging-1.2.jar;%APP_LIB_DIR%\software.amazon.ion.ion-java-1.0.2.jar;%APP_LIB_DIR%\org.hamcrest.hamcrest-core-1.3.jar;%APP_LIB_DIR%\webknossos.webknossos-wk-assets.jar"
+set "APP_MAIN_CLASS=play.core.server.ProdServerStart"
+set "SCRIPT_CONF_FILE=%APP_HOME%\conf\application.ini"
+
+rem Bundled JRE has priority over standard environment variables
+if defined BUNDLED_JVM (
+ set "_JAVACMD=%BUNDLED_JVM%\bin\java.exe"
+) else (
+ if "%JAVACMD%" neq "" (
+ set "_JAVACMD=%JAVACMD%"
+ ) else (
+ if "%JAVA_HOME%" neq "" (
+ if exist "%JAVA_HOME%\bin\java.exe" set "_JAVACMD=%JAVA_HOME%\bin\java.exe"
+ )
+ )
+)
+
+if "%_JAVACMD%"=="" set _JAVACMD=java
+
+rem Detect if this java is ok to use.
+for /F %%j in ('"%_JAVACMD%" -version 2^>^&1') do (
+ if %%~j==java set JAVAINSTALLED=1
+ if %%~j==openjdk set JAVAINSTALLED=1
+)
+
+rem BAT has no logical or, so we do it OLD SCHOOL! Oppan Redmond Style
+set JAVAOK=true
+if not defined JAVAINSTALLED set JAVAOK=false
+
+if "%JAVAOK%"=="false" (
+ echo.
+ echo A Java JDK is not installed or can't be found.
+ if not "%JAVA_HOME%"=="" (
+ echo JAVA_HOME = "%JAVA_HOME%"
+ )
+ echo.
+ echo Please go to
+ echo http://www.oracle.com/technetwork/java/javase/downloads/index.html
+ echo and download a valid Java JDK and install before running webknossos.
+ echo.
+ echo If you think this message is in error, please check
+ echo your environment variables to see if "java.exe" and "javac.exe" are
+ echo available via JAVA_HOME or PATH.
+ echo.
+ if defined DOUBLECLICKED pause
+ exit /B 1
+)
+
+rem if configuration files exist, prepend their contents to the script arguments so it can be processed by this runner
+call :parse_config "%SCRIPT_CONF_FILE%" SCRIPT_CONF_ARGS
+
+call :process_args %SCRIPT_CONF_ARGS% %%*
+
+set _JAVA_OPTS=!_JAVA_OPTS! !_JAVA_PARAMS!
+
+if defined CUSTOM_MAIN_CLASS (
+ set MAIN_CLASS=!CUSTOM_MAIN_CLASS!
+) else (
+ set MAIN_CLASS=!APP_MAIN_CLASS!
+)
+
+rem Call the application and pass all arguments unchanged.
+"%_JAVACMD%" !_JAVA_OPTS! !WEBKNOSSOS_OPTS! -cp "%APP_CLASSPATH%" %MAIN_CLASS% !_APP_ARGS!
+
+@endlocal
+
+exit /B %ERRORLEVEL%
+
+
+rem Loads a configuration file full of default command line options for this script.
+rem First argument is the path to the config file.
+rem Second argument is the name of the environment variable to write to.
+:parse_config
+ set _PARSE_FILE=%~1
+ set _PARSE_OUT=
+ if exist "%_PARSE_FILE%" (
+ FOR /F "tokens=* eol=# usebackq delims=" %%i IN ("%_PARSE_FILE%") DO (
+ set _PARSE_OUT=!_PARSE_OUT! %%i
+ )
+ )
+ set %2=!_PARSE_OUT!
+exit /B 0
+
+
+:add_java
+ set _JAVA_PARAMS=!_JAVA_PARAMS! %*
+exit /B 0
+
+
+:add_app
+ set _APP_ARGS=!_APP_ARGS! %*
+exit /B 0
+
+
+rem Processes incoming arguments and places them in appropriate global variables
+:process_args
+ :param_loop
+ call set _PARAM1=%%1
+ set "_TEST_PARAM=%~1"
+
+ if ["!_PARAM1!"]==[""] goto param_afterloop
+
+
+ rem ignore arguments that do not start with '-'
+ if "%_TEST_PARAM:~0,1%"=="-" goto param_java_check
+ set _APP_ARGS=!_APP_ARGS! !_PARAM1!
+ shift
+ goto param_loop
+
+ :param_java_check
+ if "!_TEST_PARAM:~0,2!"=="-J" (
+ rem strip -J prefix
+ set _JAVA_PARAMS=!_JAVA_PARAMS! !_TEST_PARAM:~2!
+ shift
+ goto param_loop
+ )
+
+ if "!_TEST_PARAM:~0,2!"=="-D" (
+ rem test if this was double-quoted property "-Dprop=42"
+ for /F "delims== tokens=1,*" %%G in ("!_TEST_PARAM!") DO (
+ if not ["%%H"] == [""] (
+ set _JAVA_PARAMS=!_JAVA_PARAMS! !_PARAM1!
+ ) else if [%2] neq [] (
+ rem it was a normal property: -Dprop=42 or -Drop="42"
+ call set _PARAM1=%%1=%%2
+ set _JAVA_PARAMS=!_JAVA_PARAMS! !_PARAM1!
+ shift
+ )
+ )
+ ) else (
+ if "!_TEST_PARAM!"=="-main" (
+ call set CUSTOM_MAIN_CLASS=%%2
+ shift
+ ) else (
+ set _APP_ARGS=!_APP_ARGS! !_PARAM1!
+ )
+ )
+ shift
+ goto param_loop
+ :param_afterloop
+
+exit /B 0
diff --git a/conf/application.conf b/conf/application.conf
index 07d8b5d2dd1..ba79a4ef515 100644
--- a/conf/application.conf
+++ b/conf/application.conf
@@ -35,6 +35,7 @@ play {
font-src = "* data: blob: 'unsafe-inline'"
}
enabled += "play.filters.headers.SecurityHeadersFilter"
+ enabled += "play.filters.cors.CORSFilter"
disabled += "play.filters.csrf.CSRFFilter"
disabled += "play.filters.hosts.AllowedHostsFilter"
headers {
@@ -63,20 +64,47 @@ play {
}
}
+play.filters.cors {
+ # The allowed origins. If null, all origins are allowed.
+ allowedOrigins = [
+ "https://staging--lincbrain-org.netlify.app",
+ "https://*.lincbrain.org",
+ "https://lincbrain.org",
+ "https://staging.lincbrain.org",
+ "https://webknossos-staging.lincbrain.org",
+ "https://webknossos-backup.lincbrain.org",
+ "https://webknossos-r5.lincbrain.org",
+ "https://webknossos.lincbrain.org",
+ "https://webknossos-r5-24xlarge.lincbrain.org",
+ ]
+
+ # The allowed HTTP methods. If null, all methods are allowed.
+ allowedHttpMethods = ["GET", "POST", "PUT", "DELETE", "OPTIONS"]
+
+ # The allowed HTTP headers. If null, all headers are allowed.
+ allowedHttpHeaders = ["Accept", "Content-Type", "X-Requested-With"]
+
+ # The exposed headers
+ exposedHeaders = []
+
+ # Whether to support credentials
+ supportsCredentials = true
+}
+
pekko.actor.default-dispatcher {
# We use a compromise for our thread pool configuration
# Parts of our api are async, so they should not need many threads,
# but some parts are also blocking (file io, gcs, s3 access), causing new requests
# to wait despite idle cpu, if there are too few threads
fork-join-executor {
- parallelism-factor = 5.0 # Thread count = ceil(available processors * factor)
- parallelism-min = 8 # Min number of threads to cap factor-based parallelism number to
- parallelism-max = 300 # Max number of threads to cap factor-based parallelism number to
+ parallelism-factor = 8.0 # Aggressive thread scaling (8 threads per vCPU)
+ parallelism-min = 512 # High minimum to fully utilize the large number of vCPUs
+ parallelism-max = 4096 # High max threads for extreme stress testing
}
}
webKnossos {
- tabTitle = "WEBKNOSSOS"
+ tabTitle = "LINC | WEBKNOSSOS"
user {
timeTrackingPause = 60 seconds
timeTrackingOnlyWithSignificantChanges = false
@@ -129,6 +157,10 @@ Expires: 2024-07-03T10:00:00.000Z
Preferred-Languages: en,de
"""
}
+ s3PrivateBucketConfig {
+ keyword = "linc-brain-mit"
+ enabled = true
+ }
}
singleSignOn {
@@ -257,9 +289,10 @@ silhouette {
cookieAuthenticator {
cookieName = "id"
cookiePath = "/"
+ domain = ".lincbrain.org"
secureCookie = false
httpOnlyCookie = true
- useFingerprinting = true
+ useFingerprinting = false # Default set to `true` but disabled due to LINC Data Platform communication
authenticatorExpiry = 30 days
cookieMaxAge = 365 days
signerSecret = "`?IVa2TCaZAZ4TY]B0=tCs9mJdyaA0V /dev/tcp/127.0.0.1/6379 && echo PING >&3 && head -1 <&3 | grep PONG"
+ timeout: 1s
+ interval: 5s
+ retries: 10
+
+ nginx-proxy:
+ image: nginx:latest
+ container_name: nginx-proxy
+ ports:
+ - "8080:8080"
+ - "80:80"
+ - "443:443"
+ volumes:
+ - ./nginx.conf:/etc/nginx/nginx.conf:ro
+ - ./certs:/etc/letsencrypt
+ - /home/ec2-user/opt/webknossos/binaryData:/home/ec2-user/opt/webknossos/binaryData:ro
+ depends_on:
+ - webknossos
+
+ fossil-db-backup:
+ image: scalableminds/fossildb-client:master
+ depends_on:
+ fossildb:
+ condition: service_healthy
+ command: ["webknossos-fossildb-1", "backup"]
+ networks:
+ - webknossos_default
+
+ fossil-db-restore:
+ image: scalableminds/fossildb-client:master
+ depends_on:
+ fossildb:
+ condition: service_healthy
+ command: ["webknossos-fossildb-1", "restore"]
+ networks:
+ - webknossos_default
+
+ postgres-backup:
+ image: postgres
+ command: ["/bin/bash", "-c", "PGPASSWORD=postgres pg_dump -Fc -h postgres -U postgres webknossos | gzip > /backups/backup_$(date +%Y-%m-%d_%H-%M).sql.gz"]
+ volumes:
+ - ./backups:/backups
+ depends_on:
+ - postgres
+
+# Explicitly declare networks for fossil-db-backup connectivity
+networks:
+ webknossos_default:
+ external: true
diff --git a/docker-compose.yml b/docker-compose.yml
index 76f802426bb..4064e3a5d77 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -3,13 +3,8 @@ version: "2.2"
services:
# Production
webknossos:
- build:
- context: .
- dockerfile: Dockerfile
- cache_from:
- - scalableminds/webknossos
- - scalableminds/webknossos:${DEV_CACHE:-master}
- image: scalableminds/webknossos:${DOCKER_TAG:-master}
+ build: .
+ image: lincbrain/webknossos:${DOCKER_TAG:-master}
ports:
- "9000:9000"
links:
@@ -47,7 +42,7 @@ services:
webknossos-datastore:
build: webknossos-datastore
- image: scalableminds/webknossos-datastore:${DOCKER_TAG:-master}
+ image: lincbrain/webknossos-datastore:${DOCKER_TAG:-master}
ports:
- "9090:9090"
volumes:
@@ -66,7 +61,7 @@ services:
webknossos-tracingstore:
build: webknossos-tracingstore
- image: scalableminds/webknossos-tracingstore:${DOCKER_TAG:-master}
+ image: lincbrain/webknossos-tracingstore:${DOCKER_TAG:-master}
ports:
- "9050:9050"
command:
@@ -88,7 +83,7 @@ services:
# Development
base:
- image: scalableminds/webknossos-dev
+ image: lincbrain/webknossos-dev
build:
context: .
dockerfile: Dockerfile.dev
@@ -269,7 +264,7 @@ services:
# FossilDB
fossildb:
- image: scalableminds/fossildb:master__484
+ image: scalableminds/fossildb:master__489
command:
- fossildb
- -c
diff --git a/frontend/javascripts/admin/auth/login_view.tsx b/frontend/javascripts/admin/auth/login_view.tsx
index 1336a9e059a..a8d779ceb1c 100644
--- a/frontend/javascripts/admin/auth/login_view.tsx
+++ b/frontend/javascripts/admin/auth/login_view.tsx
@@ -31,7 +31,17 @@ function LoginView({ history, redirect }: Props) {
Login
-
+
WebKNOSSOS for LINC is accessible only to approved users
+
+
+
Webknossos can be accessed by selecting the Webknossos links on the LINC
+ Data Platform homepage
+ or in the file browser next to each asset
+
+
Please ensure that you are not using an Incognito browser session as cookies are required for login
+ via the LINC Data Platform
+
+ {/**/}
diff --git a/frontend/javascripts/admin/dataset/composition_wizard/01_select_import_type.tsx b/frontend/javascripts/admin/dataset/composition_wizard/01_select_import_type.tsx
index e22972cb679..712a04776ea 100644
--- a/frontend/javascripts/admin/dataset/composition_wizard/01_select_import_type.tsx
+++ b/frontend/javascripts/admin/dataset/composition_wizard/01_select_import_type.tsx
@@ -38,8 +38,8 @@ export default function SelectImportType({
In all three cases, you can tweak which layers should be used later.
-
diff --git a/frontend/javascripts/admin/welcome_ui.tsx b/frontend/javascripts/admin/welcome_ui.tsx
index 03df025e82e..105bac2f9a2 100644
--- a/frontend/javascripts/admin/welcome_ui.tsx
+++ b/frontend/javascripts/admin/welcome_ui.tsx
@@ -73,8 +73,9 @@ export const WhatsNextHeader = ({ activeUser, onDismiss }: WhatsNextHeaderProps)
-
Welcome to WEBKNOSSOS
+
Welcome to LINC | WEBKNOSSOS
+ Welcome to the WEBKNOSSOS open-source version for LINC.
Congratulations on your new WEBKNOSSOS account! To hit the ground running, we recommend
the following steps to you: