diff --git a/docs/inclusion_connect.md b/docs/inclusion_connect.md index 896c6d67..e566492c 100644 --- a/docs/inclusion_connect.md +++ b/docs/inclusion_connect.md @@ -179,8 +179,7 @@ L'_id token_ est un objet JWT signé qui contient notamment : - **email** : son adresse e-mail. -La signature du token est chiffrée avec l'algorithme `RS256` et il est possible de récupérer la clé publique sur une API -de keycloak pour vérifier la signature. +La signature du token est chiffrée avec l'algorithme `RS256` et il est possible de récupérer la clé publique pour vérifier la signature. Cependant, dans le cas où l'_access token_ est récupéré via un appel direct du _backend_ au _OpendID Connect provider_ (Inclusion Connect) et non par exemple récupéré par le _frontend_ puis transmis au _backend_, la validation du token ne protège pas grand-chose. diff --git a/inclusion_connect/accounts/views.py b/inclusion_connect/accounts/views.py index 473b3f75..2f3eeb95 100644 --- a/inclusion_connect/accounts/views.py +++ b/inclusion_connect/accounts/views.py @@ -100,12 +100,6 @@ class RegisterView(BaseUserCreationView): template_name = "register.html" EVENT_NAME = "register" - # TODO: Remove keycloak compatibility - def dispatch(self, request, *args, **kwargs): - if all(param in oidc_params(request) for param in ["login_hint", "lastname", "firstname"]): - return HttpResponseRedirect(reverse("accounts:activate")) - return super().dispatch(request, *args, **kwargs) - class ActivateAccountView(BaseUserCreationView): form_class = forms.ActivateAccountForm diff --git a/inclusion_connect/middleware.py b/inclusion_connect/middleware.py index 56f2794c..32596f53 100644 --- a/inclusion_connect/middleware.py +++ b/inclusion_connect/middleware.py @@ -1,5 +1,3 @@ -import logging - from django.core.exceptions import PermissionDenied from django.urls import reverse from django.utils.cache import add_never_cache_headers @@ -8,9 +6,6 @@ from inclusion_connect.utils.urls import add_url_params -logger = logging.getLogger("keycloak_compat") - - def never_cache(get_response): def middleware(request): response = get_response(request) diff --git a/scripts/import_from_kc_one_user.py b/scripts/import_from_kc_one_user.py deleted file mode 100755 index 1cc45457..00000000 --- a/scripts/import_from_kc_one_user.py +++ /dev/null @@ -1,266 +0,0 @@ -#!/usr/bin/env python3 - -import os -import pathlib -import sys - - -sys.path.append(str(pathlib.Path(__file__).parent.parent)) -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "inclusion_connect.settings.dev") -import django - - -django.setup() - - -import datetime -import json -from collections import defaultdict - -import psycopg -from django.conf import settings -from django.db import transaction -from elasticsearch import Elasticsearch -from elasticsearch.helpers import bulk - -from inclusion_connect.oidc_overrides.models import Application -from inclusion_connect.stats.models import Stats -from inclusion_connect.users.models import EmailAddress, User, UserApplicationLink - - -# Don't keep old users that did not validate their email after X days -REALMS = ["inclusion-connect", "Demo"] -user_id = "FILL_ME" - -KC_DBNAME = os.getenv("KC_DBNAME") -KC_HOST = os.getenv("KC_HOST") -KC_PORT = os.getenv("KC_PORT") -KC_PASSWORD = os.getenv("KC_PASSWORD") -KC_USER = os.getenv("KC_USER") - - -def parse_keycloak_dt(value): - return datetime.datetime.fromtimestamp(value / 1000, datetime.UTC) - - -class KeyCloakCursor: - def __init__(self): - self.cursor = None - self.connection = None - - def __enter__(self): - self.connection = psycopg.connect( - host=KC_HOST, - dbname=KC_DBNAME, - port=KC_PORT, - password=KC_PASSWORD, - user=KC_USER, - keepalives=1, - keepalives_idle=30, - keepalives_interval=5, - keepalives_count=5, - ) - self.connection.autocommit = True - self.cursor = self.connection.cursor() - return self.cursor - - def __exit__(self, exc_type, exc_value, exc_traceback): - if self.cursor: - self.cursor.close() - if self.connection: - self.connection.close() - - -with KeyCloakCursor() as cursor: - # Users - cursor.execute( - f""" - SELECT user_entity.id, username, email, first_name, last_name, email_verified, created_timestamp, realm.name - FROM user_entity - INNER JOIN realm ON user_entity.realm_id = realm.id - WHERE user_entity.id = '{user_id}' - """ - ) - users_data = cursor.fetchall() - if len(users_data) != 1: - print("User not found") - sys.exit() - - # required actions - cursor.execute( - f""" - SELECT user_id, required_action - FROM user_required_action - WHERE user_id = '{user_id}' - """ - ) - actions = cursor.fetchall() - users_must_accept_terms = [] - users_must_verify_email = [] - users_must_reset_password = [] - action_to_userlist = { - "VERIFY_EMAIL": users_must_verify_email, - "terms_and_conditions": users_must_accept_terms, - "UPDATE_PASSWORD": users_must_reset_password, - } - for user_id, required_action in actions: - action_to_userlist[required_action].append(user_id) - - # credentials_data - cursor.execute( - f""" - SELECT user_id, secret_data, credential_data, created_date - FROM credential - WHERE user_id = '{user_id}' - ORDER BY created_date - """ - ) - credentials_data = cursor.fetchall() - credentials = {} - for user_id, secret_data, credential_data, created_date in credentials_data: - decoded_secret_data = json.loads(secret_data) - secret = decoded_secret_data["value"] - salt = decoded_secret_data["salt"] - decoded_credential_data = json.loads(credential_data) - iterations = decoded_credential_data["hashIterations"] - # Overwrite previous credentials if it exists - credentials[user_id] = "$".join(["keycloak-pbkdf2-sha256", str(iterations), salt, secret]) - - # application links - cursor.execute( - f""" - SELECT user_id, client_id, MAX(event_time) - FROM event_entity - WHERE type = 'LOGIN' - AND user_id = '{user_id}' - GROUP BY user_id, client_id - """ - ) - app_links_data = cursor.fetchall() - users_app_links = defaultdict(list) - for user_id, client_id, event_time in app_links_data: - users_app_links[user_id].append([client_id, parse_keycloak_dt(event_time)]) - - users_last_login = {} - for user_id, application_last_logins in users_app_links.items(): - users_last_login[user_id] = max([event_time for client_id, event_time in application_last_logins]) - - # stats - cursor.execute( - f""" - SELECT user_id, client_id, event_time, type - FROM event_entity - WHERE type IN ('LOGIN', 'REGISTER') - AND user_id = '{user_id}' - """ - ) - stats_data = cursor.fetchall() - -applications = {application.client_id: application for application in Application.objects.all()} - -users = {} -email_addresses = [] -app_links = [] -for user_id, username, email, first_name, last_name, email_verified, created_timestamp, realm_name in users_data: - if realm_name not in REALMS: - continue - created_at = parse_keycloak_dt(created_timestamp) - email_verified = email_verified and not user_id in users_must_verify_email - user = User( - username=user_id, - email=email if email_verified else "", - first_name=first_name, - last_name=last_name, - date_joined=created_at, - last_login=users_last_login.get(user_id), - password=credentials.get(user_id, ""), - must_reset_password=user_id in users_must_reset_password, - terms_accepted_at=None if user_id in users_must_accept_terms else max(created_at, settings.NEW_TERMS_DATE), - ) - email_addresses.append( - EmailAddress(user=user, email=email, verified_at=created_at if email_verified else None, created_at=created_at) - ) - - for client_id, last_login in users_app_links[user_id]: - if client_id in applications: - app_links.append( - UserApplicationLink( - user=user, - application=applications[client_id], - last_login=last_login, - ) - ) - - users[user_id] = user - -stats = [] -stats_data_2 = set( - [ - (user_id, client_id, parse_keycloak_dt(event_time).date().replace(day=1), action) - for user_id, client_id, event_time, action in stats_data - ] -) -for user_id, client_id, event_time, action in stats_data_2: - application = applications.get(client_id) - user = users.get(user_id) - if application and user: - stats.append( - Stats( - user=user, - application=application, - date=event_time, - action=action.lower(), - ) - ) - - -# Write in db -with transaction.atomic(): - User.objects.bulk_create(users.values()) - EmailAddress.objects.bulk_create(email_addresses) - UserApplicationLink.objects.bulk_create(app_links) - Stats.objects.bulk_create(stats, ignore_conflicts=True) - - -print(f"Created {len(users)} Users") -print(f"Created {len(email_addresses)} EmailAddresses") -print(f"Created {len(app_links)} UserApplicationLinks") -print(f"Created {len(stats)} Stats") - - -print("Sending logs to ES") -with KeyCloakCursor() as cursor: - cursor.execute( - f""" - SELECT event_time, ip_address, user_id, client_id, type - FROM event_entity - WHERE type IN ('LOGIN', 'REGISTER') - AND user_id = '{user_id}' - """ - ) - stats = cursor.fetchall() - -es_config = settings.LOGGING["handlers"]["elasticsearch"] -es_client = Elasticsearch(es_config["host"], http_compress=True, request_timeout=5, max_retries=10) - -actions = [] -for event_time, ip_address, user_id, client_id, kind in stats: - application = applications.get(client_id) - user = users.get(user_id) - if application and user: - actions.append( - { - "_source": { - "ip_address": ip_address, - "application": client_id, - "event": kind.lower(), - "user": user_id, - "@timestamp": parse_keycloak_dt(event_time), - "name": "inclusion_connect.auth", - "levelname": "INFO", - } - } - ) -print(f"Sending {len(actions)} logs") -bulk(client=es_client, actions=actions, index=es_config["index_name"], stats_only=True) -print("Done!")