From 1f51ce8716d6b72c4615471bd0a77e0b627f3bf2 Mon Sep 17 00:00:00 2001 From: Ines Date: Tue, 29 Jul 2025 15:28:23 +0200 Subject: [PATCH 01/73] Update serverapplication.yml.j2 --- roles/openaccess/templates/serverapplication.yml.j2 | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/roles/openaccess/templates/serverapplication.yml.j2 b/roles/openaccess/templates/serverapplication.yml.j2 index 16a480884..0191feeed 100644 --- a/roles/openaccess/templates/serverapplication.yml.j2 +++ b/roles/openaccess/templates/serverapplication.yml.j2 @@ -68,15 +68,8 @@ oidcng: resource-server-secret: secret base-url: {{ openconextaccess_base_domain }} -invite: - user: {{ openconextaccess.invite.api_username }} - password: {{ openconextaccess.invite.api_password }} - base-url: {{ openconextaccess.invite.host }} - manage-identifier: {{ openconextaccess.invite.manage_identifier }} - manage-type: oidc10_rp - jira: - enabled: false + enabled: true base-url: {{ openconextaccess.jira.base_url }} user-name: {{ openconextaccess.jira.username }} project-key: {{ openconextaccess.jira.project_key }} From 64c1a89c68411600956eb086a2130d1ec1f9aa6f Mon Sep 17 00:00:00 2001 From: Okke Harsta Date: Wed, 12 Nov 2025 08:09:29 +0100 Subject: [PATCH 02/73] Added email.serviceDeskEmail placeholder --- roles/openaccess/templates/serverapplication.yml.j2 | 1 + 1 file changed, 1 insertion(+) diff --git a/roles/openaccess/templates/serverapplication.yml.j2 b/roles/openaccess/templates/serverapplication.yml.j2 index 5a71cc5f2..34f2d3ade 100644 --- a/roles/openaccess/templates/serverapplication.yml.j2 +++ b/roles/openaccess/templates/serverapplication.yml.j2 @@ -109,6 +109,7 @@ feature: email: from: "{{ noreply_email }}" contactEmail: "{{ support_email }}" + serviceDeskEmail: "{{ support_email }}" environment: "{{ environment_shortname }}" manage: From 961ffa805d7271dd37b4772b06641e2e1a71382a Mon Sep 17 00:00:00 2001 From: Leroy <3416288+Liemine@users.noreply.github.com> Date: Mon, 17 Nov 2025 12:02:01 +0100 Subject: [PATCH 03/73] #769 Make affiliation email more configurable --- roles/myconext/templates/application.yml.j2 | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/roles/myconext/templates/application.yml.j2 b/roles/myconext/templates/application.yml.j2 index 9f5a4d535..a4fea7074 100644 --- a/roles/myconext/templates/application.yml.j2 +++ b/roles/myconext/templates/application.yml.j2 @@ -114,6 +114,8 @@ feature: create_eduid_institution_landing: {{ myconext.feature_create_eduid_institution_landing }} # Do we default remember the user for a longer period default_remember_me: True + # Do we default add affiliate email address + default_affiliate_email: True # Does the SAMLIdpService expects authn requests to be signed requires_signed_authn_request: False # Do we support ID verify @@ -135,6 +137,8 @@ feature: # Set to true to use the BRIN code to add ui-roles and authentication scoped affiliations use_remote_creation_for_affiliation: {{ myconext.feature_use_remote_creation_for_affiliation }} + default_affiliate_email_domain: eduid.nl + captcha: sitekey: {{ myconext.captcha_sitekey }} apikey: {{ myconext.captcha_apikey }} From 3d1f8845b405a7914fb16abbd281ab64013ee42b Mon Sep 17 00:00:00 2001 From: Okke Harsta Date: Tue, 18 Nov 2025 16:20:30 +0100 Subject: [PATCH 04/73] Fixed indentation for myconext --- roles/myconext/templates/application.yml.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/myconext/templates/application.yml.j2 b/roles/myconext/templates/application.yml.j2 index a4fea7074..abcac9949 100644 --- a/roles/myconext/templates/application.yml.j2 +++ b/roles/myconext/templates/application.yml.j2 @@ -137,7 +137,7 @@ feature: # Set to true to use the BRIN code to add ui-roles and authentication scoped affiliations use_remote_creation_for_affiliation: {{ myconext.feature_use_remote_creation_for_affiliation }} - default_affiliate_email_domain: eduid.nl +default_affiliate_email_domain: eduid.nl captcha: sitekey: {{ myconext.captcha_sitekey }} From 06a6381c4984e34f8b3c24efb95a73447b933a11 Mon Sep 17 00:00:00 2001 From: Okke Harsta Date: Wed, 19 Nov 2025 17:04:19 +0100 Subject: [PATCH 05/73] WIP for https://github.com/OpenConext/OpenConext-attribute-aggregation/issues/143 --- roles/attribute-aggregation/tasks/main.yml | 6 +++++- .../templates/serverapplication.yml.j2 | 1 - 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/roles/attribute-aggregation/tasks/main.yml b/roles/attribute-aggregation/tasks/main.yml index a8959123f..dcac53c08 100644 --- a/roles/attribute-aggregation/tasks/main.yml +++ b/roles/attribute-aggregation/tasks/main.yml @@ -53,6 +53,10 @@ command: "-Xmx128m --spring.config.location=./" etc_hosts: host.docker.internal: host-gateway + labels: + traefik.http.routers.aagui.rule: "Host(`aa.{{ base_domain }}`)" + traefik.http.routers.aagui.tls: "true" + traefik.enable: "true" healthcheck: test: [ @@ -61,7 +65,7 @@ "-no-verbose", "--tries=1", "--spider", - "http://localhost:8080/aa/api/internal/health", + "http://localhost:8080/internal/health", ] interval: 10s timeout: 10s diff --git a/roles/attribute-aggregation/templates/serverapplication.yml.j2 b/roles/attribute-aggregation/templates/serverapplication.yml.j2 index 8e49715b4..28fc29379 100644 --- a/roles/attribute-aggregation/templates/serverapplication.yml.j2 +++ b/roles/attribute-aggregation/templates/serverapplication.yml.j2 @@ -8,7 +8,6 @@ server: # The port to where this Spring Boot application listens to. e.g. http://localhost:{{ springapp_tcpport }} port: 8080 servlet: - context-path: /aa/api session: timeout: 28800 cookie: From 6b68917d4484cdfc5b5b7e9b6bf87840f588f306 Mon Sep 17 00:00:00 2001 From: Okke Harsta Date: Thu, 20 Nov 2025 09:37:22 +0100 Subject: [PATCH 06/73] Fixes https://github.com/OpenConext/OpenConext-attribute-aggregation/issues/143 --- roles/attribute-aggregation/tasks/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/roles/attribute-aggregation/tasks/main.yml b/roles/attribute-aggregation/tasks/main.yml index dcac53c08..2fb6277d0 100644 --- a/roles/attribute-aggregation/tasks/main.yml +++ b/roles/attribute-aggregation/tasks/main.yml @@ -54,8 +54,8 @@ etc_hosts: host.docker.internal: host-gateway labels: - traefik.http.routers.aagui.rule: "Host(`aa.{{ base_domain }}`)" - traefik.http.routers.aagui.tls: "true" + traefik.http.routers.aaserver.rule: "Host(`aa.{{ base_domain }}`)" + traefik.http.routers.aaserver.tls: "true" traefik.enable: "true" healthcheck: test: From 05b0831f5681faf4e5abdc7776ec99b7a458b260 Mon Sep 17 00:00:00 2001 From: Ricardo van der Heijden Date: Thu, 20 Nov 2025 10:50:29 +0100 Subject: [PATCH 07/73] Fixes https://github.com/OpenConext/OpenConext-myconext/issues/757 --- roles/myconext/templates/application.yml.j2 | 2 ++ 1 file changed, 2 insertions(+) diff --git a/roles/myconext/templates/application.yml.j2 b/roles/myconext/templates/application.yml.j2 index abcac9949..31b0f1a8e 100644 --- a/roles/myconext/templates/application.yml.j2 +++ b/roles/myconext/templates/application.yml.j2 @@ -136,6 +136,8 @@ feature: captcha_enabled: True # Set to true to use the BRIN code to add ui-roles and authentication scoped affiliations use_remote_creation_for_affiliation: {{ myconext.feature_use_remote_creation_for_affiliation }} + # Set to true to show the account linking related options on the personal-info page and home page (banner) + enable_account_linking: True default_affiliate_email_domain: eduid.nl From c930adfc53fb43f1dd7bf6ac6cdbba7dc2d360c8 Mon Sep 17 00:00:00 2001 From: Ricardo van der Heijden Date: Thu, 20 Nov 2025 14:07:23 +0100 Subject: [PATCH 08/73] #757 Replaces hardcoded value with variable --- environments/template/group_vars/template.yml | 3 ++- roles/myconext/templates/application.yml.j2 | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/environments/template/group_vars/template.yml b/environments/template/group_vars/template.yml index 1317bbd88..916db22db 100644 --- a/environments/template/group_vars/template.yml +++ b/environments/template/group_vars/template.yml @@ -290,6 +290,7 @@ myconext: feature_create_eduid_institution_landing: true feature_allowlist: false feature_dry_run_email_cron: true + feature_enable_account_linking: true sms_api_url: "https://rest.spryngsms.com/v1/messages" sms_api_route: "default" sp_entity_id: https://engine.{{ base_domain }}/authentication/sp/metadata @@ -613,4 +614,4 @@ dashboard_install: false # change them if necessary, for example on docker hosts they have to be a little higher # rsyslog_imjournal_ratelimitburst: 2000 # rsyslog_imjournal_ratelimitinterval: 600 -# rsyslog_maxmessagesize: 8000 \ No newline at end of file +# rsyslog_maxmessagesize: 8000 diff --git a/roles/myconext/templates/application.yml.j2 b/roles/myconext/templates/application.yml.j2 index 31b0f1a8e..d30ffd3e2 100644 --- a/roles/myconext/templates/application.yml.j2 +++ b/roles/myconext/templates/application.yml.j2 @@ -137,7 +137,7 @@ feature: # Set to true to use the BRIN code to add ui-roles and authentication scoped affiliations use_remote_creation_for_affiliation: {{ myconext.feature_use_remote_creation_for_affiliation }} # Set to true to show the account linking related options on the personal-info page and home page (banner) - enable_account_linking: True + enable_account_linking: {{ myconext.feature_enable_account_linking }} default_affiliate_email_domain: eduid.nl From 48bdd9aeceba104ded5cc0bb8652180cb34d592e Mon Sep 17 00:00:00 2001 From: Ricardo van der Heijden <20791917+ricardovdheijden@users.noreply.github.com> Date: Mon, 24 Nov 2025 15:54:13 +0100 Subject: [PATCH 09/73] Fixes https://github.com/OpenConext/OpenConext-myconext/issues/759 Adds feature toggle use_app --- environments/template/group_vars/template.yml | 1 + roles/myconext/templates/application.yml.j2 | 2 ++ 2 files changed, 3 insertions(+) diff --git a/environments/template/group_vars/template.yml b/environments/template/group_vars/template.yml index 916db22db..55e659b57 100644 --- a/environments/template/group_vars/template.yml +++ b/environments/template/group_vars/template.yml @@ -291,6 +291,7 @@ myconext: feature_allowlist: false feature_dry_run_email_cron: true feature_enable_account_linking: true + feature_use_app: true sms_api_url: "https://rest.spryngsms.com/v1/messages" sms_api_route: "default" sp_entity_id: https://engine.{{ base_domain }}/authentication/sp/metadata diff --git a/roles/myconext/templates/application.yml.j2 b/roles/myconext/templates/application.yml.j2 index d30ffd3e2..42f0c64a5 100644 --- a/roles/myconext/templates/application.yml.j2 +++ b/roles/myconext/templates/application.yml.j2 @@ -138,6 +138,8 @@ feature: use_remote_creation_for_affiliation: {{ myconext.feature_use_remote_creation_for_affiliation }} # Set to true to show the account linking related options on the personal-info page and home page (banner) enable_account_linking: {{ myconext.feature_enable_account_linking }} + # Set to true to show the app login option + use_app: {{ myconext.feature_use_app }} default_affiliate_email_domain: eduid.nl From cf732253d7cc7b38eb070d92c18cab41fa2805cb Mon Sep 17 00:00:00 2001 From: Leroy <3416288+Liemine@users.noreply.github.com> Date: Tue, 25 Nov 2025 11:00:42 +0100 Subject: [PATCH 10/73] #1001 Add email addresses --- roles/myconext/templates/application.yml.j2 | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/roles/myconext/templates/application.yml.j2 b/roles/myconext/templates/application.yml.j2 index 9f5a4d535..f41395144 100644 --- a/roles/myconext/templates/application.yml.j2 +++ b/roles/myconext/templates/application.yml.j2 @@ -37,8 +37,11 @@ springdoc: enabled: true email: - from: eduID - error_mail: info@surfconext.nl + from_deprovisioning: + from_code: eduID + from_app_nudge: + from_new_device: + error: info@surfconext.nl magic-link-url: https://login.{{ myconext_base_domain }}/saml/guest-idp/magic my-surfconext-url: https://mijn.{{ myconext_base_domain }} idp-surfconext-url: https://login.{{ myconext_base_domain }} From 19d0a4828f95e8f8eae9341a13699b826109c5ca Mon Sep 17 00:00:00 2001 From: Okke Harsta Date: Tue, 25 Nov 2025 12:47:14 +0100 Subject: [PATCH 11/73] Added missing attributes from feature branch --- roles/myconext/templates/application.yml.j2 | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/roles/myconext/templates/application.yml.j2 b/roles/myconext/templates/application.yml.j2 index f41395144..fab11c98d 100644 --- a/roles/myconext/templates/application.yml.j2 +++ b/roles/myconext/templates/application.yml.j2 @@ -137,6 +137,12 @@ feature: captcha_enabled: True # Set to true to use the BRIN code to add ui-roles and authentication scoped affiliations use_remote_creation_for_affiliation: {{ myconext.feature_use_remote_creation_for_affiliation }} + # Set to true to show the account linking related options on the personal-info page and home page (banner) + enable_account_linking: {{ myconext.feature_enable_account_linking }} + # Set to true to show the app login option + use_app: {{ myconext.feature_use_app }} + +default_affiliate_email_domain: eduid.nl captcha: sitekey: {{ myconext.captcha_sitekey }} From 9b0db989a02b3b2474a1a44371ba68d488541039 Mon Sep 17 00:00:00 2001 From: Okke Harsta Date: Wed, 26 Nov 2025 09:08:17 +0100 Subject: [PATCH 12/73] Added languages for invite --- roles/invite/templates/serverapplication.yml.j2 | 2 ++ 1 file changed, 2 insertions(+) diff --git a/roles/invite/templates/serverapplication.yml.j2 b/roles/invite/templates/serverapplication.yml.j2 index ce195b62d..e637ff0cf 100644 --- a/roles/invite/templates/serverapplication.yml.j2 +++ b/roles/invite/templates/serverapplication.yml.j2 @@ -118,6 +118,8 @@ config: past-date-allowed: {{ invite.past_date_allowed }} performance-seed-allowed: {{ invite.performance_seed_allowed }} eduid-idp-schac-home-organization: {{ invite.eduid_idp_schac_home_organization }} + # Determines the languages available for switching language, supported are 'nl', 'en' and 'pt' + languages: "nl, en" feature: limit-institution-admin-role-visibility: {{ invite.limit_institution_admin_role_visibility }} From 90262652a4073135bb53ee432f4497e91a0f8c96 Mon Sep 17 00:00:00 2001 From: Okke Harsta Date: Sat, 29 Nov 2025 11:14:45 +0100 Subject: [PATCH 13/73] Added missing mongodb_db variable for myconext CRON jobs --- roles/myconext/templates/application.yml.j2 | 1 + 1 file changed, 1 insertion(+) diff --git a/roles/myconext/templates/application.yml.j2 b/roles/myconext/templates/application.yml.j2 index 3d1719061..b45cc13bc 100644 --- a/roles/myconext/templates/application.yml.j2 +++ b/roles/myconext/templates/application.yml.j2 @@ -77,6 +77,7 @@ manage: base_url: "https://manage.{{ base_domain }}" enabled: True +mongodb_db: {{ myconext.mongo_database }} base_domain: {{ myconext_base_domain }} saml_metadata_base_path: https://login.{{ myconext_base_domain }} base_path: https://mijn.{{ myconext_base_domain }} From 6a30219f08a518f4b8d60c42fb7d42a61a41e44e Mon Sep 17 00:00:00 2001 From: Leroy <3416288+Liemine@users.noreply.github.com> Date: Mon, 1 Dec 2025 10:49:40 +0100 Subject: [PATCH 14/73] #1024 Add mail-institution-batch-size to 500 and set mail-institution-mail-usage-expression to daily --- roles/myconext/templates/application.yml.j2 | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/roles/myconext/templates/application.yml.j2 b/roles/myconext/templates/application.yml.j2 index 9f5a4d535..515d50b3f 100644 --- a/roles/myconext/templates/application.yml.j2 +++ b/roles/myconext/templates/application.yml.j2 @@ -58,7 +58,8 @@ cron: manage-initial-delay-milliseconds: 15000 manage-fixed-rate-milliseconds: 300_000 # Runs on the first day of February, May, August, and November. - mail-institution-mail-usage-expression: "0 0 0 1 2,5,8,11 *" + mail-institution-mail-usage-expression: "0 0 0 * 2,5,8,11 *" + mail-institution-batch-size: 500 # Every day at 6:30AM nudge-app-mail-expression: "0 30 6 * * ?" # Number of days after creation of the eduID account which the nudge mail is send From 7a23f863f9b9afff0387f34110053626ced670ed Mon Sep 17 00:00:00 2001 From: Leroy <3416288+Liemine@users.noreply.github.com> Date: Tue, 2 Dec 2025 13:39:38 +0100 Subject: [PATCH 15/73] #802-differentiate-error_mail-configurable-for-prod-and-non-prod-develop --- roles/myconext/templates/application.yml.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/myconext/templates/application.yml.j2 b/roles/myconext/templates/application.yml.j2 index 46d31a2e3..d08d3d52b 100644 --- a/roles/myconext/templates/application.yml.j2 +++ b/roles/myconext/templates/application.yml.j2 @@ -41,7 +41,7 @@ email: from_code: eduID from_app_nudge: from_new_device: - error: info@surfconext.nl + error: {{ error_mail_to }} magic-link-url: https://login.{{ myconext_base_domain }}/saml/guest-idp/magic my-surfconext-url: https://mijn.{{ myconext_base_domain }} idp-surfconext-url: https://login.{{ myconext_base_domain }} From 10431c9fa1679064bada6734db11f66bad58e01d Mon Sep 17 00:00:00 2001 From: Okke Harsta Date: Tue, 2 Dec 2025 16:04:53 +0100 Subject: [PATCH 16/73] https://github.com/OpenConext/OpenConext-access/issues/322 --- roles/openaccess/templates/serverapplication.yml.j2 | 1 + 1 file changed, 1 insertion(+) diff --git a/roles/openaccess/templates/serverapplication.yml.j2 b/roles/openaccess/templates/serverapplication.yml.j2 index 6224bf7fc..94d4a72f3 100644 --- a/roles/openaccess/templates/serverapplication.yml.j2 +++ b/roles/openaccess/templates/serverapplication.yml.j2 @@ -113,6 +113,7 @@ email: from: "{{ noreply_email }}" contactEmail: "{{ support_email }}" serviceDeskEmail: "{{ support_email }}" + supportEmail: "support@surfconext.nl" environment: "{{ environment_shortname }}" manage: From 4b29f864c26090b27bf1821f905d22e54d7a5fc9 Mon Sep 17 00:00:00 2001 From: Ines Date: Fri, 5 Dec 2025 13:31:44 +0100 Subject: [PATCH 17/73] test2 uit serverapplication.yml.j2 --- roles/openaccess/templates/serverapplication.yml.j2 | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/roles/openaccess/templates/serverapplication.yml.j2 b/roles/openaccess/templates/serverapplication.yml.j2 index 94d4a72f3..ab9df15af 100644 --- a/roles/openaccess/templates/serverapplication.yml.j2 +++ b/roles/openaccess/templates/serverapplication.yml.j2 @@ -62,8 +62,8 @@ spring: host: {{ smtp_server }} oidcng: - discovery-url: "https://connect.test2.surfconext.nl/oidc/.well-known/openid-configuration" - introspect-url: "https://connect.test2.surfconext.nl/oidc/introspect" + discovery-url: "https://connect.{{ env }}.surfconext.nl/oidc/.well-known/openid-configuration" + introspect-url: "https://connect.{{ env }}.surfconext.nl/oidc/introspect" resource-server-id: myconext.rs resource-server-secret: secret base-url: {{ openconextaccess_base_domain }} @@ -90,7 +90,7 @@ config: client-url: "https://{{ openconextaccess_base_domain }}" base-url: "{{ base_domain }}" edu_id_schac_home_organization: "eduid.nl" - discovery: "https://connect.test2.surfconext.nl/oidc/.well-known/openid-configuration" + discovery: "https://connect.{{ env }}.surfconext.nl/oidc/.well-known/openid-configuration" invite: "https://invite.{{ base_domain }}" sram: "https://{{ env }}.sram.surf.nl/" serviceDesk: "https://servicedesk.surf.nl/jira/plugins/servlet/desk/user/requests?reporter=all" From 6c25ee4e03096ccfdbf02396cc933dfd7a2d5285 Mon Sep 17 00:00:00 2001 From: Chantal Rosmuller Date: Fri, 5 Dec 2025 13:38:37 +0100 Subject: [PATCH 18/73] do not show minio root ww --- roles/minio/tasks/main.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/roles/minio/tasks/main.yml b/roles/minio/tasks/main.yml index 2c9ec4063..e99e15c45 100644 --- a/roles/minio/tasks/main.yml +++ b/roles/minio/tasks/main.yml @@ -24,6 +24,7 @@ mode: "0644" with_items: - config.env + no_log: true notify: Restart minio - name: Create and start the server container From b17b097ce1a64747f4dc226db890fd745fe8d8bc Mon Sep 17 00:00:00 2001 From: Chantal Rosmuller Date: Thu, 11 Dec 2025 10:58:41 +0100 Subject: [PATCH 19/73] Add some debugging --- roles/openaccess/tasks/main.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/roles/openaccess/tasks/main.yml b/roles/openaccess/tasks/main.yml index c3cfb6e4a..25d32245b 100644 --- a/roles/openaccess/tasks/main.yml +++ b/roles/openaccess/tasks/main.yml @@ -19,6 +19,12 @@ - serverapplication.yml notify: restart accessserver + +- name: Debug mariadb_in_docker # Show with -vv + ansible.builtin.debug: + msg: "{{ mariadb_in_docker }}" + verbosity: 2 + - name: Add the MariaDB docker network to the list of networks when MariaDB runs in Docker ansible.builtin.set_fact: invite_docker_networks: From 2e366f2f760fba6adc16b6f96f644d9e76865516 Mon Sep 17 00:00:00 2001 From: Chantal Rosmuller Date: Thu, 11 Dec 2025 11:04:59 +0100 Subject: [PATCH 20/73] network variable was not used in container creation --- roles/openaccess/tasks/main.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/roles/openaccess/tasks/main.yml b/roles/openaccess/tasks/main.yml index 25d32245b..4947442be 100644 --- a/roles/openaccess/tasks/main.yml +++ b/roles/openaccess/tasks/main.yml @@ -27,7 +27,7 @@ - name: Add the MariaDB docker network to the list of networks when MariaDB runs in Docker ansible.builtin.set_fact: - invite_docker_networks: + openaccess_docker_networks: - name: loadbalancer - name: openconext_mariadb when: mariadb_in_docker | default(false) | bool @@ -42,8 +42,7 @@ restart_policy: "{{ openaccess_server_restart_policy }}" restart_retries: "{{ openaccess_server_restart_retries }}" # Only for restart policy on-failure state: started - networks: - - name: "loadbalancer" + networks: "{{ openaccess_docker_networks }}" mounts: - source: /opt/openconext/openaccess/serverapplication.yml target: /application.yml From 1829d45c1c27a72aa6e2919e9f1a80ca0d29bac4 Mon Sep 17 00:00:00 2001 From: Ines Date: Thu, 12 Mar 2026 13:37:53 +0100 Subject: [PATCH 21/73] Update serverapplication.yml.j2 remove states --- roles/openaccess/templates/serverapplication.yml.j2 | 7 ------- 1 file changed, 7 deletions(-) diff --git a/roles/openaccess/templates/serverapplication.yml.j2 b/roles/openaccess/templates/serverapplication.yml.j2 index 4b9b673ca..ec4d8b68a 100644 --- a/roles/openaccess/templates/serverapplication.yml.j2 +++ b/roles/openaccess/templates/serverapplication.yml.j2 @@ -166,13 +166,6 @@ invite: user: {{ invite.access_user }} password: "{{ invite.access_secret }}" -# Todo relace with openconextaccess user -statistics: - enabled: True - url: {{ dashboard.stats_url }} - user: {{ dashboard.stats_user }} - password: {{ stats_dashboard_api_password }} - s3storage: url: {{ openconextaccess.s3_storage.url }} key: {{ openconextaccess.s3_storage.key }} From 338d4d225a75bcacd3c5d0f25ac00756b1ce3753 Mon Sep 17 00:00:00 2001 From: Ines Duits Date: Thu, 12 Mar 2026 14:01:11 +0100 Subject: [PATCH 22/73] docker fix en stats eruit? --- roles/openaccess/defaults/main.yml | 2 ++ roles/openaccess/tasks/main.yml | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/roles/openaccess/defaults/main.yml b/roles/openaccess/defaults/main.yml index ba813a4c8..888e97b36 100644 --- a/roles/openaccess/defaults/main.yml +++ b/roles/openaccess/defaults/main.yml @@ -1,3 +1,5 @@ --- openaccess_server_restart_policy: always openaccess_server_restart_retries: 0 +openaccess_docker_networks: + - name: loadbalancer diff --git a/roles/openaccess/tasks/main.yml b/roles/openaccess/tasks/main.yml index 4947442be..5f92ead18 100644 --- a/roles/openaccess/tasks/main.yml +++ b/roles/openaccess/tasks/main.yml @@ -100,4 +100,4 @@ S3_STORAGE_URL : "{{ openconextaccess.s3_storage.url }}" S3_STORAGE_KEY : "{{ openconextaccess.s3_storage.key }}" S3_STORAGE_SECRET : "{{ openconextaccess.s3_storage.secret }}" - S3_STORAGE_BUCKET : "{{ openconextaccess.s3_storage.bucket }}" \ No newline at end of file + S3_STORAGE_BUCKET : "{{ openconextaccess.s3_storage.bucket }}" From ed069e3118cd0f059a992d29d49fc6c1d43189b2 Mon Sep 17 00:00:00 2001 From: Ines Date: Thu, 12 Mar 2026 14:33:13 +0100 Subject: [PATCH 23/73] Update serverapplication.yml.j2 with stats variables --- roles/openaccess/templates/serverapplication.yml.j2 | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/roles/openaccess/templates/serverapplication.yml.j2 b/roles/openaccess/templates/serverapplication.yml.j2 index ec4d8b68a..e5b7564be 100644 --- a/roles/openaccess/templates/serverapplication.yml.j2 +++ b/roles/openaccess/templates/serverapplication.yml.j2 @@ -172,6 +172,13 @@ s3storage: secret: {{ openconextaccess.s3_storage.secret }} bucket: {{ openconextaccess.s3_storage.bucket }} +statistics: + enabled: {{ openconextaccess.statistics.enabled }} + url: {{ openconextaccess.statistics.url }} + user: {{ openconextaccess.statistics.user }} + password: {{ openconextaccess.statistics.password }} + + management: health: mail: From 923793e4227346fbc251cee89870eb9ab5229dd6 Mon Sep 17 00:00:00 2001 From: Ines Date: Thu, 12 Mar 2026 15:13:06 +0100 Subject: [PATCH 24/73] Update serverapplication.yml.j2 voor OIDCNG --- roles/openaccess/templates/serverapplication.yml.j2 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/roles/openaccess/templates/serverapplication.yml.j2 b/roles/openaccess/templates/serverapplication.yml.j2 index e5b7564be..d0a03b9a3 100644 --- a/roles/openaccess/templates/serverapplication.yml.j2 +++ b/roles/openaccess/templates/serverapplication.yml.j2 @@ -29,8 +29,8 @@ spring: client: registration: oidcng: - client-id: {{ oidc_playground.client_id }} - client-secret: {{ oidc_playground.secret }} + client-id: {{ openconextaccess.oidcng.client_id }} + client-secret: {{ openconextaccess.oidcng.secret }} redirect-uri: "{baseUrl}/login/oauth2/code/{registrationId}" authorization-grant-type: "authorization_code" scope: openid From 0410d4a610c86dc94f43d3165f63746ec147f174 Mon Sep 17 00:00:00 2001 From: Ines Duits Date: Fri, 13 Mar 2026 14:57:59 +0100 Subject: [PATCH 25/73] het iig werkend maken voor test2 --- roles/openaccess/templates/serverapplication.yml.j2 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/roles/openaccess/templates/serverapplication.yml.j2 b/roles/openaccess/templates/serverapplication.yml.j2 index d0a03b9a3..e5b7564be 100644 --- a/roles/openaccess/templates/serverapplication.yml.j2 +++ b/roles/openaccess/templates/serverapplication.yml.j2 @@ -29,8 +29,8 @@ spring: client: registration: oidcng: - client-id: {{ openconextaccess.oidcng.client_id }} - client-secret: {{ openconextaccess.oidcng.secret }} + client-id: {{ oidc_playground.client_id }} + client-secret: {{ oidc_playground.secret }} redirect-uri: "{baseUrl}/login/oauth2/code/{registrationId}" authorization-grant-type: "authorization_code" scope: openid From 42882df5d857dbf410e85853b46ca3051527a1b5 Mon Sep 17 00:00:00 2001 From: Ines Duits Date: Mon, 30 Mar 2026 12:03:40 +0200 Subject: [PATCH 26/73] update van serviceapplication yml --- .../templates/serverapplication.yml.j2 | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/roles/openaccess/templates/serverapplication.yml.j2 b/roles/openaccess/templates/serverapplication.yml.j2 index e5b7564be..1e5a2f9b9 100644 --- a/roles/openaccess/templates/serverapplication.yml.j2 +++ b/roles/openaccess/templates/serverapplication.yml.j2 @@ -29,17 +29,17 @@ spring: client: registration: oidcng: - client-id: {{ oidc_playground.client_id }} - client-secret: {{ oidc_playground.secret }} + client-id: {{ openconextaccess.oidcng.client_id }} + client-secret: {{ openconextaccess.oidcng.secret }} redirect-uri: "{baseUrl}/login/oauth2/code/{registrationId}" authorization-grant-type: "authorization_code" scope: openid provider: oidcng: - authorization-uri: "https://connect.{{ base_domain }}/oidc/authorize" - token-uri: "https://connect.{{ base_domain }}/oidc/token" - user-info-uri: "https://connect.{{ base_domain }}/oidc/userinfo" - jwk-set-uri: "https://connect.{{ base_domain }}/oidc/certs" + authorization-uri: {{ openconextaccess.oidcng.authorization_uri }} + token-uri: {{ openconextaccess.oidcng.token_uri }} + user-info-uri: {{ openconextaccess.oidcng.user_info_uri }} + jwk-set-uri: {{ openconextaccess.oidcng.jwk_set_uri }} user-name-attribute: sub user-info-authentication-method: client_secret_basic jpa: @@ -62,8 +62,8 @@ spring: host: {{ smtp_server }} oidcng: - discovery-url: "https://connect.{{ env }}.surfconext.nl/oidc/.well-known/openid-configuration" - introspect-url: "https://connect.{{ env }}.surfconext.nl/oidc/introspect" + discovery-url: {{ openconextaccess.oidcng.discovery_url }} + introspect-url: {{ openconextaccess.oidcng.introspect_url }} resource-server-id: myconext.rs resource-server-secret: secret base-url: {{ openconextaccess_base_domain }} @@ -105,7 +105,7 @@ config: entityid: "https://idp.diy.surfconext.nl" descriptionEN: "Een test-IdP met fictieve gebruikersaccounts. De metadata vind je hier" descriptionNL: "Een test-IdP met fictieve gebruikersaccounts. De metadata vind je hier" - idp_proxy_meta_data: https://metadata.test2.surfconext.nl/idp-metadata.xml + idp_proxy_meta_data: {{ openconextaccess.idp_proxy_meta_data }} minimal_stepup_acr_level: "http://{{ base_domain }}/assurance/loa2" features: - name: idp @@ -121,7 +121,7 @@ config: - "{{ loa }}" {% endfor %} -eduid-idp-entity-id: "https://login.{{ myconext_base_domain }}" +eduid-idp-entity-id: {{ openconextaccess.eduid_idp_entity_id }} super-admin: users: From 52b4fb2c33be4fb55dcbc71a6ca417e7bc59a9ff Mon Sep 17 00:00:00 2001 From: Peter Havekes Date: Thu, 9 Apr 2026 14:30:15 +0200 Subject: [PATCH 27/73] Do not purge Invite audit log yet (#648) --- roles/invite/templates/serverapplication.yml.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/invite/templates/serverapplication.yml.j2 b/roles/invite/templates/serverapplication.yml.j2 index 26c97d432..1f5f2894d 100644 --- a/roles/invite/templates/serverapplication.yml.j2 +++ b/roles/invite/templates/serverapplication.yml.j2 @@ -87,7 +87,7 @@ cron: metadata-resolver-fixed-rate-milliseconds: 86_400_000 metadata-resolver-url: "https://metadata.{{ base_domain }}/idps-metadata.xml" # A value of 0 means no logs will be deleted - purge-audit-log-days: 365 + purge-audit-log-days: 0 # A value of 0 means no invitations will be deleted purge-expired-invitations-days: 365 From 5a6cdf437e36236d2fece80251ab0d4761ab9f19 Mon Sep 17 00:00:00 2001 From: crosmuller Date: Fri, 10 Apr 2026 09:59:13 +0200 Subject: [PATCH 28/73] Fix/pin traefik version (#645) we do not want different traefik versions hanging around --- roles/docker/defaults/main.yml | 1 + roles/docker/tasks/main.yml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/roles/docker/defaults/main.yml b/roles/docker/defaults/main.yml index 9e9b32ef9..e8b651210 100644 --- a/roles/docker/defaults/main.yml +++ b/roles/docker/defaults/main.yml @@ -8,5 +8,6 @@ docker_apt_gpg_key_checksum: "sha256:1500c1f56fa9e26b9b8f42452a553675796ade0807c docker_apt_filename: "docker" docker_install_traefik: true docker_traefik_ldaps: false +docker_traefik_version: 3.6.10 docker_traefik_ports: - 0.0.0.0:443:443 diff --git a/roles/docker/tasks/main.yml b/roles/docker/tasks/main.yml index a9067a5b1..fe0a22829 100644 --- a/roles/docker/tasks/main.yml +++ b/roles/docker/tasks/main.yml @@ -82,7 +82,7 @@ - name: Create the Traefik loadbalancer community.docker.docker_container: name: loadbalancer - image: traefik:latest + image: traefik:{{ docker_traefik_version }} published_ports: "{{ docker_traefik_ports }}" pull: true restart_policy: "always" From 3137cf06cb9b78bc0ea13735eb9a3b9014afe988 Mon Sep 17 00:00:00 2001 From: Peter Havekes Date: Fri, 10 Apr 2026 10:05:55 +0200 Subject: [PATCH 29/73] Use shedlock in invite --- roles/invite/templates/serverapplication.yml.j2 | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/roles/invite/templates/serverapplication.yml.j2 b/roles/invite/templates/serverapplication.yml.j2 index 1f5f2894d..ca74d23e0 100644 --- a/roles/invite/templates/serverapplication.yml.j2 +++ b/roles/invite/templates/serverapplication.yml.j2 @@ -7,6 +7,7 @@ logging: org.springframework.security: WARN com.zaxxer.hikari: ERROR invite: DEBUG + net.javacrumbs.shedlock: DEBUG server: port: 8080 @@ -74,11 +75,13 @@ crypto: private-key-location: file:///private_key_pkcs8.pem cron: - user-cleaner-expression: "0 0/30 * * * *" + user-cleaner-cron: "PT30M" + user-cleaner-cron-initiaal-delay: "PT10M" user-cleaner-lock-at-least-for: "PT5M" user-cleaner-lock-at-most-for: "PT28M" last-activity-duration-days: 1000 - role-expiration-notifier-expression: "0 0/30 * * * *" + role-expiration-notifier-cron: "PT30M" + role-expiration-notifier-cron-initial-delay: "PT15M" # Set to -1 to suppress role expiry notifications role-expiration-notifier-duration-days: 5 role-expiration-notifier-lock-at-least-for: "PT5M" From 4b4db87720c7c8d03573d59c4db466e8e6d6002d Mon Sep 17 00:00:00 2001 From: Peter Havekes Date: Tue, 4 Nov 2025 11:37:29 +0100 Subject: [PATCH 30/73] rsyslog: Only opdate the lastseen tabel for newer dates --- roles/rsyslog/templates/parse_ebauth_to_mysql.py.j2 | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/roles/rsyslog/templates/parse_ebauth_to_mysql.py.j2 b/roles/rsyslog/templates/parse_ebauth_to_mysql.py.j2 index b37f4720c..7e0bc7bcb 100644 --- a/roles/rsyslog/templates/parse_ebauth_to_mysql.py.j2 +++ b/roles/rsyslog/templates/parse_ebauth_to_mysql.py.j2 @@ -21,11 +21,17 @@ cursor = db.cursor() def update_lastseen(user_id, date): query = """ - REPLACE INTO last_login (userid, lastseen) + INSERT INTO last_login (userid, lastseen) VALUES (%s, %s) + ON DUPLICATE KEY UPDATE + lastseen = GREATEST(lastseen, VALUES(lastseen)) """ - cursor.execute(query, (user_id, date)) - db.commit() + try: + cursor.execute(query, (user_id, date)) + db.commit() + except Exception as e: + db.rollback() + print(f"Error updating last_login for user {user_id}: {e}") def load_in_mysql(a,b,c,d,e,f,g,h): sql = """insert into log_logins(idpentityid,spentityid,loginstamp,userid,keyid,sessionid,requestid,trustedproxyentityid) values(%s,%s,%s,%s,%s,%s,%s,%s)""" @@ -73,4 +79,3 @@ for filename in os.listdir(workdir): cursor.close() db.close() - From 0cff50e61faac0cb32f8366f5cd8fd8b30fa7641 Mon Sep 17 00:00:00 2001 From: Peter Havekes Date: Tue, 4 Nov 2025 12:50:09 +0100 Subject: [PATCH 31/73] rsyslog: Also rotate and parse stepup-logs --- .../rsyslog/templates/logrotate_stepupauth.j2 | 16 +++ .../parse_stepupauthauth_to_mysql.py.j2 | 133 ++++++++++++++++++ 2 files changed, 149 insertions(+) create mode 100644 roles/rsyslog/templates/logrotate_stepupauth.j2 create mode 100644 roles/rsyslog/templates/parse_stepupauthauth_to_mysql.py.j2 diff --git a/roles/rsyslog/templates/logrotate_stepupauth.j2 b/roles/rsyslog/templates/logrotate_stepupauth.j2 new file mode 100644 index 000000000..be1a50652 --- /dev/null +++ b/roles/rsyslog/templates/logrotate_stepupauth.j2 @@ -0,0 +1,16 @@ +{{ rsyslog_dir }}/log_logins/{{ item.name }}/stepup-authentication.log +{ + missingok + daily + rotate 180 + sharedscripts + dateext + dateyesterday + compress + delaycompress + create 0640 root {{ rsyslog_read_group }} + postrotate + /usr/local/sbin/parse_stepupauth_to_mysql_{{ item.name }}.py > /dev/null + systemctl kill -s HUP rsyslog.service + endscript +} diff --git a/roles/rsyslog/templates/parse_stepupauthauth_to_mysql.py.j2 b/roles/rsyslog/templates/parse_stepupauthauth_to_mysql.py.j2 new file mode 100644 index 000000000..2fe55d6ab --- /dev/null +++ b/roles/rsyslog/templates/parse_stepupauthauth_to_mysql.py.j2 @@ -0,0 +1,133 @@ +#!/usr/bin/python3 +# This script parses rotated stepup-authentication.log files produced by engineblock. +# It filters for successful logins (authentication_result:OK) and inserts the data +# into the log_logins and last_login MySQL tables. +# This script is intended to be run separately during logrotate. + +import os +import sys +import json +import MySQLdb +from dateutil.parser import parse + +# Configuration variables (to be injected by Ansible/Jinja2) +mysql_host="{{ item.db_loglogins_host }}" +mysql_user="{{ item.db_loglogins_user }}" +mysql_password="{{ item.db_loglogins_password }}" +mysql_db="{{ item.db_loglogins_name }}" +workdir="{{ rsyslog_dir }}/log_logins/{{ item.name}}/" + +# Establish database connection +try: + db = MySQLdb.connect(mysql_host,mysql_user,mysql_password,mysql_db ) + cursor = db.cursor() +except Exception as e: + print(f"Error connecting to MySQL: {e}") + sys.exit(1) + +# --- Database Functions --- + +def update_lastseen(user_id, date): + """ + Updates the last_login table. + Uses GREATEST() to ensure only newer dates overwrite the existing 'lastseen' value. + """ + query = """ + INSERT INTO last_login (userid, lastseen) + VALUES (%s, %s) + ON DUPLICATE KEY UPDATE + lastseen = GREATEST(lastseen, VALUES(lastseen)) + """ + try: + cursor.execute(query, (user_id, date)) + db.commit() + except Exception as e: + db.rollback() + print(f"Error updating last_login for user {user_id}: {e}") + +def load_stepup_in_mysql(idp, sp, loginstamp, userid, requestid): + """ + Inserts Step-up login data into the log_logins table. + Fills keyid, sessionid, and trustedproxyentityid with NULL. + """ + # Columns in log_logins: idpentityid, spentityid, loginstamp, userid, keyid, sessionid, requestid, trustedproxyentityid + + keyid = None + sessionid = None + trustedproxyentityid = None + + sql = """ + INSERT INTO log_logins(idpentityid, spentityid, loginstamp, userid, keyid, sessionid, requestid, trustedproxyentityid) + VALUES(%s, %s, %s, %s, %s, %s, %s, %s) + """ + try: + cursor.execute(sql, (idp, sp, loginstamp, userid, keyid, sessionid, requestid, trustedproxyentityid)) + db.commit() + except Exception as e: + db.rollback() + print(f"Error inserting stepup data: {e}") + # Print the data that failed insertion + print((idp, sp, loginstamp, userid, keyid, sessionid, requestid, trustedproxyentityid)) + +# --- Parsing Function --- + +def parse_stepup_lines(a): + """ + Opens the stepup log file, parses each line, filters for successful logins, + and loads the data into MySQL. + """ + input_file = open((a), 'r') + for line in input_file: + try: + # Assumes JSON data starts after the first ']:' + jsonline = line.split(']:',2)[1] + data = json.loads(jsonline) + except: + continue + + # 1. Filtering condition: Only parse logs having authentication_result:OK + if data.get("authentication_result") != "OK": + continue + + # 2. Extract required fields + user_id = data.get("identity_id") + timestamp = data.get("datetime") + request_id = data.get("request_id") + sp_entity_id = data.get("requesting_sp") + idp_entity_id = data.get("authenticating_idp") + + # Basic data validation + if not user_id or not timestamp: + continue + + try: + # 3. Format date and time for MySQL + loginstamp = parse(timestamp).strftime("%Y-%m-%d %H:%M:%S") + last_login_date = parse(timestamp).strftime("%Y-%m-%d") + except: + continue + + # 4. Insert into MySQL + load_stepup_in_mysql(idp_entity_id, sp_entity_id, loginstamp, user_id, request_id) + + # 5. Update last login date + update_lastseen(user_id, last_login_date) + + +# --- Main Execution --- + +## Loop over the files and parse them one by one +for filename in os.listdir(workdir): + filetoparse=(os.path.join(workdir, filename)) + + # Check for Stepup files, ignore compressed files + if os.path.isfile(filetoparse) and filename.startswith("stepup-authentication.log-") and not filename.endswith(".gz"): + print(f"Parsing stepup log file: {filename}") + parse_stepup_lines(filetoparse) + else: + continue + +# Close database connection +cursor.close() +db.close() +print("Stepup log parsing complete.") From c255aa48e62fa9b47af185621d3a026669a091b9 Mon Sep 17 00:00:00 2001 From: Peter Havekes Date: Tue, 4 Nov 2025 12:53:04 +0100 Subject: [PATCH 32/73] rsyslog: Add ansible tasks for stepup log parsing --- roles/rsyslog/tasks/process_auth_logs.yml | 24 +++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/roles/rsyslog/tasks/process_auth_logs.yml b/roles/rsyslog/tasks/process_auth_logs.yml index e62027530..e123fb33f 100644 --- a/roles/rsyslog/tasks/process_auth_logs.yml +++ b/roles/rsyslog/tasks/process_auth_logs.yml @@ -39,7 +39,7 @@ state: present when: ansible_os_family == "Debian" -- name: Create a python script that parses log_logins per environment +- name: Create a python script that parses eb log_logins per environment ansible.builtin.template: src: parse_ebauth_to_mysql.py.j2 dest: /usr/local/sbin/parse_ebauth_to_mysql_{{ item.name }}.py @@ -49,7 +49,17 @@ with_items: "{{ rsyslog_environments }}" when: item.db_loglogins_name is defined -- name: Put log_logins logrotate scripts +- name: Create a python script that parses stepup log_logins per environment + ansible.builtin.template: + src: parse_ebauth_to_mysql.py.j2 + dest: /usr/local/sbin/parse_stepupauth_to_mysql_{{ item.name }}.py + mode: 0740 + owner: root + group: root + with_items: "{{ rsyslog_environments }}" + when: item.db_loglogins_name is defined + +- name: Put log_logins logrotate scripts for eb ansible.builtin.template: src: logrotate_ebauth.j2 dest: /etc/logrotate.d/logrotate_ebauth_{{ item.name }} @@ -59,6 +69,16 @@ with_items: "{{ rsyslog_environments }}" when: item.db_loglogins_name is defined +- name: Put log_logins logrotate scripts for stepup + ansible.builtin.template: + src: logrotate_ebauth.j2 + dest: /etc/logrotate.d/logrotate_stepupauth_{{ item.name }} + mode: 0644 + owner: root + group: root + with_items: "{{ rsyslog_environments }}" + when: item.db_loglogins_name is defined + - name: Create logdirectory for log_logins cleanup script ansible.builtin.file: path: "{{ rsyslog_dir }}/apps/{{ item.name }}/loglogins_cleanup/" From ff6862d2eade667d5d060d98d12e40705e387e0e Mon Sep 17 00:00:00 2001 From: Peter Havekes Date: Fri, 10 Apr 2026 13:31:18 +0200 Subject: [PATCH 33/73] Fix name for parse_stepupauth template --- roles/rsyslog/tasks/process_auth_logs.yml | 2 +- ...pauthauth_to_mysql.py.j2 => parse_stepupauth_to_mysql.py.j2} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename roles/rsyslog/templates/{parse_stepupauthauth_to_mysql.py.j2 => parse_stepupauth_to_mysql.py.j2} (100%) diff --git a/roles/rsyslog/tasks/process_auth_logs.yml b/roles/rsyslog/tasks/process_auth_logs.yml index e123fb33f..808d8fa9f 100644 --- a/roles/rsyslog/tasks/process_auth_logs.yml +++ b/roles/rsyslog/tasks/process_auth_logs.yml @@ -51,7 +51,7 @@ - name: Create a python script that parses stepup log_logins per environment ansible.builtin.template: - src: parse_ebauth_to_mysql.py.j2 + src: parse_stepupauth_to_mysql.py.j2 dest: /usr/local/sbin/parse_stepupauth_to_mysql_{{ item.name }}.py mode: 0740 owner: root diff --git a/roles/rsyslog/templates/parse_stepupauthauth_to_mysql.py.j2 b/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 similarity index 100% rename from roles/rsyslog/templates/parse_stepupauthauth_to_mysql.py.j2 rename to roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 From 4089da9cddacd192a7bfeb21b3a8a4aad084abf1 Mon Sep 17 00:00:00 2001 From: Peter Havekes Date: Fri, 10 Apr 2026 13:48:43 +0200 Subject: [PATCH 34/73] Fix template name --- roles/rsyslog/tasks/process_auth_logs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/rsyslog/tasks/process_auth_logs.yml b/roles/rsyslog/tasks/process_auth_logs.yml index 808d8fa9f..804bf629b 100644 --- a/roles/rsyslog/tasks/process_auth_logs.yml +++ b/roles/rsyslog/tasks/process_auth_logs.yml @@ -71,7 +71,7 @@ - name: Put log_logins logrotate scripts for stepup ansible.builtin.template: - src: logrotate_ebauth.j2 + src: logrotate_stepupauth.j2 dest: /etc/logrotate.d/logrotate_stepupauth_{{ item.name }} mode: 0644 owner: root From 5e0ec792b6d1fd30c569caefdfa593ce13edb346 Mon Sep 17 00:00:00 2001 From: Peter Havekes Date: Fri, 10 Apr 2026 13:49:02 +0200 Subject: [PATCH 35/73] Disable check for authentication_result --- roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 b/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 index 2fe55d6ab..a6aa8b707 100644 --- a/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 +++ b/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 @@ -86,8 +86,13 @@ def parse_stepup_lines(a): continue # 1. Filtering condition: Only parse logs having authentication_result:OK - if data.get("authentication_result") != "OK": - continue + # Only successful authentications are logged, so this check is not + # necessary. There is currently a bug in the Stepup-Gateway where + # FAILED is logged, even though the result is OK, making this check + # do the wrong thing now. + # + #if data.get("authentication_result") != "OK": + # continue # 2. Extract required fields user_id = data.get("identity_id") From 5a7b328be91cd9ecae7fb228cdf0d78672265f65 Mon Sep 17 00:00:00 2001 From: Peter Havekes Date: Fri, 10 Apr 2026 13:52:23 +0200 Subject: [PATCH 36/73] Get data from the context object --- .../rsyslog/templates/parse_stepupauth_to_mysql.py.j2 | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 b/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 index a6aa8b707..cef39191e 100644 --- a/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 +++ b/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 @@ -95,11 +95,11 @@ def parse_stepup_lines(a): # continue # 2. Extract required fields - user_id = data.get("identity_id") - timestamp = data.get("datetime") - request_id = data.get("request_id") - sp_entity_id = data.get("requesting_sp") - idp_entity_id = data.get("authenticating_idp") + user_id = data.get("context").("identity_id") + timestamp = data.get("context").("datetime") + request_id = data.get("context").("request_id") + sp_entity_id = data.get("context").("requesting_sp") + idp_entity_id = data.get("context").("authenticating_idp") # Basic data validation if not user_id or not timestamp: From adf12d2352e036dcc13892d6ba5f85cc0130cccb Mon Sep 17 00:00:00 2001 From: Peter Havekes Date: Fri, 10 Apr 2026 13:53:41 +0200 Subject: [PATCH 37/73] Get data from the context object --- roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 b/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 index cef39191e..3893cc5a1 100644 --- a/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 +++ b/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 @@ -91,7 +91,7 @@ def parse_stepup_lines(a): # FAILED is logged, even though the result is OK, making this check # do the wrong thing now. # - #if data.get("authentication_result") != "OK": + #if data.get("context").("authentication_result") != "OK": # continue # 2. Extract required fields From a7aa40d279c09213b4b858bd04f91a6bff30c706 Mon Sep 17 00:00:00 2001 From: Peter Havekes Date: Fri, 10 Apr 2026 14:00:20 +0200 Subject: [PATCH 38/73] Log if data checks failed --- roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 b/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 index 3893cc5a1..eef7fdff5 100644 --- a/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 +++ b/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 @@ -103,6 +103,10 @@ def parse_stepup_lines(a): # Basic data validation if not user_id or not timestamp: + print( + "Skipping line: validation failed " + f"(user_id={user_id!r}, timestamp={timestamp!r}, request_id={request_id!r})" + ) continue try: @@ -110,6 +114,10 @@ def parse_stepup_lines(a): loginstamp = parse(timestamp).strftime("%Y-%m-%d %H:%M:%S") last_login_date = parse(timestamp).strftime("%Y-%m-%d") except: + print( + "Skipping line: timestamp parsing failed " + f"(timestamp={timestamp!r}, user_id={user_id!r}, error={e})" + ) continue # 4. Insert into MySQL From 6669321a8932cdd5a5de68d26155c08701730594 Mon Sep 17 00:00:00 2001 From: Peter Havekes Date: Fri, 10 Apr 2026 14:12:24 +0200 Subject: [PATCH 39/73] more robust parsing of data --- .../templates/parse_stepupauth_to_mysql.py.j2 | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 b/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 index eef7fdff5..843fe44bc 100644 --- a/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 +++ b/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 @@ -95,11 +95,17 @@ def parse_stepup_lines(a): # continue # 2. Extract required fields - user_id = data.get("context").("identity_id") - timestamp = data.get("context").("datetime") - request_id = data.get("context").("request_id") - sp_entity_id = data.get("context").("requesting_sp") - idp_entity_id = data.get("context").("authenticating_idp") + context = data.get("context") + + if not isinstance(context, dict): + print("Skipping line: context is missing or invalid") + continue + + user_id = context.get("identity_id") + timestamp = context.get("datetime") + request_id = context.get("request_id") + sp_entity_id = context.get("requesting_sp") + idp_entity_id = context.get("authenticating_idp") # Basic data validation if not user_id or not timestamp: From 0107394b42fab8fe83a48ecf2f5c5202d48c41ca Mon Sep 17 00:00:00 2001 From: Peter Havekes Date: Fri, 10 Apr 2026 15:05:27 +0200 Subject: [PATCH 40/73] Add python3-dateutil to rsyslog role --- roles/rsyslog/tasks/main.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/roles/rsyslog/tasks/main.yml b/roles/rsyslog/tasks/main.yml index 1fc0608dc..a531fd677 100644 --- a/roles/rsyslog/tasks/main.yml +++ b/roles/rsyslog/tasks/main.yml @@ -1,9 +1,10 @@ -- name: Install rsyslog +- name: Install rsyslog and python modules ansible.builtin.package: name: - rsyslog - rsyslog-gnutls - rsyslog-relp + - python3-dateutil state: present notify: - "restart rsyslog" From eccfa6c13410f1549d71a299f02874a012fa24d9 Mon Sep 17 00:00:00 2001 From: Thijs Kinkhorst Date: Mon, 13 Apr 2026 10:37:50 +0200 Subject: [PATCH 41/73] Replace Teams with Invite as default provider for voot (#632) --- environments/template/group_vars/template.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/environments/template/group_vars/template.yml b/environments/template/group_vars/template.yml index 0a4308714..9b8606268 100644 --- a/environments/template/group_vars/template.yml +++ b/environments/template/group_vars/template.yml @@ -156,15 +156,15 @@ voot: - { name: "voot", level: "DEBUG" } externalGroupProviders: - { - type: "teams", - url: "https://teams.{{ base_domain }}/api/voot", - credentials: { - username: "{{ teams.voot_api_user }}", - secret: "{{ external_group_provider_secrets.teams }}" - }, - schacHomeOrganization: "{{ base_domain}}", - name: "SURFteams", - timeoutMillis: 15000 + type: "invite", + url: "https://invite.{{ base_domain }}/api/external/v1/voot", + credentials: { + username: "{{ invite.vootuser }}", + secret: "{{ invite.vootsecret }}" + }, + schacHomeOrganization: "N/A", + name: "Invite", + timeoutMillis: 3000 } oidc_playground: From ca39d8e5c6ceb33febe0610363b7e16036835223 Mon Sep 17 00:00:00 2001 From: Okke Harsta Date: Mon, 13 Apr 2026 15:11:42 +0200 Subject: [PATCH 42/73] Fixed typo --- roles/invite/templates/serverapplication.yml.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/invite/templates/serverapplication.yml.j2 b/roles/invite/templates/serverapplication.yml.j2 index ca74d23e0..81e7dae5d 100644 --- a/roles/invite/templates/serverapplication.yml.j2 +++ b/roles/invite/templates/serverapplication.yml.j2 @@ -76,7 +76,7 @@ crypto: cron: user-cleaner-cron: "PT30M" - user-cleaner-cron-initiaal-delay: "PT10M" + user-cleaner-cron-initial-delay: "PT10M" user-cleaner-lock-at-least-for: "PT5M" user-cleaner-lock-at-most-for: "PT28M" last-activity-duration-days: 1000 From 5bcf8925f42a9330aff6804a69a84d9ed7cc519d Mon Sep 17 00:00:00 2001 From: Leroy <3416288+Liemine@users.noreply.github.com> Date: Tue, 14 Apr 2026 13:22:06 +0200 Subject: [PATCH 43/73] #1042 Add create-from-institution return-url-allowed-domains --- roles/myconext/templates/application.yml.j2 | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/roles/myconext/templates/application.yml.j2 b/roles/myconext/templates/application.yml.j2 index f00d7b668..6af4e6ae9 100644 --- a/roles/myconext/templates/application.yml.j2 +++ b/roles/myconext/templates/application.yml.j2 @@ -104,6 +104,12 @@ mobile_app_redirect: eduid:///client/mobile # For this RP we nudge the user to use the magic link mobile_app_rp_entity_id: {{ myconext.mobile_app_rp_entity_id }} +create-from-institution: + return-url-allowed-domains: + {% for url in create_from_institution_return_url_allowed_domains %} + - "{{ url }}" + {% endfor %} + # The host headers to identify the service the user is logged in host_headers: service_desk: servicedesk.{{ myconext_base_domain }} From b0aeac06941204a50b4e55770ed4d0439c40fcff Mon Sep 17 00:00:00 2001 From: Bas Zoetekouw Date: Wed, 15 Apr 2026 12:45:19 +0200 Subject: [PATCH 44/73] fix config --- roles/invite/templates/serverapplication.yml.j2 | 1 + roles/openaccess/templates/serverapplication.yml.j2 | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/roles/invite/templates/serverapplication.yml.j2 b/roles/invite/templates/serverapplication.yml.j2 index 78b8c5727..26c97d432 100644 --- a/roles/invite/templates/serverapplication.yml.j2 +++ b/roles/invite/templates/serverapplication.yml.j2 @@ -137,6 +137,7 @@ config: languages: "nl, en" environment: {{ environment_shortname }} + feature: enable-performance-seed: False # Allow non-eduID accounts to accept invites for rolemanager and inviter diff --git a/roles/openaccess/templates/serverapplication.yml.j2 b/roles/openaccess/templates/serverapplication.yml.j2 index 1e5a2f9b9..522240035 100644 --- a/roles/openaccess/templates/serverapplication.yml.j2 +++ b/roles/openaccess/templates/serverapplication.yml.j2 @@ -90,7 +90,7 @@ config: client-url: "https://{{ openconextaccess_base_domain }}" base-url: "{{ base_domain }}" edu_id_schac_home_organization: "eduid.nl" - discovery: "https://connect.{{ env }}.surfconext.nl/oidc/.well-known/openid-configuration" + discovery: "https://connect.surfconext.nl/oidc/.well-known/openid-configuration" invite: "https://invite.{{ base_domain }}" sram: "https://{{ env }}.sram.surf.nl/" service_desk: "https://servicedesk.surf.nl/jira/plugins/servlet/desk/user/requests?reporter=all" From 3f39b6d765f301e6090e42732a9dd242b3287fad Mon Sep 17 00:00:00 2001 From: Okke Harsta Date: Wed, 15 Apr 2026 13:15:42 +0200 Subject: [PATCH 45/73] Stats down feature toggle https://github.com/OpenConext/OpenConext-dashboard/issues/685 --- roles/dashboard/templates/serverapplication.yml.j2 | 1 + 1 file changed, 1 insertion(+) diff --git a/roles/dashboard/templates/serverapplication.yml.j2 b/roles/dashboard/templates/serverapplication.yml.j2 index 410d97f70..45109b554 100644 --- a/roles/dashboard/templates/serverapplication.yml.j2 +++ b/roles/dashboard/templates/serverapplication.yml.j2 @@ -80,6 +80,7 @@ dashboard.feature.consent={{ dashboard.feature_consent }} # Valid choices are 'MOCK', 'PDP' or 'MANAGE', 'MOCK' is for local development dashboard.feature.pdpSource={{ dashboard.pdp_source }} dashboard.feature.statistics=true +dashboard.feature.statisticsDown={{ dashboard.feature_statsdown }} dashboard.feature.mail={{ dashboard.feature_mail }} dashboard.feature.oidc={{ dashboard.feature_oidc }} dashboard.feature.stepup={{ dashboard.feature_stepup }} From 03c5b75bca27dc58682fa1f0742efc4c9ee45d6d Mon Sep 17 00:00:00 2001 From: Okke Harsta Date: Fri, 17 Apr 2026 10:36:43 +0200 Subject: [PATCH 46/73] Added surf_schac_home_organization for access stats --- roles/openaccess/templates/serverapplication.yml.j2 | 1 + 1 file changed, 1 insertion(+) diff --git a/roles/openaccess/templates/serverapplication.yml.j2 b/roles/openaccess/templates/serverapplication.yml.j2 index 522240035..e4319e35a 100644 --- a/roles/openaccess/templates/serverapplication.yml.j2 +++ b/roles/openaccess/templates/serverapplication.yml.j2 @@ -90,6 +90,7 @@ config: client-url: "https://{{ openconextaccess_base_domain }}" base-url: "{{ base_domain }}" edu_id_schac_home_organization: "eduid.nl" + surf_schac_home_organization: "example.com" discovery: "https://connect.surfconext.nl/oidc/.well-known/openid-configuration" invite: "https://invite.{{ base_domain }}" sram: "https://{{ env }}.sram.surf.nl/" From b17b13d0127d981f5f4fcf9a86d8940ff03eaeb8 Mon Sep 17 00:00:00 2001 From: Pieter van der Meulen Date: Fri, 17 Apr 2026 15:00:15 +0200 Subject: [PATCH 47/73] Always clear AzureMFA federation-metadata so that an updated institutions.yaml is used --- roles/stepupazuremfa/tasks/main.yml | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/roles/stepupazuremfa/tasks/main.yml b/roles/stepupazuremfa/tasks/main.yml index 7e01fdc56..dbee925c5 100644 --- a/roles/stepupazuremfa/tasks/main.yml +++ b/roles/stepupazuremfa/tasks/main.yml @@ -26,15 +26,16 @@ - "{{ current_release_config_dir_name }}" - "{{ current_release_appdir }}/public/images" -- name: Create federation-metadata cache dir +- name: Create and empty the federation-metadata cache dir ansible.builtin.file: - state: directory - dest: "{{ item }}" + state: "{{ item }}" + dest: "{{ current_release_appdir }}/federation-metadata" owner: "{{ appname }}" group: root mode: "0755" with_items: - - "{{ current_release_appdir }}/federation-metadata" + - absent + - directory - name: Install images ansible.builtin.include_role: From 7f408c947b8cb00f1fbe020ec0b72f3c8be9bafc Mon Sep 17 00:00:00 2001 From: Okke Harsta Date: Wed, 22 Apr 2026 08:56:28 +0200 Subject: [PATCH 48/73] Access cron jobs --- roles/openaccess/templates/serverapplication.yml.j2 | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/roles/openaccess/templates/serverapplication.yml.j2 b/roles/openaccess/templates/serverapplication.yml.j2 index e4319e35a..3865a2044 100644 --- a/roles/openaccess/templates/serverapplication.yml.j2 +++ b/roles/openaccess/templates/serverapplication.yml.j2 @@ -68,6 +68,12 @@ oidcng: resource-server-secret: secret base-url: {{ openconextaccess_base_domain }} +cron: + user-cleaner-cron: "PT30M" + user-cleaner-cron-initial-delay: "PT10S" + user-cleaner-lock-at-least-for: "PT5M" + user-cleaner-lock-at-most-for: "PT28M" + lifecycle: user: lifecycle password: {{ openconextaccess_lifecycle_secret }} From ed9f9423ee426d9fce2a192a04162c9e1ec1f6cf Mon Sep 17 00:00:00 2001 From: Okke Harsta Date: Wed, 22 Apr 2026 16:11:25 +0200 Subject: [PATCH 49/73] Cron properties aaccess --- roles/openaccess/templates/serverapplication.yml.j2 | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/roles/openaccess/templates/serverapplication.yml.j2 b/roles/openaccess/templates/serverapplication.yml.j2 index e4319e35a..18ae3f48e 100644 --- a/roles/openaccess/templates/serverapplication.yml.j2 +++ b/roles/openaccess/templates/serverapplication.yml.j2 @@ -20,6 +20,8 @@ spring: jdbc: cleanup-cron: "-" initialize-schema: always + flush-mode: on_save + save-mode: on_set_attribute store-type: jdbc timeout: 8h mvc: @@ -68,6 +70,16 @@ oidcng: resource-server-secret: secret base-url: {{ openconextaccess_base_domain }} +cron: + user-cleaner-cron: "PT30M" + user-cleaner-cron-initial-delay: "PT10S" + user-cleaner-lock-at-least-for: "PT5M" + user-cleaner-lock-at-most-for: "PT28M" + org-contact-reminder-days: 180 + org-delete-after-days: 365 + user-inactivity-warn-days: 60 + user-inactivity-delete-days: 90 + lifecycle: user: lifecycle password: {{ openconextaccess_lifecycle_secret }} @@ -141,6 +153,7 @@ email: contactEmail: "{{ support_email }}" serviceDeskEmail: "{{ support_email }}" supportEmail: "support@surfconext.nl" + jiraErrorEmail: "{{ support_email }}" environment: "{{ environment_shortname }}" manage: From 4ee77f18d3e64b4ae82a0e8b6d5c4e76fc10fcc1 Mon Sep 17 00:00:00 2001 From: Okke Harsta Date: Thu, 23 Apr 2026 09:11:31 +0200 Subject: [PATCH 50/73] Cron properties access --- roles/openaccess/templates/serverapplication.yml.j2 | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/roles/openaccess/templates/serverapplication.yml.j2 b/roles/openaccess/templates/serverapplication.yml.j2 index 18ae3f48e..80da77a94 100644 --- a/roles/openaccess/templates/serverapplication.yml.j2 +++ b/roles/openaccess/templates/serverapplication.yml.j2 @@ -71,14 +71,14 @@ oidcng: base-url: {{ openconextaccess_base_domain }} cron: - user-cleaner-cron: "PT30M" - user-cleaner-cron-initial-delay: "PT10S" + user-cleaner-cron: "PT60M" + user-cleaner-cron-initial-delay: "PT10M" user-cleaner-lock-at-least-for: "PT5M" user-cleaner-lock-at-most-for: "PT28M" - org-contact-reminder-days: 180 + org-contact-reminder-days: 365 org-delete-after-days: 365 - user-inactivity-warn-days: 60 - user-inactivity-delete-days: 90 + user-inactivity-warn-days: 365 + user-inactivity-delete-days: 1095 lifecycle: user: lifecycle From 2ba4d1adec81692b6dd86300de0b5cc57a8ce785 Mon Sep 17 00:00:00 2001 From: Okke Harsta Date: Thu, 23 Apr 2026 15:16:46 +0200 Subject: [PATCH 51/73] Added sram_rp_entity_id to manage for push functionality --- environments/template/group_vars/template.yml | 1 + roles/manage/templates/application.yml.j2 | 3 +++ 2 files changed, 4 insertions(+) diff --git a/environments/template/group_vars/template.yml b/environments/template/group_vars/template.yml index 9b8606268..dc2642d3b 100644 --- a/environments/template/group_vars/template.yml +++ b/environments/template/group_vars/template.yml @@ -338,6 +338,7 @@ manage: features: push, validation, push_preview, orphans, find_my_data, edugain, auto_refresh environment: template super_user_team_names: "urn:collab:group:test.surfteams.nl:nl:surfnet:diensten:surfconext_tpm_core" + sram_rp_entity_id: "sbs.test.sram.surf.nl" apiUsers: - { name: "dashboard", diff --git a/roles/manage/templates/application.yml.j2 b/roles/manage/templates/application.yml.j2 index 5790737b0..5b7a7980d 100644 --- a/roles/manage/templates/application.yml.j2 +++ b/roles/manage/templates/application.yml.j2 @@ -83,6 +83,9 @@ policies: allowed_attributes: file://{{ manage_dir }}/policies/allowed_attributes.json extra_saml_attributes: file://{{ manage_dir }}/policies/extra_saml_attributes.json +sram: + sram_rp_entity_id: ""{{ manage.sram_rp_entity_id }}" + spring: mail: host: {{ smtp_server }} From e709961b0207ab9076cc251e8a85bb66c972479d Mon Sep 17 00:00:00 2001 From: crosmuller Date: Mon, 4 May 2026 17:30:01 +0200 Subject: [PATCH 52/73] Remove obsolete certificate task and stop patching during a deploy (#660) - Remove obsolete certificate task - Remove obsolete template - Stop Haproxy updates during deploys --- roles/haproxy/tasks/main.yml | 13 +------------ roles/haproxy/templates/certlist.lst.j2 | 5 ----- roles/haproxy/templates/update_ocsp.j2 | 11 ----------- 3 files changed, 1 insertion(+), 28 deletions(-) delete mode 100644 roles/haproxy/templates/update_ocsp.j2 diff --git a/roles/haproxy/tasks/main.yml b/roles/haproxy/tasks/main.yml index 55f38f8f1..d765af2bc 100644 --- a/roles/haproxy/tasks/main.yml +++ b/roles/haproxy/tasks/main.yml @@ -16,7 +16,7 @@ - name: Install haproxy and socat ansible.builtin.apt: name: - - "haproxy=3.0.*" + - "haproxy" - "socat" - "git" state: "present" @@ -88,17 +88,6 @@ group: haproxy mode: "0770" -- name: Create combined key and certificate file for HAproxy - ansible.builtin.copy: - content: > - {{ item.key_content }}{{ lookup('file', '{{ inventory_dir }}/files/certs/{{ item.crt_name }}') }} - dest: "/etc/haproxy/certs/{{ item.name }}_haproxy.pem" - mode: "0600" - with_items: "{{ haproxy_sni_ip.certs }}" - when: haproxy_sni_ip.certs is defined - notify: - - "reload haproxy" - - name: Create backend CA directory ansible.builtin.file: path: "{{ tls_backend_ca | dirname }}" diff --git a/roles/haproxy/templates/certlist.lst.j2 b/roles/haproxy/templates/certlist.lst.j2 index 3e8bb226d..800a79b39 100644 --- a/roles/haproxy/templates/certlist.lst.j2 +++ b/roles/haproxy/templates/certlist.lst.j2 @@ -3,11 +3,6 @@ /etc/haproxy/certs/{{ host }}.pem [ocsp-update on] {% endfor %} {% endif %} -{% if haproxy_sni_ip.certs is defined %} -{% for cert in haproxy_sni_ip.certs %} -/etc/haproxy/certs/{{ cert.name }}_haproxy.pem [ocsp-update on] -{% endfor %} -{% endif %} {% if haproxy_extra_certs is defined %} {% for cert in haproxy_extra_certs %} {{ cert }} [ocsp-update on] diff --git a/roles/haproxy/templates/update_ocsp.j2 b/roles/haproxy/templates/update_ocsp.j2 deleted file mode 100644 index 2ed61f528..000000000 --- a/roles/haproxy/templates/update_ocsp.j2 +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/sh -# Call hapos-upd to update OCSP stapling info foreach of our haproxy certificates - -# probably we want to continue even if one fails -set -e - -{% for cert in haproxy_sni_ip.certs %} -/usr/local/sbin/hapos-upd --partial-chain --good-only --socket /var/lib/haproxy/haproxy.stats \ - --VAfile /etc/pki/haproxy/{{ cert.name }}_haproxy.pem \ - --cert /etc/pki/haproxy/{{ cert.name }}_haproxy.pem -{% endfor %} From f68cf270b4ffaf45b95bd58142f6684e021fa11d Mon Sep 17 00:00:00 2001 From: Peter Havekes Date: Tue, 5 May 2026 13:10:04 +0200 Subject: [PATCH 53/73] Default create_from_institution_return_url_allowed_domains voor eduID --- roles/myconext/templates/application.yml.j2 | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/roles/myconext/templates/application.yml.j2 b/roles/myconext/templates/application.yml.j2 index 6af4e6ae9..9e2af3722 100644 --- a/roles/myconext/templates/application.yml.j2 +++ b/roles/myconext/templates/application.yml.j2 @@ -106,9 +106,11 @@ mobile_app_rp_entity_id: {{ myconext.mobile_app_rp_entity_id }} create-from-institution: return-url-allowed-domains: - {% for url in create_from_institution_return_url_allowed_domains %} +{% for url in myconext.create_from_institution_return_url_allowed_domains | default([]) %} - "{{ url }}" - {% endfor %} +{% else %} + [] # lege lijst wanneer er geen URLs zijn +{% endfor %} # The host headers to identify the service the user is logged in host_headers: From 523926f5e74a5a158ad83b02f2b9c32ae3e65548 Mon Sep 17 00:00:00 2001 From: Okke Harsta Date: Wed, 6 May 2026 06:10:09 +0200 Subject: [PATCH 54/73] Bugfix for double quote --- roles/manage/templates/application.yml.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/manage/templates/application.yml.j2 b/roles/manage/templates/application.yml.j2 index 5b7a7980d..aec21cfdf 100644 --- a/roles/manage/templates/application.yml.j2 +++ b/roles/manage/templates/application.yml.j2 @@ -84,7 +84,7 @@ policies: extra_saml_attributes: file://{{ manage_dir }}/policies/extra_saml_attributes.json sram: - sram_rp_entity_id: ""{{ manage.sram_rp_entity_id }}" + sram_rp_entity_id: "{{ manage.sram_rp_entity_id }}" spring: mail: From 112b538b74ea02ad3c7ac879873ed6429af0b608 Mon Sep 17 00:00:00 2001 From: Bas Zoetekouw Date: Tue, 8 Apr 2025 11:02:49 +0200 Subject: [PATCH 55/73] Add engineblock parameters for SBS integration --- roles/engine/defaults/main.yml | 9 +++++++++ roles/engine/templates/parameters.yml.j2 | 14 ++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/roles/engine/defaults/main.yml b/roles/engine/defaults/main.yml index cdc888cd5..3721e2f54 100644 --- a/roles/engine/defaults/main.yml +++ b/roles/engine/defaults/main.yml @@ -17,6 +17,7 @@ engine_api_feature_consent_remove: 0 engine_api_feature_metadata_api: 1 engine_api_feature_deprovision: 1 engine_feature_send_user_attributes: 0 +engine_feature_enable_sbs_interrupt: 0 # Cutoff point for showing unfiltered IdPs on the WAYF engine_wayf_cutoff_point_for_showing_unfiltered_idps: 50 @@ -76,6 +77,14 @@ engine_stepup_gateway_sfo_entity_id: "https://{{ engine_stepup_gateway_domain }} # The single sign-on endpoint used for Stepup Gateway SFO callouts engine_stepup_gateway_sfo_sso_location: "https://{{ engine_stepup_gateway_domain }}/second-factor-only/single-sign-on" +# SBS interrupt settings +engine_sbs_base_url: "sbs.{{ base_domain }}" +engine_sbs_attributes_allowed: + - 'urn:mace:dir:attribute-def:eduPersonEntitlement' + - 'urn:mace:dir:attribute-def:uid' + - 'urn:mace:dir:attribute-def:eduPersonPrincipalName' + - 'urn:oid:1.3.6.1.4.1.24552.500.1.1.1.13' + ## The minimum priority of messages that will be logged engine_logging_passthru_level: NOTICE diff --git a/roles/engine/templates/parameters.yml.j2 b/roles/engine/templates/parameters.yml.j2 index 77903de1e..9b416fe66 100644 --- a/roles/engine/templates/parameters.yml.j2 +++ b/roles/engine/templates/parameters.yml.j2 @@ -228,6 +228,7 @@ parameters: feature_stepup_sfo_override_engine_entityid: {{ engine_feature_stepup_override_entityid | bool | to_json }} feature_enable_idp_initiated_flow: {{ engine_feature_idp_initiated_flow | bool | to_json }} feature_stepup_send_user_attributes: {{ engine_feature_send_user_attributes | bool | to_json }} + feature_enable_sram_interrupt: { { engine_feature_enable_sbs_interrupt | bool | to_json } } ########################################################################################## ## PROFILE SETTINGS ########################################################################################## @@ -310,3 +311,16 @@ parameters: # used in the authentication log record. The attributeName will be searched in the response attributes and if present # the log data will be enriched. The values of the response attributes are the final values after ARP and Attribute Manipulation. auth.log.attributes: {{ engine_log_attributes }} + + + ########################################################################################## + ## SBS external authorization/attribute enrichtment + ########################################################################################## + sram.api_token: "{{ engine_sbs_api_token | default('') }}" + sram.base_url: "https://{{ engine_sbs_base_url }}/api/users/" + sram.authz_location: "authz_eb" + sram.attributes_location: "authz_eb" + sram.attributes_location: "attributes_eb" + sram.interrupt_location: "interrupt" + sram.verify_peer: true + sram.allowed_attributes: {{ engine_sbs_attributes_allowed }} From 165e8263238fbf359b0daa7c76a9053a1a063f52 Mon Sep 17 00:00:00 2001 From: Bas Zoetekouw Date: Wed, 8 Apr 2026 15:19:04 +0200 Subject: [PATCH 56/73] Fix duplicate keys --- roles/engine/templates/parameters.yml.j2 | 1 - 1 file changed, 1 deletion(-) diff --git a/roles/engine/templates/parameters.yml.j2 b/roles/engine/templates/parameters.yml.j2 index 9b416fe66..edd6d80e6 100644 --- a/roles/engine/templates/parameters.yml.j2 +++ b/roles/engine/templates/parameters.yml.j2 @@ -319,7 +319,6 @@ parameters: sram.api_token: "{{ engine_sbs_api_token | default('') }}" sram.base_url: "https://{{ engine_sbs_base_url }}/api/users/" sram.authz_location: "authz_eb" - sram.attributes_location: "authz_eb" sram.attributes_location: "attributes_eb" sram.interrupt_location: "interrupt" sram.verify_peer: true From 2f16f085c611a270cf538217c272c6a1d244ef2e Mon Sep 17 00:00:00 2001 From: Bas Zoetekouw Date: Wed, 8 Apr 2026 15:37:15 +0200 Subject: [PATCH 57/73] Fix jinja template --- roles/engine/templates/parameters.yml.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/engine/templates/parameters.yml.j2 b/roles/engine/templates/parameters.yml.j2 index edd6d80e6..526104405 100644 --- a/roles/engine/templates/parameters.yml.j2 +++ b/roles/engine/templates/parameters.yml.j2 @@ -228,7 +228,7 @@ parameters: feature_stepup_sfo_override_engine_entityid: {{ engine_feature_stepup_override_entityid | bool | to_json }} feature_enable_idp_initiated_flow: {{ engine_feature_idp_initiated_flow | bool | to_json }} feature_stepup_send_user_attributes: {{ engine_feature_send_user_attributes | bool | to_json }} - feature_enable_sram_interrupt: { { engine_feature_enable_sbs_interrupt | bool | to_json } } + feature_enable_sram_interrupt: {{ engine_feature_enable_sbs_interrupt | bool | to_json }} ########################################################################################## ## PROFILE SETTINGS ########################################################################################## From abfe916bb954d4a6096ff29c30a2daf22d09094f Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Thu, 19 Mar 2026 16:48:37 +0100 Subject: [PATCH 58/73] WIP --- roles/redis/defaults/main.yml | 11 + roles/redis/handlers/main.yml | 6 + roles/redis/tasks/main.yml | 61 ++++ roles/redis/templates/redis.conf.j2 | 3 + roles/redis/vars/main.yml | 1 + roles/sbs/defaults/main.yml | 159 +++++++++++ roles/sbs/files/yarn.gpg | 243 ++++++++++++++++ roles/sbs/handlers/main.yml | 9 + roles/sbs/tasks/main.yml | 171 ++++++++++++ roles/sbs/templates/alembic.ini.j2 | 72 +++++ roles/sbs/templates/config.yml.j2 | 264 ++++++++++++++++++ roles/sbs/templates/disclaimer.css.j2 | 6 + .../templates/saml_advanced_settings.json.j2 | 35 +++ roles/sbs/templates/saml_settings.json.j2 | 22 ++ roles/sbs/templates/sbs-apache.conf.j2 | 30 ++ roles/sbs/templates/sbs.service.j2 | 32 +++ roles/sbs/vars/main.yml | 1 + 17 files changed, 1126 insertions(+) create mode 100644 roles/redis/defaults/main.yml create mode 100644 roles/redis/handlers/main.yml create mode 100644 roles/redis/tasks/main.yml create mode 100644 roles/redis/templates/redis.conf.j2 create mode 100644 roles/redis/vars/main.yml create mode 100644 roles/sbs/defaults/main.yml create mode 100644 roles/sbs/files/yarn.gpg create mode 100644 roles/sbs/handlers/main.yml create mode 100644 roles/sbs/tasks/main.yml create mode 100644 roles/sbs/templates/alembic.ini.j2 create mode 100644 roles/sbs/templates/config.yml.j2 create mode 100644 roles/sbs/templates/disclaimer.css.j2 create mode 100644 roles/sbs/templates/saml_advanced_settings.json.j2 create mode 100644 roles/sbs/templates/saml_settings.json.j2 create mode 100644 roles/sbs/templates/sbs-apache.conf.j2 create mode 100644 roles/sbs/templates/sbs.service.j2 create mode 100644 roles/sbs/vars/main.yml diff --git a/roles/redis/defaults/main.yml b/roles/redis/defaults/main.yml new file mode 100644 index 000000000..d4eb4b182 --- /dev/null +++ b/roles/redis/defaults/main.yml @@ -0,0 +1,11 @@ +--- +redis: "{{ redis_defaults | combine(redis_overrides, recursive=true) }}" +redis_defaults: + image: "docker.io/library/redis:7" + conf_dir: "{{ current_release_appdir }}/redis" + data_dir: "{{ current_release_appdir }}/redis/data" + user: redis + group: redis + redis_user: default + redis_password: changethispassword + max_memory: 100mb diff --git a/roles/redis/handlers/main.yml b/roles/redis/handlers/main.yml new file mode 100644 index 000000000..5ed78e133 --- /dev/null +++ b/roles/redis/handlers/main.yml @@ -0,0 +1,6 @@ +--- +- name: Restart redis container + community.docker.docker_container: + name: redis + state: started + restart: true diff --git a/roles/redis/tasks/main.yml b/roles/redis/tasks/main.yml new file mode 100644 index 000000000..65e7392ef --- /dev/null +++ b/roles/redis/tasks/main.yml @@ -0,0 +1,61 @@ +--- +- name: "Create redis group" + group: + name: "{{ redis.group }}" + state: "present" + register: "result" + +- name: "Save redis group gid" + set_fact: + redis_group_gid: "{{ result.gid }}" + +- name: "Create redis user" + user: + name: "{{ redis.user }}" + group: "{{ redis.group }}" + comment: "User to run SRAM Redis service" + shell: "/bin/false" + password: "!" + home: "{{ redis.conf_dir }}" + create_home: false + state: "present" + register: "result" + +- name: "Save redis user uid" + set_fact: + redis_user_uid: "{{ result.uid }}" + +- name: "Create directories" + file: + path: "{{item.path}}" + state: "directory" + owner: "{{ redis.user }}" + group: "{{ redis.group }}" + mode: "{{item.mode}}" + with_items: + - { path: "{{redis.conf_dir}}", mode: "0755" } + - { path: "{{redis.data_dir}}", mode: "0755" } + +- name: "Create redis config" + template: + src: "redis.conf.j2" + dest: "{{ redis.conf_dir }}/redis.conf" + owner: "{{ redis.user }}" + group: "{{ redis.group }}" + mode: "0644" + notify: "Restart redis container" + +- name: "Create redis container" + community.docker.docker_container: + name: "redis" + image: "{{ redis.image }}" + restart_policy: "always" + state: "started" + user: "{{ redis_user_uid }}:{{ redis_group_gid }}" + command: | + redis-server /usr/local/etc/redis/redis.conf + volumes: + - "{{ redis.conf_dir }}:/usr/local/etc/redis" + - "{{ redis.data_dir }}:/data" + networks: + - name: loadbalancer diff --git a/roles/redis/templates/redis.conf.j2 b/roles/redis/templates/redis.conf.j2 new file mode 100644 index 000000000..ba231dc58 --- /dev/null +++ b/roles/redis/templates/redis.conf.j2 @@ -0,0 +1,3 @@ +user {{redis.redis_user}} on +@all ~* &* >{{redis.redis_password}} +maxmemory {{ redis.max_memory }} +maxmemory-policy allkeys-lru diff --git a/roles/redis/vars/main.yml b/roles/redis/vars/main.yml new file mode 100644 index 000000000..761942f7b --- /dev/null +++ b/roles/redis/vars/main.yml @@ -0,0 +1 @@ +current_release_appdir: /opt/openconext diff --git a/roles/sbs/defaults/main.yml b/roles/sbs/defaults/main.yml new file mode 100644 index 000000000..f21addf5f --- /dev/null +++ b/roles/sbs/defaults/main.yml @@ -0,0 +1,159 @@ +--- +sbs: "{{ sbs_defaults | combine(sbs_overrides, recursive=true) }}" +sbs_defaults: + openidc_timeout: 86400 + sram_conf_dir: "{{ current_release_appdir }}/sram" + + work_dir: "{{ sram_conf_dir }}/sbs" + git_dir: "{{ sbs.work_dir }}/sbs" + env_dir: "{{ sbs.work_dir }}/sbs-env" + conf_dir: "{{ sbs.work_dir }}/config" + log_dir: "{{ sbs.work_dir }}/log" + cert_dir: "{{ sbs.work_dir }}/cert" + apache_conf: "{{ sbs.work_dir }}/sbs.conf" + nginx_conf: "{{ sbs.work_dir }}/nginx.conf" + + + db_name: "sbs" + db_user: "sbsrw" + dbbackup_user: "sbs_backupper" + migration_user: "sbs_migrater" + + db_connection: "\ + mysql+mysqldb://%s:%s@{{ mariadb_host }}/{{ sbs_db_name }}\ + ?ssl=true&charset=utf8mb4" + db_connection_sbs: "{{ sbs_db_connection | format(sbs_db_user, sbs_db_password) }}" + db_connection_migration: "\ + {{ sbs_db_connection | format(sbs_migration_user, sbs_migration_password) }}" + + redis_host: redis + redis_port: 6379 + redis_ssl: false + redis_user: default + + mail_host: "{{ mail.relay_to }}" + mail_port: "{{ mail.relay_port }}" + + user: "sbs" + group: "sbs" + + session_lifetime: 1440 + secret_key_suffix: "" + + oidc_crypto_password: "CHANGEME" + uid_attribute: "sub" + + disclaimer_color: "#a29c13" + disclaimer_label: wsgi + + urn_namespace: "urn:example:sbs" + eppn_scope: "sbs.example.edu" + restricted_co_default_org: "example.org" + + mail_sender_name: "SURF" + mail_sender_email: "no-reply@localhost" + exceptions_mail: "root@localhost" + + support_email: "sram-support@localhost" + admin_email: "sram-beheer@localhost" + ticket_email: "sram-support@surf.nl" + eduteams_email: "eduteams@localhost" + + wiki_link: "https://www.example.org/wiki" + + backend_port: 8080 + num_workers: 2 + + cron_hour_of_day: 4 + seed_allowed: True + api_keys_enabled: True + feedback_enabled: True + audit_trail_notifications_enabled: True + send_exceptions: False + send_js_exceptions: False + second_factor_authentication_required: True + totp_token_name: "SRAM-example" + notifications_enabled: True + invitation_reminders_enabled: True + invitation_expirations_enabled: True + open_requests_enabled: True + scim_sweep: False + impersonation_allowed: True + admin_platform_backdoor_totp: True + past_dates_allowed: True + mock_scim_enabled: True + log_to_stdout: True + + delete_orphaned: True + suspension_inactive_days: 365 + suspension_reminder_days: 14 + suspension_notify_admin: False + + oidc_config_url: "http://localhost/.well-known/openid-configuration" + oidc_authz_endpoint: "http://localhost/OIDC/authorization" + oidc_token_endpoint: "http://localhost/OIDC/token" + oidc_userinfo_endpoint: "http://localhost/OIDC/userinfo" + oidc_jwks_endpoint: "http://localhost/OIDC/jwks.json" + oidc_redirect_uri: "https://sbs.scz-vm.net/api/users/resume-session" + mfa_idp_allowed: false + eduteams_continue_endpoint: "https://localhost/continue" + eb_continue_endpoint: "https://engine.(.*)surfconext.nl(.*)" + oidc_jwt_audience: "https://localhost" + continue_eduteams_redirect_uri: "https://localhost/continue" + oidc_verify_peer: False + oidc_scopes: + - openid + + manage_base_enabled: False + manage_base_url: "https://manage.test2.surfconext.nl" + manage_sram_rp_entity_id: "sbs.test2.sram.surf.nl" + manage_verify_peer: False + + idp_metadata_url: "https://metadata.surfconext.nl/signed/2023/edugain-downstream-idp.xml " + backup_dir: "{{backup_base}}/sbs" + + swagger_enabled: true + + ssid_identity_providers: [] + surf_secure_id: + environment: "unknown.example.org" + sp_entity_id: "https://sbs.{{base_domain}}" + acs_url: "https://{{base_domain}}/api/users/acs" + sa_gw_environment: "sa-gw.unknown.example.org" + sa_idp_certificate: | + -----BEGIN CERTIFICATE----- + 12345 + -----END CERTIFICATE----- + priv: | + -----BEGIN RSA PRIVATE KEY----- + abcde + -----END RSA PRIVATE KEY----- + pub: | + -----BEGIN CERTIFICATE----- + 12345 + -----END CERTIFICATE----- + + ssid_authncontext: "\ + http://{{ sbs.surf_secure_id.environment }}/assurance/sfo-level2" + ssid_entityid: "\ + https://{{ sbs.surf_secure_id.sa_gw_environment }}/second-factor-only/metadata" + ssid_sso_endpoint: "\ + https://{{ sbs.surf_secure_id.sa_gw_environment }}/second-factor-only/single-sign-on" + + mfa_sso_minutes: 10 + mfa_fallback_enabled: true + + ldap_url: "ldap://ldap.example.com/dc=example,dc=com" + ldap_bind_account: "cn=admin,dc=entity_id,dc=services,dc=sram-tst,dc=surf,dc=nl" + + csp_style_hashes: + - 'sha256-0+ANsgYUJdh56RK8gGvTF2vnriYqvFHfWqtA8xXa+bA=' + - 'sha256-3SnfHQolDHbZMbDAPmhrZf1keHiXfj/KJyh2phhFAAY=' + - 'sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=' + - 'sha256-Ng6y+QCkPChG4Q49SIfXB5ToIDcDhITtQNFkDBPpCTw=' + - 'sha256-orBPipbqpMvkNi+Z+m6qEn0XS6ymmAQE6+FwCNs1FbQ=' + - 'sha256-vFt3L2qLqpJmRpcXGbYr2UVSmgSp9VCUzz2lnqWIATw=' + - 'sha256-SU3XCwbQ/8qgzoGOWCYdkwIr3xRrl5rsvdFcpw8NSiE=' # on /new-service-request + - 'sha256-WTC9gHKjIpzl5ub1eg/YrRy/k+jlzeyRojah9dxAApc=' # on /new-service-request + + engine_block_api_token: secret diff --git a/roles/sbs/files/yarn.gpg b/roles/sbs/files/yarn.gpg new file mode 100644 index 000000000..3e9e7d155 --- /dev/null +++ b/roles/sbs/files/yarn.gpg @@ -0,0 +1,243 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- +Version: GnuPG v1 + +mQINBFf0j5oBEADS6cItqCbf4lOLICohq2aHqM5I1jsz3DC4ddIU5ONbKXP1t0wk +FEUPRzd6m80cTo7Q02Bw7enh4J6HvM5XVBSSGKENP6XAsiOZnY9nkXlcQAPFRnCn +CjEfoOPZ0cBKjn2IpIXXcC+7xh4p1yruBpOsCbT6BuzA+Nm9j4cpRjdRdWSSmdID +TyMZClmYm/NIfCPduYvNZxZXhW3QYeieP7HIonhZSHVu/jauEUyHLVsieUIvAOJI +cXYpwLlrw0yy4flHe1ORJzuA7EZ4eOWCuKf1PgowEnVSS7Qp7lksCuljtfXgWelB +XGJlAMD90mMbsNpQPF8ywQ2wjECM8Q6BGUcQuGMDBtFihobb+ufJxpUOm4uDt0y4 +zaw+MVSi+a56+zvY0VmMGVyJstldPAcUlFYBDsfC9+zpzyrAqRY+qFWOT2tj29R5 +ZNYvUUjEmA/kXPNIwmEr4oj7PVjSTUSpwoKamFFE6Bbha1bzIHpdPIRYc6cEulp3 +dTOWfp+Cniiblp9gwz3HeXOWu7npTTvJBnnyRSVtQgRnZrrtRt3oLZgmj2fpZFCE +g8VcnQOb0iFcIM7VlWL0QR4SOz36/GFyezZkGsMlJwIGjXkqGhcEHYVDpg0nMoq1 +qUvizxv4nKLanZ5jKrV2J8V09PbL+BERIi6QSeXhXQIui/HfV5wHXC6DywARAQAB +tBxZYXJuIFBhY2thZ2luZyA8eWFybkBkYW4uY3g+iQI5BBMBCAAjBQJX9I+aAhsD +BwsJCAcDAgEGFQgCCQoLBBYCAwECHgECF4AACgkQFkawG4blAxB52Q/9FcyGIEK2 +QamDhookuoUGGYjIeN+huQPWmc6mLPEKS2Vahk5jnJKVtAFiaqINiUtt/1jZuhF2 +bVGITvZK79kM6lg42xQcnhypzQPgkN7GQ/ApYqeKqCh1wV43KzT/CsJ9TrI0SC34 +qYHTEXXUprAuwQitgAJNi5QMdMtauCmpK+Xtl/72aetvL8jMFElOobeGwKgfLo9+ +We2EkKhSwyiy3W5TYI1UlV+evyyT+N0pmhRUSH6sJpzDnVYYPbCWa2b+0D/PHjXi +edKcely/NvqyVGoWZ+j41wkp5Q0wK2ybURS1ajfaKt0OcMhRf9XCfeXAQvU98mEk +FlfPaq0CXsjOy8eJXDeoc1dwxjDi2YbfHel0CafjrNp6qIFG9v3JxPUU19hG9lxD +Iv7VXftvMpjJCo/J4Qk+MOv7KsabgXg1iZHmllyyH3TY4AA4VA+mlceiiOHdXbKk +Q3BfS1jdXPV+2kBfqM4oWANArlrFTqtop8PPsDNqh/6SrVsthr7WTvC5q5h/Lmxy +Krm4Laf7JJMvdisfAsBbGZcR0Xv/Vw9cf2OIEzeOWbj5xul0kHT1vHhVNrBNanfe +t79RTDGESPbqz+bTS7olHWctl6TlwxA0/qKlI/PzXfOg63Nqy15woq9buca+uTcS +ccYO5au+g4Z70IEeQHsq5SC56qDR5/FvYyu5Ag0EV/SPmgEQANDSEMBKp6ER86y+ +udfKdSLP9gOv6hPsAgCHhcvBsks+ixeX9U9KkK7vj/1q6wodKf9oEbbdykHgIIB1 +lzY1l7u7/biAtQhTjdEZPh/dt3vjogrJblUEC0rt+fZe325ociocS4Bt9I75Ttkd +nWgkE4uOBJsSllpUbqfLBfYR58zz2Rz1pkBqRTkmJFetVNYErYi2tWbeJ59GjUN7 +w1K3GhxqbMbgx4dF5+rjGs+KI9k6jkGeeQHqhDk+FU70oLVLuH2Dmi9IFjklKmGa +3BU7VpNxvDwdoV7ttRYEBcBnPOmL24Sn4Xhe2MDCqgJwwyohd9rk8neV7GtavVea +Tv6bnzi1iJRgDld51HFWG8X+y55i5cYWaiXHdHOAG1+t35QUrczm9+sgkiKSk1II +TlEFsfwRl16NTCMGzjP5kGCm/W+yyyvBMw7CkENQcd23fMsdaQ/2UNYJau2PoRH/ +m+IoRehIcmE0npKeLVTDeZNCzpmfY18T542ibK49kdjZiK6G/VyBhIbWEFVu5Ll9 ++8GbcO9ucYaaeWkFS8Hg0FZafMk59VxKiICKLZ5he/C4f0UssXdyRYU6C5BH8UTC +QLg0z8mSSL+Wb2iFVPrn39Do7Zm8ry6LBCmfCf3pI99Q/1VaLDauorooJV3rQ5kC +JEiAeqQtLOvyoXIex1VbzlRUXmElABEBAAGJAh8EGAEIAAkFAlf0j5oCGwwACgkQ +FkawG4blAxAUUQ//afD0KLHjClHsA/dFiW+5qVzI8kPMHwO1QcUjeXrB6I3SluOT +rLSPhOsoS72yAaU9hFuq8g9ecmFrl3Skp/U4DHZXioEmozyZRp7eVsaHTewlfaOb +6g7+v52ktYdomcp3BM5v/pPZCnB5rLrH2KaUWbpY6V6tqtCHbF7zftDqcBENJDXf +hiCqS19J08GZFjDEqGDrEj3YEmEXZMN7PcXEISPIz6NYI6rw4yVH8AXfQW6vpPzm +ycHwI0QsVW2NQdcZ6zZt+phm6shNUbN2iDdg3BJICmIvQf8qhO3bOh0Bwc11FLHu +MKuGVxnWN82HyIsuUB7WDLBHEOtg61Zf1nAF1PQK52YuQz3EWI4LL9OqVqfSTY1J +jqIfj+u1PY2UHrxZfxlz1M8pXb1grozjKQ5aNqBKRrcMZNx71itR5rv18qGjGR2i +Sciu/xah7zAroEQrx72IjYt03tbk/007CvUlUqFIFB8kY1bbfX8JAA+TxelUniUR +2CY8eom5HnaPpKE3kGXZ0jWkudbWb7uuWcW1FE/bO+VtexpBL3SoXmwbVMGnJIEi +Uvy8m6ez0kzLXzJ/4K4b8bDO4NjFX2ocKdzLA89Z95KcZUxEG0O7kaDCu0x3BEge +uArJLecD5je2/2HXAdvkOAOUi6Gc/LiJrtInc0vUFsdqWCUK5Ao/MKvdMFW5Ag0E +V/SP2AEQALRcYv/hiv1n3VYuJbFnEfMkGwkdBYLGo3hiHKY8xrsFVePl9SkL8aqd +C310KUFNI42gGY/lz54RUHOqfMszTdafFrmwU18ECWGo4oG9qEutIKG7fkxcvk2M +tgsOMZFJqVDS1a9I4QTIkv1ellLBhVub9S7vhe/0jDjXs9IyOBpYQrpCXAm6SypC +fpqkDJ4qt/yFheATcm3s8ZVTsk2hiz2jnbqfvpte3hr3XArDjZXr3mGAp3YY9JFT +zVBOhyhT/92e6tURz8a/+IrMJzhSyIDel9L+2sHHo9E+fA3/h3lg2mo6EZmRTuvE +v9GXf5xeP5lSCDwS6YBXevJ8OSPlocC8Qm8ziww6dy/23XTxPg4YTkdf42i7VOpS +pa7EvBGne8YrmUzfbrxyAArK05lo56ZWb9ROgTnqM62wfvrCbEqSHidN3WQQEhMH +N7vtXeDPhAd8vaDhYBk4A/yWXIwgIbMczYf7Pl7oY3bXlQHb0KW/y7N3OZCr5mPW +94VLLH/v+T5R4DXaqTWeWtDGXLih7uXrG9vdlyrULEW+FDSpexKFUQe83a+Vkp6x +GX7FdMC9tNKYnPeRYqPF9UQEJg+MSbfkHSAJgky+bbacz+eqacLXMNCEk2LXFV1B +66u2EvSkGZiH7+6BNOar84I3qJrU7LBD7TmKBDHtnRr9JXrAxee3ABEBAAGJBEQE +GAEIAA8FAlf0j9gCGwIFCQHhM4ACKQkQFkawG4blAxDBXSAEGQEIAAYFAlf0j9gA +CgkQ0QH3iZ1B88PaoA//VuGdF5sjxRIOAOYqXypOD9/Kd7lYyxmtCwnvKdM7f8O5 +iD8oR2Pk1RhYHjpkfMRVjMkaLfxIRXfGQsWfKN2Zsa4zmTuNy7H6X26XW3rkFWpm +dECz1siGRvcpL6NvwLPIPQe7tST72q03u1H7bcyLGk0sTppgMoBND7yuaBTBZkAO +WizR+13x7FV+Y2j430Ft/DOe/NTc9dAlp6WmF5baOZClULfFzCTf9OcS2+bo68oP +gwWwnciJHSSLm6WRjsgoDxo5f3xBJs0ELKCr4jMwpSOTYqbDgEYOQTmHKkX8ZeQA +7mokc9guA0WK+DiGZis85lU95mneyJ2RuYcz6/VDwvT84ooe1swVkC2palDqBMwg +jZSTzbcUVqZRRnSDCe9jtpvF48WK4ZRiqtGO6Avzg1ZwMmWSr0zHQrLrUMTq/62W +KxLyj2oPxgptRg589hIwXVxJRWQjFijvK/xSjRMLgg73aNTq6Ojh98iyKAQ3HfzW +6iXBLLuGfvxflFednUSdWorr38MspcFvjFBOly+NDSjPHamNQ2h19iHLrYT7t4ve +nU9PvC+ORvXGxTN8mQR9btSdienQ8bBuU/mg/c417w6WbY7tkkqHqUuQC9LoaVdC +QFeE/SKGNe+wWN/EKi0QhXR9+UgWA41Gddi83Bk5deuTwbUeYkMDeUlOq3yyemcG +VxAA0PSktXnJgUj63+cdXu7ustVqzMjVJySCKSBtwJOge5aayonCNxz7KwoPO34m +Gdr9P4iJfc9kjawNV79aQ5aUH9uU2qFlbZOdO8pHOTjy4E+J0wbJb3VtzCJc1Eaa +83kZLFtJ45Fv2WQQ2Nv3Fo+yqAtkOkaBZv9Yq0UTaDkSYE9MMzHDVFx11TT21NZD +xu2QiIiqBcZfqJtIFHN5jONjwPG08xLAQKfUNROzclZ1h4XYUT+TWouopmpNeay5 +JSNcp5LsC2Rn0jSFuZGPJ1rBwB9vSFVA/GvOj8qEdfhjN3XbqPLVdOeChKuhlK0/ +sOLZZG91SHmT5SjP2zM6QKKSwNgHX4xZt4uugSZiY13+XqnrOGO9zRH8uumhsQmI +eFEdT27fsXTDTkWPI2zlHTltQjH1iebqqM9gfa2KUt671WyoL1yLhWrgePvDE+He +r002OslvvW6aAIIBki3FntPDqdIH89EEB4UEGqiA1eIZ6hGaQfinC7/IOkkm/mEa +qdeoI6NRS521/yf7i34NNj3IaL+rZQFbVWdbTEzAPtAs+bMJOHQXSGZeUUFrEQ/J +ael6aNg7mlr7cacmDwZWYLoCfY4w9GW6JHi6i63np8EA34CXecfor7cAX4XfaokB +XjyEkrnfV6OWYS7f01JJOcqYANhndxz1Ph8bxoRPelf5q+W5Ag0EWBU7dwEQAL1p +wH4prFMFMNV7MJPAwEug0Mxf3OsTBtCBnBYNvgFB+SFwKQLyDXUujuGQudjqQPCz +/09MOJPwGCOi0uA0BQScJ5JAfOq33qXi1iXCj9akeCfZXCOWtG3Izc3ofS6uee7K +fWUF1hNyA3PUwpRtM2pll+sQEO3y/EN7xYGUOM0mlCawrYGtxSNMlWBlMk/y5HK9 +upz+iHwUaEJ4PjV+P4YmDq0PnPvXE4qhTIvxx0kO5oZF0tAJCoTg1HE7o99/xq9Z +rejDR1JJj6btNw1YFQsRDLxRZv4rL9He10lmLhiQE8QN7zOWzyJbRP++tWY2d2zE +yFzvsOsGPbBqLDNkbb9d8Bfvp+udG13sHAEtRzI2UWe5SEdVHobAgu5l+m10WlsN +TG/L0gJe1eD1bwceWlnSrbqw+y+pam9YKWqdu18ETN6CeAbNo4w7honRkcRdZyoG +p9zZf3o1bGBBMla6RbLuJBoRDOy2Ql7B+Z87N0td6KlHI6X8fNbatbtsXR7qLUBP +5oRb6nXX4+DnTMDbvFpE2zxnkg+C354Tw5ysyHhM6abB2+zCXcZ3holeyxC+BUrO +gGPyLH/s01mg2zmttwC1UbkaGkQ6SwCoQoFEVq9Dp96B6PgZxhEw0GMrKRw53LoX +4rZif9Exv6qUFsGY8U9daEdDPF5UHYe7t/nPpfW3ABEBAAGJBD4EGAEIAAkFAlgV +O3cCGwICKQkQFkawG4blAxDBXSAEGQEIAAYFAlgVO3cACgkQRsITDf0kl/VynQ/+ +P3Vksu4fno26vA7ml9bzV3mu/X/gzU1HqySqYv9Zwzk2o512Z4QkoT/8lRepIG7v +AFRQzPn56Pz/vpMfiMDaf6thxs8wpv4y3m+rcQIQKO4sN3wwFPPbvM8wGoY6fGav +IkLKKIXy1BpzRGltGduf0c29+ycvzccQpyuTrZk4Zl73kLyBS8fCt+MZWejMMolD +uuLJiHbXci6+Pdi3ImabyStbNnJYmSyruNHcLHlgIbyugTiAcdTy0Bi/z8MfeYwj +VAwEkX4b2NwtuweYLzupBOTv0SqYCmBduZObkS5LHMZ+5Yh9Hfrd04uMdO5cIiy0 +AsGehTRC3Xyaea7Qk993rNcGEzX7LNB1GB2BXSq9FYPb+q0ewf8k8Lr9E0WG0dvD +OaJSkSGedgdA1QzvTgpAAkVWsXlksShVf4NVskxNUGDRaPLeRB+IV/5jO+kRsFuO +g5Tlkn6cgu1+Bn5gIfv0ny9K7TeC697gRQIcK8db1t8XidgSKbRmsSYEaRCy3c9x +w2/N7DLU/Js3gV8FUd7cZpaYN+k/erMdyfqLA7oFd+HLbA5Du/971yF8/6Bof8zp +jB9+QPRIARpcROEcQXz09dtl8wW8M0r09xpna+0Jk6JxF+stD97+hzikQXIxUtCX +j35ps9USSxv1cuz0MaFdWGW13OugtN4bQ2DNgelbTDUEKg//YTbBl9oGYQxHv9S5 +qvZVNvV3DuI18E5VW5ddyo/JfW24+Tukli/ZjPQYnMOP86nnIqo/LPGb4nV1uWL4 +KhmOCbH7t43+TkAwdwoxLjYP7iOqQp9VRPFjomUfvtmLjHp4r3cVEt5QeJEZLiSC +zSKMjPKqRMo5nNs3Et+/FyWCMRYdSggwhBfkbKKo44H9pmL3bTLqyir7EJAcArla +zjKMyZqRsK3gZfQgoASN5xAhemVWHnnecVSAqrOW599EBkc7Kf6lXjTVHtHN02vX +YYRZ16zrEjrfwb23LR+lAxSfWxLDovKLBg2SPbpduEv1GxyEFgF7v9fco4aQbuh/ +fOGvA8nuXkC5nI6ukw4c4zwmJ5+SNQthFUYKWLd4hR4qrCoJkMEWZmsCRtqxjVCJ +/i9ygRJHOGAWaam7bS+U7pdmq2mgF+qTxb2vX6mSzI3q3M7drGUA3EdaZo1hPA5u +kWi7tMCGqPQmtUFRnUvHPzCDuXLYT8lRxhTxDi3T5MXdIUlAUTcNpwG8Ill0xkGc +pMlh0D5p44GEdMFfJiXw6AUETHcqC2qZr2rP9kpzvVlapIrsPRg/DU+s70YnccI3 +iMCVm4/WrghFeK232zkjiwRVOm+IEWBlDFrm4MMjfguUeneYbK9WhqJnss9nc4QK +Vhzuyn3GTtg1w/T6CaYVXBjcHFmJBEQEGAEIAA8CGwIFAlokZSMFCQQWmKMCKcFd +IAQZAQgABgUCWBU7dwAKCRBGwhMN/SSX9XKdD/4/dWSy7h+ejbq8DuaX1vNXea79 +f+DNTUerJKpi/1nDOTajnXZnhCShP/yVF6kgbu8AVFDM+fno/P++kx+IwNp/q2HG +zzCm/jLeb6txAhAo7iw3fDAU89u8zzAahjp8Zq8iQsoohfLUGnNEaW0Z25/Rzb37 +Jy/NxxCnK5OtmThmXveQvIFLx8K34xlZ6MwyiUO64smIdtdyLr492LciZpvJK1s2 +cliZLKu40dwseWAhvK6BOIBx1PLQGL/Pwx95jCNUDASRfhvY3C27B5gvO6kE5O/R +KpgKYF25k5uRLkscxn7liH0d+t3Ti4x07lwiLLQCwZ6FNELdfJp5rtCT33es1wYT +Nfss0HUYHYFdKr0Vg9v6rR7B/yTwuv0TRYbR28M5olKRIZ52B0DVDO9OCkACRVax +eWSxKFV/g1WyTE1QYNFo8t5EH4hX/mM76RGwW46DlOWSfpyC7X4GfmAh+/SfL0rt +N4Lr3uBFAhwrx1vW3xeJ2BIptGaxJgRpELLdz3HDb83sMtT8mzeBXwVR3txmlpg3 +6T96sx3J+osDugV34ctsDkO7/3vXIXz/oGh/zOmMH35A9EgBGlxE4RxBfPT122Xz +BbwzSvT3Gmdr7QmTonEX6y0P3v6HOKRBcjFS0JePfmmz1RJLG/Vy7PQxoV1YZbXc +66C03htDYM2B6VtMNQkQFkawG4blAxCiVRAAhq/1L5YlsmItiC6MROtPP+lfAWRm +MSkoIuAtzkV/orqPetwWzjYLgApOvVXBuf9FdJ5vAx1IXG3mDx6mQQWkr4t9onwC +UuQ7lE29qmvCHB3FpKVJPKiGC6xK38t5dGAJtbUMZBQb1vDuQ7new8dVLzBSH1VZ +7gx9AT+WEptWznb1US1AbejO0uT8jsVc/McK4R3LQmVy9+hbTYZFz1zCImuv9SCN +ZPSdLpDe41QxcMfKiW7XU4rshJULKd4HYG92KjeJU80zgCyppOm85ENiMz91tPT7 ++A4O7XMlOaJEH8t/2SZGBE/dmHjSKcWIpJYrIZKXTrNv7rSQGvweNG5alvCAvnrL +J2cRpU1Rziw7auEU1YiSse+hQ1ZBIzWhPMunIdnkL/BJunBTVE7hPMMG7alOLy5Z +0ikNytVewasZlm/dj5tEsfvF7tisVTZWVjWCvEMTP5fecNMEAwbZdBDyQBAN00y7 +xp4Pwc/kPLuaqESyTTt8jGek/pe7/+6fu0GQmR2gZKGagAxeZEvXWrxSJp/q81XS +QGcO6QYMff7VexY3ncdjSVLro+Z3ZtYt6aVIGAEEA5UE341yCGIeN+nr27CXD4fH +F28aPh+AJzYh+uVjQhHbL8agwcyCMLgU88u1U0tT5Qtjwnw+w+3UNhROvn495REp +eEwD60iVeiuF5FW5Ag0EWbWWowEQALCiEk5Ic40W7/v5hqYNjrRlxTE/1axOhhzt +8eCB7eOeNOMQKwabYxqBceNmol/guzlnFqLtbaA6yZQkzz/K3eNwWQg7CfXO3+p/ +dN0HtktPfdCk+kY/t7StKRjINW6S9xk9KshiukmdiDq8JKS0HgxqphBB3tDjmo6/ +RiaOEFMoUlXKSU+BYYpBpLKg53P8F/8nIsK2aZJyk8XuBd0UXKI+N1gfCfzoDWnY +Hs73LQKcjrTaZQauT81J7+TeWoLI28vkVxyjvTXAyjSBnhxTYfwUNGSoawEXyJ1u +KCwhIpklxcCMI9Hykg7sKNsvmJ4uNcRJ7cSRfb0g5DR9dLhR+eEvFd+o4PblKk16 +AI48N8Zg1dLlJuV2cAtl0oBPk+tnbZukvkS5n1IzTSmiiPIXvK2t506VtfFEw4iZ +rJWf2Q9//TszBM3r1FPATLH7EAeG5P8RV+ri7L7NvzP6ZQClRDUsxeimCSe8v/t0 +OpheCVMlM9TpVcKGMw8ig/WEodoLOP4iqBs4BKR7fuydjDqbU0k/sdJTltp7IIdK +1e49POIQ7pt+SUrsq/HnPW4woLC1WjouBWyr2M7/a0SldPidZ2BUAK7O9oXosidZ +MJT7dBp3eHrspY4bdkSxsd0nshj0ndtqNktxkrSFRkoFpMz0J/M3Q93CjdHuTLpT +HQEWjm/7ABEBAAGJBEQEGAEIAA8FAlm1lqMCGwIFCQJ2LQACKQkQFkawG4blAxDB +XSAEGQEIAAYFAlm1lqMACgkQ4HTRbrb/TeMpDQ//eOIsCWY2gYOGACw42JzMVvuT +DrgRT4hMhgHCGeKzn1wFL1EsbSQV4Z6pYvnNayuEakgIz14wf4UFs5u1ehfBwatm +akSQJn32ANcAvI0INAkLEoqqy81mROjMc9FFrOkdqjcN7yN0BzH9jNYL/gsvmOOw +Ou+dIH3C1Lgei844ZR1BZK1900mohuRwcji0sdROMcrKrGjqd4yb6f7yl0wbdAxA +3IHT3TFGczC7Y41P2OEpaJeVIZZgxkgQsJ14qK/QGpdKvmZAQpjHBipeO/H+qxyO +T5Y+f15VLWGOOVL090+ZdtF7h3m4X2+L7xWsFIgdOprfO60gq3e79YFfgNBYU5BG +tJGFGlJ0sGtnpzx5QCRka0j/1E5lIu00sW3WfGItFd48hW6wHCloyoi7pBR7xqSE +oU/U5o7+nC8wHFrDYyqcyO9Q3mZDw4LvlgnyMOM+qLv/fNgO9USE4T30eSvc0t/5 +p1hCKNvyxHFghdRSJqn70bm6MQY+kd6+B/k62Oy8eCwRt4PR+LQEIPnxN7xGuNpV +O1oMyhhO41osYruMrodzw81icBRKYFlSuDOQ5jlcSajc6TvF22y+VXy7nx1q/CN4 +tzB/ryUASU+vXS8/QNM6qI/QbbgBy7VtHqDbs2KHp4cP0j9KYQzMrKwtRwfHqVrw +FLkCp61EHwSlPsEFiglpMg/8DQ92O4beY0n7eSrilwEdJg89IeepTBm1QYiLM33q +WLR9CABYAIiDG7qxviHozVfX6kUwbkntVpyHAXSbWrM3kD6jPs3u/dimLKVyd29A +VrBSn9FC04EjtDWsj1KB7HrFN4oo9o0JLSnXeJb8FnPf3MitaKltvj/kZhegozIs ++zvpzuri0LvoB4fNA0T4eAmxkGkZBB+mjNCrUHIakyPZVzWGL0QGsfK1Q9jvw0OE +rqHJYX8A1wLre/HkBne+e5ezS6Mc7kFW33Y1arfbHFNAe12juPsOxqK76qNilUbQ +pPtNvWP3FTpbkAdodMLq/gQ+M5yHwPe8SkpZ8wYCfcwEemz/P+4QhQB8tbYbpcPx +J+aQjVjcHpsLdrlSY3JL/gqockR7+97GrCzqXbgvsqiWr16Zyn6mxYWEHn9HXMh3 +b+2IYKFFXHffbIBq/mfibDnZtQBrZpn2uyh6F2ZuOsZh0LTD7RL53KV3fi90nS00 +Gs1kbMkPycL1JLqvYQDpllE2oZ1dKDYkwivGyDQhRNfERL6JkjyiSxfZ2c84r2HP +gnJTi/WBplloQkM+2NfXrBo6kLHSC6aBndRKk2UmUhrUluGcQUyfzYRFH5kVueIY +fDaBPus9gb+sjnViFRpqVjefwlXSJEDHWP3Cl2cuo2mJjeDghj400U6pjSUW3bIC +/PK5Ag0EXCxEEQEQAKVjsdljwPDGO+48879LDa1d7GEu/Jm9HRK6INCQiSiS/0mH +keKa6t4DRgCY2ID9lFiegx2Er+sIgL0chs16XJrFO21ukw+bkBdm2HYUKSsUFmr/ +bms8DkmAM699vRYVUAzO9eXG/g8lVrAzlb3RT7eGHYKd15DT5KxXDQB+T+mWE9qD +5RJwEyPjSU+4WjYF+Rr9gbSuAt5UySUb9jTR5HRNj9wtb4YutfP9jbfqy8esQVG9 +R/hpWKb2laxvn8Qc2Xj93qNIkBt/SILfx9WDJl0wNUmu+zUwpiC2wrLFTgNOpq7g +9wRPtg5mi8MXExWwSF2DlD54yxOOAvdVACJFBXEcstQ3SWg8gxljG8eLMpDjwoIB +ax3DZwiYZjkjJPeydSulh8vKoFBCQkf2PcImXdOk2HqOV1L7FROM6fKydeSLJbx1 +7SNjVdQnq1OsyqSO0catAFNptMHBsN+tiCI29gpGegaoumV9cnND69aYvyPBgvdt +mzPChjSmc6rzW1yXCJDm2qzwm/BcwJNXW5B3EUPxc0qSWste9fUna0G4l/WMuaIz +VkuTgXf1/r9HeQbjtxAztxH0d0VgdHAWPDkUYmztcZ4sd0PWkVa18qSrOvyhI96g +CzdvMRLX17m1kPvP5PlPulvqizjDs8BScqeSzGgSbbQVm5Tx4w2uF4/n3FBnABEB +AAGJBEQEGAECAA8FAlwsRBECGwIFCQIKEgACKQkQFkawG4blAxDBXSAEGQECAAYF +AlwsRBEACgkQI+cWZ4i2Ph6B0g//cPis3v2M6XvAbVoM3GIMXnsVj1WAHuwA/ja7 +UfZJ9+kV/PiMLkAbW0fBj0/y0O3Ry12VVQGXhC+Vo4j6C8qwFP4OXa6EsxHXuvWM +IztBaX1Kav613aXBtxp6tTrud0FFUh4sDc1RREb3tMr6y5cvFJgnrdWcX1gsl6OD +cgWBGNc6ZX7H7j48hMR6KmNeZocW7p8W+BgDQJqXYwVNL15qOHzVAh0dWsFLE9gw +BTmDCY03x9arxSNDGCXyxt6E77LbNVIoSRlEbkvi6j33nEbuERICYl6CltXQCyiV +KjheJcLMjbgv5+bLCv2zfeJ/WyOmOGKpHRu+lBV1GvliRxUblVlmjWPhYPBZXGyj +II16Tqr+ilREcZFW+STccbrVct75JWLbxwlEmix+W1HwSRCR+KHx3Cur4ZPMOBlP +sFilOOsNa7ROUB56t7zv21Ef3BeeaCd9c4kzNGN8d1icEqSXoWWPqgST0LZPtZyq +WZVnWrHChVHfrioxhSnw8O3wY1A2GSahiCSvvjvOeEoJyU21ZMw6AVyHCh6v42oY +adBfGgFwNo5OCMhNxNy/CcUrBSDqyLVTM5QlNsT75Ys7kHHnc+Jk+xx4JpiyNCz5 +LzcPhlwpqnJQcjJdY1hDhK75Ormj/NfCMeZ8g1aVPX4xEq8AMyZYhZ5/lmM+13Rd +v8ZW6FK7HQ/+IAKzntxOjw0MzCXkksKdmIOZ2bLeOVI8aSLaUmoT5CLuoia9g7iF +HlYrSY+01riRrAaPtYx0x8onfyVxL9dlW/Fv5+qc1fF5FxdhyIgdqgzm82TnXHu/ +haUxYmUvNrbsmmNl5UTTOf+YQHMccKFdYfZ2rCBtbN2niXG1tuz2+k83pozu4mJ1 +rOOLNAsQoY3yR6OODte1FyOgp7blwDhTIoQb8/UiJ7CMBI3OPrfoXFAnhYoxeRSA +N4UFu9/HIkqfaQgRPCZS1gNerWF6r6yz9AZWUZqjSJssjBqXCtK9bGbTYBZk+pw3 +H9Nd0RJ2WJ9qPqmlmUr1wdqct0ChsJx1xAT86QrssicJ/HFFmF45hlnGkHUBWLaV +Jt8YkLb/DqOIbVbwyCLQtJ80VQLEeupfmu5QNsTpntRYNKf8cr00uc8vSYXYFRxa +5H5oRT1eoFEEjDDvokNnHXfT+Hya44IjYpzaqvAgeDp6sYlOdtWIv/V3s+trxACw +TkRN7zw3lLTbT8PK9szK0fYZ5KHG1/AKH+mbZ6qNc/25PNbAFRtttLGuEIC3HJ12 +IAp2JdjioeD2OnWLu4ZeCT2CKKFsleZPrSyCrn3gyZPmfYvv5h2JbQNO6uweOrZE +NWX5SU43OBoplbuKJZsMP6p6NahuGnIeJLlv509JYAf/HN4ARyvvOpO5Ag0EXDf1 +bwEQAKBByJMoxQ7H6AsQP29qjY8/pfDiNloQDHasUXoOyTfUetam3rY/UWCHFrMD +0jvOHNIqEVJPsSWrxBYf+i4NNECsCSj39JHdVLOkn6pJcRnMzmljS8ojOybYRUTT +KdKlV+jYy6hqAjTvnf/pzZOrNseKyxAo/xETphN2UEBKOZwV5j5YV6VXptt6xn1x +EL1wzahZr6qz/gXn5//mg6aPPUCJt7BPBtC34HGoyHUn4Cx/jSU7zlQLV11VyTyt +/TY69Wgc1k21oS0tm44uw8D+4bIXYewxNq0utt75c75JK5rPKCpIkaSgE3YUPAhM +fpoUxSgo+hrTaocLbQm3/fDfRqYhw9IWrOuWLYEEI5NqS0etq2X+nM2oEXymxUM1 +45dicUv27B1YU5IciRaoA3Bwkl3uyvLhkwBNgJGpBoRsgyWKhlUpdMOSAFPHag0D +HNCKbFTGxZOJ1+BoDsIscK864AodI0YvhMFByWGRwQMszQpK/vg9uUdIMDYTzI0i +nvCrOht4R91z/2VZXHlv4D38UYsVE5P6u7N8T6T4SzERBKSktWhnJmMRJK5FQQwM +zWCnSj9TGMC5+JYeMjRV1pUwpZw8iOlDg0x8LfMQ3XbZ0/bvlPsXOjiYmHAjrLZf +qL0vR5jPyrfVUxF/XHJBBC9SEvvXrEDK+G+V9NmNavUNrhLnABEBAAGJBEQEGAEC +AA8FAlw39W8CGwIFCQH+NIACKQkQFkawG4blAxDBXSAEGQECAAYFAlw39W8ACgkQ +T3dnk2lHW6p0eg/+K2JJu1RbTSLJPFYQhLcxX+5d2unkuNLIy3kArtZuB992E2Fw +00okPGtuPdSyk2ygh4DeYnwmabIWChi7LDp+YnqcI4GfMxNG6RsHs+A/77rLBST3 +BB1sejZppmKCQZDSC2pvYaZBpS80UvftCZ9RFdY+kTC22Btn/5ekiQOfIqhUH9Cy +GWS/YlGciomVIVn1hSPN8l4EpBCDtceRaephvzjQIZT3AxOfSlpwJviYjAOkSX4q +WyIjC5Ke5kfEOldUuBN1JGAm45tKlrz/LD/+VOc2IWpbkOIAVSldUgpRyiIJQAZ8 +0trNxrJI7ncaID8lAa7pBptJiL0KorRjk3c6Y7p830Nwe0J5e5+W1RzN4wlR8+9u +uRyP8Mcwz/Hz2jwMiv38Vk4tAOe4PYNZuDnpjZ28yCpF3UUgvzjarubFAcg2jd8S +auCQFlmOfvT+1qIMSeLmWBOdlzJTUpJRcZqnkEE4WtiMSlxyWVFvUwOmKSGi8CLo +GW1Ksh9thQ9zKhvVUiVoKn4Z79HXr4pX6rnp+mweJ2dEZtlqD7HxjVTlCHn9fzCl +t/Nt0h721fJbS587AC/ZMgg5GV+GKu6Mij0sPAowUJVCIwN9uK/GHICZEAoMSngP +8xzKnhU5FD38vwBvsqbKxTtICrv2NuwnQ0WBBQ58w5mv2RCMr2W6iegSKIDjwxAA +hDpCw0dlUOodY4omJB19Ra9zIZO5IGxT2+oksks3uWkT/l+I7FY0+YNtIZnC01Ge +RJxJtuDwQXigYEKn1UEJ7ymBKrAdCEY0OC344AffLx81aOYWbbW7XaO6rZn8nyZu +0oC95dGlQQdWYJBLcTwANx50iQQGkR5a+XF87yVciFm6x5Cf78pzJ5OBvN3qLJzN +4YBftPMKIgbozGm6/3I6DDT0SMeCOhamshoBf7Ksqd6N+XUjRHZr7UwprWDJlhSC +XFF1e6tjlf22NwZ9UH29VswFkepT99tfBFpobjbzfABO0YnAj72WcR2ZKP7oYHf7 +EkhI2ssWQ9PRPTwdOSXZDEH0s4cJqO+ZzRoAPE+3hbHlGukAqZiiHRlNpOvPdO6Q +mgVBRsURs5i+4vylfat59HUtzQWbTF1bnZbMlefttb5CHRJNb3PTuxHR562Uzp9/ +/SZfDhAx7SYgwRF+FANWJsvX+I7CbP4qvOzutvIYTsNchbCxrOl+0PxMxWaYZzVb +ZW45mO0LFUNCFqcnr3Sot5e9n0C0vjKBV9XgICHKKgeHaMwOMirb1MKvvMpJ3+NI +BYZJ6d+LyhFXL0xJXccUnEXsmk2h4SBEEZYIhAk9ntRmzOXhXFLAOS8agWlmvYwh +xeeb76cVOYlpLw1utXV9hbuo+oM109vMs73mpF88g4g= +=oMDY +-----END PGP PUBLIC KEY BLOCK----- diff --git a/roles/sbs/handlers/main.yml b/roles/sbs/handlers/main.yml new file mode 100644 index 000000000..012cba535 --- /dev/null +++ b/roles/sbs/handlers/main.yml @@ -0,0 +1,9 @@ +--- +- name: Restart sbs containers + community.docker.docker_container: + name: "{{ item }}" + state: started + restart: true + loop: + - sbs + - sbs_server diff --git a/roles/sbs/tasks/main.yml b/roles/sbs/tasks/main.yml new file mode 100644 index 000000000..8a52adc4e --- /dev/null +++ b/roles/sbs/tasks/main.yml @@ -0,0 +1,171 @@ +--- +- name: "Create SBS group" + group: + name: "{{ sbs.group }}" + state: "present" + register: "result" + +- name: "Save SBS group gid" + set_fact: + sbs_group_gid: "{{ result.gid }}" + +- name: "Create SBS user" + user: + name: "{{ sbs.user }}" + group: "{{ sbs.group }}" + comment: "User to run SBS service" + shell: "/bin/false" + password: "!" + home: "{{ sbs.conf_dir }}" + create_home: false + state: "present" + register: "result" + +- name: "Save sbs user uid" + set_fact: + sbs_user_uid: "{{ result.uid }}" + +- name: "Create directories" + file: + path: "{{item.path}}" + state: "directory" + owner: "root" + group: "{{sbs_group_gid}}" + mode: "{{item.mode}}" + with_items: + - { path: "{{sbs.work_dir}}", mode: "0755" } + - { path: "{{sbs.conf_dir}}", mode: "0755" } + - { path: "{{sbs.conf_dir}}/saml", mode: "0755" } + - { path: "{{sbs.log_dir}}", mode: "0775" } + - { path: "{{sbs.cert_dir}}", mode: "0755" } + +- name: "Fix file permissions" + file: + path: "{{sbs.log_dir}}/{{item}}" + owner: "root" + group: "{{sbs_group_gid}}" + mode: "0664" + state: "touch" + modification_time: "preserve" + access_time: "preserve" + with_items: + - "sbs.log" + - "sbs_debug.log" + +- name: "Copy wildcard backend cert" + copy: + content: "{{wildcard_backend_cert.pub}}" + dest: "{{sbs.cert_dir}}/backend.crt" + owner: "root" + group: "root" + mode: "0644" + notify: "Restart sbs containers" + +- name: "Copy https cert" + copy: + content: "{{https_cert.cert}}" + dest: "{{sbs.cert_dir}}/frontend.crt" + owner: "root" + group: "root" + mode: "0644" + notify: "Restart sbs containers" + +- name: "Install database certificate" + copy: + dest: "{{sbs.db_cert_path}}" + content: "{{ sbs.db_tls_cert }}" + owner: "root" + group: "root" + mode: "0644" + +- name: "Create SBS config files" + template: + src: "{{item.name}}.j2" + dest: "{{ sbs.conf_dir }}/{{item.name}}" + owner: "root" + group: "{{sbs_group_gid}}" + mode: "{{item.mode}}" + with_items: + - { name: "config.yml", mode: "0644" } + - { name: "alembic.ini", mode: "0644" } + - { name: "disclaimer.css", mode: "0644" } + - { name: "sbs-apache.conf", mode: "0644" } + no_log: "{{sram_ansible_nolog}}" + notify: "Restart sbs containers" + +- name: "Run SBS migrations" + throttle: 1 + community.docker.docker_container: + name: "sbs_migration" + image: "{{ sbs.server_image }}" + pull: "never" + state: "started" + restart_policy: "no" + detach: false + env: + RUNAS_UID: "{{ sbs_user_uid | string }}" + RUNAS_GID: "{{ sbs_group_gid | string }}" + CONFIG: "/opt/sbs/server/config/config.yml" + MIGRATIONS_ONLY: "1" + # don't actually run the server + command: "/bin/true" + volumes: + - "{{ sbs.conf_dir }}:/sbs-config" + - "{{ sbs.cert_dir }}:/sbs-config/cert" + - "{{ sbs.log_dir }}:/opt/sbs/log" + networks: + - name: "{{internal_network}}" + register: "result" + failed_when: "'container' not in result or result.container.State.ExitCode != 0" + changed_when: "'[alembic.runtime.migration] Running upgrade' in result.container.Output" + notify: "Restart sbs containers" + +# Remove the migration container; we can't do that with auto_remove, because if we use that, ansible +# will not save the output in result +- name: "Remove migration container" + community.docker.docker_container: + name: "sbs_migration" + state: "absent" + # TODO: fix this by only running this if "sbs_image is changed" + changed_when: false + +- name: "Start sbs container" + community.docker.docker_container: + name: "sbs" + image: "{{ sbs.image }}" + pull: "never" + restart_policy: "always" + state: "started" + env: + RUN_MIGRATIONS: "0" + volumes: + - "{{ sbs.conf_dir }}/sbs-apache.conf:/etc/apache2/sites-enabled/sbs.conf:ro" + networks: + - name: loadbalancer + labels: + traefik.enable: "true" + traefik.docker.network: "{{traefik_network}}" + traefik.http.routers.sbs.rule: "Host(`{{ sbs.base_domain }}`)" + traefik.http.routers.sbs.tls: "true" + +- name: "Start SBS server container" + community.docker.docker_container: + name: "sbs_server" + image: "{{ sbs.server_image }}" + restart_policy: "always" + state: "started" + env: + RUNAS_UID: "{{ sbs_user_uid | string }}" + RUNAS_GID: "{{ sbs_group_gid | string }}" + CONFIG: "/opt/sbs/server/config/config.yml" + REQUESTS_CA_BUNDLE: "/etc/ssl/certs/ca-certificates.crt" + RUN_MIGRATIONS: "0" + pull: "always" + command: "/usr/local/bin/gunicorn --preload --worker-class eventlet --workers 8 --bind 0.0.0.0:8080 server.__main__:app" + volumes: + - "{{ sbs.conf_dir }}:/sbs-config" + - "{{ sbs.cert_dir }}:/sbs-config/cert" + - "{{ sbs.log_dir }}:/opt/sbs/log" + - "/tmp/ci-runner:/tmp/ci-runner" + networks: + - name: loadbalander diff --git a/roles/sbs/templates/alembic.ini.j2 b/roles/sbs/templates/alembic.ini.j2 new file mode 100644 index 000000000..7849e4f89 --- /dev/null +++ b/roles/sbs/templates/alembic.ini.j2 @@ -0,0 +1,72 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = migrations + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# timezone to use when rendering the date +# within the migration file as well as the filename. +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +#truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; this defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path +# version_locations = %(here)s/bar %(here)s/bat alembic/versions + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = {{ sbs_db_connection_migration }} + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = NOTSET +handlers = console + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = DEBUG +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/roles/sbs/templates/config.yml.j2 b/roles/sbs/templates/config.yml.j2 new file mode 100644 index 000000000..11f029af7 --- /dev/null +++ b/roles/sbs/templates/config.yml.j2 @@ -0,0 +1,264 @@ +--- +database: + uri: {{ sbs.db_connection_sbs }} + +redis: +{% if environment_name == 'tst2' %} + uri: "redis://{{redis_vhost}}/" +{% else %} + uri: "redis{% if sbs.redis_ssl %}s{% endif %}://{{ sbs.redis_user }}:{{ sbs.redis_password }}@{{ sbs.redis_host }}:{{ sbs.redis_port }}/" +{% endif %} + +# add a per-release suffix here to invalidate sessions on new releases +secret_key: {{ sbs.db_secret }}{{sbs.secret_key_suffix}} +# Must be a base64 encoded key of 128, 192, or 256 bits. +# Generate: base64.b64encode(os.urandom(256 // 8)).decode() +encryption_key: {{ sbs.encryption_key }} + +# Lifetime of session in minutes (one day is 60 * 24) +permanent_session_lifetime: {{ sbs.session_lifetime }} + +logging: + log_to_stdout: {{ sbs.log_to_stdout }} + +# Valid scopes are "READ" and "WRITE" +api_users: +{% for name, user in sbs.api_users.items() %} + - name: "{{ name }}" + password: "{{ user.password }}" + scopes: "[ {{ user.scopes | join(', ') }} ]" +{% endfor %} + +oidc: + client_id: "{{ sbs.client_id }}" + client_secret: "{{ sbs.client_secret }}" + audience: "{{ sbs.oidc_jwt_audience }}" + verify_peer: {{ sbs.oidc_verify_peer }} + authorization_endpoint: "{{ sbs.oidc_authz_endpoint}}" + token_endpoint: "{{ sbs.oidc_token_endpoint }}" + userinfo_endpoint: "{{ sbs.oidc_userinfo_endpoint }}" + jwks_endpoint: "{{ sbs.oidc_jwks_endpoint }}" + #Note that the paths for these uri's is hardcoded and only domain and port differ per environment + redirect_uri: "{{ sbs.oidc_redirect_uri }}" + continue_eduteams_redirect_uri: "{{ sbs.eduteams_continue_endpoint }}" + continue_eb_redirect_uri: "{{ sbs.eb_continue_endpoint }}" + second_factor_authentication_required: {{ sbs.second_factor_authentication_required }} + totp_token_name: "{{ sbs.totp_token_name }}" + # The service_id in the proxy_authz endpoint when logging into SBS. Most likely to equal the oidc.client_id + sram_service_entity_id: "{{ sbs.client_id }}" + scopes: {{ sbs.oidc_scopes }} + +base_scope: "{{ base_domain }}" +entitlement_group_namespace: "{{ sbs.urn_namespace }}" +eppn_scope: " {{ sbs.eppn_scope }}" +scim_schema_sram: "urn:mace:surf.nl:sram:scim:extension" +collaboration_creation_allowed_entitlement: "urn:mace:surf.nl:sram:allow-create-co" + +{% if environment_name == "prd" %} +environment_disclaimer: "" +{% else %} +environment_disclaimer: "{{ sbs.disclaimer_label }}" +{% endif %} + +# All services in the white list can be requested in the create-restricted-co API +# The default organisation is a fallback for when the administrator has no schac_home_org +restricted_co: + services_white_list: [ "https://cloud" ] + default_organisation: "{{ sbs.restricted_co_default_org }}" + +mail: + host: {{ sbs.mail_host }} + port: {{ sbs.mail_port }} + sender_name: {{ sbs.mail_sender_name }} + sender_email: {{ sbs.mail_sender_email }} + suppress_sending_mails: False + info_email: {{ sbs.support_email }} + beheer_email: {{ sbs.admin_email }} + ticket_email: {{ sbs.ticket_email }} + eduteams_email: {{ sbs.eduteams_email }} + # Do we mail a summary of new Organizations and Services to the beheer_email? + audit_trail_notifications_enabled: {{ sbs.audit_trail_notifications_enabled }} + account_deletion_notifications_enabled: True + send_exceptions: {{ sbs.send_exceptions }} + send_js_exceptions: {{ sbs.send_js_exceptions }} + send_exceptions_recipients: [ "{{ sbs.exceptions_mail }}" ] + environment: "{{ base_domain }}" + +manage: + enabled: {{ sbs.manage_base_enabled }} + # The entity_id of the SRAM RP in Manage for API retrieval, e.g "sbs.test2.sram.surf.nl" + sram_rp_entity_id: "{{ sbs.manage_sram_rp_entity_id }}" + base_url: "{{ sbs.manage_base_url }}" + user: "{{ sbs.manage_user }}" + password: "{{ sbs.manage_password }}" + verify_peer: {{ sbs.manage_verify_peer }} + +aup: + version: 1 + url_aup_en: "https://edu.nl/6wb63" + url_aup_nl: "https://edu.nl/6wb63" + +base_url: {{ sbs.base_url }} +socket_url: {{ sbs.base_url }} +base_server_url: {{ sbs.base_url }} +wiki_link: {{ sbs.wiki_link }} + +admin_users: +{% for admin_user in sbs.admin_users %} + - uid: "{{ admin_user.uid }}" +{% endfor %} + +organisation_categories: + - "HBO" + - "MBO" + - "UMC" + - "University" + - "Research" + - "SURF" + +feature: + seed_allowed: {{ sbs.seed_allowed }} + api_keys_enabled: {{ sbs.api_keys_enabled }} + feedback_enabled: {{ sbs.feedback_enabled }} + impersonation_allowed: {{ sbs.impersonation_allowed }} + sbs_swagger_enabled: {{ sbs.swagger_enabled }} + admin_platform_backdoor_totp: {{ sbs.admin_platform_backdoor_totp }} + past_dates_allowed: {{ sbs.past_dates_allowed }} + mock_scim_enabled: {{ sbs.mock_scim_enabled }} + +metadata: + idp_url: "{{sbs.idp_metadata_url}}" + parse_at_startup: True + # No need for environment specific values + scope_override: + knaw.nl: "Koninklijke Nederlandse Akademie van Wetenschappen (KNAW)" + +platform_admin_notifications: + # Do we daily check for CO join_requests and CO requests and send a summary mail to beheer_email? + enabled: False + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How long before we include open join_requests in the summary + outstanding_join_request_days_threshold: 7 + # How long before we include open CO requests in the summary + outstanding_coll_request_days_threshold: 7 + +user_requests_retention: + # Do we daily check for CO join_requests and CO requests and delete approved and denied? + enabled: {{ sbs.notifications_enabled }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How long before we delete approved / denied join_requests + outstanding_join_request_days_threshold: 90 + # How long before we delete approved / denied CO requests + outstanding_coll_request_days_threshold: 90 + +# The retention config determines how long users may be inactive, how long the reminder email is valid and when do we resent the magic link +retention: + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # how many days of inactivity before a user is suspended + # 0 allows for any last_login_date in the past to trigger suspension notification + allowed_inactive_period_days: {{ sbs.suspension_inactive_days }} + # how many days before suspension do we send a warning + # -1 will suspend notified users on second suspension cron + reminder_suspend_period_days: {{ sbs.suspension_reminder_days }} + # how many days after suspension do we delete the account + remove_suspended_users_period_days: 90 + # how many days before deletion do we send a reminder + reminder_expiry_period_days: 7 + # whether to send a notification of the result of the retention process to the beheer_email + admin_notification_mail: {{ sbs.suspension_notify_admin }} + +collaboration_expiration: + # Do we daily check for CO's that will be deleted because they have been expired? + enabled: {{ sbs.notifications_enabled }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How long after expiration do we actually delete expired collaborations + expired_collaborations_days_threshold: 90 + # How many days before actual expiration do we mail the organisation members + expired_warning_mail_days_threshold: 10 + +collaboration_suspension: + # Do we daily check for CO's that will be suspended because of inactivity? + enabled: {{ sbs.notifications_enabled }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # After how many days of inactivity do we suspend collaborations + collaboration_inactivity_days_threshold: 365 + # How many days before actual suspension do we mail the organisation members + inactivity_warning_mail_days_threshold: 10 + # After how many days after suspension do we actually delete the collaboration + collaboration_deletion_days_threshold: 90 + +membership_expiration: + # Do we daily check for memberships that will be deleted because they have been expired? + enabled: {{ sbs.notifications_enabled }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How long after expiration do we actually delete expired memberships + expired_memberships_days_threshold: 90 + # How many days before actual expiration do we mail the co admin and member + expired_warning_mail_days_threshold: 10 + +invitation_reminders: + # Do we daily check for invitations that need a reminder? + enabled: {{ sbs.invitation_reminders_enabled }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How many days before expiration of an invitation do we remind the user? + invitation_reminders_threshold: 5 + +invitation_expirations: + # Do we daily check for invitations that are expired / accepted and are eligible for deletion ? + enabled: {{ sbs.invitation_expirations_enabled }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How long after expiration of an invitation do we delete the invitation? + nbr_days_remove_expired_invitations: 10 + # How long after expiration of an API created invitation do we delete the invitation? + nbr_days_remove_api_expired_invitations: 30 + +orphan_users: + # Do we daily check for users that are orphans soo they can be deleted? + enabled: {{ sbs.delete_orphaned }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How long after created do we delete orphan users + delete_days_threshold: 14 + +open_requests: + # Do we weekly check for all open requests? + enabled: {{ sbs.open_requests_enabled }} + cron_day_of_week: 1 + +scim_sweep: + # Do we enable scim sweeps? + enabled: {{ sbs.scim_sweep }} + # How often do we check if scim sweeps are needed per service + cron_minutes_expression: "*/15" + +ldap: + url: "{{ sbs.ldap_url }}" + bind_account: "{{ sbs.ldap_bind_account }}" + +# A MFA login in a different flow is valid for X minutes +mfa_sso_time_in_minutes: {{sbs.mfa_sso_minutes}} + +# whether to fall back to TOTP MFA +mfa_fallback_enabled: {{sbs.mfa_fallback_enabled}} + +# Lower case entity ID's and schac_home allowed skipping MFA. +# Note that for a login directly into SRAM only schac_home can be used as the entity_idp of the IdP is unknown +mfa_idp_allowed: {{sbs.mfa_idp_allowed}} + +# Lower case schachome organisations / entity ID's where SURFSecure ID is used for step-up +ssid_identity_providers: {{sbs.ssid_identity_providers}} + +ssid_config_folder: saml + +pam_web_sso: + session_timeout_seconds: 300 + +rate_limit_totp_guesses_per_30_seconds: 10 + +# The uid's of user that will never be suspended or deleted +excluded_user_accounts: +{% for excluded_user in sbs.excluded_users %} + - uid: "{{ excluded_user.uid }}" +{% endfor %} + +engine_block: + api_token: {{ sbs.engine_block_api_token }} diff --git a/roles/sbs/templates/disclaimer.css.j2 b/roles/sbs/templates/disclaimer.css.j2 new file mode 100644 index 000000000..e89bbfce7 --- /dev/null +++ b/roles/sbs/templates/disclaimer.css.j2 @@ -0,0 +1,6 @@ +{% if environment_name!="prd" -%} +body::after { + background: {{ sbs_disclaimer_color }}; + content: "{{ sbs_disclaimer_label }}"; +} +{% endif %} diff --git a/roles/sbs/templates/saml_advanced_settings.json.j2 b/roles/sbs/templates/saml_advanced_settings.json.j2 new file mode 100644 index 000000000..bdde32050 --- /dev/null +++ b/roles/sbs/templates/saml_advanced_settings.json.j2 @@ -0,0 +1,35 @@ +{ + "security": { + "nameIdEncrypted": false, + "authnRequestsSigned": true, + "logoutRequestSigned": false, + "logoutResponseSigned": false, + "signMetadata": false, + "wantMessagesSigned": false, + "wantAssertionsSigned": true, + "wantNameId" : true, + "wantNameIdEncrypted": false, + "wantAttributeStatement": false, + "wantAssertionsEncrypted": false, + "requestedAuthnContext": ["{{sbs_ssid_authncontext}}"], + "requestedAuthnContextComparison": "minimum", + "failOnAuthnContextMismatch": false, + "allowSingleLabelDomains": false, + "signatureAlgorithm": "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256", + "digestAlgorithm": "http://www.w3.org/2001/04/xmlenc#sha256", + "rejectDeprecatedAlgorithm": true + }, + "contactPerson": { + "technical": { + "givenName": "{{ mail.admin_name }}", + "emailAddress": "{{ mail.admin_address }}" + } + }, + "organization": { + "en-US": { + "name": "{{ org.name }}", + "displayname": "{{ org.name }}", + "url": "{{ org.url }}" + } + } +} diff --git a/roles/sbs/templates/saml_settings.json.j2 b/roles/sbs/templates/saml_settings.json.j2 new file mode 100644 index 000000000..bb5788e97 --- /dev/null +++ b/roles/sbs/templates/saml_settings.json.j2 @@ -0,0 +1,22 @@ +{ + "strict": true, + "debug": true, + "sp": { + "entityId": "{{ sbs_surf_secure_id.sp_entity_id }}", + "assertionConsumerService": { + "url": "{{ sbs_surf_secure_id.acs_url }}", + "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST" + }, + "NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified", + "x509cert": "{{ sbs_surf_secure_id.pub | barepem }}", + "privateKey": "{{ sbs_surf_secure_id.priv | barepem }}" + }, + "idp": { + "entityId": "{{ sbs_ssid_entityid }}", + "singleSignOnService": { + "url": "{{ sbs_ssid_sso_endpoint }}", + "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" + }, + "x509cert": "{{ sbs_surf_secure_id.sa_idp_certificate | barepem }}" + } +} diff --git a/roles/sbs/templates/sbs-apache.conf.j2 b/roles/sbs/templates/sbs-apache.conf.j2 new file mode 100644 index 000000000..13752ee56 --- /dev/null +++ b/roles/sbs/templates/sbs-apache.conf.j2 @@ -0,0 +1,30 @@ +ServerName {{ hostnames.sbs }} +#ErrorLog /proc/self/fd/2 +#CustomLog /proc/self/fd/1 common +DocumentRoot /opt/sbs/client/build + +Header set Content-Security-Policy "default-src 'self'; base-uri 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; frame-src 'none'; form-action 'self' https://*.{{ base_domain }}; frame-ancestors 'none'; block-all-mixed-content;" +Header set Permissions-Policy "accelerometer=(), ambient-light-sensor=(), autoplay=(), battery=(), camera=(), cross-origin-isolated=(), display-capture=(), document-domain=(), encrypted-media=(), execution-while-not-rendered=(), execution-while-out-of-viewport=(), fullscreen=(), geolocation=(), gyroscope=(), keyboard-map=(), magnetometer=(), microphone=(), midi=(), navigation-override=(), payment=(), picture-in-picture=(), publickey-credentials-get=(), screen-wake-lock=(), sync-xhr=(), usb=(), web-share=(), xr-spatial-tracking=(), clipboard-read=(), clipboard-write=(self), gamepad=(), speaker-selection=()" + +RewriteEngine On +RewriteCond %{REQUEST_URI} !^/(api|pam-weblogin|flasgger_static|swagger|health|config|info|socket.io) +RewriteCond %{DOCUMENT_ROOT}%{REQUEST_FILENAME} !-f +RewriteRule ^/(.*)$ /index.html [L] + +ProxyRequests off +ProxyPassMatch ^/(api|pam-weblogin|flasgger_static|swagger|health|config|info) http://{{ containers.sbs_server }}:{{sbs_backend_port}}/ +ProxyPassReverse / http://{{ containers.sbs_server }}:{{sbs_backend_port}}/ +ProxyPass /socket.io/ ws://{{ containers.sbs_server }}:{{sbs_backend_port}}/socket.io/ +ProxyPassReverse /socket.io/ ws://{{ containers.sbs_server }}:{{sbs_backend_port}}/socket.io/ + + + Header set Cache-Control: "public, max-age=31536000, immutable" + + + Header set Cache-Control: "no-cache, private" + + + + Require all granted + Options -Indexes + diff --git a/roles/sbs/templates/sbs.service.j2 b/roles/sbs/templates/sbs.service.j2 new file mode 100644 index 000000000..2920ddc8d --- /dev/null +++ b/roles/sbs/templates/sbs.service.j2 @@ -0,0 +1,32 @@ +[Unit] +Description=SBS +After=network.target + +[Service] +DynamicUser=true +User=_sram_sbs +Group=_sram_sbs +SupplementaryGroups={{sbs_group}} + +WorkingDirectory={{sbs_git_dir}} +ReadWritePaths={{sbs_log_dir}} +NoNewPrivileges=true +PrivateTmp=true + +Environment="CONFIG=config/config.yml" +Environment="PROFILE=log_to_stdout" +# the python requests module uses the CAs provided by the certifi package by default +# we'll just take the OS-provided CAs, thankyouverymuch +Environment="REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt" + +Type=notify +ExecStart={{sbs_env_dir}}/bin/gunicorn --worker-class eventlet --workers {{sbs_num_workers}} --bind 127.0.0.1:8080 server.__main__:app + +Restart=on-failure +RestartSec=10 + +KillMode=mixed +TimeoutStopSec=5 + +[Install] +WantedBy=multi-user.target diff --git a/roles/sbs/vars/main.yml b/roles/sbs/vars/main.yml new file mode 100644 index 000000000..761942f7b --- /dev/null +++ b/roles/sbs/vars/main.yml @@ -0,0 +1 @@ +current_release_appdir: /opt/openconext From 150b22e60dbdb367e8728fe9c8e146006a97f807 Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Wed, 25 Mar 2026 13:59:09 +0100 Subject: [PATCH 59/73] WIP --- roles/sbs/defaults/main.yml | 69 ++++++++----- roles/sbs/handlers/main.yml | 4 +- roles/sbs/tasks/main.yml | 129 +++++++++++++++---------- roles/sbs/templates/alembic.ini.j2 | 2 +- roles/sbs/templates/config.yml.j2 | 6 +- roles/sbs/templates/disclaimer.css.j2 | 6 +- roles/sbs/templates/sbs-apache.conf.j2 | 14 +-- 7 files changed, 139 insertions(+), 91 deletions(-) diff --git a/roles/sbs/defaults/main.yml b/roles/sbs/defaults/main.yml index f21addf5f..7a907df66 100644 --- a/roles/sbs/defaults/main.yml +++ b/roles/sbs/defaults/main.yml @@ -1,38 +1,49 @@ --- -sbs: "{{ sbs_defaults | combine(sbs_overrides, recursive=true) }}" +sbs: "{{ sbs_defaults | combine(sbs_overrides) }}" + sbs_defaults: + base_domain: "test2.sram.surf.nl" + environment_name: test + ansible_nolog: true + base_url: "https://{{ sbs_defaults.base_domain }}" + server_image: "ghcr.io/surfscz/sram-sbs-server:main" + client_image: "ghcr.io/surfscz/sram-sbs-client:main" + openidc_timeout: 86400 sram_conf_dir: "{{ current_release_appdir }}/sram" - work_dir: "{{ sram_conf_dir }}/sbs" - git_dir: "{{ sbs.work_dir }}/sbs" - env_dir: "{{ sbs.work_dir }}/sbs-env" - conf_dir: "{{ sbs.work_dir }}/config" - log_dir: "{{ sbs.work_dir }}/log" - cert_dir: "{{ sbs.work_dir }}/cert" - apache_conf: "{{ sbs.work_dir }}/sbs.conf" - nginx_conf: "{{ sbs.work_dir }}/nginx.conf" - + work_dir: "{{ sbs_defaults.sram_conf_dir }}/sbs" + git_dir: "{{ sbs_defaults.work_dir }}/sbs" + env_dir: "{{ sbs_defaults.work_dir }}/sbs-env" + conf_dir: "{{ sbs_defaults.work_dir }}/config" + log_dir: "{{ sbs_defaults.work_dir }}/log" + cert_dir: "{{ sbs_defaults.work_dir }}/cert" + apache_conf: "{{ sbs_defaults.work_dir }}/sbs_defaults.conf" + nginx_conf: "{{ sbs_defaults.work_dir }}/nginx.conf" db_name: "sbs" db_user: "sbsrw" - dbbackup_user: "sbs_backupper" - migration_user: "sbs_migrater" + # dbbackup_user: "sbs_backupper" + migration_user: "sbsmigrate" db_connection: "\ - mysql+mysqldb://%s:%s@{{ mariadb_host }}/{{ sbs_db_name }}\ + mysql+mysqldb://%s:%s@{{ mariadb_host }}/{{ sbs_defaults.db_name }}\ ?ssl=true&charset=utf8mb4" - db_connection_sbs: "{{ sbs_db_connection | format(sbs_db_user, sbs_db_password) }}" + db_connection_sbs: "{{ sbs_defaults.db_connection | format(sbs_defaults.db_user, mysql_passwords.sbs) }}" db_connection_migration: "\ - {{ sbs_db_connection | format(sbs_migration_user, sbs_migration_password) }}" + {{ sbs_defaults.db_connection | format(sbs_defaults.migration_user, mysql_passwords.sbsmigrate) }}" + + db_secret: secret + secret_key_suffix: suffix + encryption_key: encryption_key redis_host: redis redis_port: 6379 redis_ssl: false redis_user: default - mail_host: "{{ mail.relay_to }}" - mail_port: "{{ mail.relay_port }}" + mail_host: "host.docker.internal" + mail_port: 25 user: "sbs" group: "sbs" @@ -94,7 +105,7 @@ sbs_defaults: oidc_token_endpoint: "http://localhost/OIDC/token" oidc_userinfo_endpoint: "http://localhost/OIDC/userinfo" oidc_jwks_endpoint: "http://localhost/OIDC/jwks.json" - oidc_redirect_uri: "https://sbs.scz-vm.net/api/users/resume-session" + oidc_redirect_uri: "https://{{sbs_defaults.base_domain}}/api/users/resume-session" mfa_idp_allowed: false eduteams_continue_endpoint: "https://localhost/continue" eb_continue_endpoint: "https://engine.(.*)surfconext.nl(.*)" @@ -105,20 +116,20 @@ sbs_defaults: - openid manage_base_enabled: False - manage_base_url: "https://manage.test2.surfconext.nl" - manage_sram_rp_entity_id: "sbs.test2.sram.surf.nl" + manage_base_url: "https://manage.{{base_domain}}" + manage_sram_rp_entity_id: "sbs.{{sbs_defaults.base_domain}}" manage_verify_peer: False idp_metadata_url: "https://metadata.surfconext.nl/signed/2023/edugain-downstream-idp.xml " - backup_dir: "{{backup_base}}/sbs" + # backup_dir: "{{backup_base}}/sbs" swagger_enabled: true ssid_identity_providers: [] surf_secure_id: environment: "unknown.example.org" - sp_entity_id: "https://sbs.{{base_domain}}" - acs_url: "https://{{base_domain}}/api/users/acs" + sp_entity_id: "https://sbs.{{sbs_defaults.base_domain}}" + acs_url: "https://{{sbs_defaults.base_domain}}/api/users/acs" sa_gw_environment: "sa-gw.unknown.example.org" sa_idp_certificate: | -----BEGIN CERTIFICATE----- @@ -134,11 +145,11 @@ sbs_defaults: -----END CERTIFICATE----- ssid_authncontext: "\ - http://{{ sbs.surf_secure_id.environment }}/assurance/sfo-level2" + http://{{ sbs_defaults.surf_secure_id.environment }}/assurance/sfo-level2" ssid_entityid: "\ - https://{{ sbs.surf_secure_id.sa_gw_environment }}/second-factor-only/metadata" + https://{{ sbs_defaults.surf_secure_id.sa_gw_environment }}/second-factor-only/metadata" ssid_sso_endpoint: "\ - https://{{ sbs.surf_secure_id.sa_gw_environment }}/second-factor-only/single-sign-on" + https://{{ sbs_defaults.surf_secure_id.sa_gw_environment }}/second-factor-only/single-sign-on" mfa_sso_minutes: 10 mfa_fallback_enabled: true @@ -157,3 +168,9 @@ sbs_defaults: - 'sha256-WTC9gHKjIpzl5ub1eg/YrRy/k+jlzeyRojah9dxAApc=' # on /new-service-request engine_block_api_token: secret + + # wildcard_backend_cert: + # pub: | + # -----BEGIN CERTIFICATE----- + # 12345 + # -----END CERTIFICATE----- diff --git a/roles/sbs/handlers/main.yml b/roles/sbs/handlers/main.yml index 012cba535..2d7710d43 100644 --- a/roles/sbs/handlers/main.yml +++ b/roles/sbs/handlers/main.yml @@ -5,5 +5,5 @@ state: started restart: true loop: - - sbs - - sbs_server + - sbs-client + - sbs-server diff --git a/roles/sbs/tasks/main.yml b/roles/sbs/tasks/main.yml index 8a52adc4e..ecc82bff7 100644 --- a/roles/sbs/tasks/main.yml +++ b/roles/sbs/tasks/main.yml @@ -1,4 +1,9 @@ --- +# - name: "Initialize database" +# throttle: 1 +# import_tasks: "database_init.yml" +# when: "is_dev" + - name: "Create SBS group" group: name: "{{ sbs.group }}" @@ -29,7 +34,7 @@ file: path: "{{item.path}}" state: "directory" - owner: "root" + owner: "{{sbs_user_uid}}" group: "{{sbs_group_gid}}" mode: "{{item.mode}}" with_items: @@ -42,7 +47,7 @@ - name: "Fix file permissions" file: path: "{{sbs.log_dir}}/{{item}}" - owner: "root" + owner: "{{sbs_user_uid}}" group: "{{sbs_group_gid}}" mode: "0664" state: "touch" @@ -50,39 +55,43 @@ access_time: "preserve" with_items: - "sbs.log" - - "sbs_debug.log" - -- name: "Copy wildcard backend cert" - copy: - content: "{{wildcard_backend_cert.pub}}" - dest: "{{sbs.cert_dir}}/backend.crt" - owner: "root" - group: "root" - mode: "0644" - notify: "Restart sbs containers" + - "sbs.debug.log" -- name: "Copy https cert" - copy: - content: "{{https_cert.cert}}" - dest: "{{sbs.cert_dir}}/frontend.crt" - owner: "root" - group: "root" - mode: "0644" - notify: "Restart sbs containers" +# - name: "Copy wildcard backend cert" +# copy: +# content: "{{wildcard_backend_cert.pub}}" +# dest: "{{sbs.cert_dir}}/backend.crt" +# owner: "root" +# group: "root" +# mode: "0644" +# notify: "Restart sbs containers" -- name: "Install database certificate" - copy: - dest: "{{sbs.db_cert_path}}" - content: "{{ sbs.db_tls_cert }}" - owner: "root" - group: "root" - mode: "0644" +# - name: "Copy https cert" +# copy: +# content: "{{https_cert.cert}}" +# dest: "{{sbs.cert_dir}}/frontend.crt" +# owner: "root" +# group: "root" +# mode: "0644" +# notify: "Restart sbs containers" + +# - name: "Install database certificate" +# copy: +# dest: "{{sbs.db_cert_path}}" +# content: "{{ sbs.db_tls_cert }}" +# owner: "root" +# group: "root" +# mode: "0644" +- name: "Touch file in {{ sbs.cert_dir }}" + ansible.builtin.file: + path: "{{sbs.cert_dir}}/dummy" + state: file - name: "Create SBS config files" template: src: "{{item.name}}.j2" dest: "{{ sbs.conf_dir }}/{{item.name}}" - owner: "root" + owner: "{{sbs_user_uid}}" group: "{{sbs_group_gid}}" mode: "{{item.mode}}" with_items: @@ -90,13 +99,35 @@ - { name: "alembic.ini", mode: "0644" } - { name: "disclaimer.css", mode: "0644" } - { name: "sbs-apache.conf", mode: "0644" } - no_log: "{{sram_ansible_nolog}}" + no_log: "{{sbs.ansible_nolog}}" notify: "Restart sbs containers" +- name: "Pull sbs image" + community.docker.docker_image_pull: + name: "{{ item }}" + with_items: + - "{{ sbs.client_image }}" + - "{{ sbs.server_image }}" + register: "sbs_image" + +# We need to remove sram-static so it gets repopulated +# with new SBS image static content +- name: "Clean up old containers" + block: + - name: "Stop and remove sbs and sbs-server containers" + community.docker.docker_container: + name: "{{ item }}" + state: "absent" + with_items: + - "sbs-client" + - "sbs-server" + + when: "sbs_image is changed" + - name: "Run SBS migrations" throttle: 1 community.docker.docker_container: - name: "sbs_migration" + name: "sbs-migration" image: "{{ sbs.server_image }}" pull: "never" state: "started" @@ -105,52 +136,49 @@ env: RUNAS_UID: "{{ sbs_user_uid | string }}" RUNAS_GID: "{{ sbs_group_gid | string }}" - CONFIG: "/opt/sbs/server/config/config.yml" MIGRATIONS_ONLY: "1" # don't actually run the server command: "/bin/true" volumes: - "{{ sbs.conf_dir }}:/sbs-config" - - "{{ sbs.cert_dir }}:/sbs-config/cert" + - "{{ sbs.cert_dir }}:/sbs-config/cert:ro" - "{{ sbs.log_dir }}:/opt/sbs/log" networks: - - name: "{{internal_network}}" + - name: "loadbalancer" register: "result" failed_when: "'container' not in result or result.container.State.ExitCode != 0" changed_when: "'[alembic.runtime.migration] Running upgrade' in result.container.Output" notify: "Restart sbs containers" -# Remove the migration container; we can't do that with auto_remove, because if we use that, ansible +# Remove the migration container; we can do that with auto_remove, because if we use that, ansible # will not save the output in result - name: "Remove migration container" community.docker.docker_container: - name: "sbs_migration" + name: "sbs-migration" state: "absent" # TODO: fix this by only running this if "sbs_image is changed" changed_when: false -- name: "Start sbs container" +- name: "Start sbs client container" community.docker.docker_container: - name: "sbs" - image: "{{ sbs.image }}" + name: "sbs-client" + image: "{{ sbs.client_image }}" pull: "never" restart_policy: "always" state: "started" - env: - RUN_MIGRATIONS: "0" volumes: - "{{ sbs.conf_dir }}/sbs-apache.conf:/etc/apache2/sites-enabled/sbs.conf:ro" + - "{{ sbs.conf_dir }}/disclaimer.css:/opt/sbs/client/dist/disclaimer.css:ro" networks: - - name: loadbalancer + - name: "loadbalancer" labels: + traefik.http.routers.sbsclient.rule: "Host(`{{ sbs.base_domain }}`)" + traefik.http.routers.sbsclient.tls: "true" traefik.enable: "true" - traefik.docker.network: "{{traefik_network}}" - traefik.http.routers.sbs.rule: "Host(`{{ sbs.base_domain }}`)" - traefik.http.routers.sbs.tls: "true" - name: "Start SBS server container" community.docker.docker_container: - name: "sbs_server" + name: "sbs-server" image: "{{ sbs.server_image }}" restart_policy: "always" state: "started" @@ -160,12 +188,15 @@ CONFIG: "/opt/sbs/server/config/config.yml" REQUESTS_CA_BUNDLE: "/etc/ssl/certs/ca-certificates.crt" RUN_MIGRATIONS: "0" - pull: "always" - command: "/usr/local/bin/gunicorn --preload --worker-class eventlet --workers 8 --bind 0.0.0.0:8080 server.__main__:app" + pull: "never" volumes: - "{{ sbs.conf_dir }}:/sbs-config" - - "{{ sbs.cert_dir }}:/sbs-config/cert" + - "{{ sbs.cert_dir }}:/sbs-config/cert:ro" - "{{ sbs.log_dir }}:/opt/sbs/log" - "/tmp/ci-runner:/tmp/ci-runner" networks: - - name: loadbalander + - name: "loadbalancer" + # TODO: fix this: this is only for dev + etc_hosts: + oidc-op.scz-vm.net: "172.20.1.24" + host.docker.internal: host-gateway diff --git a/roles/sbs/templates/alembic.ini.j2 b/roles/sbs/templates/alembic.ini.j2 index 7849e4f89..9ccd51979 100644 --- a/roles/sbs/templates/alembic.ini.j2 +++ b/roles/sbs/templates/alembic.ini.j2 @@ -35,7 +35,7 @@ script_location = migrations # are written from script.py.mako # output_encoding = utf-8 -sqlalchemy.url = {{ sbs_db_connection_migration }} +sqlalchemy.url = {{ sbs.db_connection_migration }} # Logging configuration [loggers] diff --git a/roles/sbs/templates/config.yml.j2 b/roles/sbs/templates/config.yml.j2 index 11f029af7..4c38c4100 100644 --- a/roles/sbs/templates/config.yml.j2 +++ b/roles/sbs/templates/config.yml.j2 @@ -3,8 +3,8 @@ database: uri: {{ sbs.db_connection_sbs }} redis: -{% if environment_name == 'tst2' %} - uri: "redis://{{redis_vhost}}/" +{% if sbs.environment_name == 'test2' %} + uri: "redis://{{ sbs.redis_user }}:{{ sbs.redis_password }}@{{sbs.redis_host}}/" {% else %} uri: "redis{% if sbs.redis_ssl %}s{% endif %}://{{ sbs.redis_user }}:{{ sbs.redis_password }}@{{ sbs.redis_host }}:{{ sbs.redis_port }}/" {% endif %} @@ -54,7 +54,7 @@ eppn_scope: " {{ sbs.eppn_scope }}" scim_schema_sram: "urn:mace:surf.nl:sram:scim:extension" collaboration_creation_allowed_entitlement: "urn:mace:surf.nl:sram:allow-create-co" -{% if environment_name == "prd" %} +{% if sbs.environment_name == "prd" %} environment_disclaimer: "" {% else %} environment_disclaimer: "{{ sbs.disclaimer_label }}" diff --git a/roles/sbs/templates/disclaimer.css.j2 b/roles/sbs/templates/disclaimer.css.j2 index e89bbfce7..04c62aa86 100644 --- a/roles/sbs/templates/disclaimer.css.j2 +++ b/roles/sbs/templates/disclaimer.css.j2 @@ -1,6 +1,6 @@ -{% if environment_name!="prd" -%} +{% if sbs.environment_name!="prd" -%} body::after { - background: {{ sbs_disclaimer_color }}; - content: "{{ sbs_disclaimer_label }}"; + background: {{ sbs.disclaimer_color }}; + content: "{{ sbs.disclaimer_label }}"; } {% endif %} diff --git a/roles/sbs/templates/sbs-apache.conf.j2 b/roles/sbs/templates/sbs-apache.conf.j2 index 13752ee56..99fda3d13 100644 --- a/roles/sbs/templates/sbs-apache.conf.j2 +++ b/roles/sbs/templates/sbs-apache.conf.j2 @@ -1,7 +1,7 @@ -ServerName {{ hostnames.sbs }} +ServerName {{ sbs.base_domain }} #ErrorLog /proc/self/fd/2 #CustomLog /proc/self/fd/1 common -DocumentRoot /opt/sbs/client/build +DocumentRoot /opt/sbs/client/dist Header set Content-Security-Policy "default-src 'self'; base-uri 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; frame-src 'none'; form-action 'self' https://*.{{ base_domain }}; frame-ancestors 'none'; block-all-mixed-content;" Header set Permissions-Policy "accelerometer=(), ambient-light-sensor=(), autoplay=(), battery=(), camera=(), cross-origin-isolated=(), display-capture=(), document-domain=(), encrypted-media=(), execution-while-not-rendered=(), execution-while-out-of-viewport=(), fullscreen=(), geolocation=(), gyroscope=(), keyboard-map=(), magnetometer=(), microphone=(), midi=(), navigation-override=(), payment=(), picture-in-picture=(), publickey-credentials-get=(), screen-wake-lock=(), sync-xhr=(), usb=(), web-share=(), xr-spatial-tracking=(), clipboard-read=(), clipboard-write=(self), gamepad=(), speaker-selection=()" @@ -12,10 +12,10 @@ RewriteCond %{DOCUMENT_ROOT}%{REQUEST_FILENAME} !-f RewriteRule ^/(.*)$ /index.html [L] ProxyRequests off -ProxyPassMatch ^/(api|pam-weblogin|flasgger_static|swagger|health|config|info) http://{{ containers.sbs_server }}:{{sbs_backend_port}}/ -ProxyPassReverse / http://{{ containers.sbs_server }}:{{sbs_backend_port}}/ -ProxyPass /socket.io/ ws://{{ containers.sbs_server }}:{{sbs_backend_port}}/socket.io/ -ProxyPassReverse /socket.io/ ws://{{ containers.sbs_server }}:{{sbs_backend_port}}/socket.io/ +ProxyPassMatch ^/(api|pam-weblogin|flasgger_static|swagger|health|config|info) http://sbs-server:{{sbs.backend_port}}/ +ProxyPassReverse / http://sbs-server:{{sbs.backend_port}}/ +ProxyPass /socket.io/ ws://sbs-server:{{sbs.backend_port}}/socket.io/ +ProxyPassReverse /socket.io/ ws://sbs-server:{{sbs.backend_port}}/socket.io/ Header set Cache-Control: "public, max-age=31536000, immutable" @@ -24,7 +24,7 @@ ProxyPassReverse /socket.io/ ws://{{ containers.sbs_server }}:{{sbs_backend_port Header set Cache-Control: "no-cache, private" - + Require all granted Options -Indexes From 28686c48e25b760ddbee2781224fc395caaef410 Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Thu, 26 Mar 2026 10:15:14 +0100 Subject: [PATCH 60/73] WIP --- roles/sbs/defaults/main.yml | 9 --------- roles/sbs/tasks/main.yml | 27 ++------------------------ roles/sbs/templates/config.yml.j2 | 6 +++--- roles/sbs/templates/sbs-apache.conf.j2 | 8 ++++---- 4 files changed, 9 insertions(+), 41 deletions(-) diff --git a/roles/sbs/defaults/main.yml b/roles/sbs/defaults/main.yml index 7a907df66..c42b80948 100644 --- a/roles/sbs/defaults/main.yml +++ b/roles/sbs/defaults/main.yml @@ -72,9 +72,6 @@ sbs_defaults: wiki_link: "https://www.example.org/wiki" - backend_port: 8080 - num_workers: 2 - cron_hour_of_day: 4 seed_allowed: True api_keys_enabled: True @@ -168,9 +165,3 @@ sbs_defaults: - 'sha256-WTC9gHKjIpzl5ub1eg/YrRy/k+jlzeyRojah9dxAApc=' # on /new-service-request engine_block_api_token: secret - - # wildcard_backend_cert: - # pub: | - # -----BEGIN CERTIFICATE----- - # 12345 - # -----END CERTIFICATE----- diff --git a/roles/sbs/tasks/main.yml b/roles/sbs/tasks/main.yml index ecc82bff7..560191f8a 100644 --- a/roles/sbs/tasks/main.yml +++ b/roles/sbs/tasks/main.yml @@ -57,31 +57,8 @@ - "sbs.log" - "sbs.debug.log" -# - name: "Copy wildcard backend cert" -# copy: -# content: "{{wildcard_backend_cert.pub}}" -# dest: "{{sbs.cert_dir}}/backend.crt" -# owner: "root" -# group: "root" -# mode: "0644" -# notify: "Restart sbs containers" - -# - name: "Copy https cert" -# copy: -# content: "{{https_cert.cert}}" -# dest: "{{sbs.cert_dir}}/frontend.crt" -# owner: "root" -# group: "root" -# mode: "0644" -# notify: "Restart sbs containers" - -# - name: "Install database certificate" -# copy: -# dest: "{{sbs.db_cert_path}}" -# content: "{{ sbs.db_tls_cert }}" -# owner: "root" -# group: "root" -# mode: "0644" +# Create dummy file in certs dir to pacify container pre-init script +# https://github.com/SURFscz/SBS/pull/2312 - name: "Touch file in {{ sbs.cert_dir }}" ansible.builtin.file: path: "{{sbs.cert_dir}}/dummy" diff --git a/roles/sbs/templates/config.yml.j2 b/roles/sbs/templates/config.yml.j2 index 4c38c4100..eba293de9 100644 --- a/roles/sbs/templates/config.yml.j2 +++ b/roles/sbs/templates/config.yml.j2 @@ -30,8 +30,8 @@ api_users: {% endfor %} oidc: - client_id: "{{ sbs.client_id }}" - client_secret: "{{ sbs.client_secret }}" + client_id: "{{ sbs.oidc_client_id }}" + client_secret: "{{ sbs.oidc_client_secret }}" audience: "{{ sbs.oidc_jwt_audience }}" verify_peer: {{ sbs.oidc_verify_peer }} authorization_endpoint: "{{ sbs.oidc_authz_endpoint}}" @@ -45,7 +45,7 @@ oidc: second_factor_authentication_required: {{ sbs.second_factor_authentication_required }} totp_token_name: "{{ sbs.totp_token_name }}" # The service_id in the proxy_authz endpoint when logging into SBS. Most likely to equal the oidc.client_id - sram_service_entity_id: "{{ sbs.client_id }}" + sram_service_entity_id: "{{ sbs.oidc_client_id }}" scopes: {{ sbs.oidc_scopes }} base_scope: "{{ base_domain }}" diff --git a/roles/sbs/templates/sbs-apache.conf.j2 b/roles/sbs/templates/sbs-apache.conf.j2 index 99fda3d13..0743c2ddb 100644 --- a/roles/sbs/templates/sbs-apache.conf.j2 +++ b/roles/sbs/templates/sbs-apache.conf.j2 @@ -12,10 +12,10 @@ RewriteCond %{DOCUMENT_ROOT}%{REQUEST_FILENAME} !-f RewriteRule ^/(.*)$ /index.html [L] ProxyRequests off -ProxyPassMatch ^/(api|pam-weblogin|flasgger_static|swagger|health|config|info) http://sbs-server:{{sbs.backend_port}}/ -ProxyPassReverse / http://sbs-server:{{sbs.backend_port}}/ -ProxyPass /socket.io/ ws://sbs-server:{{sbs.backend_port}}/socket.io/ -ProxyPassReverse /socket.io/ ws://sbs-server:{{sbs.backend_port}}/socket.io/ +ProxyPassMatch ^/(api|pam-weblogin|flasgger_static|swagger|health|config|info) http://sbs-server:8080/ +ProxyPassReverse / http://sbs-server:8080/ +ProxyPass /socket.io/ ws://sbs-server:8080/socket.io/ +ProxyPassReverse /socket.io/ ws://sbs-server:8080/socket.io/ Header set Cache-Control: "public, max-age=31536000, immutable" From 114a7a97a728b61dc99fe91fd701aa39ccbf286d Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Thu, 26 Mar 2026 16:58:29 +0100 Subject: [PATCH 61/73] Add ldap role --- .../haproxy/templates/haproxy_backend.cfg.j2 | 16 + .../haproxy/templates/haproxy_frontend.cfg.j2 | 46 ++- roles/ldap/defaults/main.yml | 41 ++ roles/ldap/files/eduMember.ldif | 27 ++ roles/ldap/files/eduPerson.ldif | 83 ++++ roles/ldap/files/groupOfMembers.ldif | 19 + roles/ldap/files/ldap-add | 51 +++ roles/ldap/files/ldapPublicKey.ldif | 21 + roles/ldap/files/logrotate_slapd | 13 + roles/ldap/files/rsyslog_slapd.conf | 2 + roles/ldap/files/sczGroup.ldif | 23 ++ roles/ldap/files/sramPerson.ldif | 23 ++ roles/ldap/files/voPerson.ldif | 44 +++ roles/ldap/handlers/main.yml | 24 ++ roles/ldap/tasks/admins.yml | 82 ++++ roles/ldap/tasks/main.yml | 369 ++++++++++++++++++ roles/ldap/templates/ldap-backup.sh.j2 | 19 + roles/ldap/templates/ldap.conf.j2 | 16 + roles/ldap/templates/slapd.service.j2 | 20 + roles/ldap/vars/main.yml | 1 + roles/sbs/defaults/main.yml | 1 - roles/sbs/templates/config.yml.j2 | 4 +- roles/sbs/templates/disclaimer.css.j2 | 2 +- 23 files changed, 928 insertions(+), 19 deletions(-) create mode 100644 roles/ldap/defaults/main.yml create mode 100644 roles/ldap/files/eduMember.ldif create mode 100644 roles/ldap/files/eduPerson.ldif create mode 100644 roles/ldap/files/groupOfMembers.ldif create mode 100644 roles/ldap/files/ldap-add create mode 100644 roles/ldap/files/ldapPublicKey.ldif create mode 100644 roles/ldap/files/logrotate_slapd create mode 100644 roles/ldap/files/rsyslog_slapd.conf create mode 100644 roles/ldap/files/sczGroup.ldif create mode 100644 roles/ldap/files/sramPerson.ldif create mode 100644 roles/ldap/files/voPerson.ldif create mode 100644 roles/ldap/handlers/main.yml create mode 100644 roles/ldap/tasks/admins.yml create mode 100644 roles/ldap/tasks/main.yml create mode 100644 roles/ldap/templates/ldap-backup.sh.j2 create mode 100644 roles/ldap/templates/ldap.conf.j2 create mode 100644 roles/ldap/templates/slapd.service.j2 create mode 100644 roles/ldap/vars/main.yml diff --git a/roles/haproxy/templates/haproxy_backend.cfg.j2 b/roles/haproxy/templates/haproxy_backend.cfg.j2 index d2387c033..ea52ac5b0 100644 --- a/roles/haproxy/templates/haproxy_backend.cfg.j2 +++ b/roles/haproxy/templates/haproxy_backend.cfg.j2 @@ -67,3 +67,19 @@ {% endfor %} {% endif %} {% endfor %} + +{% if haproxy_ldap_servers is defined %} +#--------------------------------------------------------------------- +# ldap backend +#--------------------------------------------------------------------- +backend ldap_servers + mode tcp + option tcpka + + option ldap-check + + {% for server in haproxy_ldap_servers -%} + server {{server.label}} {{server.ip}}:{{server.port}} ssl verify none check weight 10 {% if loop.index==1 %}on-marked-up shutdown-backup-sessions{% else %}backup{% endif %} + {% endfor %} +{% endfor %} +{% endig %} diff --git a/roles/haproxy/templates/haproxy_frontend.cfg.j2 b/roles/haproxy/templates/haproxy_frontend.cfg.j2 index 6082e9c03..4909a0074 100644 --- a/roles/haproxy/templates/haproxy_frontend.cfg.j2 +++ b/roles/haproxy/templates/haproxy_frontend.cfg.j2 @@ -12,8 +12,8 @@ frontend stats # ------------------------------------------------------------------- frontend internet_ip - bind {{ haproxy_sni_ip.ipv4 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent - bind {{ haproxy_sni_ip.ipv6 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent + bind {{ haproxy_sni_ip.ipv4 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent + bind {{ haproxy_sni_ip.ipv6 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent bind {{ haproxy_sni_ip.ipv4 }}:80 transparent bind {{ haproxy_sni_ip.ipv6 }}:80 transparent # Logging is done in the local_ip backend, otherwise all requests are logged twice @@ -30,7 +30,7 @@ frontend internet_ip http-request redirect scheme https code 301 if !{ ssl_fc } # Log the user agent in the httplogs capture request header User-agent len 256 - # Put the useragent header in a variable, shared between request and response. + # Put the useragent header in a variable, shared between request and response. http-request set-var(txn.useragent) req.fhdr(User-Agent) # The ACL below makes sure only supported http methods are allowed acl valid_method method {{ haproxy_supported_http_methods }} @@ -51,7 +51,7 @@ frontend internet_ip http-response replace-header Set-Cookie (?i)(^(?!.*samesite).*$) \1;\ SameSite=None if !no_same_site_uas # Remove an already present SameSite cookie attribute for unsupported browsers http-response replace-value Set-Cookie (^.*)(?i);\ *SameSite=(Lax|Strict|None)(.*$) \1\3 if no_same_site_uas - # Log whether the no_same_site_uas ACL has been hit + # Log whether the no_same_site_uas ACL has been hit http-request set-header samesitesupport samesite_notsupported if no_same_site_uas http-request set-header samesitesupport samesite_supported if !no_same_site_uas # We need a dummy backend in order to be able to rewrite the loadbalancer cookies @@ -66,7 +66,7 @@ frontend local_ip acl valid_vhost hdr(host) -f /etc/haproxy/acls/validvhostsunrestricted.acl acl staging req.cook(staging) -m str true acl staging src -f /etc/haproxy/acls/stagingips.acl - acl stagingvhost hdr(host) -i -M -f /etc/haproxy/maps/backendsstaging.map + acl stagingvhost hdr(host) -i -M -f /etc/haproxy/maps/backendsstaging.map use_backend %[req.hdr(host),lower,map(/etc/haproxy/maps/backendsstaging.map)] if stagingvhost staging use_backend %[req.hdr(host),lower,map(/etc/haproxy/maps/backends.map)] option httplog @@ -82,7 +82,7 @@ frontend local_ip http-request capture sc_http_req_rate(0) len 4 # Create an ACL when the request rate exceeds {{ haproxy_max_request_rate }} per 10s acl exceeds_max_request_rate_per_ip sc_http_req_rate(0) gt {{ haproxy_max_request_rate }} - # Measure and log the request rate per path and ip + # Measure and log the request rate per path and ip http-request track-sc1 base32+src table st_httpreqs_per_ip_and_path http-request capture sc_http_req_rate(1) len 4 # Some paths allow for a higher ratelimit. These are in a seperate mapfile @@ -96,7 +96,7 @@ frontend local_ip http-request deny if ! valid_vhost # Deny the request when the request rate exceeds {{ haproxy_max_request_rate }} per 10s http-request deny deny_status 429 if exceeds_max_request_rate_per_ip !allowlist - # Deny the request when the request rate per host header url path and src ip exceeds {{ haproxy_max_request_rate_ip_path }} per 1 m + # Deny the request when the request rate per host header url path and src ip exceeds {{ haproxy_max_request_rate_ip_path }} per 1 m http-request deny deny_status 429 if exceeds_max_request_rate_per_ip_and_path !allowlist # Create some http redirects {% if haproxy_securitytxt_target_url is defined %} @@ -111,8 +111,8 @@ frontend local_ip ## ------------------------------------------------------------------- frontend internet_restricted_ip - bind {{ haproxy_sni_ip_restricted.ipv4 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent - bind {{ haproxy_sni_ip_restricted.ipv6 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent + bind {{ haproxy_sni_ip_restricted.ipv4 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent + bind {{ haproxy_sni_ip_restricted.ipv6 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent bind {{ haproxy_sni_ip_restricted.ipv4 }}:80 transparent bind {{ haproxy_sni_ip_restricted.ipv6 }}:80 transparent # Logging is done in the local_ip_restriced backend, otherwise all requests are logged twice @@ -128,8 +128,8 @@ frontend internet_restricted_ip # We redirect all port 80 to port 443 http-request redirect scheme https code 301 if !{ ssl_fc } # Log the user agent in the httplogs - capture request header User-agent len 256 - # Put the useragent header in a variable, shared between request and response. + capture request header User-agent len 256 + # Put the useragent header in a variable, shared between request and response. http-request set-var(txn.useragent) req.fhdr(User-Agent) # The ACL below makes sure only supported http methods are allowed acl valid_method method {{ haproxy_supported_http_methods }} @@ -155,12 +155,12 @@ frontend internet_restricted_ip # frontend restricted ip addresses localhost # traffic coming back from the dummy backend ends up here # ------------------------------------------------------------------- -frontend localhost_restricted +frontend localhost_restricted bind 127.0.0.1:82 accept-proxy acl valid_vhost hdr(host) -f /etc/haproxy/acls/validvhostsrestricted.acl acl staging req.cook(staging) -m str true acl staging src -f /etc/haproxy/acls/stagingips.acl - acl stagingvhost hdr(host) -i -M -f /etc/haproxy/maps/backendsstaging.map + acl stagingvhost hdr(host) -i -M -f /etc/haproxy/maps/backendsstaging.map use_backend %[req.hdr(host),lower,map(/etc/haproxy/maps/backendsstaging.map)] if stagingvhost staging use_backend %[req.hdr(host),lower,map(/etc/haproxy/maps/backends.map)] option httplog @@ -177,7 +177,7 @@ frontend localhost_restricted # Create an ACL when the request rate exceeds {{ haproxy_max_request_rate }} per 10s acl exceeds_max_request_rate_per_ip sc_http_req_rate(0) gt {{ haproxy_max_request_rate }} http-request deny deny_status 429 if exceeds_max_request_rate_per_ip !allowlist - # Measure and log the request rate per path and ip + # Measure and log the request rate per path and ip http-request track-sc1 base32+src table st_httpreqs_per_ip_and_path http-request capture sc_http_req_rate(1) len 4 # Some paths allow for a higher ratelimit. These are in a seperate mapfile @@ -191,7 +191,7 @@ frontend localhost_restricted http-request deny if ! valid_vhost # Deny the request when the request rate exceeds {{ haproxy_max_request_rate }} per 10s http-request deny deny_status 429 if exceeds_max_request_rate_per_ip !allowlist - # Deny the request when the request rate per host header url path and src ip exceeds {{ haproxy_max_request_rate_ip_path }} per 1 m + # Deny the request when the request rate per host header url path and src ip exceeds {{ haproxy_max_request_rate_ip_path }} per 1 m http-request deny deny_status 429 if exceeds_max_request_rate_per_ip_and_path !allowlist # Create some http redirects {% if haproxy_securitytxt_target_url is defined %} @@ -201,3 +201,19 @@ frontend localhost_restricted http-request redirect location %[base,map_reg(/etc/haproxy/maps/redirects.map)] if { base,map_reg(/etc/haproxy/maps/redirects.map) -m found } {% endif %} + +{% if haproxy_ldap_servers is defined %} +#-------------------------------------------------------------------- +# frontend public ips ldap +# ------------------------------------------------------------------- +listen ldap + mode tcp + no option dontlognull + option tcplog + option logasap + timeout client 900s + timeout server 901s + bind {{ haproxy_sni_ip.ipv4 }}:636 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 transparent + bind {{ haproxy_sni_ip.ipv6 }}:636 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 transparent + use_backend ldap_servers +{% endif %} diff --git a/roles/ldap/defaults/main.yml b/roles/ldap/defaults/main.yml new file mode 100644 index 000000000..e60ab603c --- /dev/null +++ b/roles/ldap/defaults/main.yml @@ -0,0 +1,41 @@ +--- +ldap: "{{ ldap_defaults | combine(ldap_overrides) }}" + +ldap_defaults: + image: "ghcr.io/surfscz/sram-ldap:main" + conf_dir: "{{ current_release_appdir }}/ldap" + ldif_dir: "{{ ldap_defaults.conf_dir }}/schema" + certs_dir: "{{ ldap_defaults.conf_dir }}/certs" + backup_dir: "{{ ldap_defaults.conf_dir }}/ldap" + data_dir: "{{ ldap_defaults.conf_dir}}/data" + uri: "ldap://localhost/" + + user: "openldap" + group: "openldap" + + # admin_group: "ldap_admin" + admins: + - name: Admin + uid: admin + pw_hash: + sshkey: "" + + loglevel: "stats stats2 filter" + + services_password: secret + monitor_password: secret + ldap_monitor_password: secret + + uri: "ldap://localhost/" + rid_prefix: "ldap://" + + base_domain: "{{ base_domain }}" + base_dn: >- + {{ ((ldap_defaults.base_domain.split('.')|length)*['dc=']) | + zip(ldap_defaults.base_domain.split('.')) | list | map('join', '') | list | join(',') }} + services_dn: + basedn: "dc=services,{{ ldap_defaults.base_dn }}" + o: "Services" + binddn: "cn=admin,{{ ldap_defaults.base_dn }}" + + hosts: {} diff --git a/roles/ldap/files/eduMember.ldif b/roles/ldap/files/eduMember.ldif new file mode 100644 index 000000000..42894d596 --- /dev/null +++ b/roles/ldap/files/eduMember.ldif @@ -0,0 +1,27 @@ +dn: cn=eduMember,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: eduMember +# Internet X.500 Schema for Ldappc +# Includes the eduMember ObjectClass schema +# +# +# An auxiliary object class, "eduMember," is a convenient container +# for an extensible set of attributes concerning group memberships. +# At this time, the only attributes specified as belonging to the +# object class are "isMemberOf" and "hasMember." +# +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.5.1.1 + NAME 'isMemberOf' + DESC 'identifiers for groups to which containing entity belongs' + EQUALITY caseExactMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.5.1.2 + NAME 'hasMember' + DESC 'identifiers for entities that are members of the group' + EQUALITY caseExactMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcObjectClasses: ( 1.3.6.1.4.1.5923.1.5.2.1 + NAME 'eduMember' + AUXILIARY + MAY ( isMemberOf $ hasMember ) + ) diff --git a/roles/ldap/files/eduPerson.ldif b/roles/ldap/files/eduPerson.ldif new file mode 100644 index 000000000..e4f2c96a0 --- /dev/null +++ b/roles/ldap/files/eduPerson.ldif @@ -0,0 +1,83 @@ +dn: cn=eduperson,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: eduperson +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.1 + NAME 'eduPersonAffiliation' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.7 + NAME 'eduPersonEntitlement' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseExactMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.2 + NAME 'eduPersonNickName' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.3 + NAME 'eduPersonOrgDN' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY distinguishedNameMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.4 + NAME 'eduPersonOrgUnitDN' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY distinguishedNameMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.5 + NAME 'eduPersonPrimaryAffiliation' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.8 + NAME 'eduPersonPrimaryOrgUnitDN' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY distinguishedNameMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.6 + NAME 'eduPersonPrincipalName' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.12 + NAME 'eduPersonPrincipalNamePrior' + DESC 'eduPersonPrincipalNamePrior per Internet2' + EQUALITY caseIgnoreMatch + SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.9 + NAME 'eduPersonScopedAffiliation' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.10 + NAME 'eduPersonTargetedID' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseExactMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.11 + NAME 'eduPersonAssurance' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseExactMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.13 + NAME 'eduPersonUniqueId' + DESC 'eduPersonUniqueId per Internet2' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.16 + NAME 'eduPersonOrcid' + DESC 'ORCID researcher identifiers belonging to the principal' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcObjectClasses: ( 1.3.6.1.4.1.5923.1.1.2 + NAME 'eduPerson' + AUXILIARY + MAY ( + eduPersonAffiliation $ eduPersonNickname $ eduPersonOrgDN $ + eduPersonOrgUnitDN $ eduPersonPrimaryAffiliation $ + eduPersonPrincipalName $ eduPersonEntitlement $ eduPersonPrimaryOrgUnitDN $ + eduPersonScopedAffiliation $ eduPersonTargetedID $ eduPersonAssurance $ + eduPersonPrincipalNamePrior $ eduPersonUniqueId $ eduPersonOrcid ) + ) diff --git a/roles/ldap/files/groupOfMembers.ldif b/roles/ldap/files/groupOfMembers.ldif new file mode 100644 index 000000000..aa10094d3 --- /dev/null +++ b/roles/ldap/files/groupOfMembers.ldif @@ -0,0 +1,19 @@ +# Internet X.500 Schema for Ldappc +# Includes the groupOfMembers ObjectClass schema +# +# Taken from RFC2307bis draft 2 +# https://tools.ietf.org/html/draft-howard-rfc2307bis-02 +# +# An structural object class, "groupOfMembers" is a convenient container +# for an extensible set of attributes concerning group memberships. +# +dn: cn=groupOfMembers,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: groupOfMembers +olcObjectClasses: ( 1.3.6.1.1.1.2.18 SUP top STRUCTURAL + NAME 'groupOfMembers' + DESC 'A group with members (DNs)' + MUST cn + MAY ( businessCategory $ seeAlso $ owner $ ou $ o $ + description $ member ) + ) diff --git a/roles/ldap/files/ldap-add b/roles/ldap/files/ldap-add new file mode 100644 index 000000000..3d0c5e487 --- /dev/null +++ b/roles/ldap/files/ldap-add @@ -0,0 +1,51 @@ +#!/usr/bin/env bash + +# Copyright (C) 2015-2019 Maciej Delmanowski +# Copyright (C) 2015-2019 DebOps +# SPDX-License-Identifier: GPL-3.0-only + +# Check if specified LDAP schema file is loaded in the local slapd cn=config +# database. If not, try loading it in the server. + + +set -o nounset -o pipefail -o errexit + +schema_file="${1}" + +if [ -z "${schema_file}" ] ; then + printf "Error: You need to specify schema file to load\\n" && exit 1 +fi + +if [ ! -e "${schema_file}" ] ; then + printf "Error: %s does not exist\\n" "${schema_file}" && exit 1 +fi + +if [ ! -r "${schema_file}" ] ; then + printf "Error: %s is unreadable\\n" "${schema_file}" && exit 1 +fi + +# The schema file is already converted, we can deal with them directly +if [[ "${schema_file}" == *.ldif ]] ; then + + # Get the DN of the schema + schema_dn="$(grep -E '^^dn:\s' "${schema_file}")" + + # Get list of already installed schemas from local LDAP server + schema_list() { + ldapsearch -Y EXTERNAL -H ldapi:/// -LLLQ -b 'cn=schema,cn=config' dn \ + | sed -e '/^$/d' -e 's/{[0-9]\+}//' + } + + if schema_list | grep -q "${schema_dn}" ; then + + # Schema is already installed, do nothing + exit 80 + + else + + # Try installing the schema in the database + ldapadd -Y EXTERNAL -H ldapi:/// -f "${schema_file}" + + fi + +fi diff --git a/roles/ldap/files/ldapPublicKey.ldif b/roles/ldap/files/ldapPublicKey.ldif new file mode 100644 index 000000000..8968b6e96 --- /dev/null +++ b/roles/ldap/files/ldapPublicKey.ldif @@ -0,0 +1,21 @@ +dn: cn=openssh-lpk-openldap,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: openssh-lpk-openldap +# +# LDAP Public Key Patch schema for use with openssh-ldappubkey +# useful with PKA-LDAP also +# +# Author: Eric AUGE +# +# Based on the proposal of : Mark Ruijter +# +# octetString SYNTAX +olcAttributeTypes: ( 1.3.6.1.4.1.24552.500.1.1.1.13 NAME 'sshPublicKey' + DESC 'MANDATORY: OpenSSH Public key' + EQUALITY octetStringMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 ) +# printableString SYNTAX yes|no +olcObjectClasses: ( 1.3.6.1.4.1.24552.500.1.1.2.0 NAME 'ldapPublicKey' SUP top AUXILIARY + DESC 'MANDATORY: OpenSSH LPK olcObjectClasses:' + MUST ( sshPublicKey $ uid ) + ) diff --git a/roles/ldap/files/logrotate_slapd b/roles/ldap/files/logrotate_slapd new file mode 100644 index 000000000..f225a935f --- /dev/null +++ b/roles/ldap/files/logrotate_slapd @@ -0,0 +1,13 @@ +/var/log/slapd.log +{ + rotate 7 + daily + missingok + notifempty + delaycompress + compress + postrotate + invoke-rc.d rsyslog rotate > /dev/null + endscript +} + diff --git a/roles/ldap/files/rsyslog_slapd.conf b/roles/ldap/files/rsyslog_slapd.conf new file mode 100644 index 000000000..a3435617f --- /dev/null +++ b/roles/ldap/files/rsyslog_slapd.conf @@ -0,0 +1,2 @@ +if $programname == 'slapd' then /var/log/slapd.log +if $programname == 'slapd' then ~ diff --git a/roles/ldap/files/sczGroup.ldif b/roles/ldap/files/sczGroup.ldif new file mode 100644 index 000000000..d1b5cb332 --- /dev/null +++ b/roles/ldap/files/sczGroup.ldif @@ -0,0 +1,23 @@ +# Internet X.500 Schema for Ldappc +# Includes the sczGroup ObjectClass schema +# +# An auxiliary object class, "sczGroup," is a convenient container +# for an extensible set of attributes concerning group memberships. +# At this time, the only attribute specified as belonging to the +# object class is "sczMember." +# +# It is specifically configured to support the memberOf overlay. +# +dn: cn=sczGroup,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: sczGroup +olcAttributeTypes: ( 1.3.6.1.4.1.1076.20.40.50.1.1 + NAME 'sczMember' + DESC 'DN identifiers for entities that are members of the group' + EQUALITY distinguishedNameMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 ) +olcObjectClasses: ( 1.3.6.1.4.1.1076.20.40.50.1 + NAME 'sczGroup' + AUXILIARY + MAY ( sczMember ) + ) diff --git a/roles/ldap/files/sramPerson.ldif b/roles/ldap/files/sramPerson.ldif new file mode 100644 index 000000000..e194381d1 --- /dev/null +++ b/roles/ldap/files/sramPerson.ldif @@ -0,0 +1,23 @@ +# Internet X.500 Schema for Ldappc +# Includes the sramPerson ObjectClass schema +# +# An auxiliary object class, "sramPerson," is a convenient container +# for an extensible set of attributes concerning sram persons. +# At this time, the only attribute specified as belonging to the +# object class is "sramInactiveDays". +# +dn: cn=sramPerson,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: sramPerson +olcAttributeTypes: ( 1.3.6.1.4.1.1076.20.100.20.2.1 NAME 'sramInactiveDays' + DESC 'Number of days this entity was inactive' + EQUALITY IntegerMatch + ORDERING IntegerOrderingMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 + ) +olcObjectClasses: ( 1.3.6.1.4.1.1076.20.100.20.1.1 NAME 'sramPerson' + AUXILIARY + MAY ( + sramInactiveDays + ) + ) diff --git a/roles/ldap/files/voPerson.ldif b/roles/ldap/files/voPerson.ldif new file mode 100644 index 000000000..bdce11ed8 --- /dev/null +++ b/roles/ldap/files/voPerson.ldif @@ -0,0 +1,44 @@ +dn: cn=voperson,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: voperson +olcAttributeTypes: {0}( 1.3.6.1.4.1.34998.3.3.1.1 NAME 'voPersonApplicationUID + ' DESC 'voPerson Application-Specific User Identifier' EQUALITY caseIgnoreMat + ch SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' ) +olcAttributeTypes: {1}( 1.3.6.1.4.1.34998.3.3.1.2 NAME 'voPersonAuthorName' DE + SC 'voPerson Author Name' EQUALITY caseIgnoreMatch SYNTAX '1.3.6.1.4.1.1466.1 + 15.121.1.15' ) +olcAttributeTypes: {2}( 1.3.6.1.4.1.34998.3.3.1.3 NAME 'voPersonCertificateDN' + DESC 'voPerson Certificate Distinguished Name' EQUALITY distinguishedNameMat + ch SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' ) +olcAttributeTypes: {3}( 1.3.6.1.4.1.34998.3.3.1.4 NAME 'voPersonCertificateIss + uerDN' DESC 'voPerson Certificate Issuer DN' EQUALITY distinguishedNameMatch + SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' ) +olcAttributeTypes: {4}( 1.3.6.1.4.1.34998.3.3.1.5 NAME 'voPersonExternalID' DE + SC 'voPerson Scoped External Identifier' EQUALITY caseIgnoreMatch SYNTAX '1.3 + .6.1.4.1.1466.115.121.1.15' ) +olcAttributeTypes: {5}( 1.3.6.1.4.1.34998.3.3.1.6 NAME 'voPersonID' DESC 'voPe + rson Unique Identifier' EQUALITY caseIgnoreMatch SYNTAX '1.3.6.1.4.1.1466.115 + .121.1.15' ) +olcAttributeTypes: {6}( 1.3.6.1.4.1.34998.3.3.1.7 NAME 'voPersonPolicyAgreemen + t' DESC 'voPerson Policy Agreement Indicator' EQUALITY caseIgnoreMatch SYNTAX + '1.3.6.1.4.1.1466.115.121.1.15' ) +olcAttributeTypes: {7}( 1.3.6.1.4.1.34998.3.3.1.8 NAME 'voPersonSoRID' DESC 'v + oPerson External Identifier' EQUALITY caseIgnoreMatch SYNTAX '1.3.6.1.4.1.146 + 6.115.121.1.15' ) +olcAttributeTypes: {8}( 1.3.6.1.4.1.34998.3.3.1.9 NAME 'voPersonStatus' DESC ' + voPerson Status' EQUALITY caseIgnoreMatch SYNTAX '1.3.6.1.4.1.1466.115.121.1. + 15' ) +olcAttributeTypes: {9}( 1.3.6.1.4.1.34998.3.3.1.10 NAME 'voPersonAffiliation' + DESC 'voPerson Affiliation Within Local Scope' EQUALITY caseIgnoreMatch SYNTA + X '1.3.6.1.4.1.1466.115.121.1.15' ) +olcAttributeTypes: {10}( 1.3.6.1.4.1.34998.3.3.1.11 NAME 'voPersonExternalAffi + liation' DESC 'voPerson Scoped External Affiliation' EQUALITY caseIgnoreMatch + SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' ) +olcAttributeTypes: {11}( 1.3.6.1.4.1.34998.3.3.1.12 NAME 'voPersonScopedAffili + ation' DESC 'voPerson Affiliation With Explicit Local Scope' EQUALITY caseIgn + oreMatch SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' ) +olcObjectClasses: {0}( 1.3.6.1.4.1.34998.3.3.1 NAME 'voPerson' AUXILIARY MAY ( + voPersonAffiliation $ voPersonApplicationUID $ voPersonAuthorName $ voPerson + CertificateDN $ voPersonCertificateIssuerDN $ voPersonExternalAffiliation $ v + oPersonExternalID $ voPersonID $ voPersonPolicyAgreement $ voPersonScopedAffi + liation $ voPersonSoRID $ voPersonStatus ) ) diff --git a/roles/ldap/handlers/main.yml b/roles/ldap/handlers/main.yml new file mode 100644 index 000000000..0510176a6 --- /dev/null +++ b/roles/ldap/handlers/main.yml @@ -0,0 +1,24 @@ +--- +- name: restart rsyslog + service: + name: rsyslog + state: restarted + listen: "restart rsyslog" + +- name: systemd daemon-reload + systemd: + name: slapd + daemon_reload: yes + +- name: restart LDAP + systemd: + name: slapd + state: restarted + enabled: true + daemon-reload: true + +- name: Restart the ldap container + community.docker.docker_container: + name: "{{ containers.ldap }}" + restart: true + state: started diff --git a/roles/ldap/tasks/admins.yml b/roles/ldap/tasks/admins.yml new file mode 100644 index 000000000..e00115c04 --- /dev/null +++ b/roles/ldap/tasks/admins.yml @@ -0,0 +1,82 @@ +--- +# - name: Initialize DIT admin +# community.general.ldap_entry: +# dn: "{{ services_ldap.binddn }}" +# objectClass: organizationalRole +# attributes: +# cn: "{{ services_ldap.binddn | regex_replace('^cn=([^,]+).*', '\\1') }}" + +# determine which users need to be admin +# check for each role of each user if it leads to membership of group {{ldap_admin_group}} +# - name: determine ldap admins +# set_fact: +# ldap_admins: "{{ ldap_admins | default([]) + [item.0] }}" +# when: ldap_admin_group in role_to_groups[item.1] or ldap_admin_group in item.0.groups +# loop: "{{ users | subelements('roles') }}" + +- name: determine ldap admins + set_fact: + ldap_admins: "{{ ldap.admins }}" + +# Find existing ldap admins +- name: Initialize admins (I) + community.general.ldap_search: + dn: "{{ ldap.services_dn.basedn }}" + scope: "onelevel" + filter: "(objectClass=organizationalRole)" + attrs: + - "cn" + bind_dn: "{{ ldap.services_dn.binddn }}" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ldap.uri }}" + register: "existing_ldap_admins_result" + +# ansible sucks like this: we need to extract the results from the result +- name: Initialize admins (Ia) + set_fact: + existing_ldap_admins: "{{ existing_ldap_admins_result.results }}" + +- debug: + var: "existing_ldap_admins" +- debug: + var: "ldap_admins" + +# Remove LDAP non-admins +- name: Initialize admins (II) + community.general.ldap_entry: + dn: "cn={{ item.cn }},{{ services_ldap.basedn }}" + state: absent + bind_dn: "{{ ldap.services_dn.binddn }}" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ldap.uri }}" + when: > + item.cn not in ldap_admins | map(attribute='uid') + and item.cn != 'admin' + loop: "{{existing_ldap_admins}}" + +# Insert LDAP admins +- name: Initialize admins (III) + community.general.ldap_entry: + dn: "cn={{ item.uid }},{{ ldap.services_dn.basedn }}" + objectClass: + - simpleSecurityObject + - organizationalRole + attributes: + description: An LDAP administrator + userPassword: "{{ item.pw_hash }}" + bind_dn: "{{ ldap.services_dn.binddn }}" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ldap.uri }}" + loop: "{{ ldap_admins }}" + +# Make sure passwords are updated for existing admins +- name: Initialize admins (IV) + community.general.ldap_attrs: + dn: "cn={{ item.uid }},{{ ldap.services_dn.basedn }}" + attributes: + userPassword: "{{ item.pw_hash }}" + bind_dn: "{{ ldap.services_dn.binddn }}" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ldap.uri }}" + loop: "{{ ldap_admins }}" + diff --git a/roles/ldap/tasks/main.yml b/roles/ldap/tasks/main.yml new file mode 100644 index 000000000..f58c21ee7 --- /dev/null +++ b/roles/ldap/tasks/main.yml @@ -0,0 +1,369 @@ +--- +# playbook to install and configure all components of the LDAP +- name: Install LDAP utils + apt: + state: "present" + name: + - "python3-ldap" # for ansible ldap modules + install_recommends: false + +- name: Ensure that a number of directories exist + file: + path: "{{ item.path }}" + state: "directory" + # owner: "{{ ldap.user }}" + # group: "{{ ldap.group }}" + mode: "{{ item.mode }}" + with_items: + - { path: "{{ldap.ldif_dir}}", mode: "0755" } + - { path: "{{ldap.certs_dir}}", mode: "0755" } + - { path: "{{ldap.data_dir}}", mode: "0777" } + notify: Restart the ldap container + +- name: Copy schemas + copy: + src: "{{ item }}" + dest: "{{ ldap.ldif_dir }}/{{ item }}" + mode: "0644" + with_items: + - sczGroup.ldif + - groupOfMembers.ldif + - eduPerson.ldif + - ldapPublicKey.ldif + - eduMember.ldif + - voPerson.ldif + - sramPerson.ldif + notify: Restart the ldap container + +- name: Copying ldap-add script + copy: + src: "{{ item }}" + dest: "{{ ldap.conf_dir }}/{{ item }}" + mode: "0755" + with_items: + - ldap-add + +# # cert is used for communication between ldap for sync +# # is generated in roles/certificates/tasks/main.yml +# - name: Copy wildcard frontend cert +# copy: +# src: "/etc/ssl/certs/sram-https.pem" # was installed here by update-ca-certificates +# remote_src: true +# dest: "{{ldap.certs_dir}}/frontend.crt" +# mode: "0644" +# when: "is_dev" +# notify: Restart the ldap container + +- name: Setup ldap hosts + vars: + host: + key: "%s.{{ ldap.base_domain }}" + value: "%s" + etc_hosts: {} + set_fact: + etc_hosts: >- + {{ etc_hosts | + combine({ host.key | format(item.key): host.value | format(item.value) }) }} + with_dict: "{{ ldap.hosts }}" + +- name: Create the ldap container + community.docker.docker_container: + name: "ldap" + image: "{{ ldap.image }}" + restart_policy: "always" + state: started + pull: true + ports: + - 0.0.0.0:389:389 + env: + LDAP_ORGANISATION: "{{ env }}" + LDAP_DOMAIN: "{{ ldap.base_domain }}" + LDAP_ROOTPASS: "{{ ldap.services_password }}" + etc_hosts: "{{ etc_hosts }}" + volumes: + # For now the target side /opt/ldap is hard-coded + - "{{ ldap.conf_dir }}:/opt/ldap" + networks: + - name: "loadbalancer" + labels: + traefik.enable: "true" + traefik.tcp.routers.ldap.entrypoints: "ldaps" + traefik.tcp.routers.ldap.rule: "HostSNI(`*`)" + traefik.tcp.routers.ldap.tls: "true" + traefik.tcp.services.ldap.loadbalancer.server.port: "389" + healthcheck: + test: + - "CMD" + - "bash" + - "-c" + - "[[ -S /var/run/slapd/ldapi ]]" + register: "ldap_container" + +- name: Wait for LDAP initialization + ansible.builtin.wait_for: + port: 389 + delay: 5 + +- name: Wait for 5 seconds + ansible.builtin.wait_for: + timeout: 5 + when: "ldap_container is changed" + +- name: Ensure the schemas are added to LDAP + ansible.builtin.shell: + # For now the target side /opt/ldap is hard-coded + cmd: "docker exec ldap /opt/ldap/ldap-add /opt/ldap/schema/{{ item }}" + register: "result" + failed_when: "result.rc not in [0,80]" + changed_when: "result.rc != 80" + become: true + loop: + - "sczGroup.ldif" + - "groupOfMembers.ldif" + - "eduPerson.ldif" + - "ldapPublicKey.ldif" + - "eduMember.ldif" + - "voPerson.ldif" + - "sramPerson.ldif" + +- name: Set indices + community.general.ldap_attrs: + dn: "olcDatabase={1}mdb,cn=config" + attributes: + olcDbIndex: "{{item}}" + state: "present" + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + with_items: + - "entryUUID eq" + - "o eq" + - "dc eq" + - "entryCSN eq" + +- name: Set olcDatabase={-1}frontend olcSizeLimit + community.general.ldap_attrs: + dn: "olcDatabase={-1}frontend,cn=config" + state: "exact" + attributes: + olcSizeLimit: "unlimited" + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +- name: Set config + community.general.ldap_attrs: + dn: "cn=config" + state: "present" + attributes: + olcServerID: "{{ ldap.server_id }}" + olcSizeLimit: "unlimited" + olcLogLevel: "{{ ldap.loglevel }}" + olcAttributeOptions: "time-" + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +# # cert is used for communication between ldap for sync +# # is generated in roles/certificates/tasks/main.yml +# - name: Set TLS config +# community.general.ldap_attrs: +# dn: "cn=config" +# state: "exact" +# attributes: +# olcTLSCACertificateFile: "/opt/ldap/certs/frontend.crt" +# bind_dn: "cn=admin,cn=config" +# bind_pw: "{{ ldap.services_password }}" +# server_uri: "{{ ldap.uri }}" + +- name: Setup Modules + community.general.ldap_attrs: + dn: cn=module{0},cn=config + attributes: + olcModuleLoad: + - syncprov + - dynlist.so + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +- name: Setup Dynlist + community.general.ldap_entry: + dn: olcOverlay=dynlist,olcDatabase={1}mdb,cn=config + objectClass: + - olcOverlayConfig + - olcDynamicList + attributes: + olcDlAttrSet: "voPerson labeledURI member+memberOf@groupOfMembers" + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +- name: Setup Syncprov + community.general.ldap_entry: + dn: olcOverlay=syncprov,olcDatabase={1}mdb,cn=config + objectClass: + - olcOverlayConfig + - olcSyncProvConfig + attributes: + olcSpCheckpoint: 100 10 + olcSpSessionLog: 100 + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +# Leave this here in case we do want to build our own +# root database from scratch instead of relying on the +# domain based Debian slapd package setup +# +# - name: Setup main database +# community.general.ldap_attrs: +# dn: olcDatabase={1}mdb,cn=config +# attributes: +# olcSuffix: "{{ services_ldap.basedn }}" +# olcRootDN: "{{ services_ldap.binddn }}" +# olcRootPW: "{{ '%s' | format(services_ldap_password) | slapd_hash }}" +# state: exact +# +# - name: Set root credentials +# community.general.ldap_attrs: +# dn: olcDatabase={0}config,cn=config +# attributes: +# olcAccess: >- +# {0}to * +# by dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth manage +# {% if environment_name=="vm" %} +# by dn.exact=gidNumber=1000+uidNumber=1000,cn=peercred,cn=external,cn=auth manage +# {% endif %} +# by dn.exact="{{ services_ldap.binddn }}" manage +# by * break +# state: exact +# bind_dn: "cn=admin,cn=config" +# bind_pw: "{{ services_ldap_password }}" +# server_uri: "{{ ldap_uri }}" +# +# +# - name: Get uid of openldap user +# ansible.builtin.getent: +# database: "passwd" +# key: "openldap" +# +# # store it in a nice var (so line below doesn't get too long) +# - name: Save gid of openldap user +# ansible.builtin.set_fact: +# openldap_auth: "gidNumber={{ansible_facts.getent_passwd['openldap'][2]}}+\ +# uidNumber={{ansible_facts.getent_passwd['openldap'][1]}}" +# + +- name: Set ACLs + community.general.ldap_attrs: + dn: "olcDatabase={1}mdb,cn=config" + attributes: + olcAccess: + - >- + to dn.regex="(([^,]+),{{ ldap.services_dn.basedn }})$" + by dn.exact="{{ ldap.services_dn.binddn }}" write + by dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth write + by dn.exact,expand="cn=admin,$1" read + by * break + - >- + to * + by dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth manage + by dn.regex="cn=[^,]+,{{ ldap.services_dn.basedn }}" read + {% if env=="vm" %} + by dn.exact=gidNumber=1000+uidNumber=1000,cn=peercred,cn=external,cn=auth manage + {% endif %} + by * break + - >- + to attrs=userPassword + by self write + by anonymous auth + by * break + state: exact + ordered: true + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +# ldap_rids: +# 101: ldaps://ldap1.scz-vm.net/ +# 102: ldaps://ldap2.scz-vm.net/ + +- name: Set rids + vars: + start: 101 + rid: + key: "%d" + value: "{{ ldap.rid_prefix }}%s.{{ ldap.base_domain }}/" + ldap_rids: {} + set_fact: + ldap_rids: >- + {{ ldap_rids | combine({ rid.key | format(start|int): + rid.value | format(item.key) }) }} + start: "{{ start|int + 1 }}" + with_dict: "{{ ldap.hosts | dict2items | sort(attribute='key') }}" + +# Voor toekomstige Claude gebruikers: onderstaande construct levert aan het eind +# een string representatie van de dict op, die niet meer gebruikt kan worden +# in de hieropvolgende 'Setup rids' task... +# - name: Set rids +# set_fact: +# ldap_rids: >- +# {%- set result = {} %} +# {%- for host in (ldap_hosts | dict2items | sort(attribute='key')) %} +# {%- set _ = result.update({(101 + loop.index0)|string: \ +# ldap_rid_prefix ~ host.key ~ '.' ~ base_domain ~ '/'}) %} +# {%- endfor %} +# {{ result }} + +- name: Setup rids + vars: + rid: >- + rid={} + provider="{}" + searchbase="{{ ldap.services_dn.basedn }}" + type=refreshAndPersist + bindmethod=simple + binddn="{{ ldap.services_dn.binddn }}" + credentials={{ ldap.services_password }} + retry="30 +" + timeout=30 + network-timeout=5 + rids: [] + set_fact: + rids: "{{ rids + [ rid.format(item.key, item.value) ] }}" + with_dict: "{{ dict(ldap_rids) }}" + +- name: Setup Syncrepl + community.general.ldap_attrs: + dn: olcDatabase={1}mdb,cn=config + attributes: + olcSyncrepl: "{{ rids }}" + olcMultiProvider: "TRUE" + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +# We now have Syncrepl in place, so only write to primary +- name: Initialize DIT + community.general.ldap_entry: + dn: "{{ ldap.services_dn.basedn }}" + state: "present" + objectClass: + - "top" + - "dcObject" + - "organization" + attributes: + dc: "{{ ldap.services_dn.basedn | regex_replace('^dc=([^,]+).*', '\\1') }}" + o: "{{ ldap.services_dn.o }}" + bind_dn: "{{ ldap.services_dn.binddn }}" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + when: > + inventory_hostname in groups['ldap_primary'] + +# We now have Syncrepl in place, so only write to primary +- name: Add ldap admins + include_tasks: "admins.yml" + when: > + inventory_hostname in groups['ldap_primary'] diff --git a/roles/ldap/templates/ldap-backup.sh.j2 b/roles/ldap/templates/ldap-backup.sh.j2 new file mode 100644 index 000000000..2c6aa9201 --- /dev/null +++ b/roles/ldap/templates/ldap-backup.sh.j2 @@ -0,0 +1,19 @@ +#!/bin/bash +# vim:ft=sh +set -e + +if [ $UID -ne 0 ] +then + echo "Sorry, this script must run as root" + exit 1 +fi + +BACKUP_DIR="{{ldap_backup_dir}}" +BACKUP_FILE="$BACKUP_DIR/ldap_$(/bin/date +%Y-%m-%d_%H:%M)" + +mkdir -p -m 0755 "$BACKUP_DIR" + +/usr/sbin/slapcat -o ldif-wrap=no -n0 | /bin/bzip2 -c6 > "${BACKUP_FILE}.db0.ldif.bz2" +/usr/sbin/slapcat -o ldif-wrap=no -n1 | /bin/bzip2 -c6 > "${BACKUP_FILE}.db1.ldif.bz2" + +exit 0 diff --git a/roles/ldap/templates/ldap.conf.j2 b/roles/ldap/templates/ldap.conf.j2 new file mode 100644 index 000000000..d7fa7c227 --- /dev/null +++ b/roles/ldap/templates/ldap.conf.j2 @@ -0,0 +1,16 @@ +# +# LDAP Defaults +# + +# See ldap.conf(5) for details +# This file should be world readable but not world writable. + +#BASE dc=example,dc=com +#URI ldap://ldap.example.com ldap://ldap-master.example.com:666 + +#SIZELIMIT 12 +#TIMELIMIT 15 +#DEREF never + +# TLS certificates (needed for GnuTLS) +TLS_CACERT {{ ssl_certs_dir }}/{{ internal_base_domain }}.crt diff --git a/roles/ldap/templates/slapd.service.j2 b/roles/ldap/templates/slapd.service.j2 new file mode 100644 index 000000000..7e0f79397 --- /dev/null +++ b/roles/ldap/templates/slapd.service.j2 @@ -0,0 +1,20 @@ +[Unit] +Description = LDAP server + +[Service] +Type = forking +User = root +SupplementaryGroups = ssl-cert +ExecStartPre=-/bin/mkdir -p /var/run/slapd +ExecStartPre=-/bin/chown openldap. /var/run/slapd +ExecStart = /usr/sbin/slapd -F /etc/ldap/slapd.d -u openldap -g openldap -h 'ldapi:/// ldap://localhost/ ldaps://{{inventory_hostname}}/' +Restart = always +RestartSec = 30 +PIDFile = /run/slapd/slapd.pid +# defaults are 1024:524288 which is too small for slapd +# see https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=378261 and +# https://www.openldap.org/lists/openldap-software/200802/msg00186.html +LimitNOFILE=4096:524288 + +[Install] +WantedBy = multi-user.target diff --git a/roles/ldap/vars/main.yml b/roles/ldap/vars/main.yml new file mode 100644 index 000000000..761942f7b --- /dev/null +++ b/roles/ldap/vars/main.yml @@ -0,0 +1 @@ +current_release_appdir: /opt/openconext diff --git a/roles/sbs/defaults/main.yml b/roles/sbs/defaults/main.yml index c42b80948..9bcd62c87 100644 --- a/roles/sbs/defaults/main.yml +++ b/roles/sbs/defaults/main.yml @@ -3,7 +3,6 @@ sbs: "{{ sbs_defaults | combine(sbs_overrides) }}" sbs_defaults: base_domain: "test2.sram.surf.nl" - environment_name: test ansible_nolog: true base_url: "https://{{ sbs_defaults.base_domain }}" server_image: "ghcr.io/surfscz/sram-sbs-server:main" diff --git a/roles/sbs/templates/config.yml.j2 b/roles/sbs/templates/config.yml.j2 index eba293de9..d052565c2 100644 --- a/roles/sbs/templates/config.yml.j2 +++ b/roles/sbs/templates/config.yml.j2 @@ -3,7 +3,7 @@ database: uri: {{ sbs.db_connection_sbs }} redis: -{% if sbs.environment_name == 'test2' %} +{% if env == 'test2' %} uri: "redis://{{ sbs.redis_user }}:{{ sbs.redis_password }}@{{sbs.redis_host}}/" {% else %} uri: "redis{% if sbs.redis_ssl %}s{% endif %}://{{ sbs.redis_user }}:{{ sbs.redis_password }}@{{ sbs.redis_host }}:{{ sbs.redis_port }}/" @@ -54,7 +54,7 @@ eppn_scope: " {{ sbs.eppn_scope }}" scim_schema_sram: "urn:mace:surf.nl:sram:scim:extension" collaboration_creation_allowed_entitlement: "urn:mace:surf.nl:sram:allow-create-co" -{% if sbs.environment_name == "prd" %} +{% if env == "prd" %} environment_disclaimer: "" {% else %} environment_disclaimer: "{{ sbs.disclaimer_label }}" diff --git a/roles/sbs/templates/disclaimer.css.j2 b/roles/sbs/templates/disclaimer.css.j2 index 04c62aa86..7922f5e5b 100644 --- a/roles/sbs/templates/disclaimer.css.j2 +++ b/roles/sbs/templates/disclaimer.css.j2 @@ -1,4 +1,4 @@ -{% if sbs.environment_name!="prd" -%} +{% if env!="prd" -%} body::after { background: {{ sbs.disclaimer_color }}; content: "{{ sbs.disclaimer_label }}"; From 96bad9f7f78ada857859e3a4cfbcca75715183ae Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Fri, 27 Mar 2026 14:55:12 +0100 Subject: [PATCH 62/73] Add plsc and mailpit roles --- roles/ldap/defaults/main.yml | 2 +- roles/mailpit/defaults/main.yml | 8 +++++++ roles/mailpit/tasks/main.yml | 37 +++++++++++++++++++++++++++++++ roles/plsc/defaults/main.yml | 15 +++++++++++++ roles/plsc/handlers/main.yml | 18 +++++++++++++++ roles/plsc/tasks/main.yml | 28 +++++++++++++++++++++++ roles/plsc/templates/plsc.yml.j2 | 25 +++++++++++++++++++++ roles/plsc/vars/main.yml | 1 + roles/sbs/defaults/main.yml | 4 +++- roles/sbs/templates/config.yml.j2 | 2 +- 10 files changed, 137 insertions(+), 3 deletions(-) create mode 100644 roles/mailpit/defaults/main.yml create mode 100644 roles/mailpit/tasks/main.yml create mode 100644 roles/plsc/defaults/main.yml create mode 100644 roles/plsc/handlers/main.yml create mode 100644 roles/plsc/tasks/main.yml create mode 100644 roles/plsc/templates/plsc.yml.j2 create mode 100644 roles/plsc/vars/main.yml diff --git a/roles/ldap/defaults/main.yml b/roles/ldap/defaults/main.yml index e60ab603c..7d02ebe4a 100644 --- a/roles/ldap/defaults/main.yml +++ b/roles/ldap/defaults/main.yml @@ -1,5 +1,5 @@ --- -ldap: "{{ ldap_defaults | combine(ldap_overrides) }}" +ldap: "{{ ldap_defaults | combine(ldap_overrides, recursive=true) }}" ldap_defaults: image: "ghcr.io/surfscz/sram-ldap:main" diff --git a/roles/mailpit/defaults/main.yml b/roles/mailpit/defaults/main.yml new file mode 100644 index 000000000..7647de9dc --- /dev/null +++ b/roles/mailpit/defaults/main.yml @@ -0,0 +1,8 @@ +--- +mailpit: "{{ mailpit_defaults | combine(mailpit_overrides, recursive=true) }}" + +mailpit_defaults: + image: "axllent/mailpit" + hostname: "mailpit.{{ base_domain }}" + user: "mailpit" + group: "mailpit" diff --git a/roles/mailpit/tasks/main.yml b/roles/mailpit/tasks/main.yml new file mode 100644 index 000000000..7fb32d8a7 --- /dev/null +++ b/roles/mailpit/tasks/main.yml @@ -0,0 +1,37 @@ +--- +- name: "Create mailpit group" + group: + name: "{{ mailpit.group }}" + state: "present" + register: "result" + +- name: "Create mailpit user" + user: + name: "{{ mailpit.user }}" + group: "{{ mailpit.group }}" + comment: "User to run Mailpit service" + shell: "/bin/false" + password: "!" + create_home: false + state: "present" + register: "result" + +- name: "Save mailpit user uid" + set_fact: + mailpit_user_uid: "{{ result.uid }}" + +- name: "Create mailpit container" + docker_container: + name: "mailpit" + image: "{{ mailpit.image }}" + restart_policy: "always" + state: "started" + user: "{{ mailpit_user_uid }}" + ports: + networks: + - name: "loadbalancer" + labels: + traefik.enable: "true" + traefik.http.routers.mailpit.rule: "Host(`{{ mailpit.hostname }}`)" + traefik.http.routers.mailpit.tls: "true" + traefik.http.services.mailpit.loadbalancer.server.port: 8025 diff --git a/roles/plsc/defaults/main.yml b/roles/plsc/defaults/main.yml new file mode 100644 index 000000000..2a3711b0c --- /dev/null +++ b/roles/plsc/defaults/main.yml @@ -0,0 +1,15 @@ +--- +plsc: "{{ plsc_defaults | combine(plsc_overrides, recursive=true) }}" + +plsc_defaults: + image: "ghcr.io/surfscz/sram-plsc:main" + conf_dir: "{{current_release_appdir}}/sram/plsc" + ansible_nolog: false + ldap_uri: "ldap://ldap:389/" + ldap_basedn: "dc=services,dc=vnet" + ldap_binddn: "cn=admin,dc=vnet" + ldap_password: "secret" + sbs_host: "http://sbs-server:8080" + sbs_user: "sysread" + sbs_password: "secret" + retry: 3 diff --git a/roles/plsc/handlers/main.yml b/roles/plsc/handlers/main.yml new file mode 100644 index 000000000..9ce03e899 --- /dev/null +++ b/roles/plsc/handlers/main.yml @@ -0,0 +1,18 @@ +--- +# - name: enable plsc job +# systemd: +# name: "plsc.timer" +# enabled: true +# state: "restarted" +# daemon_reload: true + +# - name: "restart zabbix-agent" +# systemd: +# name: "zabbix-agent2.service" +# state: "restarted" + +- name: Restart the plsc container + community.docker.docker_container: + name: "plsc" + restart: true + state: started diff --git a/roles/plsc/tasks/main.yml b/roles/plsc/tasks/main.yml new file mode 100644 index 000000000..14e7b40b7 --- /dev/null +++ b/roles/plsc/tasks/main.yml @@ -0,0 +1,28 @@ +--- +- name: Make sure clients sync directory exists + file: + path: "{{ plsc.conf_dir }}" + state: directory + mode: "0755" + +- name: "Create plsc.yml source if it doesn't exist" + template: + src: "plsc.yml.j2" + dest: "{{ plsc.conf_dir }}/plsc.yml" + mode: "0640" + no_log: "{{plsc.ansible_nolog}}" + notify: "Restart the plsc container" + +- name: Create the plsc container + community.docker.docker_container: + name: "plsc" + image: "{{ plsc.image }}" + restart_policy: "always" + state: started + pull: true + mounts: + - type: bind + source: "{{ plsc.conf_dir }}/plsc.yml" + target: "/opt/plsc/plsc.yml" + networks: + - name: "loadbalancer" diff --git a/roles/plsc/templates/plsc.yml.j2 b/roles/plsc/templates/plsc.yml.j2 new file mode 100644 index 000000000..a42c00807 --- /dev/null +++ b/roles/plsc/templates/plsc.yml.j2 @@ -0,0 +1,25 @@ +--- +ldap: + src: + uri: "{{ plsc.ldap_uri }}" + basedn: "{{ plsc.ldap_basedn }}" + binddn: "{{ plsc.ldap_binddn }}" + passwd: "{{ plsc.ldap_password }}" + sizelimit: 500 + dst: + uri: "{{ plsc.ldap_uri }}" + basedn: "{{ plsc.ldap_basedn }}" + binddn: "{{ plsc.ldap_binddn }}" + passwd: "{{ plsc.ldap_password }}" + sizelimit: 500 +sbs: + src: + host: "{{ plsc.sbs_host }}" + user: "{{ plsc.sbs_user }}" + passwd: "{{ plsc.sbs_password }}" + verify_ssl: {{ false if env=='vm' else true }} + timeout: 60 + retry: {{ plsc.retry }} +pwd: "{CRYPT}!" +uid: 1000 +gid: 1000 diff --git a/roles/plsc/vars/main.yml b/roles/plsc/vars/main.yml new file mode 100644 index 000000000..761942f7b --- /dev/null +++ b/roles/plsc/vars/main.yml @@ -0,0 +1 @@ +current_release_appdir: /opt/openconext diff --git a/roles/sbs/defaults/main.yml b/roles/sbs/defaults/main.yml index 9bcd62c87..aedcee959 100644 --- a/roles/sbs/defaults/main.yml +++ b/roles/sbs/defaults/main.yml @@ -1,5 +1,5 @@ --- -sbs: "{{ sbs_defaults | combine(sbs_overrides) }}" +sbs: "{{ sbs_defaults | combine(sbs_overrides, recursive=true) }}" sbs_defaults: base_domain: "test2.sram.surf.nl" @@ -69,6 +69,8 @@ sbs_defaults: ticket_email: "sram-support@surf.nl" eduteams_email: "eduteams@localhost" + suppress_mails: False + wiki_link: "https://www.example.org/wiki" cron_hour_of_day: 4 diff --git a/roles/sbs/templates/config.yml.j2 b/roles/sbs/templates/config.yml.j2 index d052565c2..7d4c92bf4 100644 --- a/roles/sbs/templates/config.yml.j2 +++ b/roles/sbs/templates/config.yml.j2 @@ -71,7 +71,7 @@ mail: port: {{ sbs.mail_port }} sender_name: {{ sbs.mail_sender_name }} sender_email: {{ sbs.mail_sender_email }} - suppress_sending_mails: False + suppress_sending_mails: {{ sbs.suppress_mails }} info_email: {{ sbs.support_email }} beheer_email: {{ sbs.admin_email }} ticket_email: {{ sbs.ticket_email }} From 3eee2dcd397beaaea389f22de4f1e0b4b279c008 Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Fri, 27 Mar 2026 16:32:42 +0100 Subject: [PATCH 63/73] Add sram-metadata and rename apps+roles --- roles/{ldap => sram-ldap}/defaults/main.yml | 2 +- .../{ldap => sram-ldap}/files/eduMember.ldif | 0 .../{ldap => sram-ldap}/files/eduPerson.ldif | 0 .../files/groupOfMembers.ldif | 0 roles/{ldap => sram-ldap}/files/ldap-add | 0 .../files/ldapPublicKey.ldif | 0 .../{ldap => sram-ldap}/files/logrotate_slapd | 0 .../files/rsyslog_slapd.conf | 0 roles/{ldap => sram-ldap}/files/sczGroup.ldif | 0 .../{ldap => sram-ldap}/files/sramPerson.ldif | 0 roles/{ldap => sram-ldap}/files/voPerson.ldif | 0 roles/{ldap => sram-ldap}/handlers/main.yml | 0 roles/{ldap => sram-ldap}/tasks/admins.yml | 0 roles/{ldap => sram-ldap}/tasks/main.yml | 4 +- .../templates/ldap-backup.sh.j2 | 0 .../templates/ldap.conf.j2 | 0 .../templates/slapd.service.j2 | 0 roles/{ldap => sram-ldap}/vars/main.yml | 0 roles/sram-metadata/defaults/main.yml | 81 +++++++++++++ roles/sram-metadata/files/01_idps.fd | 23 ++++ roles/sram-metadata/files/02_backend.fd | 14 +++ roles/sram-metadata/files/03_frontend.fd | 14 +++ roles/sram-metadata/files/surf.png | Bin 0 -> 16016 bytes roles/sram-metadata/files/surf.svg | 24 ++++ roles/sram-metadata/files/surf_bimi.svg | 15 +++ roles/sram-metadata/files/surfconext.crt | 3 + roles/sram-metadata/files/transform.xslt | 47 ++++++++ .../sram-metadata/files/transform_proxy.xslt | 50 +++++++++ roles/sram-metadata/handlers/main.yml | 19 ++++ roles/sram-metadata/tasks/http.yml | 48 ++++++++ roles/sram-metadata/tasks/main.yml | 49 ++++++++ roles/sram-metadata/tasks/pyff.yml | 106 ++++++++++++++++++ roles/sram-metadata/templates/index.html.j2 | 11 ++ .../templates/pyff-metadata.service.j2 | 12 ++ .../templates/pyff-metadata.timer.j2 | 8 ++ roles/sram-metadata/templates/vhosts.conf.j2 | 15 +++ roles/{plsc => sram-metadata}/vars/main.yml | 0 roles/sram-midproxy/defaults/main.yml | 8 ++ .../files/internal_attributes.yaml | 22 ++++ .../files/plugins/attribute-maps/basic.py | 51 +++++++++ .../plugins/backends/openid_backend.yaml | 14 +++ .../files/plugins/backends/saml2_backend.yaml | 1 + .../plugins/frontends/ping_frontend.yaml | 3 + .../plugins/frontends/saml2_frontend.yaml | 63 +++++++++++ .../microservices/generate_attributes.yaml | 8 ++ .../microservices/regex_attributes.yaml | 10 ++ roles/sram-midproxy/files/proxy_conf.yaml | 74 ++++++++++++ roles/sram-midproxy/tasks/main.yml | 59 ++++++++++ roles/{plsc => sram-plsc}/defaults/main.yml | 0 roles/{plsc => sram-plsc}/handlers/main.yml | 2 +- roles/{plsc => sram-plsc}/tasks/main.yml | 2 +- .../{plsc => sram-plsc}/templates/plsc.yml.j2 | 0 roles/{redis => sram-plsc}/vars/main.yml | 0 roles/{redis => sram-redis}/defaults/main.yml | 4 +- roles/{redis => sram-redis}/handlers/main.yml | 2 +- roles/{redis => sram-redis}/tasks/main.yml | 2 +- .../templates/redis.conf.j2 | 0 roles/{sbs => sram-redis}/vars/main.yml | 0 roles/{sbs => sram-sbs}/defaults/main.yml | 2 +- roles/{sbs => sram-sbs}/files/yarn.gpg | 0 roles/{sbs => sram-sbs}/handlers/main.yml | 4 +- roles/{sbs => sram-sbs}/tasks/main.yml | 14 +-- .../templates/alembic.ini.j2 | 0 .../{sbs => sram-sbs}/templates/config.yml.j2 | 0 .../templates/disclaimer.css.j2 | 0 .../templates/saml_advanced_settings.json.j2 | 0 .../templates/saml_settings.json.j2 | 0 .../templates/sbs-apache.conf.j2 | 8 +- .../templates/sbs.service.j2 | 0 roles/sram-sbs/vars/main.yml | 1 + 70 files changed, 876 insertions(+), 23 deletions(-) rename roles/{ldap => sram-ldap}/defaults/main.yml (95%) rename roles/{ldap => sram-ldap}/files/eduMember.ldif (100%) rename roles/{ldap => sram-ldap}/files/eduPerson.ldif (100%) rename roles/{ldap => sram-ldap}/files/groupOfMembers.ldif (100%) rename roles/{ldap => sram-ldap}/files/ldap-add (100%) rename roles/{ldap => sram-ldap}/files/ldapPublicKey.ldif (100%) rename roles/{ldap => sram-ldap}/files/logrotate_slapd (100%) rename roles/{ldap => sram-ldap}/files/rsyslog_slapd.conf (100%) rename roles/{ldap => sram-ldap}/files/sczGroup.ldif (100%) rename roles/{ldap => sram-ldap}/files/sramPerson.ldif (100%) rename roles/{ldap => sram-ldap}/files/voPerson.ldif (100%) rename roles/{ldap => sram-ldap}/handlers/main.yml (100%) rename roles/{ldap => sram-ldap}/tasks/admins.yml (100%) rename roles/{ldap => sram-ldap}/tasks/main.yml (99%) rename roles/{ldap => sram-ldap}/templates/ldap-backup.sh.j2 (100%) rename roles/{ldap => sram-ldap}/templates/ldap.conf.j2 (100%) rename roles/{ldap => sram-ldap}/templates/slapd.service.j2 (100%) rename roles/{ldap => sram-ldap}/vars/main.yml (100%) create mode 100644 roles/sram-metadata/defaults/main.yml create mode 100644 roles/sram-metadata/files/01_idps.fd create mode 100644 roles/sram-metadata/files/02_backend.fd create mode 100644 roles/sram-metadata/files/03_frontend.fd create mode 100644 roles/sram-metadata/files/surf.png create mode 100644 roles/sram-metadata/files/surf.svg create mode 100644 roles/sram-metadata/files/surf_bimi.svg create mode 100644 roles/sram-metadata/files/surfconext.crt create mode 100644 roles/sram-metadata/files/transform.xslt create mode 100644 roles/sram-metadata/files/transform_proxy.xslt create mode 100644 roles/sram-metadata/handlers/main.yml create mode 100644 roles/sram-metadata/tasks/http.yml create mode 100644 roles/sram-metadata/tasks/main.yml create mode 100644 roles/sram-metadata/tasks/pyff.yml create mode 100644 roles/sram-metadata/templates/index.html.j2 create mode 100644 roles/sram-metadata/templates/pyff-metadata.service.j2 create mode 100644 roles/sram-metadata/templates/pyff-metadata.timer.j2 create mode 100644 roles/sram-metadata/templates/vhosts.conf.j2 rename roles/{plsc => sram-metadata}/vars/main.yml (100%) create mode 100644 roles/sram-midproxy/defaults/main.yml create mode 100644 roles/sram-midproxy/files/internal_attributes.yaml create mode 100644 roles/sram-midproxy/files/plugins/attribute-maps/basic.py create mode 100644 roles/sram-midproxy/files/plugins/backends/openid_backend.yaml create mode 100644 roles/sram-midproxy/files/plugins/backends/saml2_backend.yaml create mode 100644 roles/sram-midproxy/files/plugins/frontends/ping_frontend.yaml create mode 100644 roles/sram-midproxy/files/plugins/frontends/saml2_frontend.yaml create mode 100644 roles/sram-midproxy/files/plugins/microservices/generate_attributes.yaml create mode 100644 roles/sram-midproxy/files/plugins/microservices/regex_attributes.yaml create mode 100644 roles/sram-midproxy/files/proxy_conf.yaml create mode 100644 roles/sram-midproxy/tasks/main.yml rename roles/{plsc => sram-plsc}/defaults/main.yml (100%) rename roles/{plsc => sram-plsc}/handlers/main.yml (94%) rename roles/{plsc => sram-plsc}/tasks/main.yml (96%) rename roles/{plsc => sram-plsc}/templates/plsc.yml.j2 (100%) rename roles/{redis => sram-plsc}/vars/main.yml (100%) rename roles/{redis => sram-redis}/defaults/main.yml (67%) rename roles/{redis => sram-redis}/handlers/main.yml (83%) rename roles/{redis => sram-redis}/tasks/main.yml (98%) rename roles/{redis => sram-redis}/templates/redis.conf.j2 (100%) rename roles/{sbs => sram-redis}/vars/main.yml (100%) rename roles/{sbs => sram-sbs}/defaults/main.yml (99%) rename roles/{sbs => sram-sbs}/files/yarn.gpg (100%) rename roles/{sbs => sram-sbs}/handlers/main.yml (76%) rename roles/{sbs => sram-sbs}/tasks/main.yml (96%) rename roles/{sbs => sram-sbs}/templates/alembic.ini.j2 (100%) rename roles/{sbs => sram-sbs}/templates/config.yml.j2 (100%) rename roles/{sbs => sram-sbs}/templates/disclaimer.css.j2 (100%) rename roles/{sbs => sram-sbs}/templates/saml_advanced_settings.json.j2 (100%) rename roles/{sbs => sram-sbs}/templates/saml_settings.json.j2 (100%) rename roles/{sbs => sram-sbs}/templates/sbs-apache.conf.j2 (88%) rename roles/{sbs => sram-sbs}/templates/sbs.service.j2 (100%) create mode 100644 roles/sram-sbs/vars/main.yml diff --git a/roles/ldap/defaults/main.yml b/roles/sram-ldap/defaults/main.yml similarity index 95% rename from roles/ldap/defaults/main.yml rename to roles/sram-ldap/defaults/main.yml index 7d02ebe4a..e20f4553d 100644 --- a/roles/ldap/defaults/main.yml +++ b/roles/sram-ldap/defaults/main.yml @@ -3,7 +3,7 @@ ldap: "{{ ldap_defaults | combine(ldap_overrides, recursive=true) }}" ldap_defaults: image: "ghcr.io/surfscz/sram-ldap:main" - conf_dir: "{{ current_release_appdir }}/ldap" + conf_dir: "{{ current_release_appdir }}/sram/ldap" ldif_dir: "{{ ldap_defaults.conf_dir }}/schema" certs_dir: "{{ ldap_defaults.conf_dir }}/certs" backup_dir: "{{ ldap_defaults.conf_dir }}/ldap" diff --git a/roles/ldap/files/eduMember.ldif b/roles/sram-ldap/files/eduMember.ldif similarity index 100% rename from roles/ldap/files/eduMember.ldif rename to roles/sram-ldap/files/eduMember.ldif diff --git a/roles/ldap/files/eduPerson.ldif b/roles/sram-ldap/files/eduPerson.ldif similarity index 100% rename from roles/ldap/files/eduPerson.ldif rename to roles/sram-ldap/files/eduPerson.ldif diff --git a/roles/ldap/files/groupOfMembers.ldif b/roles/sram-ldap/files/groupOfMembers.ldif similarity index 100% rename from roles/ldap/files/groupOfMembers.ldif rename to roles/sram-ldap/files/groupOfMembers.ldif diff --git a/roles/ldap/files/ldap-add b/roles/sram-ldap/files/ldap-add similarity index 100% rename from roles/ldap/files/ldap-add rename to roles/sram-ldap/files/ldap-add diff --git a/roles/ldap/files/ldapPublicKey.ldif b/roles/sram-ldap/files/ldapPublicKey.ldif similarity index 100% rename from roles/ldap/files/ldapPublicKey.ldif rename to roles/sram-ldap/files/ldapPublicKey.ldif diff --git a/roles/ldap/files/logrotate_slapd b/roles/sram-ldap/files/logrotate_slapd similarity index 100% rename from roles/ldap/files/logrotate_slapd rename to roles/sram-ldap/files/logrotate_slapd diff --git a/roles/ldap/files/rsyslog_slapd.conf b/roles/sram-ldap/files/rsyslog_slapd.conf similarity index 100% rename from roles/ldap/files/rsyslog_slapd.conf rename to roles/sram-ldap/files/rsyslog_slapd.conf diff --git a/roles/ldap/files/sczGroup.ldif b/roles/sram-ldap/files/sczGroup.ldif similarity index 100% rename from roles/ldap/files/sczGroup.ldif rename to roles/sram-ldap/files/sczGroup.ldif diff --git a/roles/ldap/files/sramPerson.ldif b/roles/sram-ldap/files/sramPerson.ldif similarity index 100% rename from roles/ldap/files/sramPerson.ldif rename to roles/sram-ldap/files/sramPerson.ldif diff --git a/roles/ldap/files/voPerson.ldif b/roles/sram-ldap/files/voPerson.ldif similarity index 100% rename from roles/ldap/files/voPerson.ldif rename to roles/sram-ldap/files/voPerson.ldif diff --git a/roles/ldap/handlers/main.yml b/roles/sram-ldap/handlers/main.yml similarity index 100% rename from roles/ldap/handlers/main.yml rename to roles/sram-ldap/handlers/main.yml diff --git a/roles/ldap/tasks/admins.yml b/roles/sram-ldap/tasks/admins.yml similarity index 100% rename from roles/ldap/tasks/admins.yml rename to roles/sram-ldap/tasks/admins.yml diff --git a/roles/ldap/tasks/main.yml b/roles/sram-ldap/tasks/main.yml similarity index 99% rename from roles/ldap/tasks/main.yml rename to roles/sram-ldap/tasks/main.yml index f58c21ee7..97977e06a 100644 --- a/roles/ldap/tasks/main.yml +++ b/roles/sram-ldap/tasks/main.yml @@ -68,7 +68,7 @@ - name: Create the ldap container community.docker.docker_container: - name: "ldap" + name: "sram-ldap" image: "{{ ldap.image }}" restart_policy: "always" state: started @@ -112,7 +112,7 @@ - name: Ensure the schemas are added to LDAP ansible.builtin.shell: # For now the target side /opt/ldap is hard-coded - cmd: "docker exec ldap /opt/ldap/ldap-add /opt/ldap/schema/{{ item }}" + cmd: "docker exec sram-ldap /opt/ldap/ldap-add /opt/ldap/schema/{{ item }}" register: "result" failed_when: "result.rc not in [0,80]" changed_when: "result.rc != 80" diff --git a/roles/ldap/templates/ldap-backup.sh.j2 b/roles/sram-ldap/templates/ldap-backup.sh.j2 similarity index 100% rename from roles/ldap/templates/ldap-backup.sh.j2 rename to roles/sram-ldap/templates/ldap-backup.sh.j2 diff --git a/roles/ldap/templates/ldap.conf.j2 b/roles/sram-ldap/templates/ldap.conf.j2 similarity index 100% rename from roles/ldap/templates/ldap.conf.j2 rename to roles/sram-ldap/templates/ldap.conf.j2 diff --git a/roles/ldap/templates/slapd.service.j2 b/roles/sram-ldap/templates/slapd.service.j2 similarity index 100% rename from roles/ldap/templates/slapd.service.j2 rename to roles/sram-ldap/templates/slapd.service.j2 diff --git a/roles/ldap/vars/main.yml b/roles/sram-ldap/vars/main.yml similarity index 100% rename from roles/ldap/vars/main.yml rename to roles/sram-ldap/vars/main.yml diff --git a/roles/sram-metadata/defaults/main.yml b/roles/sram-metadata/defaults/main.yml new file mode 100644 index 000000000..5355989bc --- /dev/null +++ b/roles/sram-metadata/defaults/main.yml @@ -0,0 +1,81 @@ +--- +metadata: "{{ metadata_defaults | combine(metadata_overrides, recursive=true) }}" + +metadata_defaults: + image_server: "ghcr.io/openconext/openconext-basecontainers/apache2:latest" + image_pyff: "ghcr.io/surfscz/sram-pyff:main" + hostname: "meta.{{ base_domain }}" + basedir: "{{current_release_appdir}}/sram/metadata" + + # server_name: "metadata-server" + + user: "sram-metadata" + group: "sram-metadata" + +# idps_source: "https://metadata.surfconext.nl/idps-metadata.xml" +# idps_cert: | +# -----BEGIN CERTIFICATE----- +# MIIEKjCCAhICEG12w6QqayYAWntxDN59dU0wDQYJKoZIhvcNAQELBQAwPDELMAkG +# A1UEBhMCTkwxEDAOBgNVBAoMB1NVUkZuZXQxGzAZBgNVBAMMElNVUkZjb25leHQg +# Um9vdCBDQTAeFw0xOTAxMTQxNjM5MDVaFw0yNDAxMTgxNjM5MDVaMGsxCzAJBgNV +# BAYTAk5MMRAwDgYDVQQIDAdVdHJlY2h0MRAwDgYDVQQKDAdTVVJGbmV0MRMwEQYD +# VQQLDApTVVJGY29uZXh0MSMwIQYDVQQDDBpTVVJGY29uZXh0IG1ldGFkYXRhIHNp +# Z25lcjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMckFyqXzW7dbMt4 +# wDdSLaAjFAbNziUgQaivu4dl9Uf/cZ4f36a9DfQBUSraNoIR76ruwK3TPfFalemp +# xmWTsoVSQpb3AOsWbU+i0YKS1cmcqMUC1fef2j1IbuK4B4nEu9S5saGNVGNvUJ+Y +# jDUpC5vyyp7boW9E1md2jIBI6Mw+ZhlmkPucqaphxurWnm0KbxTZrYLOBZ1IXj6r +# yrRoFwwtjEH+CW8cRn8OATK0q4yb0BVr2gY2tp/lTpASHZ3WVWBK0prwK0KkusY6 +# ck+/vvlk46IdEr803NB0Dm3ECh3i65mfCaWzVTtd/md874paK+65f1JeVyd5I5al +# M2KEpvkCAwEAATANBgkqhkiG9w0BAQsFAAOCAgEAjvJXXkxOqh3K0k2NdDG5EOTy +# bA+koRbAqhdY/qJoSnqTzwBxJc6aPs+L4q2PIoLo0gNJj1Nm1taLusaaK+CBx3ar +# 1kxEika5FM0dqFjD3i7Y5U0FMeDB5cReo8TNdo31VGoY7CbRjtqHLRTuKzNmIfEm +# ahLnHIBtarE82b7Mpg0aLxjrRR+t8wSCriy+e9AEPzC5bWxtPJA+OhU8U9hMuOs5 +# SzKmHwYue4WY3q1rRaDpK3fqgXRDRfznNn9/RDDbBos7CRMSAPEmAO28qLKBW/1z +# a2TKQLddZ3uoCurFNbToSTueKYVEnveQNO2P5X6uy4rcYkjeSiwbmHo7jYuHAxx4 +# uGzHMpoqoGNx+2iYjtUo3dJUXzcZai3X+RuuMKXXvqGzrxJsoKayNVAE1dWoUHJl +# RouPhDLTdZq/pblORhFS8r10rKhSScgrNuN9LTTV7EPFeVr8trocNwl8IruH+eNL +# 6/7b5Y7fb7rvpxeHjWrTz8a9BXAIAv+bgyrg4OHGRcNIQb0XF438HD9r8Zb92B6Z +# VCR3aVS5496+1td+8aN/Blzo59LhKPiHyGZCPHFV/oBqG7nxp603kcWmJOcG+AgB +# 9bFiAimF5LLk/LnMfplK9w0vvxWVcdQkDgVPYvEGNtttj0QC7/jM4ZeihGb6Oyzy +# DZA6aeg73/ygOATQ13A= +# -----END CERTIFICATE----- + idps_filters: [] + + idps_files: + - name: "dummy-idp" + metadata: | + + + + + + + SRAM VM Dummy IdP + SRAM VM Dummy IdP + https://test-idp.sram.example.org/ + + + Administrator + mailto:sinterklaas@example.nl + + + +# idps_xrd: "{{metadata_defaults.basedir}}/certs/surfconext.xrd" +# idps_source_dir: "/opt/metadata-src" +# idps_feed: "{{ metadata_defaults.basedir }}/idps_feed.fd" +# idps_file: "idps.xml" +## +# proxy_frontend_source: "https://satosa.local/frontend.xml" +# proxy_frontend_feed: "{{ metadata_defaults.basedir }}/frontend_feed.fd" +# proxy_frontend_file: "proxy_idp.xml" +# +# proxy_backend_source: "https://satosa.local/metadata/backend.xml" +# proxy_backend_feed: "{{ metadata_defaults.basedir }}/backend_feed.fd" +# proxy_backend_file: "proxy_sp.xml" diff --git a/roles/sram-metadata/files/01_idps.fd b/roles/sram-metadata/files/01_idps.fd new file mode 100644 index 000000000..46d58b663 --- /dev/null +++ b/roles/sram-metadata/files/01_idps.fd @@ -0,0 +1,23 @@ +- load fail_on_error True: + #- "https://metadata.test.surfconext.nl/idps-metadata.xml verify certs/surfconext.crt" + - "https://metadata.test.surfconext.nl/idps-metadata.xml" + - "src/" +- select: + - "https://idp.diy.surfconext.nl/saml2/idp/metadata.php" + - "http://mock-idp" + - "https://login.test.eduid.nl" + - "https://idp-acc.surfnet.nl" + - "https://login.uaccess-a.leidenuniv.nl/nidp/saml2/metadata" + - "test-idp.lab.surf.nl" + - "https://test-idp.sram.surf.nl/saml/saml2/idp/metadata.php" + - "https://idp.ci-runner.sram.surf.nl/saml/saml2/idp/metadata.php" +- xslt: + stylesheet: "xslt/transform.xslt" +- finalize: + cacheDuration: P7D + validUntil: P14D +- sign: + key: "certs/signing.key" + cert: "certs/signing.crt" +- publish: "out/idps.xml.new" +- stats diff --git a/roles/sram-metadata/files/02_backend.fd b/roles/sram-metadata/files/02_backend.fd new file mode 100644 index 000000000..698d615a4 --- /dev/null +++ b/roles/sram-metadata/files/02_backend.fd @@ -0,0 +1,14 @@ +- load fail_on_error True: + - "https://proxy.acc.sram.eduteams.org/metadata/backend.xml" +- select +- xslt: + stylesheet: "xslt/transform_proxy.xslt" +- finalize: + cacheDuration: P7D + validUntil: P14D + name: "SURF Research Access Management" +- sign: + key: "certs/signing.key" + cert: "certs/signing.crt" +- publish: "out/proxy_sp.xml.new" +- stats diff --git a/roles/sram-metadata/files/03_frontend.fd b/roles/sram-metadata/files/03_frontend.fd new file mode 100644 index 000000000..252206d42 --- /dev/null +++ b/roles/sram-metadata/files/03_frontend.fd @@ -0,0 +1,14 @@ +- load fail_on_error True: + - "https://proxy.acc.sram.eduteams.org/metadata/frontend.xml" +- select +- xslt: + stylesheet: "xslt/transform_proxy.xslt" +- finalize: + cacheDuration: P7D + validUntil: P14D + name: "SURF Research Access Management" +- sign: + key: "certs/signing.key" + cert: "certs/signing.crt" +- publish: "out/proxy_idp.xml.new" +- stats diff --git a/roles/sram-metadata/files/surf.png b/roles/sram-metadata/files/surf.png new file mode 100644 index 0000000000000000000000000000000000000000..e2bc4a3c2b61cabcd3babac9f9b67ef8c860dfbc GIT binary patch literal 16016 zcmeHu2UJs8*XX4Ps5CzdA_6Lih%^Cd0fM41i~>3c7!j}}0VFhogiah$9BB#yhHga! zMT2w#2+~PFkP?!BfHZ*wX_5ZUW!}vH&3p5|nQ#67dh4yXzU5+h?>>9)b9OoV?7R2L zFQ+ZdL^kc(1VNC9`47i`f}jmd_)mx*Kw=HTIN(3t%O}l_Lu=tN%@yEnqt_28JOmX* z!hbx?^QCG4DClEuaYC?vvw*1FmeeWVb_m)7nIAuT)~{!RHF;giF6niDc-|uN@D_2E z4aPqFVRGWTdHRL;PwY|THQBQBZsloy0Xg-Z#7`Ib`Amrmd+7;i^I@~?ZwMXNUmY1w z(acQdbqZnSzjfu@Lv%h1XW2xP#6MC$K-*ae$ih6;%o>adI|mt5F$X( z?*JGT2K=9Ce`ofe(ynv=ZQkFR!PERXExfYd=KXWp-e(_&0?3X9fJOO;}fdPW#tF{DJnbt@yh}{&^w)6)XM? zA^zaTe`<+;QHcK^T~Q8%vEOv_*GBqtZ~23w|JlHQ!$|-3wEu>9@$aDb+d24Obwx!+ zMPx>H!IFEdgSPBipOhSrU&hDuwUIpy>eX8{c|$Z&Bcp&$GbBp4Pb>(0%_O?U2rl%|!RoXV--tD>m+S|xC7wQHcW;|A@g zCPD#Lp$gY6djd+GQXV|@iUM!~wU=xXijN2pMfIp!*7nX&f3I}YN?YenB2B;LQx9Sf z)2%R&>^IW9lgLIiX<0Az^7K1qPKLPRg^gpnb@_z2nh2p>g>eu80$n0HJy=ABKIX3iu%cL8zs5Q-yFtRVmy;qUvK4oHi$ zpLH9e$^zYmp`dt%4VdLlM+nvq0rhz&_$zd>ZnP}NY}wt(Ztz))!wG&a^_aGPzQo!Z z2d!SNS;@%B=kp?DqRtTnSJ5>q7k#a;ojp87;f&=6r0aywa);Q0WV)5~vQQ;o3T7-t za*r)j3|+o#b4JY|nZbN}8yX5(Cx zw`a#SW(9nESUmlz3?AbWzAA@b-)MF?FoW4iu4~G;XjLkL zZb)kMg`$TQ;s|ZhVF*IKZ;Uw>O?u%%)p0Q=h;<)I`LeMtsfXfe~%7@ru z+P#RA0>tSHXbe?azv1o@9~Ce-QF1L#$>+g#t{`eMRD&P7J9kpSmUXl!4#&M>_2Y(& zipH_=bTeKVSyZ#dAr+?asO-b)N_5G$X6QNFB87d)&5h2 z(H)1_TrEvrXqn0z7C5$>ccZc4Yxl$(mTzr_Wl|LQj2$fVXsy1Iqe1}hvz8?E^_p9$ zun^biyuO71@#_W55cRq^dxacGR_BM5v|8A|93*;i?$?w`nLruGbWnafxBC2?Y|TNJ zuEp$yP9H3MaZKl&cST(B7U=XVMmKw-!^N5v(at4V5gy{pm87>v2rp8$a}jSWd59Gl zqL*54&*6K}S&ovMC;6rqN8fE5^gU2Oh&zDt6wHvyDF!gz@vYGJn$i(fyfe6`;wl%7 z4o5)J*OOk!?R|92mRZpll?C~9x!ZP58W$02dH^ax6l_9=af zvJYWgl;}}fahM+p_^e=`i~-@Hd65rV9R?V@zRNtV683UXzbca_BghRrII1C-JfmWz zU^+XTJ3lmghf4yFZf^YGbHKQ0X@dZ?%H|QnU&b>sSG#nf(fBpQVZIl)wsWm;Xar<9 zIDb~0eSLLWgBJ>@#Y&2^L;Qh>f{9+-rp-{&n?1vrJRpBmd|-_+>OUn3E!Gul zoQ?>Ud@t*3)tq5IBDp*nT1bt04{hUeJ*RrN85h}MW@y4PTgL|F;dnhIyEenX&Ge`J z?4V5_DzR?YUBdA?EKRI>^~Vb9&vHF+ROzrlhXPYCj*k@P-kDnNs%OZZrEd}kD;iXj zao6mJv@A7gii45miIqv3sda7z$0%{w1HRQhcbjQgo6IG!JBgcon za}K|>$0TMy5e-r;jX5dA3QC*^Omp_Akq#^M;v_jsI#0(_#@(|niEyqk$N(n^bQ1EaQ6P7)Z5~rQ+X)XbazXDrn?^2wXpKTtS7ULnhy!N1PH( zns5?W*3aH&JyL#j>D#`;*B#`IT9$dnH1!%cG&=OHDDD+0MA?)U1m1Uz;FQE)Bjwj8 zr!C*EXg*eS(@TQw;FaL2AaCr+91#oYfEqG zQq9hpDInW9^rgs_yQojrI7msLHWI|yRg_g1HP5+gaW;FiIU^|KKn0~fTSmU;JMmB> zm-SA*PgZ}#=0IUxP~iKEJvR+GRlqpcO#=mcf>$HTz7$p^qNM!=zq)qD1j$}LfAoa| zdaiztv^0(~UHBD$Eg6$=E0<%mx>R1<^zzJz9jhg82iO+l1I1aPdQD|N=QmxqHDlgb zoO#k0p@Tb`nlH;nyhlEw)Vd-C=w{@C~8RXSxY>C~e9LwR>e! z!hkQKJ6E=Q*66ASK!B zh_zXGIkcx~S=lwB%)piVE-}I~{hCJO%X`R`8P~~X9q@oV2CRiPr@(lTA^WoOGCSnLn zT@Nw!J~KEqPZC57dWdnHoZ*>xBd4_8Ra$tbqhM_nDcAKGap;hz@#dm#%xa%CdZM&( zQM+s=%6HYh?~=1@lS!XB0U*XS2Y##NevsHTCQ9_m#3ma0`*Gjc9f)o%^a=G4f~rzq zBam%W_qvPJRCZFSy5XCsEdXd@KRbF_^T4H6%t@V#vY%)|P(SUGA;+d!H2D44ESK9R zE?R#V;DC^EksR|DLTjqsol0Th3VT7CCpt<_9BB|DEMB@urJQIg)8;oW!v8QR6GCG! zzgM+y?PM~@^_xwgyWiC0M#CG9uH|3%r7O^}OEu4`_f+3>Fx4%OTHT3Gu&Zr-<1p-h zMHo!`MihU6sW6VqTdmdSjszaJ(ri?`8L4@7a!xt8f2^aX*vqp`J6rjhn>avjt$lT1 zNhv$f{Zuj4=5=h@kJs*>sg0_rjgQipZB%cW?%cWS4NXBRk3S6IAk4)i6o-5=D{8cH zJWyP(8z(H~Z-b~S{8ou}t5s|mttr(7oDh!1o`UjE|kgP<@{z6QlqCr z_`986i*CDF#Q{+i$EaG;`5zbwA4R8QihP1Z1S*`Y=hdQqA+o z3nSN8{AkD-!N~U?H!bAw_v3p7I2dF z&HaJIRPJoG4C?Cn^m;8s_7<`+g;V?n+jVQkYLX8+a*xmAWv@q*?lOU=90j29o1J42#P zZ*K-eVvL&;9u*PxAESNNs=8fx^qXb4G=!%C^M*oOFuk#kqJAED|0@Ag=tNTCwW{Xs zi#%b7B5|Su3(G#R4GaY`mjx{R+pFYAw`<|K#1c&*5mfOz0pp^XaEn1%Z`_O1xFUuU z9}voRrbRdP3+e4K6J$CyvlNd3UcT&l-`a#|niB=85j5GPK4r~%C1-)nA}%2{!7eX= zQqSjm+s-PE*UC=v2Y*_|YWKtH>3kj^qhGJD=1#g{W-kTufmAWySHWLUouc7kmqU;7 zh_laP+Xw1ou=o3F5C&D9LZC7i%ma0fK#;+u{)_g-N&`($ZuwvjMLA zF{xjFSYxZ_DbMsRU*;l$s=MJimofgILWA%9!s1NXP}h#V=7L~jH`(=GA)^v~n2+AV;w8Yn&0{MdPxh5=BEXST?IFG){+d+F!tYHfjKACW$ z{H6I0`jW6}kcsxYKI@r?D@%B#Y`fFSDwy_~5>6HfMnsuwn%RcRTtq5G|ie+7@b7z&AgOswd zGs>GoI#JtUa4|U6d;)<&-__?KQQn1P$d<`e#m@hU95w7|sqcDfAwV%qP zDm5{`Pk)rmBo$uUNSVRMq90ybE&4`?e-jHgYzC8+G?S+FZW9G_z-_`3>$z-?McI=d29 z<88Y@zjXiXmwia*_G!7+&e~-MO-?6ImGoH1?snXCvqbDb>JTQxbJF*d6KpGp=b2|kj%Cj#%=z|% z@EtIyjl?85424ENLj+?eD@GLum(zfgcYuH~6uSBhv4WO6RmmTY#VJt;d*WamT9_HQ#3rF+=ly(PY+MgD@Fv`O~1BT0E+Md%UrX;&~4qx4%VXQ;t zkvA3~DpdC|zs%JIIoWMZQwyfweS73*TXOK_y}~O>smaS{E9Xh({ryJD zQ*K=oZs(USxhU$TGI~F313NUR9js%AS&xyXssZy=(M*dkY3LR%^xZ9jGN{z9P3Y$u z83oR@E*QB6<(m|5vU~fIdzIT~-PGKJ zOUK$dgz|5)my%sW$eq3Q@&|4MAM$f4m%YjfY`?_xxf8v4X;Et>Ag2h))EF!~a$6ur z({};XHvIg`D;h&J3@fRZEy4ra_nz<3zu2BzcXz2;qy{V3cOzoiB^F3l7mzF=nZKL! zQ-E~QiMofY%Zh+GJqzV$(JS^bNSy2p`Ht9}lSc+bUr zT<}d~W3!OV&WUVgjfRSu#`B0)$u$k$AAjMdlN%pDbM}qi)q9}K)o+RVE@S)qHqW8S zWN+1L9dBE*CX?F>Wyt(O1Q!M)C-3S|Wc+hALG7rD@}^_aOM#u=O(pwR&*jl>Ej`Dl zZ>hZ0tz>e>L(_HtY*O^c>w~#(9#A!?6`}xTz8Hj-u`?w)(rg`--5XKIQd0B9*`2ns zEk*>O&s)!z-hKD-yh5df>Dr%F&om+r!%5(29TomBuGO-hjQ6^wF8yatYd89gkVS#1 zoIVB>=>#{8m)vbx{O+yDEWL&jto){Db9*04JLL1__MYxy!{#PAnavAjx8~-13L0~y zid%QSdqdBTaBcz}wXr*G)rP2iFS!xrY&k>c*_>x{^Nhyw@uXs}U4>qDCnQ?%f}cZh z@sCcI4mkCf#K#2WOlnLTEMDzs=k)rP-v#mr5ft0re7Ebid=Nc7v&ASe-JY({-sctC zUPWzM7x&<&qVINojGq!LXO=6|8!(3g$#E7jvIZ5np(p3@g5vDcDKoa(9$N0Kwz|}j zX<~X!UT48bPQbEs84i<|cmyQHqBSJjZ%T~K?KSV)R>;rt`^&?ZUV(uyM6g%qv7$on zwD0b7VYLzR#hLy?E>vkE7KG7EqrSU8^*jo8|A0oM_m|v~+q;%j^=8PLc6gj}ANOj|A?5q6QkAQl&zF4B zr9Q5Sh(>pfM`?s!2*}^U2E^iQpI%oiDP$y}*s8XzZscA_3~W0VP2gx zebQ2D`Hu6#{_M_{2B*bGZj+_ez)kus!zn?P_`_x-=hVI#182d2_{@c8`NF*g^DjNc z?!Ab^O_T5YA*oHa+@ZDjNu7|1Smrw)^ou+=36h>0ms@QX(`-?w7{ZKab6Uu*hZPG| z%BO31%5UFnCO0&hIUG+N8cOXW_$x0l7%S{IaF#gsG*N{4Rj*Nbd^s&qWvJ>z2$kHC zfx}x(S-6Q^UOZp67TXk z$h7D0+nrVI4HJiM7IcM5sHEJSQHeW6{{H>Q_e=vLs_v6W(eGbZ9}2pM5_IiB6mLJA zsUrVrPu!_kK&w42n9dWLWNu2@jTZ51pRTKD{0c4AB3lZ)vE~KzkmZTa^yRhvH-$Cz zTNf7-a|%NjSa02j+CI@WK>lOtig)|4C+*;KlaF<4U;5oey{Z~4cKAvk zFi+TEP?fFaZuq<@2?2JiW}5W=IDBjVY^iSZ#sd)?qu!TlzMYJUZ?HCqb@-R3!lNQT z6Z=EP6Po=TzZO;q5?%#e;7o%^sec*AulMB%(XlXOD{id7e@Kcgm3X}=A*42S1hxXv z`Vd>Tdc|w8d^Pr`g|iSOvmL$*;JU(P>;N7GGS3K{RoiF(8G<)!T)KDj(yNN`Mi6N|&1Mfz z=foV!n7PT~y^vSXyGGin0^mKGW4ebI<}-9YBwUQwDQB^L&UDc}K5b8tRE0lRqZJH# z)5awzNvpN0;|RC$SP$Ggp!t&O{JvAw*ycHO-8L3COkKXgbi3_VKoG1RB|KQ8^LU-D9rx=~D%oRgr#l?aH zup`r^Z@xIOz<w$}shb-;HO4-?sv3`$h5TQnKN#*}GD3zJw~?V>fA*=5eSaA! zqUWD1n_V$@@xE-t$=6ufch`$lESesVw6f6!@K-^qOVG#ut=NPO!(X~6jYT~!bZo5M ziE^?+SCSaFjFoKKPFdMJqmcdq_XPepmMv>|dNWj=KpH-ABA2T_sFdzf+W4J#CJ;H! zi)?dH-nFv$Nz1w4@e8Nz02WAE_4r$OGa{c|-uR*<9AXV;HkNNHZJx1y&vC^a5@}4X zy%rdJFXH9!aGB^3y-br2uKrv%m8Nsbo8FD4EU(&Kxl>iA7hffA1|@oJ*8 zabE4Mx2Q^x!Ka2^VWY*qw5S14)XWYV%5IK`X($ayLkR$>9d_KxeHXe&VSd=(IxCd; zvoi`gJzhbJ5e&?i5IN3Ol?y+*W2`IE>J(3-a}A}?V|w2cWhKNAI@LVn>#2fJFuS5h z-I8iVgj?NB=c z$qj=k*aY+2C(07s6SmiOxY|k@e*Fm(^sec>lZL-nftBcTygqgK3`ZO{eG_@)Rse~# z9n>L@kS1~img(u(*l#!a9=n%b$y{{8*$uY<_lOLMBLpWtH`}hiJ zVTr2ozSrjTYM=Ta^uTsuBIwE=<^q> zS@{LMmgzJkzOIa~Ut>m)>TmacCJIhsvkx;W4u>I4E~lKSH7j=q1yS2nC+D?NlZvIc zjW54$4OqRFFhuEkT}aw-FOF+}9XUTq=T@}p))vUH1CJJnvq7s*qpw0cESG;-{fXJ6 zOnHSvCw*3fE|$v4&dK|F)-n~FFlMN#8*zNq1Nr_5WW z^*{bv)oplm##9pf&^vpLrazuTNnNc;Z$DGb2?;7v|H@9W(xlGH+JzBN*fZTHnoeN* z7B(K5w2Ucrkay1;?XJ0Gm+GtnIsjd0%GyV9_GjhlNR4_$SNXw0deE}$`_&a>_MkA5 zwt5NQi%quZH1t0(s0_;K(;gocl5v~Mae6KCPg+9A1@vt}kLE7fv!=4~>=uH(&T_h! zAZRr#i9fg*Bbkm|^9x0ccFC^22o-AhAo5kTX6B zmtx&9Tm0{%tvk-o`3rPVu&dL3nK!v`=VjlT+fHaU);aT-h=JaAD8SR1A5@30T>_1Z zwpvgm>%zFekDcp}7k}+xBzociESAq7^fj=vpM;^&%&52yfQz15{Ui>JIu}Xt2ysgy zBO#(=&n_Mz0=Q%`1Zos8EDFG`1u6+~d#YG`={%!Z`3!5Iry-`$f6%Styavs$#v;S* z#E=pV(DS%187QPMSuCHz7ht@7>?C6e0(i&lJMJ21$vU026N6}d4{GfZ|fCDJlY3SfUBDS#cils`Ai1e$v^ z-b}F&TOX&Z@)_(}|zTupE1KccHa3pZ-gM=&Gvxo=7JnRDKuVf58?NhMGyeE7}0 zfmI>Aa|vJTNjsWPxZIn8RmJ2L`NCST_LY<@8=%cLX{d3;`;^zYf<}Y3Q8cJ*&$ekv zjfaBpw3t5>BnbtKX>T2AB6{UEO?-hpz`3L%4!BV^`5v%fTfPt2psZip+a-L&|LrZ> zM-Unlt#Vi&lllBXuI2nM+!gTAnh{PTH;;bU4Vrc6;?PTA+E&*D$&+wgrcWFwJ|X}G zjEDNK@`rF-+M=l+kZ8*!m6ZRt3h& zKF>%VqD)D>@)ZDZDB}ayFF@Hv5Q2wWzLk?lq=}BTOY-1y20-^Xw;>~!rqT4_%zY5L z$jsc7nRQWPT}?ZC;TT-%RNI4qf_>7m@_{<@QIlQs6yO0Z8VMXg(8RUr-Ngq{@JZ&i zG9CO?l=7fHbiC9}D1)G?LF?`%gdwie*>-fa`;sugO7J$7bXNSlh5ZSg*N2YF-|hzm zjA#u;YBbi1`TY6AqiJoq=%V+@VnkC1;N-=J&$!JsO2 z@p7*)By%;6mA+Oe-^+UXk4#y$OSdKE!O8O$!3XY>&8iM_OLO9I& z%z~Rfqv>LVU9a4&o|5mZc^!)(O0c2UrZ6YxxG28BEY;5kqPsz1v$cy@$ZO?5)1ZA? zgoC3|k0FX;Kl1A6X`6B@jr>K>v^Fj`5us^PKb%PdW!J?d&LK1NpW>TN?cRG3G_=zT zmM{q(4$t;UkbZ%~V{ksxKG3X84h(G?Uui2Ryu8abe?9<*RiryNqU^B1OYekR@i6Lu z^wlY4eE5k?1W257y1f0AW+EG&L5hP9?9Hn6AJ(SAQsmBsq`6(m_(vis@OT_p;p%zz z2+cq9ADJlhW!4H2zV(~2yFHEyT+evB$BgxGCs7Rai=b`Jl=Pw6oiD2V6KEto@k4Cr zbjr7e;SVK<>qRu(Lxm^nZ0PebgRhH}6vADJFFw8YB#FdoS*Juuv= z3e&xt3;CKN`SN06AOlAESbHEjQ=-;*!)r}iZIV|64BIfg)OFiS3bNl=#tyw`j%SXlCWjj!;5*T0kRu@rHdUD ze_NnHrHHshz%!>rrv=D!)*0UO+gdhiTI3CgLLH>EWt`A94u~`*I*stSYhJqFW2G7d zwWs~8)D@VWmOe_4o(dL_#M+8?xxJkLy-2VjT=lvie}roaFW@TbdZvIK$^M{67(AE+ z2B+L=ZCSEZn4!Cmh7SK-=gh#Zusm}qz+Er+q2bLe^QF{Q!cW}N`UB^dP;)A3I6>J)ZcXWi4f#lM8egRmaZ4=t~!WTw& zz`93M=2p#2GF&WuLc!G-s8!Fm#Jc3x76T$YhArddirn}ag|9%uW9CgMyLdO`4FkU> z$~R1*uL4F&Zc<2ggRLYqs?5MB(ofiGH!&k%TIJ*|igX)o+k3YV&^>v%%%;=to^4@9 zywZSLFO?n`B0#Ew6tBkApI}}FQMdN%i`C0`2>6TzS%K!f9laOMp++HB_ z6zN+7Y47Q$`CGt7{A_J4m+^yd@n-Ni;}v>U;DO7p_zz6>9&C?VN5NkKC~k$BU`v0t zj*GK4$uLI9fqS!+5@R0x+v+E?VE$FzLW-@OLLhQY75~!CJb3Sw1`G}?k>aT-*5Cz^ zA;=ZSzs+U*f+YOBg>9Me=!2IIJg}_7idw%&1F|tVRRiS#z@}BKIu36FK+pqEH2*fN z8!U!6`$3D`LwJ(yS3v#*kBusW*bW(sfNT&TDE762jFk@>k^*Tb0m=w6Sw{m`Ydowr z0Q^stC~#);tml1D#zs*8hnWC>hmm=0LFZQq&taGjV}G>jh%pRH7dr&zy{pL7vi zL;0USLV)Kn?3Z7ke#3;fAV38rlYhDi4dQWt z1z;CY$WD3C&s_HmLt8!Xjo^SGDxDj9dgtU1>kPQ4v8Vhdzdar=0TsLEK_WjHdDK(n+B2HJs12 zt3pLS;4~z9{mgOH-?h$ctLkpB1@@$T{x|`y@IaNRQSj$NVie4#?}LzTY!>YDpw3g0 z7m0hY1z@lMh6qn$B+UQx)Di3Tg&OjG&64zT8L?bP2^Q%^YVwN)7rVedT2%3Ni`T`i zoEat^6bT?AQiNN0yYl@7?4>8osVHwj$msA9UeS7yk1$8}HqO2) zH06pU0)dKJaNM=S{Gy-t9U7nHUIrWeJ4ozBuUzHlqT!dHj}UqY2~l(NfpGK;dY^=t z%&S;g1tAFvy+Td(MR~4=&B{Jut29Yol_aV6d?0>7<(Erk>om{0hR>j7hSwOSPX<4{ zQWvgHZzGOEwMx+Si~8zWl|B~>c;hzNjuBt&dHVBdUVEZs+l@lrp?x}Zk{`~PSGdKx zDs=8^b7*u5tmrFeyN^CsTMrc(h?&bcq1n#|Vw5!GNMN1&Fj!41l7j3C+i}AqliyGX=h*VTjf)W^6 z)#_*!AFaHCC9f#@f_Fj=!%5_ZTvXo63nvYuw*udgjvHtY*Zq-H>jnp&(Z0t;I{eY< zR_+$VF18qmC^n@oA3h_5)`8b7tKJRb^iARR?{+W2OqR|+1s&m^x5c`a!r>?L2;MxN zk{68m^{r4*M&yQ}78xFL-gJ1379LDJTR^DK)_^M06ebBKpZVx)t1Q+vV224;n<$YR z1cOGkCcA{{@DHfLv9-vrX+C0ay)V5N0nd3KTO)fgs{c95*}4Iq=0chI+bo&L zeykzJMz~yf&=oUht}bNh7PGmlkw^AOQt#$w8X8tz;(inpI!|<1)D( zu&;=IBgWOunt8iHQb}aY1ay@tJDl4-33JpuS1`WpDlC!RtZZFj_Ya^n^XY)XAe8^u z%76hnDGLqj5y{AJ%9$=adV%PTC6$B$AAx!cT)RNF&RtQyO?pl{P6mMl;t3_aH!@E( zH{Eds_+aHD9&nSfbYh~CkaH^aj6jz4W>|x*&m2md^c#lzo)UIdBwjK#kJWz?`ICJ( zSSDm#5ptOKpJF;d|2?Mvd0hT2$ipYgder~z>{tf?0oOz7@3g-G{%}D2f%dNth(FN& zjl<=yh4{n41iS@As{iGOcJMU+f)MMdzs>tQGgw!Ds(`;C#6Q#iz7SYfe@^??Li~aD zudVpkLi~$X{2M~Ry83 + + + + + + + + + + + + + diff --git a/roles/sram-metadata/files/surf_bimi.svg b/roles/sram-metadata/files/surf_bimi.svg new file mode 100644 index 000000000..f49b7a035 --- /dev/null +++ b/roles/sram-metadata/files/surf_bimi.svg @@ -0,0 +1,15 @@ + + + surf + + + + + + + + + + + + diff --git a/roles/sram-metadata/files/surfconext.crt b/roles/sram-metadata/files/surfconext.crt new file mode 100644 index 000000000..0e8f074e8 --- /dev/null +++ b/roles/sram-metadata/files/surfconext.crt @@ -0,0 +1,3 @@ +-----BEGIN CERTIFICATE----- +MIIFbjCCA1agAwIBAgIQagXJvtKqIRRO8zD41OktRjANBgkqhkiG9w0BAQsFADB8MQswCQYDVQQGEwJOTDEQMA4GA1UEBwwHVXRyZWNodDEQMA4GA1UECAwHVXRyZWNodDESMBAGA1UECgwJU1VSRiBCLlYuMRMwEQYDVQQLDApTVVJGY29uZXh0MSAwHgYDVQQDDBdTVVJGY29uZXh0IENBIDIwMjMgVEVTVDAeFw0yMzA2MDcxMTQxNDRaFw0yNTA2MDYxMTQxNDRaMIGVMQswCQYDVQQGEwJOTDEQMA4GA1UEBwwHVXRyZWNodDEQMA4GA1UECAwHVXRyZWNodDESMBAGA1UECgwJU1VSRiBCLlYuMRMwEQYDVQQLDApTVVJGY29uZXh0MTkwNwYDVQQDDDBTVVJGY29uZXh0IHRlc3QgZW52aXJvbm1lbnQgbWV0YWRhdGEgc2lnbmVyIDIwMjMwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQC1Wj1MYwzY646Wa9td4zUZb5W27+cbARhNbIZsteUIPV6unxoO6tHCLJhRxC4pBTQsdrhfhh3+s5rwm8mjhJs2rciQkCdPiTl860jqihhWi5bFXyGX5o1U5mZgomUT+o7+nUj0et1l/kbFJ0GqIKtf0uPj7R/zqTpqeT0c6VFxchU6LA8GOI9w5XIISEGi/IWlDKyM69I3DmbCip/rm8u6kIQ0qqXh58lNNOsZw8WYokCXP0IPFQWpPkKC1VGYtivwKLzzvNxSGcuvp39ui+37hrdjqiTxK68Z48vJ6l+KsJP+jpDXYBYE/NsSVYez3vbVTB/l664yvBfKyGIHHDdTq5akLCQDgYQzjeNOU1oSZbcsub0k+osp7MFGkslYRhLb0V9tX0Xu+7jXzGthPUWicN0XdlHS0JOlSgOBftPn8kcqYNMF0IZVe6V/AVgfj4/4iDk3OKl9FRctFp3kSa8GzLIbjqmYXpGGIEse6U2gfqHS9WHu4odfKH7rhD3hZssCAwEAAaNSMFAwHQYDVR0OBBYEFNclSgPTrGp4QJQZGjFu6VEBTX4PMB8GA1UdIwQYMBaAFI5kmzwW92s2rRY2B5NNjSYI2oj1MA4GA1UdDwEB/wQEAwIHgDANBgkqhkiG9w0BAQsFAAOCAgEAORNL7FGBkeq6u/rmcNf+jZZz27vw86COPOiN6ygTyxaBq5fmJ4JZlDnlfO4C/4iek2QjKdgPlpvATGUUMXJdO6a7A3/vXNuoIGu3Ug9GW4vpTVPulaYZedPHC8zBsxwRKwxpSTda7ubWDxH3vUxHz/zDOD2O71O6KFj6Ph8JXwa3TLH0xRN5CXa0UMKX0S+ck8MahCYnMtd99EBL/uOr0+D4q2HwxDRDpL4I9yRwyWxCafoR+6OfzO/vc/SGcjEk/9s0DrMKDkDTJlE9eZbaaWFFCkAkg3LHHLMYjykcTvjDEV75OohYcEC5/6uKHcB/ZQjHwkPBqv9pUF897yZ7sxS66GEJmqqVIC+ayWRvC8N+UmvMGWAdohrY7r7CPeTE+iVHaeB7xGTSI9BhTEv3yMNHhqzqIOvgr8h5iCv7B5hQL+V7MRqD7e7X9uRR7wbyGmwT4p4VFbz5VqthCOFobsMxam9Axt+saebRyH6Mg3Ro9D5WgGoZmTP1yyiMrmEHQdf9+iblbfTbRW0irlaX5t58fWB1u4QZqcamlhVcl65Fub0g+QkSyGDMD9G57z3CKOluNy6TxFZOxMynY6CEtaozDaiETm7NaNC1lkhi+SOHKRX5+q0KqJdnEC7GOX69hSDsCT905dpVnr8JgFKoUfXWSmbwTMj45190dw7RMzk= +-----END CERTIFICATE----- diff --git a/roles/sram-metadata/files/transform.xslt b/roles/sram-metadata/files/transform.xslt new file mode 100644 index 000000000..80673b688 --- /dev/null +++ b/roles/sram-metadata/files/transform.xslt @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/roles/sram-metadata/files/transform_proxy.xslt b/roles/sram-metadata/files/transform_proxy.xslt new file mode 100644 index 000000000..6e6f55d09 --- /dev/null +++ b/roles/sram-metadata/files/transform_proxy.xslt @@ -0,0 +1,50 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + Security Response Team + mailto:securityincident@surf.nl + + + + + + + + + + + https://refeds.org/sirtfi2 + https://refeds.org/sirtfi + + + + + + diff --git a/roles/sram-metadata/handlers/main.yml b/roles/sram-metadata/handlers/main.yml new file mode 100644 index 000000000..2787eba12 --- /dev/null +++ b/roles/sram-metadata/handlers/main.yml @@ -0,0 +1,19 @@ +--- +# - name: "enable pyff-metadata job" +# systemd: +# name: "pyff-metadata.timer" +# enabled: true +# state: "started" +# daemon_reload: true + +# - name: "run pyff-metadata job" +# systemd: +# name: "pyff-metadata.service" +# state: "started" +# daemon_reload: true + +- name: Restart the pyFF container + community.docker.docker_container: + name: "sram-metadata-pyff" + restart: true + state: started diff --git a/roles/sram-metadata/tasks/http.yml b/roles/sram-metadata/tasks/http.yml new file mode 100644 index 000000000..befcc1d18 --- /dev/null +++ b/roles/sram-metadata/tasks/http.yml @@ -0,0 +1,48 @@ +--- +- name: "Install index page" + template: + src: "index.html.j2" + dest: "{{metadata.basedir}}/web/index.html" + mode: "0644" + +- name: "Install legacy link" + file: + src: "." + dest: "{{metadata.basedir}}/web/metadata" + state: "link" + +- name: "Install logos" + copy: + src: "{{item}}" + dest: "{{metadata.basedir}}/web" + mode: "0644" + with_items: + - "surf.svg" + - "surf.png" + - "surf_bimi.svg" + +- name: "Create the metadata-server container" + community.docker.docker_container: + name: "sram-metadata-server" + image: "{{ metadata.image_server }}" + restart_policy: "always" + state: "started" + pull: true + mounts: + - source: "{{metadata.basedir}}/web" + target: "/var/www/html" + type: "bind" + read_only: true + networks: + - name: "loadbalancer" + labels: + traefik.http.routers.metadata.rule: "Host(`{{ metadata.hostname }}`)" + traefik.http.routers.metadata.tls: "true" + traefik.enable: "true" + healthcheck: + test: [ "CMD", "curl", "-fail", "http://localhost/" ] + interval: "10s" + timeout: "5s" + retries: 3 + start_period: "5s" + diff --git a/roles/sram-metadata/tasks/main.yml b/roles/sram-metadata/tasks/main.yml new file mode 100644 index 000000000..d6ac55f29 --- /dev/null +++ b/roles/sram-metadata/tasks/main.yml @@ -0,0 +1,49 @@ +--- +- name: "Create metadata group" + group: + name: "{{ metadata.group }}" + state: "present" + register: "result" + +- name: "Save metadata group gid" + set_fact: + metadata_group_gid: "{{ result.gid }}" + +- name: "Create metadata user" + user: + name: "{{ metadata.user }}" + group: "{{ metadata.group }}" + comment: "User to run metadata service" + shell: "/bin/false" + password: "!" + home: "{{ metadata.basedir }}" + create_home: false + state: "present" + register: "result" + +- name: "Save metadata user uid" + set_fact: + metadata_user_uid: "{{ result.uid }}" + + +- name: "Create metadata directories" + file: + path: "{{ item.dir }}" + state: "directory" + mode: "{{ item.mode }}" + owner: "root" + group: "{{ metadata.group }}" + with_items: + - { dir: "{{metadata.basedir}}/web", mode: "0775" } + - { dir: "{{metadata.basedir}}/feeds", mode: "0755" } + - { dir: "{{metadata.basedir}}/src", mode: "0755" } + - { dir: "{{metadata.basedir}}/certs", mode: "0755" } + - { dir: "{{metadata.basedir}}/xslt", mode: "0755" } + notify: "Restart the pyFF container" + + +- name: "Start pyff container" + include_tasks: "pyff.yml" + +- name: "Start http container" + include_tasks: "http.yml" diff --git a/roles/sram-metadata/tasks/pyff.yml b/roles/sram-metadata/tasks/pyff.yml new file mode 100644 index 000000000..6c66a5696 --- /dev/null +++ b/roles/sram-metadata/tasks/pyff.yml @@ -0,0 +1,106 @@ +--- +- name: "create self-signed Metadata Signing SSL certs" + shell: + cmd: ' + openssl genrsa -out "{{ metadata.basedir }}/certs/signing.key" 2048; + openssl req -new -nodes -x509 -subj "/C=NL/CN=signing" + -days 3650 -key "{{ metadata.basedir }}/certs/signing.key" + -out "{{ metadata.basedir }}/certs/signing.crt" -extensions v3_ca; + chown {{metadata.user}}:{{metadata.group}} {{ metadata.basedir }}/certs/*; + ' + creates: "{{ metadata.basedir }}/certs/signing.crt" + when: "metadata.signing_cert is not defined" + notify: "Restart the pyFF container" + +- name: "Write fixed Metadata signing certificates" + copy: + dest: "{{ metadata.basedir }}/certs/{{ item.file }}" + content: "{{item.contents}}" + mode: "{{item.mode}}" + owner: "{{metadata.user}}" + group: "{{metadata.group}}" + with_items: + - { file: "signing.key", mode: "0640", contents: "{{metadata.signing_cert.priv}}" } + - { file: "signing.crt", mode: "0644", contents: "{{metadata.signing_cert.pub}}" } + when: "metadata.signing_cert is defined" + notify: "Restart the pyFF container" + +- name: "Copy source certificates" + copy: + src: "{{ item }}" + dest: "{{ metadata.basedir }}/certs" + mode: "0644" + with_items: + - "surfconext.crt" + notify: "Restart the pyFF container" + +- name: "Install IdP metadata" + copy: + content: "{{item.metadata}}" + dest: "{{ metadata.basedir }}/src/{{item.name}}.xml" + mode: "0644" + with_items: "{{ metadata.idps_files }}" + notify: "Restart the pyFF container" + +- name: "Copy pyFF xslt transformations" + copy: + src: "{{item}}" + dest: "{{metadata.basedir}}/xslt" + mode: "0644" + with_items: + - "transform_proxy.xslt" + - "transform.xslt" + notify: "Restart the pyFF container" + +- name: "Copy pyFF feeds" + copy: + src: "{{item}}" + dest: "{{metadata.basedir}}/feeds" + mode: "0644" + with_items: + - "01_idps.fd" + - "02_backend.fd" + - "03_frontend.fd" + notify: "Restart the pyFF container" + +- name: "Create the pyFF container" + community.docker.docker_container: + name: "sram-metadata-pyff" + image: "{{ metadata.image_pyff }}" + restart_policy: "always" + state: "started" + pull: true + init: true + env: + USER: "{{ metadata_user_uid | string }}" + GROUP: "{{ metadata_group_gid | string }}" + mounts: + - source: "{{ metadata.basedir }}/web" + target: "/opt/pyff/web" + type: "bind" + - source: "{{ metadata.basedir }}/feeds" + target: "/opt/pyff/feeds" + type: "bind" + read_only: true + - source: "{{ metadata.basedir }}/src" + target: "/opt/pyff/src" + type: "bind" + read_only: true + - source: "{{ metadata.basedir }}/certs" + target: "/opt/pyff/certs" + type: "bind" + read_only: true + - source: "{{ metadata.basedir }}/xslt" + target: "/opt/pyff/xslt" + type: "bind" + read_only: true + healthcheck: + test: + - "CMD" + - "bash" + - "-c" + - "[[ $(($(date +%s)-$(date -r /opt/pyff/web/idps.xml +%s))) -lt 400 ]]" + interval: "10s" + timeout: "5s" + retries: 3 + start_period: "5s" diff --git a/roles/sram-metadata/templates/index.html.j2 b/roles/sram-metadata/templates/index.html.j2 new file mode 100644 index 000000000..f0e40b22b --- /dev/null +++ b/roles/sram-metadata/templates/index.html.j2 @@ -0,0 +1,11 @@ + +SRAM + + +

SRAM metadata

+

SRAM IdP proxy metadata
+(for use by Service Providers)

+

SRAM SP proxy metadata
+(for use by Identity Providers)

+ + diff --git a/roles/sram-metadata/templates/pyff-metadata.service.j2 b/roles/sram-metadata/templates/pyff-metadata.service.j2 new file mode 100644 index 000000000..3df9cc6e8 --- /dev/null +++ b/roles/sram-metadata/templates/pyff-metadata.service.j2 @@ -0,0 +1,12 @@ +[Unit] +Description=pyFF Metadata processing +After=syslog.target network.target + +[Service] +Type=oneshot +WorkingDirectory={{metadata.basedir}} +ExecStart=echo "pyff-metadata" +SyslogIdentifier=pyff-metadata + +[Install] +WantedBy=multi-user.target diff --git a/roles/sram-metadata/templates/pyff-metadata.timer.j2 b/roles/sram-metadata/templates/pyff-metadata.timer.j2 new file mode 100644 index 000000000..b1231af1f --- /dev/null +++ b/roles/sram-metadata/templates/pyff-metadata.timer.j2 @@ -0,0 +1,8 @@ +[Unit] +Description=Create Metadata timer + +[Timer] +OnCalendar=*:00 + +[Install] +WantedBy=multi-user.target diff --git a/roles/sram-metadata/templates/vhosts.conf.j2 b/roles/sram-metadata/templates/vhosts.conf.j2 new file mode 100644 index 000000000..be3733827 --- /dev/null +++ b/roles/sram-metadata/templates/vhosts.conf.j2 @@ -0,0 +1,15 @@ + + ServerName sram-metadata-server + DocumentRoot /var/www/html + Header always set Referrer-Policy "strict-origin-when-cross-origin" + Header always set X-Content-Type-Options "nosniff" + Header always set X-XSS-Protection "1; mode=block" + + Require all granted + + + Require all granted + Options FollowSymLinks + Options -MultiViews + + diff --git a/roles/plsc/vars/main.yml b/roles/sram-metadata/vars/main.yml similarity index 100% rename from roles/plsc/vars/main.yml rename to roles/sram-metadata/vars/main.yml diff --git a/roles/sram-midproxy/defaults/main.yml b/roles/sram-midproxy/defaults/main.yml new file mode 100644 index 000000000..3522fcb47 --- /dev/null +++ b/roles/sram-midproxy/defaults/main.yml @@ -0,0 +1,8 @@ +--- +midproxy: + satosa_version: 8 + state_encryption_key: 'secret' + issuer: 'issuer' + client_id: 'client' + client_secret: 'secret' + sp_metadata: 'eb-metadata.xml' diff --git a/roles/sram-midproxy/files/internal_attributes.yaml b/roles/sram-midproxy/files/internal_attributes.yaml new file mode 100644 index 000000000..eb3dcd66e --- /dev/null +++ b/roles/sram-midproxy/files/internal_attributes.yaml @@ -0,0 +1,22 @@ +attributes: + displayname: + openid: [name] + saml: [displayName] + givenname: + openid: [given_name] + saml: [givenName] + mail: + openid: [email] + saml: [mail] + name: + openid: [name] + saml: [cn] + surname: + openid: [family_name] + saml: [sn, surname] + uid: + openid: [sub] + saml: [uid] + schachomeorganization: + openid: [schac_home_organization] + saml: [schacHomeOrganization] diff --git a/roles/sram-midproxy/files/plugins/attribute-maps/basic.py b/roles/sram-midproxy/files/plugins/attribute-maps/basic.py new file mode 100644 index 000000000..f98466df5 --- /dev/null +++ b/roles/sram-midproxy/files/plugins/attribute-maps/basic.py @@ -0,0 +1,51 @@ +DEF = "urn:mace:dir:attribute-def:" +TERENA = "urn:mace:terena.org:attribute-def:" + +MAP = { + "identifier": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic", + "fro": { + f"{TERENA}schacHomeOrganization": "schacHomeOrganization", + f"{DEF}cn": "cn", + f"{DEF}displayName": "displayName", + f"{DEF}eduPersonAffiliation": "eduPersonAffiliation", + f"{DEF}eduPersonEntitlement": "eduPersonEntitlement", + f"{DEF}eduPersonPrincipalName": "eduPersonPrincipalName", + f"{DEF}eduPersonScopedAffiliation": "eduPersonScopedAffiliation", + f"{DEF}eduPersonTargetedID": "eduPersonTargetedID", + f"{DEF}eduPersonAssurance": "eduPersonAssurance", + f"{DEF}email": "email", + f"{DEF}emailAddress": "emailAddress", + f"{DEF}givenName": "givenName", + f"{DEF}gn": "gn", + f"{DEF}isMemberOf": "isMemberOf", + f"{DEF}mail": "mail", + f"{DEF}member": "member", + f"{DEF}name": "name", + f"{DEF}sn": "sn", + f"{DEF}surname": "surname", + f"{DEF}uid": "uid", + }, + "to": { + "schacHomeOrganization": f"{TERENA}schacHomeOrganization", + "cn": f"{DEF}cn", + "displayName": f"{DEF}displayName", + "eduPersonAffiliation": f"{DEF}eduPersonAffiliation", + "eduPersonEntitlement": f"{DEF}eduPersonEntitlement", + "eduPersonPrincipalName": f"{DEF}eduPersonPrincipalName", + "eduPersonScopedAffiliation": f"{DEF}eduPersonScopedAffiliation", + "eduPersonTargetedID": f"{DEF}eduPersonTargetedID", + "eduPersonAssurance": f"{DEF}eduPersonAssurance", + "eduPersonOrcid": f"{DEF}eduPersonOrcid", + "email": f"{DEF}email", + "emailAddress": f"{DEF}emailAddress", + "givenName": f"{DEF}givenName", + "gn": f"{DEF}gn", + "isMemberOf": f"{DEF}isMemberOf", + "mail": f"{DEF}mail", + "member": f"{DEF}member", + "name": f"{DEF}name", + "sn": f"{DEF}sn", + "surname": f"{DEF}surname", + "uid": f"{DEF}uid", + }, +} diff --git a/roles/sram-midproxy/files/plugins/backends/openid_backend.yaml b/roles/sram-midproxy/files/plugins/backends/openid_backend.yaml new file mode 100644 index 000000000..cb78fcccd --- /dev/null +++ b/roles/sram-midproxy/files/plugins/backends/openid_backend.yaml @@ -0,0 +1,14 @@ +module: satosa.backends.openid_connect.OpenIDConnectBackend +name: myaccessid +config: + provider_metadata: + issuer: !ENV SATOSA_ISSUER + client: + verify_ssl: yes + auth_req_params: + response_type: code + scope: [openid, profile, email, schac_home_organization] + client_metadata: + client_id: !ENV SATOSA_CLIENT_ID + client_secret: !ENV SATOSA_CLIENT_SECRET + redirect_uris: [/] diff --git a/roles/sram-midproxy/files/plugins/backends/saml2_backend.yaml b/roles/sram-midproxy/files/plugins/backends/saml2_backend.yaml new file mode 100644 index 000000000..ed97d539c --- /dev/null +++ b/roles/sram-midproxy/files/plugins/backends/saml2_backend.yaml @@ -0,0 +1 @@ +--- diff --git a/roles/sram-midproxy/files/plugins/frontends/ping_frontend.yaml b/roles/sram-midproxy/files/plugins/frontends/ping_frontend.yaml new file mode 100644 index 000000000..c09b218b6 --- /dev/null +++ b/roles/sram-midproxy/files/plugins/frontends/ping_frontend.yaml @@ -0,0 +1,3 @@ +module: satosa.frontends.ping.PingFrontend +name: ping +config: null diff --git a/roles/sram-midproxy/files/plugins/frontends/saml2_frontend.yaml b/roles/sram-midproxy/files/plugins/frontends/saml2_frontend.yaml new file mode 100644 index 000000000..1f8029b66 --- /dev/null +++ b/roles/sram-midproxy/files/plugins/frontends/saml2_frontend.yaml @@ -0,0 +1,63 @@ +module: satosa.frontends.saml2.SAMLFrontend +name: idp +config: + #acr_mapping: + # "": "urn:oasis:names:tc:SAML:2.0:ac:classes:unspecified" + # "https://accounts.google.com": "http://eidas.europa.eu/LoA/low" + + endpoints: + single_sign_on_service: + 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST': sso/post + 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect': sso/redirect + + # If configured and not false or empty the common domain cookie _saml_idp will be set + # with or have appended the IdP used for authentication. The default is not to set the + # cookie. If the value is a dictionary with key 'domain' then the domain for the cookie + # will be set to the value for the 'domain' key. If no 'domain' is set then the domain + # from the BASE defined for the proxy will be used. + #common_domain_cookie: + # domain: .example.com + + entityid_endpoint: true + enable_metadata_reload: no + + idp_config: + organization: {display_name: SURF, name: SURF, url: 'https://www.surf.nl/'} + contact_person: + - {contact_type: technical, email_address: 'mailto:sram-beheer@surf.nl', given_name: Technical} + - {contact_type: support, email_address: 'mailto:sram-beheer@surf.nl', given_name: Support} + - {contact_type: other, email_address: 'mailto:sram-beheer@surf.nl', given_name: Security, extension_attributes: {'xmlns:remd': 'http://refeds.org/metadata', 'remd:contactType': 'http://refeds.org/metadata/contactType/security'}} + key_file: frontend.key + cert_file: frontend.crt + metadata: + # remote: + # - url: https://engine.test2.surfconext.nl/authentication/sp/metadata + # cert: null + local: [!ENV SATOSA_SP_METADATA] + entityid: //proxy.xml + accepted_time_diff: 60 + attribute_map_dir: plugins/attribute-maps + service: + idp: + endpoints: + single_sign_on_service: [] + name: Proxy IdP + ui_info: + display_name: + - lang: en + text: "MyAccessID proxy" + description: + - lang: en + text: "MyAccessID proxy" + keywords: + - lang: en + text: ["MyAccessID", "proxy"] + name_id_format: ['urn:oasis:names:tc:SAML:2.0:nameid-format:persistent', 'urn:oasis:names:tc:SAML:2.0:nameid-format:transient'] + policy: + default: + fail_on_missing_requested: false + name_form: urn:oasis:names:tc:SAML:2.0:attrname-format:basic + attribute_restrictions: null + lifetime: {minutes: 15} + encrypt_assertion: false + encrypted_advice_attributes: false diff --git a/roles/sram-midproxy/files/plugins/microservices/generate_attributes.yaml b/roles/sram-midproxy/files/plugins/microservices/generate_attributes.yaml new file mode 100644 index 000000000..86ac4e1f1 --- /dev/null +++ b/roles/sram-midproxy/files/plugins/microservices/generate_attributes.yaml @@ -0,0 +1,8 @@ +module: satosa.micro_services.attribute_generation.AddSyntheticAttributes +name: AddSyntheticAttributes +config: + synthetic_attributes: + default: + default: + schachomeorganization: >- + {{ uid.scope }} diff --git a/roles/sram-midproxy/files/plugins/microservices/regex_attributes.yaml b/roles/sram-midproxy/files/plugins/microservices/regex_attributes.yaml new file mode 100644 index 000000000..e820311e7 --- /dev/null +++ b/roles/sram-midproxy/files/plugins/microservices/regex_attributes.yaml @@ -0,0 +1,10 @@ +module: satosa.micro_services.attribute_processor.AttributeProcessor +name: RegexAttributeProcessor +config: + process: + - attribute: uid + processors: + - name: RegexSubProcessor + module: satosa.micro_services.processors.regex_sub_processor + regex_sub_match_pattern: ^(.+)@.+$ + regex_sub_replace_pattern: \1 diff --git a/roles/sram-midproxy/files/proxy_conf.yaml b/roles/sram-midproxy/files/proxy_conf.yaml new file mode 100644 index 000000000..136268e61 --- /dev/null +++ b/roles/sram-midproxy/files/proxy_conf.yaml @@ -0,0 +1,74 @@ +# BASE: https://example.com +BASE: !ENV SATOSA_BASE + +COOKIE_STATE_NAME: "SATOSA_STATE" +CONTEXT_STATE_DELETE: yes +#STATE_ENCRYPTION_KEY: "asdASD123" + +cookies_samesite_compat: + - ["SATOSA_STATE", "SATOSA_STATE_LEGACY"] + +INTERNAL_ATTRIBUTES: "internal_attributes.yaml" + +BACKEND_MODULES: + - "plugins/backends/openid_backend.yaml" + +FRONTEND_MODULES: + - "plugins/frontends/saml2_frontend.yaml" + - "plugins/frontends/ping_frontend.yaml" + +MICRO_SERVICES: + - "plugins/microservices/generate_attributes.yaml" + - "plugins/microservices/regex_attributes.yaml" + +LOGGING: + version: 1 + formatters: + simple: + format: "[%(asctime)s] [%(levelname)s] [%(name)s.%(funcName)s] %(message)s" + handlers: + stdout: + class: logging.StreamHandler + stream: "ext://sys.stdout" + level: INFO + formatter: simple + syslog: + class: logging.handlers.SysLogHandler + address: "/dev/log" + level: INFO + formatter: simple + debug_file: + class: logging.FileHandler + filename: satosa-debug.log + encoding: utf8 + level: INFO + formatter: simple + error_file: + class: logging.FileHandler + filename: satosa-error.log + encoding: utf8 + level: ERROR + formatter: simple + info_file: + class: logging.handlers.RotatingFileHandler + filename: satosa-info.log + encoding: utf8 + maxBytes: 10485760 # 10MB + backupCount: 20 + level: INFO + formatter: simple + loggers: + satosa: + level: INFO + saml2: + level: INFO + oidcendpoint: + level: INFO + pyop: + level: INFO + oic: + level: INFO + root: + level: INFO + handlers: + - stdout diff --git a/roles/sram-midproxy/tasks/main.yml b/roles/sram-midproxy/tasks/main.yml new file mode 100644 index 000000000..2962a6f08 --- /dev/null +++ b/roles/sram-midproxy/tasks/main.yml @@ -0,0 +1,59 @@ +--- +- name: Create directory to keep configfile + ansible.builtin.file: + dest: "/opt/sram/midproxy" + state: directory + owner: 1000 + group: 1000 + mode: "0770" + +- name: Copy EB SP metadata + ansible.builtin.copy: + src: "{{ inventory_dir }}/files/midproxy/{{ midproxy.sp_metadata }}" + dest: "/opt/sram/midproxy/{{ midproxy.sp_metadata }}" + owner: 1000 + group: 1000 + mode: "0740" + +- name: Copy SATOSA conf files + ansible.builtin.copy: + src: "{{ item }}" + dest: "/opt/sram/midproxy/{{ item }}" + owner: 1000 + group: 1000 + with_items: + - internal_attributes.yaml + - proxy_conf.yaml + - plugins/ + +- name: Create the SATOSA container + community.docker.docker_container: + name: midproxy + image: satosa:{{ midproxy.satosa_version }} + pull: true + restart_policy: "always" + state: started + restart: true + networks: + - name: "loadbalancer" + env: + SATOSA_BASE: 'https://midproxy.{{ openconextaccess_base_domain }}' + SATOSA_STATE_ENCRYPTION_KEY: '{{ midproxy_state_encryption_key }}' + SATOSA_SP_METADATA: '{{ midproxy.sp_metadata }}' + SATOSA_ISSUER: '{{ midproxy.issuer }}' + SATOSA_CLIENT_ID: '{{ midproxy_client_id }}' + SATOSA_CLIENT_SECRET: '{{ midproxy_client_secret }}' + volumes: + - /opt/sram/midproxy:/etc/satosa + labels: + traefik.http.routers.midproxy.rule: "Host(`midproxy.{{ openconextaccess_base_domain }}`)" + traefik.http.routers.midproxy.tls: "true" + traefik.enable: "true" + # curl is not availavble in the minimized satosa image + # so this healthcheck won't work + # healthcheck: + # test: ["CMD", "curl", "--fail" , "http://localhost" ] + # interval: 10s + # timeout: 10s + # retries: 3 + # start_period: 10s diff --git a/roles/plsc/defaults/main.yml b/roles/sram-plsc/defaults/main.yml similarity index 100% rename from roles/plsc/defaults/main.yml rename to roles/sram-plsc/defaults/main.yml diff --git a/roles/plsc/handlers/main.yml b/roles/sram-plsc/handlers/main.yml similarity index 94% rename from roles/plsc/handlers/main.yml rename to roles/sram-plsc/handlers/main.yml index 9ce03e899..70cbb3672 100644 --- a/roles/plsc/handlers/main.yml +++ b/roles/sram-plsc/handlers/main.yml @@ -13,6 +13,6 @@ - name: Restart the plsc container community.docker.docker_container: - name: "plsc" + name: sram-plsc restart: true state: started diff --git a/roles/plsc/tasks/main.yml b/roles/sram-plsc/tasks/main.yml similarity index 96% rename from roles/plsc/tasks/main.yml rename to roles/sram-plsc/tasks/main.yml index 14e7b40b7..0d3900bd9 100644 --- a/roles/plsc/tasks/main.yml +++ b/roles/sram-plsc/tasks/main.yml @@ -15,7 +15,7 @@ - name: Create the plsc container community.docker.docker_container: - name: "plsc" + name: "sram-plsc" image: "{{ plsc.image }}" restart_policy: "always" state: started diff --git a/roles/plsc/templates/plsc.yml.j2 b/roles/sram-plsc/templates/plsc.yml.j2 similarity index 100% rename from roles/plsc/templates/plsc.yml.j2 rename to roles/sram-plsc/templates/plsc.yml.j2 diff --git a/roles/redis/vars/main.yml b/roles/sram-plsc/vars/main.yml similarity index 100% rename from roles/redis/vars/main.yml rename to roles/sram-plsc/vars/main.yml diff --git a/roles/redis/defaults/main.yml b/roles/sram-redis/defaults/main.yml similarity index 67% rename from roles/redis/defaults/main.yml rename to roles/sram-redis/defaults/main.yml index d4eb4b182..31d44935e 100644 --- a/roles/redis/defaults/main.yml +++ b/roles/sram-redis/defaults/main.yml @@ -2,8 +2,8 @@ redis: "{{ redis_defaults | combine(redis_overrides, recursive=true) }}" redis_defaults: image: "docker.io/library/redis:7" - conf_dir: "{{ current_release_appdir }}/redis" - data_dir: "{{ current_release_appdir }}/redis/data" + conf_dir: "{{ current_release_appdir }}/sram/redis" + data_dir: "{{ current_release_appdir }}/sram/redis/data" user: redis group: redis redis_user: default diff --git a/roles/redis/handlers/main.yml b/roles/sram-redis/handlers/main.yml similarity index 83% rename from roles/redis/handlers/main.yml rename to roles/sram-redis/handlers/main.yml index 5ed78e133..b08f0b62b 100644 --- a/roles/redis/handlers/main.yml +++ b/roles/sram-redis/handlers/main.yml @@ -1,6 +1,6 @@ --- - name: Restart redis container community.docker.docker_container: - name: redis + name: sram-redis state: started restart: true diff --git a/roles/redis/tasks/main.yml b/roles/sram-redis/tasks/main.yml similarity index 98% rename from roles/redis/tasks/main.yml rename to roles/sram-redis/tasks/main.yml index 65e7392ef..72789b08f 100644 --- a/roles/redis/tasks/main.yml +++ b/roles/sram-redis/tasks/main.yml @@ -47,7 +47,7 @@ - name: "Create redis container" community.docker.docker_container: - name: "redis" + name: "sram-redis" image: "{{ redis.image }}" restart_policy: "always" state: "started" diff --git a/roles/redis/templates/redis.conf.j2 b/roles/sram-redis/templates/redis.conf.j2 similarity index 100% rename from roles/redis/templates/redis.conf.j2 rename to roles/sram-redis/templates/redis.conf.j2 diff --git a/roles/sbs/vars/main.yml b/roles/sram-redis/vars/main.yml similarity index 100% rename from roles/sbs/vars/main.yml rename to roles/sram-redis/vars/main.yml diff --git a/roles/sbs/defaults/main.yml b/roles/sram-sbs/defaults/main.yml similarity index 99% rename from roles/sbs/defaults/main.yml rename to roles/sram-sbs/defaults/main.yml index aedcee959..8cbe8d109 100644 --- a/roles/sbs/defaults/main.yml +++ b/roles/sram-sbs/defaults/main.yml @@ -36,7 +36,7 @@ sbs_defaults: secret_key_suffix: suffix encryption_key: encryption_key - redis_host: redis + redis_host: sram-redis redis_port: 6379 redis_ssl: false redis_user: default diff --git a/roles/sbs/files/yarn.gpg b/roles/sram-sbs/files/yarn.gpg similarity index 100% rename from roles/sbs/files/yarn.gpg rename to roles/sram-sbs/files/yarn.gpg diff --git a/roles/sbs/handlers/main.yml b/roles/sram-sbs/handlers/main.yml similarity index 76% rename from roles/sbs/handlers/main.yml rename to roles/sram-sbs/handlers/main.yml index 2d7710d43..bc8be505b 100644 --- a/roles/sbs/handlers/main.yml +++ b/roles/sram-sbs/handlers/main.yml @@ -5,5 +5,5 @@ state: started restart: true loop: - - sbs-client - - sbs-server + - sram-sbs-client + - sram-sbs-server diff --git a/roles/sbs/tasks/main.yml b/roles/sram-sbs/tasks/main.yml similarity index 96% rename from roles/sbs/tasks/main.yml rename to roles/sram-sbs/tasks/main.yml index 560191f8a..6881736ec 100644 --- a/roles/sbs/tasks/main.yml +++ b/roles/sram-sbs/tasks/main.yml @@ -60,9 +60,9 @@ # Create dummy file in certs dir to pacify container pre-init script # https://github.com/SURFscz/SBS/pull/2312 - name: "Touch file in {{ sbs.cert_dir }}" - ansible.builtin.file: - path: "{{sbs.cert_dir}}/dummy" - state: file + ansible.builtin.copy: + content: "" + dest: "{{sbs.cert_dir}}/dummy" - name: "Create SBS config files" template: @@ -104,7 +104,7 @@ - name: "Run SBS migrations" throttle: 1 community.docker.docker_container: - name: "sbs-migration" + name: "sram-sbs-migration" image: "{{ sbs.server_image }}" pull: "never" state: "started" @@ -131,14 +131,14 @@ # will not save the output in result - name: "Remove migration container" community.docker.docker_container: - name: "sbs-migration" + name: "sram-sbs-migration" state: "absent" # TODO: fix this by only running this if "sbs_image is changed" changed_when: false - name: "Start sbs client container" community.docker.docker_container: - name: "sbs-client" + name: "sram-sbs-client" image: "{{ sbs.client_image }}" pull: "never" restart_policy: "always" @@ -155,7 +155,7 @@ - name: "Start SBS server container" community.docker.docker_container: - name: "sbs-server" + name: "sram-sbs-server" image: "{{ sbs.server_image }}" restart_policy: "always" state: "started" diff --git a/roles/sbs/templates/alembic.ini.j2 b/roles/sram-sbs/templates/alembic.ini.j2 similarity index 100% rename from roles/sbs/templates/alembic.ini.j2 rename to roles/sram-sbs/templates/alembic.ini.j2 diff --git a/roles/sbs/templates/config.yml.j2 b/roles/sram-sbs/templates/config.yml.j2 similarity index 100% rename from roles/sbs/templates/config.yml.j2 rename to roles/sram-sbs/templates/config.yml.j2 diff --git a/roles/sbs/templates/disclaimer.css.j2 b/roles/sram-sbs/templates/disclaimer.css.j2 similarity index 100% rename from roles/sbs/templates/disclaimer.css.j2 rename to roles/sram-sbs/templates/disclaimer.css.j2 diff --git a/roles/sbs/templates/saml_advanced_settings.json.j2 b/roles/sram-sbs/templates/saml_advanced_settings.json.j2 similarity index 100% rename from roles/sbs/templates/saml_advanced_settings.json.j2 rename to roles/sram-sbs/templates/saml_advanced_settings.json.j2 diff --git a/roles/sbs/templates/saml_settings.json.j2 b/roles/sram-sbs/templates/saml_settings.json.j2 similarity index 100% rename from roles/sbs/templates/saml_settings.json.j2 rename to roles/sram-sbs/templates/saml_settings.json.j2 diff --git a/roles/sbs/templates/sbs-apache.conf.j2 b/roles/sram-sbs/templates/sbs-apache.conf.j2 similarity index 88% rename from roles/sbs/templates/sbs-apache.conf.j2 rename to roles/sram-sbs/templates/sbs-apache.conf.j2 index 0743c2ddb..f0140a845 100644 --- a/roles/sbs/templates/sbs-apache.conf.j2 +++ b/roles/sram-sbs/templates/sbs-apache.conf.j2 @@ -12,10 +12,10 @@ RewriteCond %{DOCUMENT_ROOT}%{REQUEST_FILENAME} !-f RewriteRule ^/(.*)$ /index.html [L] ProxyRequests off -ProxyPassMatch ^/(api|pam-weblogin|flasgger_static|swagger|health|config|info) http://sbs-server:8080/ -ProxyPassReverse / http://sbs-server:8080/ -ProxyPass /socket.io/ ws://sbs-server:8080/socket.io/ -ProxyPassReverse /socket.io/ ws://sbs-server:8080/socket.io/ +ProxyPassMatch ^/(api|pam-weblogin|flasgger_static|swagger|health|config|info) http://sram-sbs-server:8080/ +ProxyPassReverse / http://sram-sbs-server:8080/ +ProxyPass /socket.io/ ws://sram-sbs-server:8080/socket.io/ +ProxyPassReverse /socket.io/ ws://sram-sbs-server:8080/socket.io/ Header set Cache-Control: "public, max-age=31536000, immutable" diff --git a/roles/sbs/templates/sbs.service.j2 b/roles/sram-sbs/templates/sbs.service.j2 similarity index 100% rename from roles/sbs/templates/sbs.service.j2 rename to roles/sram-sbs/templates/sbs.service.j2 diff --git a/roles/sram-sbs/vars/main.yml b/roles/sram-sbs/vars/main.yml new file mode 100644 index 000000000..761942f7b --- /dev/null +++ b/roles/sram-sbs/vars/main.yml @@ -0,0 +1 @@ +current_release_appdir: /opt/openconext From b3f85f18fba824da555af93767d8a913d326d7d2 Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Fri, 27 Mar 2026 16:45:19 +0100 Subject: [PATCH 64/73] Fix haproxy_backend.cfg.j2 --- roles/haproxy/templates/haproxy_backend.cfg.j2 | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/roles/haproxy/templates/haproxy_backend.cfg.j2 b/roles/haproxy/templates/haproxy_backend.cfg.j2 index ea52ac5b0..8ef005da4 100644 --- a/roles/haproxy/templates/haproxy_backend.cfg.j2 +++ b/roles/haproxy/templates/haproxy_backend.cfg.j2 @@ -81,5 +81,4 @@ backend ldap_servers {% for server in haproxy_ldap_servers -%} server {{server.label}} {{server.ip}}:{{server.port}} ssl verify none check weight 10 {% if loop.index==1 %}on-marked-up shutdown-backup-sessions{% else %}backup{% endif %} {% endfor %} -{% endfor %} -{% endig %} +{% endif %} From bf52be3fc473bd7056f28ff48a3461767121193c Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Thu, 9 Apr 2026 17:20:03 +0200 Subject: [PATCH 65/73] WIP --- environments/template/group_vars/all.yml | 2 + roles/mailpit/defaults/main.yml | 11 +- roles/mailpit/tasks/main.yml | 10 +- roles/sram-ldap/defaults/main.yml | 41 ---- roles/sram-ldap/vars/main.yml | 1 - roles/sram-metadata/vars/main.yml | 1 - roles/sram-plsc/defaults/main.yml | 15 -- roles/sram-plsc/templates/plsc.yml.j2 | 25 --- roles/sram-plsc/vars/main.yml | 1 - roles/sram-redis/defaults/main.yml | 11 -- roles/sram-redis/templates/redis.conf.j2 | 3 - roles/sram-redis/vars/main.yml | 1 - roles/sram-sbs/defaults/main.yml | 168 ---------------- roles/sram-sbs/templates/disclaimer.css.j2 | 6 - roles/sram-sbs/vars/main.yml | 1 - roles/sram_ldap/defaults/main.yml | 38 ++++ .../files/eduMember.ldif | 0 .../files/eduPerson.ldif | 0 .../files/groupOfMembers.ldif | 0 roles/{sram-ldap => sram_ldap}/files/ldap-add | 0 .../files/ldapPublicKey.ldif | 0 .../files/logrotate_slapd | 0 .../files/rsyslog_slapd.conf | 0 .../files/sczGroup.ldif | 0 .../files/sramPerson.ldif | 0 .../files/voPerson.ldif | 0 .../handlers/main.yml | 0 .../{sram-ldap => sram_ldap}/tasks/admins.yml | 38 ++-- roles/{sram-ldap => sram_ldap}/tasks/main.yml | 104 +++++----- .../templates/ldap-backup.sh.j2 | 0 .../templates/ldap.conf.j2 | 0 .../templates/slapd.service.j2 | 2 +- .../defaults/main.yml | 69 ++++--- .../files/01_idps.fd | 0 .../files/02_backend.fd | 0 .../files/03_frontend.fd | 0 .../files/surf.png | Bin .../files/surf.svg | 0 .../files/surf_bimi.svg | 0 .../files/surfconext.crt | 0 .../files/transform.xslt | 0 .../files/transform_proxy.xslt | 0 .../handlers/main.yml | 0 .../tasks/http.yml | 12 +- .../tasks/main.yml | 20 +- .../tasks/pyff.yml | 46 ++--- .../templates/index.html.j2 | 0 .../templates/pyff-metadata.service.j2 | 0 .../templates/pyff-metadata.timer.j2 | 0 .../templates/vhosts.conf.j2 | 0 .../defaults/main.yml | 0 .../files/internal_attributes.yaml | 0 .../files/plugins/attribute-maps/basic.py | 0 .../plugins/backends/openid_backend.yaml | 0 .../files/plugins/backends/saml2_backend.yaml | 0 .../plugins/frontends/ping_frontend.yaml | 0 .../plugins/frontends/saml2_frontend.yaml | 0 .../microservices/generate_attributes.yaml | 0 .../microservices/regex_attributes.yaml | 0 .../files/proxy_conf.yaml | 0 .../tasks/main.yml | 0 roles/sram_plsc/defaults/main.yml | 12 ++ .../handlers/main.yml | 0 roles/{sram-plsc => sram_plsc}/tasks/main.yml | 11 +- roles/sram_plsc/templates/plsc.yml.j2 | 25 +++ roles/sram_redis/defaults/main.yml | 9 + .../handlers/main.yml | 0 .../{sram-redis => sram_redis}/tasks/main.yml | 29 +-- roles/sram_redis/templates/redis.conf.j2 | 3 + roles/sram_sbs/defaults/main.yml | 166 ++++++++++++++++ roles/{sram-sbs => sram_sbs}/files/yarn.gpg | 0 .../{sram-sbs => sram_sbs}/handlers/main.yml | 0 roles/{sram-sbs => sram_sbs}/tasks/main.yml | 70 ++++--- .../templates/alembic.ini.j2 | 2 +- .../templates/config.yml.j2 | 180 +++++++++--------- roles/sram_sbs/templates/disclaimer.css.j2 | 6 + .../templates/saml_advanced_settings.json.j2 | 0 .../templates/saml_settings.json.j2 | 0 .../templates/sbs-apache.conf.j2 | 2 +- .../templates/sbs.service.j2 | 0 80 files changed, 566 insertions(+), 575 deletions(-) delete mode 100644 roles/sram-ldap/defaults/main.yml delete mode 100644 roles/sram-ldap/vars/main.yml delete mode 100644 roles/sram-metadata/vars/main.yml delete mode 100644 roles/sram-plsc/defaults/main.yml delete mode 100644 roles/sram-plsc/templates/plsc.yml.j2 delete mode 100644 roles/sram-plsc/vars/main.yml delete mode 100644 roles/sram-redis/defaults/main.yml delete mode 100644 roles/sram-redis/templates/redis.conf.j2 delete mode 100644 roles/sram-redis/vars/main.yml delete mode 100644 roles/sram-sbs/defaults/main.yml delete mode 100644 roles/sram-sbs/templates/disclaimer.css.j2 delete mode 100644 roles/sram-sbs/vars/main.yml create mode 100644 roles/sram_ldap/defaults/main.yml rename roles/{sram-ldap => sram_ldap}/files/eduMember.ldif (100%) rename roles/{sram-ldap => sram_ldap}/files/eduPerson.ldif (100%) rename roles/{sram-ldap => sram_ldap}/files/groupOfMembers.ldif (100%) rename roles/{sram-ldap => sram_ldap}/files/ldap-add (100%) rename roles/{sram-ldap => sram_ldap}/files/ldapPublicKey.ldif (100%) rename roles/{sram-ldap => sram_ldap}/files/logrotate_slapd (100%) rename roles/{sram-ldap => sram_ldap}/files/rsyslog_slapd.conf (100%) rename roles/{sram-ldap => sram_ldap}/files/sczGroup.ldif (100%) rename roles/{sram-ldap => sram_ldap}/files/sramPerson.ldif (100%) rename roles/{sram-ldap => sram_ldap}/files/voPerson.ldif (100%) rename roles/{sram-ldap => sram_ldap}/handlers/main.yml (100%) rename roles/{sram-ldap => sram_ldap}/tasks/admins.yml (67%) rename roles/{sram-ldap => sram_ldap}/tasks/main.yml (78%) rename roles/{sram-ldap => sram_ldap}/templates/ldap-backup.sh.j2 (100%) rename roles/{sram-ldap => sram_ldap}/templates/ldap.conf.j2 (100%) rename roles/{sram-ldap => sram_ldap}/templates/slapd.service.j2 (92%) rename roles/{sram-metadata => sram_metadata}/defaults/main.yml (53%) rename roles/{sram-metadata => sram_metadata}/files/01_idps.fd (100%) rename roles/{sram-metadata => sram_metadata}/files/02_backend.fd (100%) rename roles/{sram-metadata => sram_metadata}/files/03_frontend.fd (100%) rename roles/{sram-metadata => sram_metadata}/files/surf.png (100%) rename roles/{sram-metadata => sram_metadata}/files/surf.svg (100%) rename roles/{sram-metadata => sram_metadata}/files/surf_bimi.svg (100%) rename roles/{sram-metadata => sram_metadata}/files/surfconext.crt (100%) rename roles/{sram-metadata => sram_metadata}/files/transform.xslt (100%) rename roles/{sram-metadata => sram_metadata}/files/transform_proxy.xslt (100%) rename roles/{sram-metadata => sram_metadata}/handlers/main.yml (100%) rename roles/{sram-metadata => sram_metadata}/tasks/http.yml (72%) rename roles/{sram-metadata => sram_metadata}/tasks/main.yml (60%) rename roles/{sram-metadata => sram_metadata}/tasks/pyff.yml (58%) rename roles/{sram-metadata => sram_metadata}/templates/index.html.j2 (100%) rename roles/{sram-metadata => sram_metadata}/templates/pyff-metadata.service.j2 (100%) rename roles/{sram-metadata => sram_metadata}/templates/pyff-metadata.timer.j2 (100%) rename roles/{sram-metadata => sram_metadata}/templates/vhosts.conf.j2 (100%) rename roles/{sram-midproxy => sram_midproxy}/defaults/main.yml (100%) rename roles/{sram-midproxy => sram_midproxy}/files/internal_attributes.yaml (100%) rename roles/{sram-midproxy => sram_midproxy}/files/plugins/attribute-maps/basic.py (100%) rename roles/{sram-midproxy => sram_midproxy}/files/plugins/backends/openid_backend.yaml (100%) rename roles/{sram-midproxy => sram_midproxy}/files/plugins/backends/saml2_backend.yaml (100%) rename roles/{sram-midproxy => sram_midproxy}/files/plugins/frontends/ping_frontend.yaml (100%) rename roles/{sram-midproxy => sram_midproxy}/files/plugins/frontends/saml2_frontend.yaml (100%) rename roles/{sram-midproxy => sram_midproxy}/files/plugins/microservices/generate_attributes.yaml (100%) rename roles/{sram-midproxy => sram_midproxy}/files/plugins/microservices/regex_attributes.yaml (100%) rename roles/{sram-midproxy => sram_midproxy}/files/proxy_conf.yaml (100%) rename roles/{sram-midproxy => sram_midproxy}/tasks/main.yml (100%) create mode 100644 roles/sram_plsc/defaults/main.yml rename roles/{sram-plsc => sram_plsc}/handlers/main.yml (100%) rename roles/{sram-plsc => sram_plsc}/tasks/main.yml (68%) create mode 100644 roles/sram_plsc/templates/plsc.yml.j2 create mode 100644 roles/sram_redis/defaults/main.yml rename roles/{sram-redis => sram_redis}/handlers/main.yml (100%) rename roles/{sram-redis => sram_redis}/tasks/main.yml (64%) create mode 100644 roles/sram_redis/templates/redis.conf.j2 create mode 100644 roles/sram_sbs/defaults/main.yml rename roles/{sram-sbs => sram_sbs}/files/yarn.gpg (100%) rename roles/{sram-sbs => sram_sbs}/handlers/main.yml (100%) rename roles/{sram-sbs => sram_sbs}/tasks/main.yml (71%) rename roles/{sram-sbs => sram_sbs}/templates/alembic.ini.j2 (96%) rename roles/{sram-sbs => sram_sbs}/templates/config.yml.j2 (58%) create mode 100644 roles/sram_sbs/templates/disclaimer.css.j2 rename roles/{sram-sbs => sram_sbs}/templates/saml_advanced_settings.json.j2 (100%) rename roles/{sram-sbs => sram_sbs}/templates/saml_settings.json.j2 (100%) rename roles/{sram-sbs => sram_sbs}/templates/sbs-apache.conf.j2 (98%) rename roles/{sram-sbs => sram_sbs}/templates/sbs.service.j2 (100%) diff --git a/environments/template/group_vars/all.yml b/environments/template/group_vars/all.yml index 1d8bd6f84..a314ae546 100644 --- a/environments/template/group_vars/all.yml +++ b/environments/template/group_vars/all.yml @@ -30,6 +30,8 @@ admin_email: "openconext-admin@example.edu" environment_shortname: "" environment_ribbon_colour: "" +current_release_appdir: /opt/openconext + httpd_csp: lenient: "default-src 'self'; object-src 'none'; script-src 'self' 'unsafe-inline'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; form-action 'self'; frame-ancestors 'none'; base-uri 'none'" lenient_with_static_img: "default-src 'self'; object-src 'none'; script-src 'self' 'unsafe-inline'; style-src 'self' 'unsafe-inline'; img-src 'self' https://{{ static_vhost }} http://localhost:* data:; form-action 'self'; frame-ancestors 'none'; base-uri 'none'" diff --git a/roles/mailpit/defaults/main.yml b/roles/mailpit/defaults/main.yml index 7647de9dc..f400a8d2e 100644 --- a/roles/mailpit/defaults/main.yml +++ b/roles/mailpit/defaults/main.yml @@ -1,8 +1,5 @@ --- -mailpit: "{{ mailpit_defaults | combine(mailpit_overrides, recursive=true) }}" - -mailpit_defaults: - image: "axllent/mailpit" - hostname: "mailpit.{{ base_domain }}" - user: "mailpit" - group: "mailpit" +mailpit_image: "axllent/mailpit" +mailpit_hostname: "mailpit.{{ base_domain }}" +mailpit_user: "mailpit" +mailpit_group: "mailpit" diff --git a/roles/mailpit/tasks/main.yml b/roles/mailpit/tasks/main.yml index 7fb32d8a7..5d89f4a7c 100644 --- a/roles/mailpit/tasks/main.yml +++ b/roles/mailpit/tasks/main.yml @@ -1,14 +1,14 @@ --- - name: "Create mailpit group" group: - name: "{{ mailpit.group }}" + name: "{{ mailpit_group }}" state: "present" register: "result" - name: "Create mailpit user" user: - name: "{{ mailpit.user }}" - group: "{{ mailpit.group }}" + name: "{{ mailpit_user }}" + group: "{{ mailpit_group }}" comment: "User to run Mailpit service" shell: "/bin/false" password: "!" @@ -23,7 +23,7 @@ - name: "Create mailpit container" docker_container: name: "mailpit" - image: "{{ mailpit.image }}" + image: "{{ mailpit_image }}" restart_policy: "always" state: "started" user: "{{ mailpit_user_uid }}" @@ -32,6 +32,6 @@ - name: "loadbalancer" labels: traefik.enable: "true" - traefik.http.routers.mailpit.rule: "Host(`{{ mailpit.hostname }}`)" + traefik.http.routers.mailpit.rule: "Host(`{{ mailpit_hostname }}`)" traefik.http.routers.mailpit.tls: "true" traefik.http.services.mailpit.loadbalancer.server.port: 8025 diff --git a/roles/sram-ldap/defaults/main.yml b/roles/sram-ldap/defaults/main.yml deleted file mode 100644 index e20f4553d..000000000 --- a/roles/sram-ldap/defaults/main.yml +++ /dev/null @@ -1,41 +0,0 @@ ---- -ldap: "{{ ldap_defaults | combine(ldap_overrides, recursive=true) }}" - -ldap_defaults: - image: "ghcr.io/surfscz/sram-ldap:main" - conf_dir: "{{ current_release_appdir }}/sram/ldap" - ldif_dir: "{{ ldap_defaults.conf_dir }}/schema" - certs_dir: "{{ ldap_defaults.conf_dir }}/certs" - backup_dir: "{{ ldap_defaults.conf_dir }}/ldap" - data_dir: "{{ ldap_defaults.conf_dir}}/data" - uri: "ldap://localhost/" - - user: "openldap" - group: "openldap" - - # admin_group: "ldap_admin" - admins: - - name: Admin - uid: admin - pw_hash: - sshkey: "" - - loglevel: "stats stats2 filter" - - services_password: secret - monitor_password: secret - ldap_monitor_password: secret - - uri: "ldap://localhost/" - rid_prefix: "ldap://" - - base_domain: "{{ base_domain }}" - base_dn: >- - {{ ((ldap_defaults.base_domain.split('.')|length)*['dc=']) | - zip(ldap_defaults.base_domain.split('.')) | list | map('join', '') | list | join(',') }} - services_dn: - basedn: "dc=services,{{ ldap_defaults.base_dn }}" - o: "Services" - binddn: "cn=admin,{{ ldap_defaults.base_dn }}" - - hosts: {} diff --git a/roles/sram-ldap/vars/main.yml b/roles/sram-ldap/vars/main.yml deleted file mode 100644 index 761942f7b..000000000 --- a/roles/sram-ldap/vars/main.yml +++ /dev/null @@ -1 +0,0 @@ -current_release_appdir: /opt/openconext diff --git a/roles/sram-metadata/vars/main.yml b/roles/sram-metadata/vars/main.yml deleted file mode 100644 index 761942f7b..000000000 --- a/roles/sram-metadata/vars/main.yml +++ /dev/null @@ -1 +0,0 @@ -current_release_appdir: /opt/openconext diff --git a/roles/sram-plsc/defaults/main.yml b/roles/sram-plsc/defaults/main.yml deleted file mode 100644 index 2a3711b0c..000000000 --- a/roles/sram-plsc/defaults/main.yml +++ /dev/null @@ -1,15 +0,0 @@ ---- -plsc: "{{ plsc_defaults | combine(plsc_overrides, recursive=true) }}" - -plsc_defaults: - image: "ghcr.io/surfscz/sram-plsc:main" - conf_dir: "{{current_release_appdir}}/sram/plsc" - ansible_nolog: false - ldap_uri: "ldap://ldap:389/" - ldap_basedn: "dc=services,dc=vnet" - ldap_binddn: "cn=admin,dc=vnet" - ldap_password: "secret" - sbs_host: "http://sbs-server:8080" - sbs_user: "sysread" - sbs_password: "secret" - retry: 3 diff --git a/roles/sram-plsc/templates/plsc.yml.j2 b/roles/sram-plsc/templates/plsc.yml.j2 deleted file mode 100644 index a42c00807..000000000 --- a/roles/sram-plsc/templates/plsc.yml.j2 +++ /dev/null @@ -1,25 +0,0 @@ ---- -ldap: - src: - uri: "{{ plsc.ldap_uri }}" - basedn: "{{ plsc.ldap_basedn }}" - binddn: "{{ plsc.ldap_binddn }}" - passwd: "{{ plsc.ldap_password }}" - sizelimit: 500 - dst: - uri: "{{ plsc.ldap_uri }}" - basedn: "{{ plsc.ldap_basedn }}" - binddn: "{{ plsc.ldap_binddn }}" - passwd: "{{ plsc.ldap_password }}" - sizelimit: 500 -sbs: - src: - host: "{{ plsc.sbs_host }}" - user: "{{ plsc.sbs_user }}" - passwd: "{{ plsc.sbs_password }}" - verify_ssl: {{ false if env=='vm' else true }} - timeout: 60 - retry: {{ plsc.retry }} -pwd: "{CRYPT}!" -uid: 1000 -gid: 1000 diff --git a/roles/sram-plsc/vars/main.yml b/roles/sram-plsc/vars/main.yml deleted file mode 100644 index 761942f7b..000000000 --- a/roles/sram-plsc/vars/main.yml +++ /dev/null @@ -1 +0,0 @@ -current_release_appdir: /opt/openconext diff --git a/roles/sram-redis/defaults/main.yml b/roles/sram-redis/defaults/main.yml deleted file mode 100644 index 31d44935e..000000000 --- a/roles/sram-redis/defaults/main.yml +++ /dev/null @@ -1,11 +0,0 @@ ---- -redis: "{{ redis_defaults | combine(redis_overrides, recursive=true) }}" -redis_defaults: - image: "docker.io/library/redis:7" - conf_dir: "{{ current_release_appdir }}/sram/redis" - data_dir: "{{ current_release_appdir }}/sram/redis/data" - user: redis - group: redis - redis_user: default - redis_password: changethispassword - max_memory: 100mb diff --git a/roles/sram-redis/templates/redis.conf.j2 b/roles/sram-redis/templates/redis.conf.j2 deleted file mode 100644 index ba231dc58..000000000 --- a/roles/sram-redis/templates/redis.conf.j2 +++ /dev/null @@ -1,3 +0,0 @@ -user {{redis.redis_user}} on +@all ~* &* >{{redis.redis_password}} -maxmemory {{ redis.max_memory }} -maxmemory-policy allkeys-lru diff --git a/roles/sram-redis/vars/main.yml b/roles/sram-redis/vars/main.yml deleted file mode 100644 index 761942f7b..000000000 --- a/roles/sram-redis/vars/main.yml +++ /dev/null @@ -1 +0,0 @@ -current_release_appdir: /opt/openconext diff --git a/roles/sram-sbs/defaults/main.yml b/roles/sram-sbs/defaults/main.yml deleted file mode 100644 index 8cbe8d109..000000000 --- a/roles/sram-sbs/defaults/main.yml +++ /dev/null @@ -1,168 +0,0 @@ ---- -sbs: "{{ sbs_defaults | combine(sbs_overrides, recursive=true) }}" - -sbs_defaults: - base_domain: "test2.sram.surf.nl" - ansible_nolog: true - base_url: "https://{{ sbs_defaults.base_domain }}" - server_image: "ghcr.io/surfscz/sram-sbs-server:main" - client_image: "ghcr.io/surfscz/sram-sbs-client:main" - - openidc_timeout: 86400 - sram_conf_dir: "{{ current_release_appdir }}/sram" - - work_dir: "{{ sbs_defaults.sram_conf_dir }}/sbs" - git_dir: "{{ sbs_defaults.work_dir }}/sbs" - env_dir: "{{ sbs_defaults.work_dir }}/sbs-env" - conf_dir: "{{ sbs_defaults.work_dir }}/config" - log_dir: "{{ sbs_defaults.work_dir }}/log" - cert_dir: "{{ sbs_defaults.work_dir }}/cert" - apache_conf: "{{ sbs_defaults.work_dir }}/sbs_defaults.conf" - nginx_conf: "{{ sbs_defaults.work_dir }}/nginx.conf" - - db_name: "sbs" - db_user: "sbsrw" - # dbbackup_user: "sbs_backupper" - migration_user: "sbsmigrate" - - db_connection: "\ - mysql+mysqldb://%s:%s@{{ mariadb_host }}/{{ sbs_defaults.db_name }}\ - ?ssl=true&charset=utf8mb4" - db_connection_sbs: "{{ sbs_defaults.db_connection | format(sbs_defaults.db_user, mysql_passwords.sbs) }}" - db_connection_migration: "\ - {{ sbs_defaults.db_connection | format(sbs_defaults.migration_user, mysql_passwords.sbsmigrate) }}" - - db_secret: secret - secret_key_suffix: suffix - encryption_key: encryption_key - - redis_host: sram-redis - redis_port: 6379 - redis_ssl: false - redis_user: default - - mail_host: "host.docker.internal" - mail_port: 25 - - user: "sbs" - group: "sbs" - - session_lifetime: 1440 - secret_key_suffix: "" - - oidc_crypto_password: "CHANGEME" - uid_attribute: "sub" - - disclaimer_color: "#a29c13" - disclaimer_label: wsgi - - urn_namespace: "urn:example:sbs" - eppn_scope: "sbs.example.edu" - restricted_co_default_org: "example.org" - - mail_sender_name: "SURF" - mail_sender_email: "no-reply@localhost" - exceptions_mail: "root@localhost" - - support_email: "sram-support@localhost" - admin_email: "sram-beheer@localhost" - ticket_email: "sram-support@surf.nl" - eduteams_email: "eduteams@localhost" - - suppress_mails: False - - wiki_link: "https://www.example.org/wiki" - - cron_hour_of_day: 4 - seed_allowed: True - api_keys_enabled: True - feedback_enabled: True - audit_trail_notifications_enabled: True - send_exceptions: False - send_js_exceptions: False - second_factor_authentication_required: True - totp_token_name: "SRAM-example" - notifications_enabled: True - invitation_reminders_enabled: True - invitation_expirations_enabled: True - open_requests_enabled: True - scim_sweep: False - impersonation_allowed: True - admin_platform_backdoor_totp: True - past_dates_allowed: True - mock_scim_enabled: True - log_to_stdout: True - - delete_orphaned: True - suspension_inactive_days: 365 - suspension_reminder_days: 14 - suspension_notify_admin: False - - oidc_config_url: "http://localhost/.well-known/openid-configuration" - oidc_authz_endpoint: "http://localhost/OIDC/authorization" - oidc_token_endpoint: "http://localhost/OIDC/token" - oidc_userinfo_endpoint: "http://localhost/OIDC/userinfo" - oidc_jwks_endpoint: "http://localhost/OIDC/jwks.json" - oidc_redirect_uri: "https://{{sbs_defaults.base_domain}}/api/users/resume-session" - mfa_idp_allowed: false - eduteams_continue_endpoint: "https://localhost/continue" - eb_continue_endpoint: "https://engine.(.*)surfconext.nl(.*)" - oidc_jwt_audience: "https://localhost" - continue_eduteams_redirect_uri: "https://localhost/continue" - oidc_verify_peer: False - oidc_scopes: - - openid - - manage_base_enabled: False - manage_base_url: "https://manage.{{base_domain}}" - manage_sram_rp_entity_id: "sbs.{{sbs_defaults.base_domain}}" - manage_verify_peer: False - - idp_metadata_url: "https://metadata.surfconext.nl/signed/2023/edugain-downstream-idp.xml " - # backup_dir: "{{backup_base}}/sbs" - - swagger_enabled: true - - ssid_identity_providers: [] - surf_secure_id: - environment: "unknown.example.org" - sp_entity_id: "https://sbs.{{sbs_defaults.base_domain}}" - acs_url: "https://{{sbs_defaults.base_domain}}/api/users/acs" - sa_gw_environment: "sa-gw.unknown.example.org" - sa_idp_certificate: | - -----BEGIN CERTIFICATE----- - 12345 - -----END CERTIFICATE----- - priv: | - -----BEGIN RSA PRIVATE KEY----- - abcde - -----END RSA PRIVATE KEY----- - pub: | - -----BEGIN CERTIFICATE----- - 12345 - -----END CERTIFICATE----- - - ssid_authncontext: "\ - http://{{ sbs_defaults.surf_secure_id.environment }}/assurance/sfo-level2" - ssid_entityid: "\ - https://{{ sbs_defaults.surf_secure_id.sa_gw_environment }}/second-factor-only/metadata" - ssid_sso_endpoint: "\ - https://{{ sbs_defaults.surf_secure_id.sa_gw_environment }}/second-factor-only/single-sign-on" - - mfa_sso_minutes: 10 - mfa_fallback_enabled: true - - ldap_url: "ldap://ldap.example.com/dc=example,dc=com" - ldap_bind_account: "cn=admin,dc=entity_id,dc=services,dc=sram-tst,dc=surf,dc=nl" - - csp_style_hashes: - - 'sha256-0+ANsgYUJdh56RK8gGvTF2vnriYqvFHfWqtA8xXa+bA=' - - 'sha256-3SnfHQolDHbZMbDAPmhrZf1keHiXfj/KJyh2phhFAAY=' - - 'sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=' - - 'sha256-Ng6y+QCkPChG4Q49SIfXB5ToIDcDhITtQNFkDBPpCTw=' - - 'sha256-orBPipbqpMvkNi+Z+m6qEn0XS6ymmAQE6+FwCNs1FbQ=' - - 'sha256-vFt3L2qLqpJmRpcXGbYr2UVSmgSp9VCUzz2lnqWIATw=' - - 'sha256-SU3XCwbQ/8qgzoGOWCYdkwIr3xRrl5rsvdFcpw8NSiE=' # on /new-service-request - - 'sha256-WTC9gHKjIpzl5ub1eg/YrRy/k+jlzeyRojah9dxAApc=' # on /new-service-request - - engine_block_api_token: secret diff --git a/roles/sram-sbs/templates/disclaimer.css.j2 b/roles/sram-sbs/templates/disclaimer.css.j2 deleted file mode 100644 index 7922f5e5b..000000000 --- a/roles/sram-sbs/templates/disclaimer.css.j2 +++ /dev/null @@ -1,6 +0,0 @@ -{% if env!="prd" -%} -body::after { - background: {{ sbs.disclaimer_color }}; - content: "{{ sbs.disclaimer_label }}"; -} -{% endif %} diff --git a/roles/sram-sbs/vars/main.yml b/roles/sram-sbs/vars/main.yml deleted file mode 100644 index 761942f7b..000000000 --- a/roles/sram-sbs/vars/main.yml +++ /dev/null @@ -1 +0,0 @@ -current_release_appdir: /opt/openconext diff --git a/roles/sram_ldap/defaults/main.yml b/roles/sram_ldap/defaults/main.yml new file mode 100644 index 000000000..cb2bc981a --- /dev/null +++ b/roles/sram_ldap/defaults/main.yml @@ -0,0 +1,38 @@ +--- +ldap_image: "ghcr.io/surfscz/sram-ldap:main" +ldap_conf_dir: "{{ current_release_appdir }}/sram/ldap" +ldap_ldif_dir: "{{ ldap_conf_dir }}/schema" +ldap_certs_dir: "{{ ldap_conf_dir }}/certs" +ldap_backup_dir: "{{ ldap_conf_dir }}/ldap" +ldap_data_dir: "{{ ldap_conf_dir}}/data" +ldap_uri: "ldap://localhost/" + +ldap_user: "openldap" +ldap_group: "openldap" + +# admin_group: "ldap_admin" +ldap_admins: + - name: Admin + uid: admin + pw_hash: + sshkey: "" + +ldap_loglevel: "stats stats2 filter" + +ldap_services_password: secret +ldap_monitor_password: secret +ldap_ldap_monitor_password: secret + +ldap_uri: "ldap://localhost/" +ldap_rid_prefix: "ldap://" + +ldap_base_domain: "{{ base_domain }}" +ldap_base_dn: >- + {{ ((ldap_base_domain.split('.')|length)*['dc=']) | + zip(ldap_base_domain.split('.')) | list | map('join', '') | list | join(',') }} +ldap_services_dn: + basedn: "dc=services,{{ ldap_base_dn }}" + o: "Services" + binddn: "cn=admin,{{ ldap_base_dn }}" + +ldap_hosts: {} diff --git a/roles/sram-ldap/files/eduMember.ldif b/roles/sram_ldap/files/eduMember.ldif similarity index 100% rename from roles/sram-ldap/files/eduMember.ldif rename to roles/sram_ldap/files/eduMember.ldif diff --git a/roles/sram-ldap/files/eduPerson.ldif b/roles/sram_ldap/files/eduPerson.ldif similarity index 100% rename from roles/sram-ldap/files/eduPerson.ldif rename to roles/sram_ldap/files/eduPerson.ldif diff --git a/roles/sram-ldap/files/groupOfMembers.ldif b/roles/sram_ldap/files/groupOfMembers.ldif similarity index 100% rename from roles/sram-ldap/files/groupOfMembers.ldif rename to roles/sram_ldap/files/groupOfMembers.ldif diff --git a/roles/sram-ldap/files/ldap-add b/roles/sram_ldap/files/ldap-add similarity index 100% rename from roles/sram-ldap/files/ldap-add rename to roles/sram_ldap/files/ldap-add diff --git a/roles/sram-ldap/files/ldapPublicKey.ldif b/roles/sram_ldap/files/ldapPublicKey.ldif similarity index 100% rename from roles/sram-ldap/files/ldapPublicKey.ldif rename to roles/sram_ldap/files/ldapPublicKey.ldif diff --git a/roles/sram-ldap/files/logrotate_slapd b/roles/sram_ldap/files/logrotate_slapd similarity index 100% rename from roles/sram-ldap/files/logrotate_slapd rename to roles/sram_ldap/files/logrotate_slapd diff --git a/roles/sram-ldap/files/rsyslog_slapd.conf b/roles/sram_ldap/files/rsyslog_slapd.conf similarity index 100% rename from roles/sram-ldap/files/rsyslog_slapd.conf rename to roles/sram_ldap/files/rsyslog_slapd.conf diff --git a/roles/sram-ldap/files/sczGroup.ldif b/roles/sram_ldap/files/sczGroup.ldif similarity index 100% rename from roles/sram-ldap/files/sczGroup.ldif rename to roles/sram_ldap/files/sczGroup.ldif diff --git a/roles/sram-ldap/files/sramPerson.ldif b/roles/sram_ldap/files/sramPerson.ldif similarity index 100% rename from roles/sram-ldap/files/sramPerson.ldif rename to roles/sram_ldap/files/sramPerson.ldif diff --git a/roles/sram-ldap/files/voPerson.ldif b/roles/sram_ldap/files/voPerson.ldif similarity index 100% rename from roles/sram-ldap/files/voPerson.ldif rename to roles/sram_ldap/files/voPerson.ldif diff --git a/roles/sram-ldap/handlers/main.yml b/roles/sram_ldap/handlers/main.yml similarity index 100% rename from roles/sram-ldap/handlers/main.yml rename to roles/sram_ldap/handlers/main.yml diff --git a/roles/sram-ldap/tasks/admins.yml b/roles/sram_ldap/tasks/admins.yml similarity index 67% rename from roles/sram-ldap/tasks/admins.yml rename to roles/sram_ldap/tasks/admins.yml index e00115c04..dfba23d6e 100644 --- a/roles/sram-ldap/tasks/admins.yml +++ b/roles/sram_ldap/tasks/admins.yml @@ -1,10 +1,10 @@ --- # - name: Initialize DIT admin # community.general.ldap_entry: -# dn: "{{ services_ldap.binddn }}" +# dn: "{{ ldap_services_dn.binddn }}" # objectClass: organizationalRole # attributes: -# cn: "{{ services_ldap.binddn | regex_replace('^cn=([^,]+).*', '\\1') }}" +# cn: "{{ ldap_services_dn.binddn | regex_replace('^cn=([^,]+).*', '\\1') }}" # determine which users need to be admin # check for each role of each user if it leads to membership of group {{ldap_admin_group}} @@ -16,19 +16,19 @@ - name: determine ldap admins set_fact: - ldap_admins: "{{ ldap.admins }}" + ldap_admins: "{{ ldap_admins }}" # Find existing ldap admins - name: Initialize admins (I) community.general.ldap_search: - dn: "{{ ldap.services_dn.basedn }}" + dn: "{{ ldap_services_dn.basedn }}" scope: "onelevel" filter: "(objectClass=organizationalRole)" attrs: - "cn" - bind_dn: "{{ ldap.services_dn.binddn }}" - bind_pw: "{{ ldap.services_password }}" - server_uri: "{{ldap.uri }}" + bind_dn: "{{ ldap_services_dn.binddn }}" + bind_pw: "{{ ldap_services_password }}" + server_uri: "{{ldap_uri }}" register: "existing_ldap_admins_result" # ansible sucks like this: we need to extract the results from the result @@ -44,11 +44,11 @@ # Remove LDAP non-admins - name: Initialize admins (II) community.general.ldap_entry: - dn: "cn={{ item.cn }},{{ services_ldap.basedn }}" + dn: "cn={{ item.cn }},{{ ldap_services_dn.basedn }}" state: absent - bind_dn: "{{ ldap.services_dn.binddn }}" - bind_pw: "{{ ldap.services_password }}" - server_uri: "{{ldap.uri }}" + bind_dn: "{{ ldap_services_dn.binddn }}" + bind_pw: "{{ ldap_services_password }}" + server_uri: "{{ldap_uri }}" when: > item.cn not in ldap_admins | map(attribute='uid') and item.cn != 'admin' @@ -57,26 +57,26 @@ # Insert LDAP admins - name: Initialize admins (III) community.general.ldap_entry: - dn: "cn={{ item.uid }},{{ ldap.services_dn.basedn }}" + dn: "cn={{ item.uid }},{{ ldap_services_dn.basedn }}" objectClass: - simpleSecurityObject - organizationalRole attributes: description: An LDAP administrator userPassword: "{{ item.pw_hash }}" - bind_dn: "{{ ldap.services_dn.binddn }}" - bind_pw: "{{ ldap.services_password }}" - server_uri: "{{ldap.uri }}" + bind_dn: "{{ ldap_services_dn.binddn }}" + bind_pw: "{{ ldap_services_password }}" + server_uri: "{{ldap_uri }}" loop: "{{ ldap_admins }}" # Make sure passwords are updated for existing admins - name: Initialize admins (IV) community.general.ldap_attrs: - dn: "cn={{ item.uid }},{{ ldap.services_dn.basedn }}" + dn: "cn={{ item.uid }},{{ ldap_services_dn.basedn }}" attributes: userPassword: "{{ item.pw_hash }}" - bind_dn: "{{ ldap.services_dn.binddn }}" - bind_pw: "{{ ldap.services_password }}" - server_uri: "{{ldap.uri }}" + bind_dn: "{{ ldap_services_dn.binddn }}" + bind_pw: "{{ ldap_services_password }}" + server_uri: "{{ldap_uri }}" loop: "{{ ldap_admins }}" diff --git a/roles/sram-ldap/tasks/main.yml b/roles/sram_ldap/tasks/main.yml similarity index 78% rename from roles/sram-ldap/tasks/main.yml rename to roles/sram_ldap/tasks/main.yml index 97977e06a..a4877722d 100644 --- a/roles/sram-ldap/tasks/main.yml +++ b/roles/sram_ldap/tasks/main.yml @@ -11,19 +11,19 @@ file: path: "{{ item.path }}" state: "directory" - # owner: "{{ ldap.user }}" - # group: "{{ ldap.group }}" + # owner: "{{ ldap_user }}" + # group: "{{ ldap_group }}" mode: "{{ item.mode }}" with_items: - - { path: "{{ldap.ldif_dir}}", mode: "0755" } - - { path: "{{ldap.certs_dir}}", mode: "0755" } - - { path: "{{ldap.data_dir}}", mode: "0777" } + - { path: "{{ldap_ldif_dir}}", mode: "0755" } + - { path: "{{ldap_certs_dir}}", mode: "0755" } + - { path: "{{ldap_data_dir}}", mode: "0777" } notify: Restart the ldap container - name: Copy schemas copy: src: "{{ item }}" - dest: "{{ ldap.ldif_dir }}/{{ item }}" + dest: "{{ ldap_ldif_dir }}/{{ item }}" mode: "0644" with_items: - sczGroup.ldif @@ -38,7 +38,7 @@ - name: Copying ldap-add script copy: src: "{{ item }}" - dest: "{{ ldap.conf_dir }}/{{ item }}" + dest: "{{ ldap_conf_dir }}/{{ item }}" mode: "0755" with_items: - ldap-add @@ -49,7 +49,7 @@ # copy: # src: "/etc/ssl/certs/sram-https.pem" # was installed here by update-ca-certificates # remote_src: true -# dest: "{{ldap.certs_dir}}/frontend.crt" +# dest: "{{ldap_certs_dir}}/frontend.crt" # mode: "0644" # when: "is_dev" # notify: Restart the ldap container @@ -57,19 +57,19 @@ - name: Setup ldap hosts vars: host: - key: "%s.{{ ldap.base_domain }}" + key: "%s.{{ ldap_base_domain }}" value: "%s" etc_hosts: {} set_fact: etc_hosts: >- {{ etc_hosts | combine({ host.key | format(item.key): host.value | format(item.value) }) }} - with_dict: "{{ ldap.hosts }}" + with_dict: "{{ ldap_hosts }}" - name: Create the ldap container community.docker.docker_container: name: "sram-ldap" - image: "{{ ldap.image }}" + image: "{{ ldap_image }}" restart_policy: "always" state: started pull: true @@ -77,12 +77,12 @@ - 0.0.0.0:389:389 env: LDAP_ORGANISATION: "{{ env }}" - LDAP_DOMAIN: "{{ ldap.base_domain }}" - LDAP_ROOTPASS: "{{ ldap.services_password }}" + LDAP_DOMAIN: "{{ ldap_base_domain }}" + LDAP_ROOTPASS: "{{ ldap_services_password }}" etc_hosts: "{{ etc_hosts }}" volumes: # For now the target side /opt/ldap is hard-coded - - "{{ ldap.conf_dir }}:/opt/ldap" + - "{{ ldap_conf_dir }}:/opt/ldap" networks: - name: "loadbalancer" labels: @@ -133,8 +133,8 @@ olcDbIndex: "{{item}}" state: "present" bind_dn: "cn=admin,cn=config" - bind_pw: "{{ ldap.services_password }}" - server_uri: "{{ ldap.uri }}" + bind_pw: "{{ ldap_services_password }}" + server_uri: "{{ ldap_uri }}" with_items: - "entryUUID eq" - "o eq" @@ -148,21 +148,21 @@ attributes: olcSizeLimit: "unlimited" bind_dn: "cn=admin,cn=config" - bind_pw: "{{ ldap.services_password }}" - server_uri: "{{ ldap.uri }}" + bind_pw: "{{ ldap_services_password }}" + server_uri: "{{ ldap_uri }}" - name: Set config community.general.ldap_attrs: dn: "cn=config" state: "present" attributes: - olcServerID: "{{ ldap.server_id }}" + olcServerID: "{{ ldap_server_id }}" olcSizeLimit: "unlimited" - olcLogLevel: "{{ ldap.loglevel }}" + olcLogLevel: "{{ ldap_loglevel }}" olcAttributeOptions: "time-" bind_dn: "cn=admin,cn=config" - bind_pw: "{{ ldap.services_password }}" - server_uri: "{{ ldap.uri }}" + bind_pw: "{{ ldap_services_password }}" + server_uri: "{{ ldap_uri }}" # # cert is used for communication between ldap for sync # # is generated in roles/certificates/tasks/main.yml @@ -173,8 +173,8 @@ # attributes: # olcTLSCACertificateFile: "/opt/ldap/certs/frontend.crt" # bind_dn: "cn=admin,cn=config" -# bind_pw: "{{ ldap.services_password }}" -# server_uri: "{{ ldap.uri }}" +# bind_pw: "{{ ldap_services_password }}" +# server_uri: "{{ ldap_uri }}" - name: Setup Modules community.general.ldap_attrs: @@ -184,8 +184,8 @@ - syncprov - dynlist.so bind_dn: "cn=admin,cn=config" - bind_pw: "{{ ldap.services_password }}" - server_uri: "{{ ldap.uri }}" + bind_pw: "{{ ldap_services_password }}" + server_uri: "{{ ldap_uri }}" - name: Setup Dynlist community.general.ldap_entry: @@ -196,8 +196,8 @@ attributes: olcDlAttrSet: "voPerson labeledURI member+memberOf@groupOfMembers" bind_dn: "cn=admin,cn=config" - bind_pw: "{{ ldap.services_password }}" - server_uri: "{{ ldap.uri }}" + bind_pw: "{{ ldap_services_password }}" + server_uri: "{{ ldap_uri }}" - name: Setup Syncprov community.general.ldap_entry: @@ -209,8 +209,8 @@ olcSpCheckpoint: 100 10 olcSpSessionLog: 100 bind_dn: "cn=admin,cn=config" - bind_pw: "{{ ldap.services_password }}" - server_uri: "{{ ldap.uri }}" + bind_pw: "{{ ldap_services_password }}" + server_uri: "{{ ldap_uri }}" # Leave this here in case we do want to build our own # root database from scratch instead of relying on the @@ -220,8 +220,8 @@ # community.general.ldap_attrs: # dn: olcDatabase={1}mdb,cn=config # attributes: -# olcSuffix: "{{ services_ldap.basedn }}" -# olcRootDN: "{{ services_ldap.binddn }}" +# olcSuffix: "{{ ldap_services_dn.basedn }}" +# olcRootDN: "{{ ldap_services_dn.binddn }}" # olcRootPW: "{{ '%s' | format(services_ldap_password) | slapd_hash }}" # state: exact # @@ -235,11 +235,11 @@ # {% if environment_name=="vm" %} # by dn.exact=gidNumber=1000+uidNumber=1000,cn=peercred,cn=external,cn=auth manage # {% endif %} -# by dn.exact="{{ services_ldap.binddn }}" manage +# by dn.exact="{{ ldap_services_dn.binddn }}" manage # by * break # state: exact # bind_dn: "cn=admin,cn=config" -# bind_pw: "{{ services_ldap_password }}" +# bind_pw: "{{ ldap_services_password }}" # server_uri: "{{ ldap_uri }}" # # @@ -261,15 +261,15 @@ attributes: olcAccess: - >- - to dn.regex="(([^,]+),{{ ldap.services_dn.basedn }})$" - by dn.exact="{{ ldap.services_dn.binddn }}" write + to dn.regex="(([^,]+),{{ ldap_services_dn.basedn }})$" + by dn.exact="{{ ldap_services_dn.binddn }}" write by dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth write by dn.exact,expand="cn=admin,$1" read by * break - >- to * by dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth manage - by dn.regex="cn=[^,]+,{{ ldap.services_dn.basedn }}" read + by dn.regex="cn=[^,]+,{{ ldap_services_dn.basedn }}" read {% if env=="vm" %} by dn.exact=gidNumber=1000+uidNumber=1000,cn=peercred,cn=external,cn=auth manage {% endif %} @@ -282,8 +282,8 @@ state: exact ordered: true bind_dn: "cn=admin,cn=config" - bind_pw: "{{ ldap.services_password }}" - server_uri: "{{ ldap.uri }}" + bind_pw: "{{ ldap_services_password }}" + server_uri: "{{ ldap_uri }}" # ldap_rids: # 101: ldaps://ldap1.scz-vm.net/ @@ -294,14 +294,14 @@ start: 101 rid: key: "%d" - value: "{{ ldap.rid_prefix }}%s.{{ ldap.base_domain }}/" + value: "{{ ldap_rid_prefix }}%s.{{ ldap_base_domain }}/" ldap_rids: {} set_fact: ldap_rids: >- {{ ldap_rids | combine({ rid.key | format(start|int): rid.value | format(item.key) }) }} start: "{{ start|int + 1 }}" - with_dict: "{{ ldap.hosts | dict2items | sort(attribute='key') }}" + with_dict: "{{ ldap_hosts | dict2items | sort(attribute='key') }}" # Voor toekomstige Claude gebruikers: onderstaande construct levert aan het eind # een string representatie van de dict op, die niet meer gebruikt kan worden @@ -321,11 +321,11 @@ rid: >- rid={} provider="{}" - searchbase="{{ ldap.services_dn.basedn }}" + searchbase="{{ ldap_services_dn.basedn }}" type=refreshAndPersist bindmethod=simple - binddn="{{ ldap.services_dn.binddn }}" - credentials={{ ldap.services_password }} + binddn="{{ ldap_services_dn.binddn }}" + credentials={{ ldap_services_password }} retry="30 +" timeout=30 network-timeout=5 @@ -341,24 +341,24 @@ olcSyncrepl: "{{ rids }}" olcMultiProvider: "TRUE" bind_dn: "cn=admin,cn=config" - bind_pw: "{{ ldap.services_password }}" - server_uri: "{{ ldap.uri }}" + bind_pw: "{{ ldap_services_password }}" + server_uri: "{{ ldap_uri }}" # We now have Syncrepl in place, so only write to primary - name: Initialize DIT community.general.ldap_entry: - dn: "{{ ldap.services_dn.basedn }}" + dn: "{{ ldap_services_dn.basedn }}" state: "present" objectClass: - "top" - "dcObject" - "organization" attributes: - dc: "{{ ldap.services_dn.basedn | regex_replace('^dc=([^,]+).*', '\\1') }}" - o: "{{ ldap.services_dn.o }}" - bind_dn: "{{ ldap.services_dn.binddn }}" - bind_pw: "{{ ldap.services_password }}" - server_uri: "{{ ldap.uri }}" + dc: "{{ ldap_services_dn.basedn | regex_replace('^dc=([^,]+).*', '\\1') }}" + o: "{{ ldap_services_dn.o }}" + bind_dn: "{{ ldap_services_dn.binddn }}" + bind_pw: "{{ ldap_services_password }}" + server_uri: "{{ ldap_uri }}" when: > inventory_hostname in groups['ldap_primary'] diff --git a/roles/sram-ldap/templates/ldap-backup.sh.j2 b/roles/sram_ldap/templates/ldap-backup.sh.j2 similarity index 100% rename from roles/sram-ldap/templates/ldap-backup.sh.j2 rename to roles/sram_ldap/templates/ldap-backup.sh.j2 diff --git a/roles/sram-ldap/templates/ldap.conf.j2 b/roles/sram_ldap/templates/ldap.conf.j2 similarity index 100% rename from roles/sram-ldap/templates/ldap.conf.j2 rename to roles/sram_ldap/templates/ldap.conf.j2 diff --git a/roles/sram-ldap/templates/slapd.service.j2 b/roles/sram_ldap/templates/slapd.service.j2 similarity index 92% rename from roles/sram-ldap/templates/slapd.service.j2 rename to roles/sram_ldap/templates/slapd.service.j2 index 7e0f79397..299b3af0c 100644 --- a/roles/sram-ldap/templates/slapd.service.j2 +++ b/roles/sram_ldap/templates/slapd.service.j2 @@ -6,7 +6,7 @@ Type = forking User = root SupplementaryGroups = ssl-cert ExecStartPre=-/bin/mkdir -p /var/run/slapd -ExecStartPre=-/bin/chown openldap. /var/run/slapd +ExecStartPre=-/bin/chown openldap: /var/run/slapd ExecStart = /usr/sbin/slapd -F /etc/ldap/slapd.d -u openldap -g openldap -h 'ldapi:/// ldap://localhost/ ldaps://{{inventory_hostname}}/' Restart = always RestartSec = 30 diff --git a/roles/sram-metadata/defaults/main.yml b/roles/sram_metadata/defaults/main.yml similarity index 53% rename from roles/sram-metadata/defaults/main.yml rename to roles/sram_metadata/defaults/main.yml index 5355989bc..7982c7f9d 100644 --- a/roles/sram-metadata/defaults/main.yml +++ b/roles/sram_metadata/defaults/main.yml @@ -1,16 +1,13 @@ --- -metadata: "{{ metadata_defaults | combine(metadata_overrides, recursive=true) }}" +sram_metadata_image_server: "ghcr.io/openconext/openconext-basecontainers/apache2:latest" +sram_metadata_image_pyff: "ghcr.io/surfscz/sram-pyff:main" +sram_metadata_hostname: "meta.{{ base_domain }}" +sram_metadata_basedir: "{{current_release_appdir}}/sram/metadata" -metadata_defaults: - image_server: "ghcr.io/openconext/openconext-basecontainers/apache2:latest" - image_pyff: "ghcr.io/surfscz/sram-pyff:main" - hostname: "meta.{{ base_domain }}" - basedir: "{{current_release_appdir}}/sram/metadata" +# server_name: "metadata-server" - # server_name: "metadata-server" - - user: "sram-metadata" - group: "sram-metadata" +sram_metadata_user: "sram-metadata" +sram_metadata_group: "sram-metadata" # idps_source: "https://metadata.surfconext.nl/idps-metadata.xml" # idps_cert: | @@ -39,33 +36,33 @@ metadata_defaults: # 9bFiAimF5LLk/LnMfplK9w0vvxWVcdQkDgVPYvEGNtttj0QC7/jM4ZeihGb6Oyzy # DZA6aeg73/ygOATQ13A= # -----END CERTIFICATE----- - idps_filters: [] +sram_metadata_idps_filters: [] - idps_files: - - name: "dummy-idp" - metadata: | - - - - - - - SRAM VM Dummy IdP - SRAM VM Dummy IdP - https://test-idp.sram.example.org/ - - - Administrator - mailto:sinterklaas@example.nl - - +sram_metadata_idps_files: +- name: "dummy-idp" + metadata: | + + + + + + + SRAM VM Dummy IdP + SRAM VM Dummy IdP + https://test-idp.sram.example.org/ + + + Administrator + mailto:sinterklaas@example.nl + + # idps_xrd: "{{metadata_defaults.basedir}}/certs/surfconext.xrd" # idps_source_dir: "/opt/metadata-src" diff --git a/roles/sram-metadata/files/01_idps.fd b/roles/sram_metadata/files/01_idps.fd similarity index 100% rename from roles/sram-metadata/files/01_idps.fd rename to roles/sram_metadata/files/01_idps.fd diff --git a/roles/sram-metadata/files/02_backend.fd b/roles/sram_metadata/files/02_backend.fd similarity index 100% rename from roles/sram-metadata/files/02_backend.fd rename to roles/sram_metadata/files/02_backend.fd diff --git a/roles/sram-metadata/files/03_frontend.fd b/roles/sram_metadata/files/03_frontend.fd similarity index 100% rename from roles/sram-metadata/files/03_frontend.fd rename to roles/sram_metadata/files/03_frontend.fd diff --git a/roles/sram-metadata/files/surf.png b/roles/sram_metadata/files/surf.png similarity index 100% rename from roles/sram-metadata/files/surf.png rename to roles/sram_metadata/files/surf.png diff --git a/roles/sram-metadata/files/surf.svg b/roles/sram_metadata/files/surf.svg similarity index 100% rename from roles/sram-metadata/files/surf.svg rename to roles/sram_metadata/files/surf.svg diff --git a/roles/sram-metadata/files/surf_bimi.svg b/roles/sram_metadata/files/surf_bimi.svg similarity index 100% rename from roles/sram-metadata/files/surf_bimi.svg rename to roles/sram_metadata/files/surf_bimi.svg diff --git a/roles/sram-metadata/files/surfconext.crt b/roles/sram_metadata/files/surfconext.crt similarity index 100% rename from roles/sram-metadata/files/surfconext.crt rename to roles/sram_metadata/files/surfconext.crt diff --git a/roles/sram-metadata/files/transform.xslt b/roles/sram_metadata/files/transform.xslt similarity index 100% rename from roles/sram-metadata/files/transform.xslt rename to roles/sram_metadata/files/transform.xslt diff --git a/roles/sram-metadata/files/transform_proxy.xslt b/roles/sram_metadata/files/transform_proxy.xslt similarity index 100% rename from roles/sram-metadata/files/transform_proxy.xslt rename to roles/sram_metadata/files/transform_proxy.xslt diff --git a/roles/sram-metadata/handlers/main.yml b/roles/sram_metadata/handlers/main.yml similarity index 100% rename from roles/sram-metadata/handlers/main.yml rename to roles/sram_metadata/handlers/main.yml diff --git a/roles/sram-metadata/tasks/http.yml b/roles/sram_metadata/tasks/http.yml similarity index 72% rename from roles/sram-metadata/tasks/http.yml rename to roles/sram_metadata/tasks/http.yml index befcc1d18..561201096 100644 --- a/roles/sram-metadata/tasks/http.yml +++ b/roles/sram_metadata/tasks/http.yml @@ -2,19 +2,19 @@ - name: "Install index page" template: src: "index.html.j2" - dest: "{{metadata.basedir}}/web/index.html" + dest: "{{sram_metadata_basedir}}/web/index.html" mode: "0644" - name: "Install legacy link" file: src: "." - dest: "{{metadata.basedir}}/web/metadata" + dest: "{{sram_metadata_basedir}}/web/metadata" state: "link" - name: "Install logos" copy: src: "{{item}}" - dest: "{{metadata.basedir}}/web" + dest: "{{sram_metadata_basedir}}/web" mode: "0644" with_items: - "surf.svg" @@ -24,19 +24,19 @@ - name: "Create the metadata-server container" community.docker.docker_container: name: "sram-metadata-server" - image: "{{ metadata.image_server }}" + image: "{{ sram_metadata_image_server }}" restart_policy: "always" state: "started" pull: true mounts: - - source: "{{metadata.basedir}}/web" + - source: "{{sram_metadata_basedir}}/web" target: "/var/www/html" type: "bind" read_only: true networks: - name: "loadbalancer" labels: - traefik.http.routers.metadata.rule: "Host(`{{ metadata.hostname }}`)" + traefik.http.routers.metadata.rule: "Host(`{{ sram_metadata_hostname }}`)" traefik.http.routers.metadata.tls: "true" traefik.enable: "true" healthcheck: diff --git a/roles/sram-metadata/tasks/main.yml b/roles/sram_metadata/tasks/main.yml similarity index 60% rename from roles/sram-metadata/tasks/main.yml rename to roles/sram_metadata/tasks/main.yml index d6ac55f29..89c0ce9e9 100644 --- a/roles/sram-metadata/tasks/main.yml +++ b/roles/sram_metadata/tasks/main.yml @@ -1,7 +1,7 @@ --- - name: "Create metadata group" group: - name: "{{ metadata.group }}" + name: "{{ sram_metadata_group }}" state: "present" register: "result" @@ -11,12 +11,12 @@ - name: "Create metadata user" user: - name: "{{ metadata.user }}" - group: "{{ metadata.group }}" + name: "{{ sram_metadata_user }}" + group: "{{ sram_metadata_group }}" comment: "User to run metadata service" shell: "/bin/false" password: "!" - home: "{{ metadata.basedir }}" + home: "{{ sram_metadata_basedir }}" create_home: false state: "present" register: "result" @@ -32,13 +32,13 @@ state: "directory" mode: "{{ item.mode }}" owner: "root" - group: "{{ metadata.group }}" + group: "{{ sram_metadata_group }}" with_items: - - { dir: "{{metadata.basedir}}/web", mode: "0775" } - - { dir: "{{metadata.basedir}}/feeds", mode: "0755" } - - { dir: "{{metadata.basedir}}/src", mode: "0755" } - - { dir: "{{metadata.basedir}}/certs", mode: "0755" } - - { dir: "{{metadata.basedir}}/xslt", mode: "0755" } + - { dir: "{{sram_metadata_basedir}}/web", mode: "0775" } + - { dir: "{{sram_metadata_basedir}}/feeds", mode: "0755" } + - { dir: "{{sram_metadata_basedir}}/src", mode: "0755" } + - { dir: "{{sram_metadata_basedir}}/certs", mode: "0755" } + - { dir: "{{sram_metadata_basedir}}/xslt", mode: "0755" } notify: "Restart the pyFF container" diff --git a/roles/sram-metadata/tasks/pyff.yml b/roles/sram_metadata/tasks/pyff.yml similarity index 58% rename from roles/sram-metadata/tasks/pyff.yml rename to roles/sram_metadata/tasks/pyff.yml index 6c66a5696..4e9b960d3 100644 --- a/roles/sram-metadata/tasks/pyff.yml +++ b/roles/sram_metadata/tasks/pyff.yml @@ -2,33 +2,33 @@ - name: "create self-signed Metadata Signing SSL certs" shell: cmd: ' - openssl genrsa -out "{{ metadata.basedir }}/certs/signing.key" 2048; + openssl genrsa -out "{{ sram_metadata_basedir }}/certs/signing.key" 2048; openssl req -new -nodes -x509 -subj "/C=NL/CN=signing" - -days 3650 -key "{{ metadata.basedir }}/certs/signing.key" - -out "{{ metadata.basedir }}/certs/signing.crt" -extensions v3_ca; - chown {{metadata.user}}:{{metadata.group}} {{ metadata.basedir }}/certs/*; + -days 3650 -key "{{ sram_metadata_basedir }}/certs/signing.key" + -out "{{ sram_metadata_basedir }}/certs/signing.crt" -extensions v3_ca; + chown {{sram_metadata_user}}:{{sram_metadata_group}} {{ sram_metadata_basedir }}/certs/*; ' - creates: "{{ metadata.basedir }}/certs/signing.crt" - when: "metadata.signing_cert is not defined" + creates: "{{ sram_metadata_basedir }}/certs/signing.crt" + when: "sram_metadata_signing_cert is not defined" notify: "Restart the pyFF container" - name: "Write fixed Metadata signing certificates" copy: - dest: "{{ metadata.basedir }}/certs/{{ item.file }}" + dest: "{{ sram_metadata_basedir }}/certs/{{ item.file }}" content: "{{item.contents}}" mode: "{{item.mode}}" - owner: "{{metadata.user}}" - group: "{{metadata.group}}" + owner: "{{sram_metadata_user}}" + group: "{{sram_metadata_group}}" with_items: - - { file: "signing.key", mode: "0640", contents: "{{metadata.signing_cert.priv}}" } - - { file: "signing.crt", mode: "0644", contents: "{{metadata.signing_cert.pub}}" } - when: "metadata.signing_cert is defined" + - { file: "signing.key", mode: "0640", contents: "{{sram_metadata_signing_cert.priv}}" } + - { file: "signing.crt", mode: "0644", contents: "{{sram_metadata_signing_cert.pub}}" } + when: "sram_metadata_signing_cert is defined" notify: "Restart the pyFF container" - name: "Copy source certificates" copy: src: "{{ item }}" - dest: "{{ metadata.basedir }}/certs" + dest: "{{ sram_metadata_basedir }}/certs" mode: "0644" with_items: - "surfconext.crt" @@ -37,15 +37,15 @@ - name: "Install IdP metadata" copy: content: "{{item.metadata}}" - dest: "{{ metadata.basedir }}/src/{{item.name}}.xml" + dest: "{{ sram_metadata_basedir }}/src/{{item.name}}.xml" mode: "0644" - with_items: "{{ metadata.idps_files }}" + with_items: "{{ sram_metadata_idps_files }}" notify: "Restart the pyFF container" - name: "Copy pyFF xslt transformations" copy: src: "{{item}}" - dest: "{{metadata.basedir}}/xslt" + dest: "{{sram_metadata_basedir}}/xslt" mode: "0644" with_items: - "transform_proxy.xslt" @@ -55,7 +55,7 @@ - name: "Copy pyFF feeds" copy: src: "{{item}}" - dest: "{{metadata.basedir}}/feeds" + dest: "{{sram_metadata_basedir}}/feeds" mode: "0644" with_items: - "01_idps.fd" @@ -66,7 +66,7 @@ - name: "Create the pyFF container" community.docker.docker_container: name: "sram-metadata-pyff" - image: "{{ metadata.image_pyff }}" + image: "{{ sram_metadata_image_pyff }}" restart_policy: "always" state: "started" pull: true @@ -75,22 +75,22 @@ USER: "{{ metadata_user_uid | string }}" GROUP: "{{ metadata_group_gid | string }}" mounts: - - source: "{{ metadata.basedir }}/web" + - source: "{{ sram_metadata_basedir }}/web" target: "/opt/pyff/web" type: "bind" - - source: "{{ metadata.basedir }}/feeds" + - source: "{{ sram_metadata_basedir }}/feeds" target: "/opt/pyff/feeds" type: "bind" read_only: true - - source: "{{ metadata.basedir }}/src" + - source: "{{ sram_metadata_basedir }}/src" target: "/opt/pyff/src" type: "bind" read_only: true - - source: "{{ metadata.basedir }}/certs" + - source: "{{ sram_metadata_basedir }}/certs" target: "/opt/pyff/certs" type: "bind" read_only: true - - source: "{{ metadata.basedir }}/xslt" + - source: "{{ sram_metadata_basedir }}/xslt" target: "/opt/pyff/xslt" type: "bind" read_only: true diff --git a/roles/sram-metadata/templates/index.html.j2 b/roles/sram_metadata/templates/index.html.j2 similarity index 100% rename from roles/sram-metadata/templates/index.html.j2 rename to roles/sram_metadata/templates/index.html.j2 diff --git a/roles/sram-metadata/templates/pyff-metadata.service.j2 b/roles/sram_metadata/templates/pyff-metadata.service.j2 similarity index 100% rename from roles/sram-metadata/templates/pyff-metadata.service.j2 rename to roles/sram_metadata/templates/pyff-metadata.service.j2 diff --git a/roles/sram-metadata/templates/pyff-metadata.timer.j2 b/roles/sram_metadata/templates/pyff-metadata.timer.j2 similarity index 100% rename from roles/sram-metadata/templates/pyff-metadata.timer.j2 rename to roles/sram_metadata/templates/pyff-metadata.timer.j2 diff --git a/roles/sram-metadata/templates/vhosts.conf.j2 b/roles/sram_metadata/templates/vhosts.conf.j2 similarity index 100% rename from roles/sram-metadata/templates/vhosts.conf.j2 rename to roles/sram_metadata/templates/vhosts.conf.j2 diff --git a/roles/sram-midproxy/defaults/main.yml b/roles/sram_midproxy/defaults/main.yml similarity index 100% rename from roles/sram-midproxy/defaults/main.yml rename to roles/sram_midproxy/defaults/main.yml diff --git a/roles/sram-midproxy/files/internal_attributes.yaml b/roles/sram_midproxy/files/internal_attributes.yaml similarity index 100% rename from roles/sram-midproxy/files/internal_attributes.yaml rename to roles/sram_midproxy/files/internal_attributes.yaml diff --git a/roles/sram-midproxy/files/plugins/attribute-maps/basic.py b/roles/sram_midproxy/files/plugins/attribute-maps/basic.py similarity index 100% rename from roles/sram-midproxy/files/plugins/attribute-maps/basic.py rename to roles/sram_midproxy/files/plugins/attribute-maps/basic.py diff --git a/roles/sram-midproxy/files/plugins/backends/openid_backend.yaml b/roles/sram_midproxy/files/plugins/backends/openid_backend.yaml similarity index 100% rename from roles/sram-midproxy/files/plugins/backends/openid_backend.yaml rename to roles/sram_midproxy/files/plugins/backends/openid_backend.yaml diff --git a/roles/sram-midproxy/files/plugins/backends/saml2_backend.yaml b/roles/sram_midproxy/files/plugins/backends/saml2_backend.yaml similarity index 100% rename from roles/sram-midproxy/files/plugins/backends/saml2_backend.yaml rename to roles/sram_midproxy/files/plugins/backends/saml2_backend.yaml diff --git a/roles/sram-midproxy/files/plugins/frontends/ping_frontend.yaml b/roles/sram_midproxy/files/plugins/frontends/ping_frontend.yaml similarity index 100% rename from roles/sram-midproxy/files/plugins/frontends/ping_frontend.yaml rename to roles/sram_midproxy/files/plugins/frontends/ping_frontend.yaml diff --git a/roles/sram-midproxy/files/plugins/frontends/saml2_frontend.yaml b/roles/sram_midproxy/files/plugins/frontends/saml2_frontend.yaml similarity index 100% rename from roles/sram-midproxy/files/plugins/frontends/saml2_frontend.yaml rename to roles/sram_midproxy/files/plugins/frontends/saml2_frontend.yaml diff --git a/roles/sram-midproxy/files/plugins/microservices/generate_attributes.yaml b/roles/sram_midproxy/files/plugins/microservices/generate_attributes.yaml similarity index 100% rename from roles/sram-midproxy/files/plugins/microservices/generate_attributes.yaml rename to roles/sram_midproxy/files/plugins/microservices/generate_attributes.yaml diff --git a/roles/sram-midproxy/files/plugins/microservices/regex_attributes.yaml b/roles/sram_midproxy/files/plugins/microservices/regex_attributes.yaml similarity index 100% rename from roles/sram-midproxy/files/plugins/microservices/regex_attributes.yaml rename to roles/sram_midproxy/files/plugins/microservices/regex_attributes.yaml diff --git a/roles/sram-midproxy/files/proxy_conf.yaml b/roles/sram_midproxy/files/proxy_conf.yaml similarity index 100% rename from roles/sram-midproxy/files/proxy_conf.yaml rename to roles/sram_midproxy/files/proxy_conf.yaml diff --git a/roles/sram-midproxy/tasks/main.yml b/roles/sram_midproxy/tasks/main.yml similarity index 100% rename from roles/sram-midproxy/tasks/main.yml rename to roles/sram_midproxy/tasks/main.yml diff --git a/roles/sram_plsc/defaults/main.yml b/roles/sram_plsc/defaults/main.yml new file mode 100644 index 000000000..f3b60a23e --- /dev/null +++ b/roles/sram_plsc/defaults/main.yml @@ -0,0 +1,12 @@ +--- +plsc_image: "ghcr.io/surfscz/sram-plsc:main" +plsc_conf_dir: "{{current_release_appdir}}/sram/plsc" +plsc_ansible_nolog: false +plsc_ldap_uri: "ldap://ldap:389/" +plsc_ldap_basedn: "dc=services,dc=vnet" +plsc_ldap_binddn: "cn=admin,dc=vnet" +plsc_ldap_password: "secret" +plsc_sbs_host: "http://sbs-server:8080" +plsc_sbs_user: "sysread" +plsc_sbs_password: "secret" +plsc_retry: 3 diff --git a/roles/sram-plsc/handlers/main.yml b/roles/sram_plsc/handlers/main.yml similarity index 100% rename from roles/sram-plsc/handlers/main.yml rename to roles/sram_plsc/handlers/main.yml diff --git a/roles/sram-plsc/tasks/main.yml b/roles/sram_plsc/tasks/main.yml similarity index 68% rename from roles/sram-plsc/tasks/main.yml rename to roles/sram_plsc/tasks/main.yml index 0d3900bd9..0523ce118 100644 --- a/roles/sram-plsc/tasks/main.yml +++ b/roles/sram_plsc/tasks/main.yml @@ -1,28 +1,29 @@ --- - name: Make sure clients sync directory exists file: - path: "{{ plsc.conf_dir }}" + path: "{{ plsc_conf_dir }}" state: directory mode: "0755" - name: "Create plsc.yml source if it doesn't exist" template: src: "plsc.yml.j2" - dest: "{{ plsc.conf_dir }}/plsc.yml" + dest: "{{ plsc_conf_dir }}/plsc.yml" mode: "0640" - no_log: "{{plsc.ansible_nolog}}" + no_log: "{{plsc_ansible_nolog}}" notify: "Restart the plsc container" - name: Create the plsc container community.docker.docker_container: name: "sram-plsc" - image: "{{ plsc.image }}" + image: "{{ plsc_image }}" restart_policy: "always" state: started pull: true mounts: - type: bind - source: "{{ plsc.conf_dir }}/plsc.yml" + source: "{{ plsc_conf_dir }}/plsc.yml" target: "/opt/plsc/plsc.yml" networks: + # TODO: Should this not be parametrized? - name: "loadbalancer" diff --git a/roles/sram_plsc/templates/plsc.yml.j2 b/roles/sram_plsc/templates/plsc.yml.j2 new file mode 100644 index 000000000..f1e7d4c6c --- /dev/null +++ b/roles/sram_plsc/templates/plsc.yml.j2 @@ -0,0 +1,25 @@ +--- +ldap: + src: + uri: "{{ plsc_ldap_uri }}" + basedn: "{{ plsc_ldap_basedn }}" + binddn: "{{ plsc_ldap_binddn }}" + passwd: "{{ plsc_ldap_password }}" + sizelimit: 500 + dst: + uri: "{{ plsc_ldap_uri }}" + basedn: "{{ plsc_ldap_basedn }}" + binddn: "{{ plsc_ldap_binddn }}" + passwd: "{{ plsc_ldap_password }}" + sizelimit: 500 +sbs: + src: + host: "{{ plsc_sbs_host }}" + user: "{{ plsc_sbs_user }}" + passwd: "{{ plsc_sbs_password }}" + verify_ssl: {{ false if env=='vm' else true }} + timeout: 60 + retry: {{ plsc_retry }} +pwd: "{CRYPT}!" +uid: 1000 +gid: 1000 diff --git a/roles/sram_redis/defaults/main.yml b/roles/sram_redis/defaults/main.yml new file mode 100644 index 000000000..857311145 --- /dev/null +++ b/roles/sram_redis/defaults/main.yml @@ -0,0 +1,9 @@ +--- +redis_image: "docker.io/library/redis:7" +redis_conf_dir: "{{ current_release_appdir }}/sram/redis" +redis_data_dir: "{{ current_release_appdir }}/sram/redis/data" +redis_user: redis +redis_group: redis +redis_redis_user: default +redis_redis_password: changethispassword +redis_max_memory: 100mb diff --git a/roles/sram-redis/handlers/main.yml b/roles/sram_redis/handlers/main.yml similarity index 100% rename from roles/sram-redis/handlers/main.yml rename to roles/sram_redis/handlers/main.yml diff --git a/roles/sram-redis/tasks/main.yml b/roles/sram_redis/tasks/main.yml similarity index 64% rename from roles/sram-redis/tasks/main.yml rename to roles/sram_redis/tasks/main.yml index 72789b08f..0212fe0b0 100644 --- a/roles/sram-redis/tasks/main.yml +++ b/roles/sram_redis/tasks/main.yml @@ -1,7 +1,7 @@ --- - name: "Create redis group" group: - name: "{{ redis.group }}" + name: "{{ redis_group }}" state: "present" register: "result" @@ -11,12 +11,12 @@ - name: "Create redis user" user: - name: "{{ redis.user }}" - group: "{{ redis.group }}" + name: "{{ redis_user }}" + group: "{{ redis_group }}" comment: "User to run SRAM Redis service" shell: "/bin/false" password: "!" - home: "{{ redis.conf_dir }}" + home: "{{ redis_conf_dir }}" create_home: false state: "present" register: "result" @@ -29,33 +29,34 @@ file: path: "{{item.path}}" state: "directory" - owner: "{{ redis.user }}" - group: "{{ redis.group }}" + owner: "{{ redis_user }}" + group: "{{ redis_group }}" mode: "{{item.mode}}" with_items: - - { path: "{{redis.conf_dir}}", mode: "0755" } - - { path: "{{redis.data_dir}}", mode: "0755" } + - { path: "{{redis_conf_dir}}", mode: "0755" } + - { path: "{{redis_data_dir}}", mode: "0755" } - name: "Create redis config" template: src: "redis.conf.j2" - dest: "{{ redis.conf_dir }}/redis.conf" - owner: "{{ redis.user }}" - group: "{{ redis.group }}" + dest: "{{ redis_conf_dir }}/redis.conf" + owner: "{{ redis_user }}" + group: "{{ redis_group }}" mode: "0644" notify: "Restart redis container" - name: "Create redis container" community.docker.docker_container: name: "sram-redis" - image: "{{ redis.image }}" + image: "{{ redis_image }}" restart_policy: "always" state: "started" user: "{{ redis_user_uid }}:{{ redis_group_gid }}" command: | redis-server /usr/local/etc/redis/redis.conf volumes: - - "{{ redis.conf_dir }}:/usr/local/etc/redis" - - "{{ redis.data_dir }}:/data" + - "{{ redis_conf_dir }}:/usr/local/etc/redis" + - "{{ redis_data_dir }}:/data" networks: + # TODO: Should this not be parametrized? - name: loadbalancer diff --git a/roles/sram_redis/templates/redis.conf.j2 b/roles/sram_redis/templates/redis.conf.j2 new file mode 100644 index 000000000..159ea9599 --- /dev/null +++ b/roles/sram_redis/templates/redis.conf.j2 @@ -0,0 +1,3 @@ +user {{redis_redis_user}} on +@all ~* &* >{{redis_redis_password}} +maxmemory {{ redis_max_memory }} +maxmemory-policy allkeys-lru diff --git a/roles/sram_sbs/defaults/main.yml b/roles/sram_sbs/defaults/main.yml new file mode 100644 index 000000000..b564e0779 --- /dev/null +++ b/roles/sram_sbs/defaults/main.yml @@ -0,0 +1,166 @@ +--- +sbs_base_domain: "test2.sram.surf.nl" +sbs_ansible_nolog: true +sbs_base_url: "https://{{ sbs_base_domain }}" +sbs_server_image: "ghcr.io/surfscz/sram-sbs-server:main" +sbs_client_image: "ghcr.io/surfscz/sram-sbs-client:main" + +sbs_openidc_timeout: 86400 +sbs_sram_conf_dir: "{{ current_release_appdir }}/sram" + +sbs_work_dir: "{{ sbs_sram_conf_dir }}/sbs" +sbs_git_dir: "{{ sbs_work_dir }}/sbs" +sbs_env_dir: "{{ sbs_work_dir }}/sbs-env" +sbs_conf_dir: "{{ sbs_work_dir }}/config" +sbs_log_dir: "{{ sbs_work_dir }}/log" +sbs_cert_dir: "{{ sbs_work_dir }}/cert" +sbs_apache_conf: "{{ sbs_work_dir }}/sbs.conf" +sbs_nginx_conf: "{{ sbs_work_dir }}/nginx.conf" + +sbs_db_name: "sbs" +sbs_db_user: "sbsrw" +# dbbackup_user: "sbs_backupper" +sbs_migration_user: "sbsmigrate" + +sbs_db_connection: "\ + mysql+mysqldb://%s:%s@{{ mariadb_host }}/{{ sbs_db_name }}\ + ?ssl=true&charset=utf8mb4" +sbs_db_connection_sbs: "{{ sbs_db_connection | format(sbs_db_user, mysql_passwords.sbs) }}" +sbs_db_connection_migration: "\ + {{ sbs_db_connection | format(sbs_migration_user, mysql_passwords.sbsmigrate) }}" + +sbs_db_secret: secret +sbs_secret_key_suffix: suffix +sbs_encryption_key: encryption_key + +sbs_redis_host: sram-redis +sbs_redis_port: 6379 +sbs_redis_ssl: false +sbs_redis_user: default + +sbs_mail_host: "host.docker.internal" +sbs_mail_port: 25 + +sbs_user: "sbs" +sbs_group: "sbs" + +sbs_session_lifetime: 1440 +sbs_secret_key_suffix: "" + +sbs_oidc_crypto_password: "CHANGEME" +sbs_uid_attribute: "sub" + +sbs_disclaimer_color: "#a29c13" +sbs_disclaimer_label: wsgi + +sbs_urn_namespace: "urn:example:sbs" +sbs_eppn_scope: "sbs.example.edu" +sbs_restricted_co_default_org: "example.org" + +sbs_mail_sender_name: "SURF" +sbs_mail_sender_email: "no-reply@localhost" +sbs_exceptions_mail: "root@localhost" + +sbs_support_email: "sram-support@localhost" +sbs_admin_email: "sram-beheer@localhost" +sbs_ticket_email: "sram-support@surf.nl" +sbs_eduteams_email: "eduteams@localhost" + +sbs_suppress_mails: False + +sbs_wiki_link: "https://www.example.org/wiki" + +sbs_cron_hour_of_day: 4 +sbs_seed_allowed: True +sbs_api_keys_enabled: True +sbs_feedback_enabled: True +sbs_audit_trail_notifications_enabled: True +sbs_send_exceptions: False +sbs_send_js_exceptions: False +sbs_second_factor_authentication_required: True +sbs_totp_token_name: "SRAM-example" +sbs_notifications_enabled: True +sbs_invitation_reminders_enabled: True +sbs_invitation_expirations_enabled: True +sbs_open_requests_enabled: True +sbs_scim_sweep: False +sbs_impersonation_allowed: True +sbs_admin_platform_backdoor_totp: True +sbs_past_dates_allowed: True +sbs_mock_scim_enabled: True +sbs_log_to_stdout: True + +sbs_delete_orphaned: True +sbs_suspension_inactive_days: 365 +sbs_suspension_reminder_days: 14 +sbs_suspension_notify_admin: False + +sbs_oidc_config_url: "http://localhost/.well-known/openid-configuration" +sbs_oidc_authz_endpoint: "http://localhost/OIDC/authorization" +sbs_oidc_token_endpoint: "http://localhost/OIDC/token" +sbs_oidc_userinfo_endpoint: "http://localhost/OIDC/userinfo" +sbs_oidc_jwks_endpoint: "http://localhost/OIDC/jwks.json" +sbs_oidc_redirect_uri: "https://{{sbs_base_domain}}/api/users/resume-session" +sbs_oidc_jwt_audience: "https://localhost" +sbs_continue_eduteams_redirect_uri: "https://localhost/continue" +sbs_oidc_verify_peer: False +sbs_oidc_scopes: + - openid + +sbs_mfa_idp_allowed: false +sbs_eduteams_continue_endpoint: "https://localhost/continue" +sbs_eb_continue_endpoint: "https://engine.(.*)surfconext.nl(.*)" + +sbs_manage_base_enabled: False +sbs_manage_base_url: "https://manage.{{base_domain}}" +sbs_manage_sram_rp_entity_id: "sbs.{{sbs_base_domain}}" +sbs_manage_verify_peer: False + +sbs_idp_metadata_url: "https://metadata.surfconext.nl/signed/2023/edugain-downstream-idp.xml " +# backup_dir: "{{backup_base}}/sbs" + +sbs_swagger_enabled: true + +sbs_ssid_identity_providers: [] +sbs_surf_secure_id: + environment: "unknown.example.org" + sp_entity_id: "https://sbs.{{sbs_base_domain}}" + acs_url: "https://{{sbs_base_domain}}/api/users/acs" + sa_gw_environment: "sa-gw.unknown.example.org" + sa_idp_certificate: | + -----BEGIN CERTIFICATE----- + 12345 + -----END CERTIFICATE----- + priv: | + -----BEGIN RSA PRIVATE KEY----- + abcde + -----END RSA PRIVATE KEY----- + pub: | + -----BEGIN CERTIFICATE----- + 12345 + -----END CERTIFICATE----- + +sbs_ssid_authncontext: "\ + http://{{ sbs_surf_secure_id.environment }}/assurance/sfo-level2" +sbs_ssid_entityid: "\ + https://{{ sbs_surf_secure_id.sa_gw_environment }}/second-factor-only/metadata" +sbs_ssid_sso_endpoint: "\ + https://{{ sbs_surf_secure_id.sa_gw_environment }}/second-factor-only/single-sign-on" + +sbs_mfa_sso_minutes: 10 +sbs_mfa_fallback_enabled: true + +sbs_ldap_url: "ldap://ldap.example.com/dc=example,dc=com" +sbs_ldap_bind_account: "cn=admin,dc=entity_id,dc=services,dc=sram-tst,dc=surf,dc=nl" + +sbs_csp_style_hashes: + - 'sha256-0+ANsgYUJdh56RK8gGvTF2vnriYqvFHfWqtA8xXa+bA=' + - 'sha256-3SnfHQolDHbZMbDAPmhrZf1keHiXfj/KJyh2phhFAAY=' + - 'sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=' + - 'sha256-Ng6y+QCkPChG4Q49SIfXB5ToIDcDhITtQNFkDBPpCTw=' + - 'sha256-orBPipbqpMvkNi+Z+m6qEn0XS6ymmAQE6+FwCNs1FbQ=' + - 'sha256-vFt3L2qLqpJmRpcXGbYr2UVSmgSp9VCUzz2lnqWIATw=' + - 'sha256-SU3XCwbQ/8qgzoGOWCYdkwIr3xRrl5rsvdFcpw8NSiE=' # on /new-service-request + - 'sha256-WTC9gHKjIpzl5ub1eg/YrRy/k+jlzeyRojah9dxAApc=' # on /new-service-request + +sbs_engine_block_api_token: secret diff --git a/roles/sram-sbs/files/yarn.gpg b/roles/sram_sbs/files/yarn.gpg similarity index 100% rename from roles/sram-sbs/files/yarn.gpg rename to roles/sram_sbs/files/yarn.gpg diff --git a/roles/sram-sbs/handlers/main.yml b/roles/sram_sbs/handlers/main.yml similarity index 100% rename from roles/sram-sbs/handlers/main.yml rename to roles/sram_sbs/handlers/main.yml diff --git a/roles/sram-sbs/tasks/main.yml b/roles/sram_sbs/tasks/main.yml similarity index 71% rename from roles/sram-sbs/tasks/main.yml rename to roles/sram_sbs/tasks/main.yml index 6881736ec..58ad32841 100644 --- a/roles/sram-sbs/tasks/main.yml +++ b/roles/sram_sbs/tasks/main.yml @@ -6,7 +6,7 @@ - name: "Create SBS group" group: - name: "{{ sbs.group }}" + name: "{{ sbs_group }}" state: "present" register: "result" @@ -16,12 +16,12 @@ - name: "Create SBS user" user: - name: "{{ sbs.user }}" - group: "{{ sbs.group }}" + name: "{{ sbs_user }}" + group: "{{ sbs_group }}" comment: "User to run SBS service" shell: "/bin/false" password: "!" - home: "{{ sbs.conf_dir }}" + home: "{{ sbs_conf_dir }}" create_home: false state: "present" register: "result" @@ -38,15 +38,15 @@ group: "{{sbs_group_gid}}" mode: "{{item.mode}}" with_items: - - { path: "{{sbs.work_dir}}", mode: "0755" } - - { path: "{{sbs.conf_dir}}", mode: "0755" } - - { path: "{{sbs.conf_dir}}/saml", mode: "0755" } - - { path: "{{sbs.log_dir}}", mode: "0775" } - - { path: "{{sbs.cert_dir}}", mode: "0755" } + - { path: "{{sbs_work_dir}}", mode: "0755" } + - { path: "{{sbs_conf_dir}}", mode: "0755" } + - { path: "{{sbs_conf_dir}}/saml", mode: "0755" } + - { path: "{{sbs_log_dir}}", mode: "0775" } + - { path: "{{sbs_cert_dir}}", mode: "0755" } - name: "Fix file permissions" file: - path: "{{sbs.log_dir}}/{{item}}" + path: "{{sbs_log_dir}}/{{item}}" owner: "{{sbs_user_uid}}" group: "{{sbs_group_gid}}" mode: "0664" @@ -59,15 +59,15 @@ # Create dummy file in certs dir to pacify container pre-init script # https://github.com/SURFscz/SBS/pull/2312 -- name: "Touch file in {{ sbs.cert_dir }}" +- name: "Touch file in {{ sbs_cert_dir }}" ansible.builtin.copy: content: "" - dest: "{{sbs.cert_dir}}/dummy" + dest: "{{sbs_cert_dir}}/dummy" - name: "Create SBS config files" template: src: "{{item.name}}.j2" - dest: "{{ sbs.conf_dir }}/{{item.name}}" + dest: "{{ sbs_conf_dir }}/{{item.name}}" owner: "{{sbs_user_uid}}" group: "{{sbs_group_gid}}" mode: "{{item.mode}}" @@ -76,15 +76,15 @@ - { name: "alembic.ini", mode: "0644" } - { name: "disclaimer.css", mode: "0644" } - { name: "sbs-apache.conf", mode: "0644" } - no_log: "{{sbs.ansible_nolog}}" + no_log: "{{sbs_ansible_nolog}}" notify: "Restart sbs containers" - name: "Pull sbs image" community.docker.docker_image_pull: name: "{{ item }}" with_items: - - "{{ sbs.client_image }}" - - "{{ sbs.server_image }}" + - "{{ sbs_client_image }}" + - "{{ sbs_server_image }}" register: "sbs_image" # We need to remove sram-static so it gets repopulated @@ -98,14 +98,13 @@ with_items: - "sbs-client" - "sbs-server" - when: "sbs_image is changed" - name: "Run SBS migrations" throttle: 1 community.docker.docker_container: name: "sram-sbs-migration" - image: "{{ sbs.server_image }}" + image: "{{ sbs_server_image }}" pull: "never" state: "started" restart_policy: "no" @@ -117,16 +116,25 @@ # don't actually run the server command: "/bin/true" volumes: - - "{{ sbs.conf_dir }}:/sbs-config" - - "{{ sbs.cert_dir }}:/sbs-config/cert:ro" - - "{{ sbs.log_dir }}:/opt/sbs/log" + - "{{ sbs_conf_dir }}:/sbs-config" + - "{{ sbs_cert_dir }}:/sbs-config/cert:ro" + - "{{ sbs_log_dir }}:/opt/sbs/log" networks: + # TODO: Should we parametrize this? - name: "loadbalancer" register: "result" - failed_when: "'container' not in result or result.container.State.ExitCode != 0" - changed_when: "'[alembic.runtime.migration] Running upgrade' in result.container.Output" + # failed_when: "'container' not in result or result.container.State.ExitCode != 0" + # changed_when: "'[alembic.runtime.migration] Running upgrade' in result.container.Output" notify: "Restart sbs containers" +# What's with result.container? +- name: debug result + ansible.builtin.debug: + msg: "{{ result }}" + +# - name: Stop +# meta: end_play + # Remove the migration container; we can do that with auto_remove, because if we use that, ansible # will not save the output in result - name: "Remove migration container" @@ -139,24 +147,24 @@ - name: "Start sbs client container" community.docker.docker_container: name: "sram-sbs-client" - image: "{{ sbs.client_image }}" + image: "{{ sbs_client_image }}" pull: "never" restart_policy: "always" state: "started" volumes: - - "{{ sbs.conf_dir }}/sbs-apache.conf:/etc/apache2/sites-enabled/sbs.conf:ro" - - "{{ sbs.conf_dir }}/disclaimer.css:/opt/sbs/client/dist/disclaimer.css:ro" + - "{{ sbs_conf_dir }}/sbs-apache.conf:/etc/apache2/sites-enabled/sbs.conf:ro" + - "{{ sbs_conf_dir }}/disclaimer.css:/opt/sbs/client/dist/disclaimer.css:ro" networks: - name: "loadbalancer" labels: - traefik.http.routers.sbsclient.rule: "Host(`{{ sbs.base_domain }}`)" + traefik.http.routers.sbsclient.rule: "Host(`{{ sbs_base_domain }}`)" traefik.http.routers.sbsclient.tls: "true" traefik.enable: "true" - name: "Start SBS server container" community.docker.docker_container: name: "sram-sbs-server" - image: "{{ sbs.server_image }}" + image: "{{ sbs_server_image }}" restart_policy: "always" state: "started" env: @@ -167,9 +175,9 @@ RUN_MIGRATIONS: "0" pull: "never" volumes: - - "{{ sbs.conf_dir }}:/sbs-config" - - "{{ sbs.cert_dir }}:/sbs-config/cert:ro" - - "{{ sbs.log_dir }}:/opt/sbs/log" + - "{{ sbs_conf_dir }}:/sbs-config" + - "{{ sbs_cert_dir }}:/sbs-config/cert:ro" + - "{{ sbs_log_dir }}:/opt/sbs/log" - "/tmp/ci-runner:/tmp/ci-runner" networks: - name: "loadbalancer" diff --git a/roles/sram-sbs/templates/alembic.ini.j2 b/roles/sram_sbs/templates/alembic.ini.j2 similarity index 96% rename from roles/sram-sbs/templates/alembic.ini.j2 rename to roles/sram_sbs/templates/alembic.ini.j2 index 9ccd51979..7849e4f89 100644 --- a/roles/sram-sbs/templates/alembic.ini.j2 +++ b/roles/sram_sbs/templates/alembic.ini.j2 @@ -35,7 +35,7 @@ script_location = migrations # are written from script.py.mako # output_encoding = utf-8 -sqlalchemy.url = {{ sbs.db_connection_migration }} +sqlalchemy.url = {{ sbs_db_connection_migration }} # Logging configuration [loggers] diff --git a/roles/sram-sbs/templates/config.yml.j2 b/roles/sram_sbs/templates/config.yml.j2 similarity index 58% rename from roles/sram-sbs/templates/config.yml.j2 rename to roles/sram_sbs/templates/config.yml.j2 index 7d4c92bf4..9f597debe 100644 --- a/roles/sram-sbs/templates/config.yml.j2 +++ b/roles/sram_sbs/templates/config.yml.j2 @@ -1,110 +1,110 @@ --- database: - uri: {{ sbs.db_connection_sbs }} + uri: {{ sbs_db_connection_sbs }} redis: -{% if env == 'test2' %} - uri: "redis://{{ sbs.redis_user }}:{{ sbs.redis_password }}@{{sbs.redis_host}}/" +{% if environment_shortname == 'test2' %} + uri: "redis://{{ sbs_redis_user }}:{{ sbs_redis_password }}@{{sbs_redis_host}}/" {% else %} - uri: "redis{% if sbs.redis_ssl %}s{% endif %}://{{ sbs.redis_user }}:{{ sbs.redis_password }}@{{ sbs.redis_host }}:{{ sbs.redis_port }}/" + uri: "redis{% if sbs_redis_ssl %}s{% endif %}://{{ sbs_redis_user }}:{{ sbs_redis_password }}@{{ sbs_redis_host }}:{{ sbs_redis_port }}/" {% endif %} # add a per-release suffix here to invalidate sessions on new releases -secret_key: {{ sbs.db_secret }}{{sbs.secret_key_suffix}} +secret_key: {{ sbs_db_secret }}{{sbs_secret_key_suffix}} # Must be a base64 encoded key of 128, 192, or 256 bits. # Generate: base64.b64encode(os.urandom(256 // 8)).decode() -encryption_key: {{ sbs.encryption_key }} +encryption_key: {{ sbs_encryption_key }} # Lifetime of session in minutes (one day is 60 * 24) -permanent_session_lifetime: {{ sbs.session_lifetime }} +permanent_session_lifetime: {{ sbs_session_lifetime }} logging: - log_to_stdout: {{ sbs.log_to_stdout }} + log_to_stdout: {{ sbs_log_to_stdout }} # Valid scopes are "READ" and "WRITE" api_users: -{% for name, user in sbs.api_users.items() %} +{% for name, user in sbs_api_users.items() %} - name: "{{ name }}" password: "{{ user.password }}" scopes: "[ {{ user.scopes | join(', ') }} ]" {% endfor %} oidc: - client_id: "{{ sbs.oidc_client_id }}" - client_secret: "{{ sbs.oidc_client_secret }}" - audience: "{{ sbs.oidc_jwt_audience }}" - verify_peer: {{ sbs.oidc_verify_peer }} - authorization_endpoint: "{{ sbs.oidc_authz_endpoint}}" - token_endpoint: "{{ sbs.oidc_token_endpoint }}" - userinfo_endpoint: "{{ sbs.oidc_userinfo_endpoint }}" - jwks_endpoint: "{{ sbs.oidc_jwks_endpoint }}" + client_id: "{{ sbs_oidc_client_id }}" + client_secret: "{{ sbs_oidc_client_secret }}" + audience: "{{ sbs_oidc_jwt_audience }}" + verify_peer: {{ sbs_oidc_verify_peer }} + authorization_endpoint: "{{ sbs_oidc_authz_endpoint}}" + token_endpoint: "{{ sbs_oidc_token_endpoint }}" + userinfo_endpoint: "{{ sbs_oidc_userinfo_endpoint }}" + jwks_endpoint: "{{ sbs_oidc_jwks_endpoint }}" #Note that the paths for these uri's is hardcoded and only domain and port differ per environment - redirect_uri: "{{ sbs.oidc_redirect_uri }}" - continue_eduteams_redirect_uri: "{{ sbs.eduteams_continue_endpoint }}" - continue_eb_redirect_uri: "{{ sbs.eb_continue_endpoint }}" - second_factor_authentication_required: {{ sbs.second_factor_authentication_required }} - totp_token_name: "{{ sbs.totp_token_name }}" + redirect_uri: "{{ sbs_oidc_redirect_uri }}" + continue_eduteams_redirect_uri: "{{ sbs_eduteams_continue_endpoint }}" + continue_eb_redirect_uri: "{{ sbs_eb_continue_endpoint }}" + second_factor_authentication_required: {{ sbs_second_factor_authentication_required }} + totp_token_name: "{{ sbs_totp_token_name }}" # The service_id in the proxy_authz endpoint when logging into SBS. Most likely to equal the oidc.client_id - sram_service_entity_id: "{{ sbs.oidc_client_id }}" - scopes: {{ sbs.oidc_scopes }} + sram_service_entity_id: "{{ sbs_oidc_client_id }}" + scopes: {{ sbs_oidc_scopes }} base_scope: "{{ base_domain }}" -entitlement_group_namespace: "{{ sbs.urn_namespace }}" -eppn_scope: " {{ sbs.eppn_scope }}" +entitlement_group_namespace: "{{ sbs_urn_namespace }}" +eppn_scope: " {{ sbs_eppn_scope }}" scim_schema_sram: "urn:mace:surf.nl:sram:scim:extension" collaboration_creation_allowed_entitlement: "urn:mace:surf.nl:sram:allow-create-co" {% if env == "prd" %} environment_disclaimer: "" {% else %} -environment_disclaimer: "{{ sbs.disclaimer_label }}" +environment_disclaimer: "{{ sbs_disclaimer_label }}" {% endif %} # All services in the white list can be requested in the create-restricted-co API # The default organisation is a fallback for when the administrator has no schac_home_org restricted_co: services_white_list: [ "https://cloud" ] - default_organisation: "{{ sbs.restricted_co_default_org }}" + default_organisation: "{{ sbs_restricted_co_default_org }}" mail: - host: {{ sbs.mail_host }} - port: {{ sbs.mail_port }} - sender_name: {{ sbs.mail_sender_name }} - sender_email: {{ sbs.mail_sender_email }} - suppress_sending_mails: {{ sbs.suppress_mails }} - info_email: {{ sbs.support_email }} - beheer_email: {{ sbs.admin_email }} - ticket_email: {{ sbs.ticket_email }} - eduteams_email: {{ sbs.eduteams_email }} + host: {{ sbs_mail_host }} + port: {{ sbs_mail_port }} + sender_name: {{ sbs_mail_sender_name }} + sender_email: {{ sbs_mail_sender_email }} + suppress_sending_mails: {{ sbs_suppress_mails }} + info_email: {{ sbs_support_email }} + beheer_email: {{ sbs_admin_email }} + ticket_email: {{ sbs_ticket_email }} + eduteams_email: {{ sbs_eduteams_email }} # Do we mail a summary of new Organizations and Services to the beheer_email? - audit_trail_notifications_enabled: {{ sbs.audit_trail_notifications_enabled }} + audit_trail_notifications_enabled: {{ sbs_audit_trail_notifications_enabled }} account_deletion_notifications_enabled: True - send_exceptions: {{ sbs.send_exceptions }} - send_js_exceptions: {{ sbs.send_js_exceptions }} - send_exceptions_recipients: [ "{{ sbs.exceptions_mail }}" ] + send_exceptions: {{ sbs_send_exceptions }} + send_js_exceptions: {{ sbs_send_js_exceptions }} + send_exceptions_recipients: [ "{{ sbs_exceptions_mail }}" ] environment: "{{ base_domain }}" manage: - enabled: {{ sbs.manage_base_enabled }} + enabled: {{ sbs_manage_base_enabled }} # The entity_id of the SRAM RP in Manage for API retrieval, e.g "sbs.test2.sram.surf.nl" - sram_rp_entity_id: "{{ sbs.manage_sram_rp_entity_id }}" - base_url: "{{ sbs.manage_base_url }}" - user: "{{ sbs.manage_user }}" - password: "{{ sbs.manage_password }}" - verify_peer: {{ sbs.manage_verify_peer }} + sram_rp_entity_id: "{{ sbs_manage_sram_rp_entity_id }}" + base_url: "{{ sbs_manage_base_url }}" + user: "{{ sbs_manage_user }}" + password: "{{ sbs_manage_password }}" + verify_peer: {{ sbs_manage_verify_peer }} aup: version: 1 url_aup_en: "https://edu.nl/6wb63" url_aup_nl: "https://edu.nl/6wb63" -base_url: {{ sbs.base_url }} -socket_url: {{ sbs.base_url }} -base_server_url: {{ sbs.base_url }} -wiki_link: {{ sbs.wiki_link }} +base_url: {{ sbs_base_url }} +socket_url: {{ sbs_base_url }} +base_server_url: {{ sbs_base_url }} +wiki_link: {{ sbs_wiki_link }} admin_users: -{% for admin_user in sbs.admin_users %} +{% for admin_user in sbs_admin_users %} - uid: "{{ admin_user.uid }}" {% endfor %} @@ -117,17 +117,17 @@ organisation_categories: - "SURF" feature: - seed_allowed: {{ sbs.seed_allowed }} - api_keys_enabled: {{ sbs.api_keys_enabled }} - feedback_enabled: {{ sbs.feedback_enabled }} - impersonation_allowed: {{ sbs.impersonation_allowed }} - sbs_swagger_enabled: {{ sbs.swagger_enabled }} - admin_platform_backdoor_totp: {{ sbs.admin_platform_backdoor_totp }} - past_dates_allowed: {{ sbs.past_dates_allowed }} - mock_scim_enabled: {{ sbs.mock_scim_enabled }} + seed_allowed: {{ sbs_seed_allowed }} + api_keys_enabled: {{ sbs_api_keys_enabled }} + feedback_enabled: {{ sbs_feedback_enabled }} + impersonation_allowed: {{ sbs_impersonation_allowed }} + sbs_swagger_enabled: {{ sbs_swagger_enabled }} + admin_platform_backdoor_totp: {{ sbs_admin_platform_backdoor_totp }} + past_dates_allowed: {{ sbs_past_dates_allowed }} + mock_scim_enabled: {{ sbs_mock_scim_enabled }} metadata: - idp_url: "{{sbs.idp_metadata_url}}" + idp_url: "{{sbs_idp_metadata_url}}" parse_at_startup: True # No need for environment specific values scope_override: @@ -136,7 +136,7 @@ metadata: platform_admin_notifications: # Do we daily check for CO join_requests and CO requests and send a summary mail to beheer_email? enabled: False - cron_hour_of_day: {{ sbs.cron_hour_of_day }} + cron_hour_of_day: {{ sbs_cron_hour_of_day }} # How long before we include open join_requests in the summary outstanding_join_request_days_threshold: 7 # How long before we include open CO requests in the summary @@ -144,8 +144,8 @@ platform_admin_notifications: user_requests_retention: # Do we daily check for CO join_requests and CO requests and delete approved and denied? - enabled: {{ sbs.notifications_enabled }} - cron_hour_of_day: {{ sbs.cron_hour_of_day }} + enabled: {{ sbs_notifications_enabled }} + cron_hour_of_day: {{ sbs_cron_hour_of_day }} # How long before we delete approved / denied join_requests outstanding_join_request_days_threshold: 90 # How long before we delete approved / denied CO requests @@ -153,24 +153,24 @@ user_requests_retention: # The retention config determines how long users may be inactive, how long the reminder email is valid and when do we resent the magic link retention: - cron_hour_of_day: {{ sbs.cron_hour_of_day }} + cron_hour_of_day: {{ sbs_cron_hour_of_day }} # how many days of inactivity before a user is suspended # 0 allows for any last_login_date in the past to trigger suspension notification - allowed_inactive_period_days: {{ sbs.suspension_inactive_days }} + allowed_inactive_period_days: {{ sbs_suspension_inactive_days }} # how many days before suspension do we send a warning # -1 will suspend notified users on second suspension cron - reminder_suspend_period_days: {{ sbs.suspension_reminder_days }} + reminder_suspend_period_days: {{ sbs_suspension_reminder_days }} # how many days after suspension do we delete the account remove_suspended_users_period_days: 90 # how many days before deletion do we send a reminder reminder_expiry_period_days: 7 # whether to send a notification of the result of the retention process to the beheer_email - admin_notification_mail: {{ sbs.suspension_notify_admin }} + admin_notification_mail: {{ sbs_suspension_notify_admin }} collaboration_expiration: # Do we daily check for CO's that will be deleted because they have been expired? - enabled: {{ sbs.notifications_enabled }} - cron_hour_of_day: {{ sbs.cron_hour_of_day }} + enabled: {{ sbs_notifications_enabled }} + cron_hour_of_day: {{ sbs_cron_hour_of_day }} # How long after expiration do we actually delete expired collaborations expired_collaborations_days_threshold: 90 # How many days before actual expiration do we mail the organisation members @@ -178,8 +178,8 @@ collaboration_expiration: collaboration_suspension: # Do we daily check for CO's that will be suspended because of inactivity? - enabled: {{ sbs.notifications_enabled }} - cron_hour_of_day: {{ sbs.cron_hour_of_day }} + enabled: {{ sbs_notifications_enabled }} + cron_hour_of_day: {{ sbs_cron_hour_of_day }} # After how many days of inactivity do we suspend collaborations collaboration_inactivity_days_threshold: 365 # How many days before actual suspension do we mail the organisation members @@ -189,8 +189,8 @@ collaboration_suspension: membership_expiration: # Do we daily check for memberships that will be deleted because they have been expired? - enabled: {{ sbs.notifications_enabled }} - cron_hour_of_day: {{ sbs.cron_hour_of_day }} + enabled: {{ sbs_notifications_enabled }} + cron_hour_of_day: {{ sbs_cron_hour_of_day }} # How long after expiration do we actually delete expired memberships expired_memberships_days_threshold: 90 # How many days before actual expiration do we mail the co admin and member @@ -198,15 +198,15 @@ membership_expiration: invitation_reminders: # Do we daily check for invitations that need a reminder? - enabled: {{ sbs.invitation_reminders_enabled }} - cron_hour_of_day: {{ sbs.cron_hour_of_day }} + enabled: {{ sbs_invitation_reminders_enabled }} + cron_hour_of_day: {{ sbs_cron_hour_of_day }} # How many days before expiration of an invitation do we remind the user? invitation_reminders_threshold: 5 invitation_expirations: # Do we daily check for invitations that are expired / accepted and are eligible for deletion ? - enabled: {{ sbs.invitation_expirations_enabled }} - cron_hour_of_day: {{ sbs.cron_hour_of_day }} + enabled: {{ sbs_invitation_expirations_enabled }} + cron_hour_of_day: {{ sbs_cron_hour_of_day }} # How long after expiration of an invitation do we delete the invitation? nbr_days_remove_expired_invitations: 10 # How long after expiration of an API created invitation do we delete the invitation? @@ -214,38 +214,38 @@ invitation_expirations: orphan_users: # Do we daily check for users that are orphans soo they can be deleted? - enabled: {{ sbs.delete_orphaned }} - cron_hour_of_day: {{ sbs.cron_hour_of_day }} + enabled: {{ sbs_delete_orphaned }} + cron_hour_of_day: {{ sbs_cron_hour_of_day }} # How long after created do we delete orphan users delete_days_threshold: 14 open_requests: # Do we weekly check for all open requests? - enabled: {{ sbs.open_requests_enabled }} + enabled: {{ sbs_open_requests_enabled }} cron_day_of_week: 1 scim_sweep: # Do we enable scim sweeps? - enabled: {{ sbs.scim_sweep }} + enabled: {{ sbs_scim_sweep }} # How often do we check if scim sweeps are needed per service cron_minutes_expression: "*/15" ldap: - url: "{{ sbs.ldap_url }}" - bind_account: "{{ sbs.ldap_bind_account }}" + url: "{{ sbs_ldap_url }}" + bind_account: "{{ sbs_ldap_bind_account }}" # A MFA login in a different flow is valid for X minutes -mfa_sso_time_in_minutes: {{sbs.mfa_sso_minutes}} +mfa_sso_time_in_minutes: {{sbs_mfa_sso_minutes}} # whether to fall back to TOTP MFA -mfa_fallback_enabled: {{sbs.mfa_fallback_enabled}} +mfa_fallback_enabled: {{sbs_mfa_fallback_enabled}} # Lower case entity ID's and schac_home allowed skipping MFA. # Note that for a login directly into SRAM only schac_home can be used as the entity_idp of the IdP is unknown -mfa_idp_allowed: {{sbs.mfa_idp_allowed}} +mfa_idp_allowed: {{sbs_mfa_idp_allowed}} # Lower case schachome organisations / entity ID's where SURFSecure ID is used for step-up -ssid_identity_providers: {{sbs.ssid_identity_providers}} +ssid_identity_providers: {{sbs_ssid_identity_providers}} ssid_config_folder: saml @@ -256,9 +256,9 @@ rate_limit_totp_guesses_per_30_seconds: 10 # The uid's of user that will never be suspended or deleted excluded_user_accounts: -{% for excluded_user in sbs.excluded_users %} +{% for excluded_user in sbs_excluded_users %} - uid: "{{ excluded_user.uid }}" {% endfor %} engine_block: - api_token: {{ sbs.engine_block_api_token }} + api_token: {{ sbs_engine_block_api_token }} diff --git a/roles/sram_sbs/templates/disclaimer.css.j2 b/roles/sram_sbs/templates/disclaimer.css.j2 new file mode 100644 index 000000000..0211d17d8 --- /dev/null +++ b/roles/sram_sbs/templates/disclaimer.css.j2 @@ -0,0 +1,6 @@ +{% if env!="prd" -%} +body::after { + background: {{ sbs_disclaimer_color }}; + content: "{{ sbs_disclaimer_label }}"; +} +{% endif %} diff --git a/roles/sram-sbs/templates/saml_advanced_settings.json.j2 b/roles/sram_sbs/templates/saml_advanced_settings.json.j2 similarity index 100% rename from roles/sram-sbs/templates/saml_advanced_settings.json.j2 rename to roles/sram_sbs/templates/saml_advanced_settings.json.j2 diff --git a/roles/sram-sbs/templates/saml_settings.json.j2 b/roles/sram_sbs/templates/saml_settings.json.j2 similarity index 100% rename from roles/sram-sbs/templates/saml_settings.json.j2 rename to roles/sram_sbs/templates/saml_settings.json.j2 diff --git a/roles/sram-sbs/templates/sbs-apache.conf.j2 b/roles/sram_sbs/templates/sbs-apache.conf.j2 similarity index 98% rename from roles/sram-sbs/templates/sbs-apache.conf.j2 rename to roles/sram_sbs/templates/sbs-apache.conf.j2 index f0140a845..af8c32ce7 100644 --- a/roles/sram-sbs/templates/sbs-apache.conf.j2 +++ b/roles/sram_sbs/templates/sbs-apache.conf.j2 @@ -1,4 +1,4 @@ -ServerName {{ sbs.base_domain }} +ServerName {{ sbs_base_domain }} #ErrorLog /proc/self/fd/2 #CustomLog /proc/self/fd/1 common DocumentRoot /opt/sbs/client/dist diff --git a/roles/sram-sbs/templates/sbs.service.j2 b/roles/sram_sbs/templates/sbs.service.j2 similarity index 100% rename from roles/sram-sbs/templates/sbs.service.j2 rename to roles/sram_sbs/templates/sbs.service.j2 From 1e8614e65c3d171542b77861253c7adb7932b28f Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Thu, 9 Apr 2026 18:31:49 +0200 Subject: [PATCH 66/73] WIP --- roles/sram_sbs/tasks/main.yml | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/roles/sram_sbs/tasks/main.yml b/roles/sram_sbs/tasks/main.yml index 58ad32841..f5f16e117 100644 --- a/roles/sram_sbs/tasks/main.yml +++ b/roles/sram_sbs/tasks/main.yml @@ -1,9 +1,4 @@ --- -# - name: "Initialize database" -# throttle: 1 -# import_tasks: "database_init.yml" -# when: "is_dev" - - name: "Create SBS group" group: name: "{{ sbs_group }}" @@ -123,18 +118,10 @@ # TODO: Should we parametrize this? - name: "loadbalancer" register: "result" - # failed_when: "'container' not in result or result.container.State.ExitCode != 0" - # changed_when: "'[alembic.runtime.migration] Running upgrade' in result.container.Output" + failed_when: "'container' not in result or result.container.State.ExitCode != 0" + changed_when: "'[alembic.runtime.migration] Running upgrade' in result.container.Output | default('')" notify: "Restart sbs containers" -# What's with result.container? -- name: debug result - ansible.builtin.debug: - msg: "{{ result }}" - -# - name: Stop -# meta: end_play - # Remove the migration container; we can do that with auto_remove, because if we use that, ansible # will not save the output in result - name: "Remove migration container" From 2d131ea6312e562d5aa0f9df4f026920f6c897b2 Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Thu, 16 Apr 2026 13:38:58 +0200 Subject: [PATCH 67/73] Remove SBS cert dir --- roles/sram_sbs/defaults/main.yml | 1 - roles/sram_sbs/tasks/main.yml | 89 +++++++++++++------------------- 2 files changed, 35 insertions(+), 55 deletions(-) diff --git a/roles/sram_sbs/defaults/main.yml b/roles/sram_sbs/defaults/main.yml index b564e0779..fe8696869 100644 --- a/roles/sram_sbs/defaults/main.yml +++ b/roles/sram_sbs/defaults/main.yml @@ -13,7 +13,6 @@ sbs_git_dir: "{{ sbs_work_dir }}/sbs" sbs_env_dir: "{{ sbs_work_dir }}/sbs-env" sbs_conf_dir: "{{ sbs_work_dir }}/config" sbs_log_dir: "{{ sbs_work_dir }}/log" -sbs_cert_dir: "{{ sbs_work_dir }}/cert" sbs_apache_conf: "{{ sbs_work_dir }}/sbs.conf" sbs_nginx_conf: "{{ sbs_work_dir }}/nginx.conf" diff --git a/roles/sram_sbs/tasks/main.yml b/roles/sram_sbs/tasks/main.yml index f5f16e117..c2cae754f 100644 --- a/roles/sram_sbs/tasks/main.yml +++ b/roles/sram_sbs/tasks/main.yml @@ -37,7 +37,6 @@ - { path: "{{sbs_conf_dir}}", mode: "0755" } - { path: "{{sbs_conf_dir}}/saml", mode: "0755" } - { path: "{{sbs_log_dir}}", mode: "0775" } - - { path: "{{sbs_cert_dir}}", mode: "0755" } - name: "Fix file permissions" file: @@ -52,13 +51,6 @@ - "sbs.log" - "sbs.debug.log" -# Create dummy file in certs dir to pacify container pre-init script -# https://github.com/SURFscz/SBS/pull/2312 -- name: "Touch file in {{ sbs_cert_dir }}" - ansible.builtin.copy: - content: "" - dest: "{{sbs_cert_dir}}/dummy" - - name: "Create SBS config files" template: src: "{{item.name}}.j2" @@ -82,54 +74,44 @@ - "{{ sbs_server_image }}" register: "sbs_image" -# We need to remove sram-static so it gets repopulated -# with new SBS image static content -- name: "Clean up old containers" +- name: "Migration" + # For some reason --check breaks this block + when: "sbs_image is changed and not ansible_check_mode" block: - - name: "Stop and remove sbs and sbs-server containers" + - name: "Run SBS migrations" + throttle: 1 community.docker.docker_container: - name: "{{ item }}" - state: "absent" - with_items: - - "sbs-client" - - "sbs-server" - when: "sbs_image is changed" - -- name: "Run SBS migrations" - throttle: 1 - community.docker.docker_container: - name: "sram-sbs-migration" - image: "{{ sbs_server_image }}" - pull: "never" - state: "started" - restart_policy: "no" - detach: false - env: - RUNAS_UID: "{{ sbs_user_uid | string }}" - RUNAS_GID: "{{ sbs_group_gid | string }}" - MIGRATIONS_ONLY: "1" - # don't actually run the server - command: "/bin/true" - volumes: - - "{{ sbs_conf_dir }}:/sbs-config" - - "{{ sbs_cert_dir }}:/sbs-config/cert:ro" - - "{{ sbs_log_dir }}:/opt/sbs/log" - networks: - # TODO: Should we parametrize this? - - name: "loadbalancer" - register: "result" - failed_when: "'container' not in result or result.container.State.ExitCode != 0" - changed_when: "'[alembic.runtime.migration] Running upgrade' in result.container.Output | default('')" - notify: "Restart sbs containers" + name: "sram-sbs-migration" + image: "{{ sbs_server_image }}" + pull: "never" + state: "started" + restart_policy: "no" + detach: false + env: + RUNAS_UID: "{{ sbs_user_uid | string }}" + RUNAS_GID: "{{ sbs_group_gid | string }}" + MIGRATIONS_ONLY: "1" + # don't actually run the server + command: "/bin/true" + volumes: + - "{{ sbs_conf_dir }}:/sbs-config" + - "{{ sbs_log_dir }}:/opt/sbs/log" + networks: + # TODO: Should we parametrize this? + - name: "loadbalancer" + register: "result" + failed_when: "'container' not in result or result.container.State.ExitCode != 0" + changed_when: "'[alembic.runtime.migration] Running upgrade' in result.container.Output | default('')" + notify: "Restart sbs containers" -# Remove the migration container; we can do that with auto_remove, because if we use that, ansible -# will not save the output in result -- name: "Remove migration container" - community.docker.docker_container: - name: "sram-sbs-migration" - state: "absent" - # TODO: fix this by only running this if "sbs_image is changed" - changed_when: false + # Remove the migration container; we can do that with auto_remove, because if we use that, ansible + # will not save the output in result + - name: "Remove migration container" + community.docker.docker_container: + name: "sram-sbs-migration" + state: "absent" + # TODO: fix this by only running this if "sbs_image is changed" + changed_when: false - name: "Start sbs client container" community.docker.docker_container: @@ -163,7 +145,6 @@ pull: "never" volumes: - "{{ sbs_conf_dir }}:/sbs-config" - - "{{ sbs_cert_dir }}:/sbs-config/cert:ro" - "{{ sbs_log_dir }}:/opt/sbs/log" - "/tmp/ci-runner:/tmp/ci-runner" networks: From 26d52db24e747da03bbff98eab015ff2303c084b Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Tue, 21 Apr 2026 14:51:59 +0200 Subject: [PATCH 68/73] Add Engine-SBS integration --- roles/engine/defaults/main.yml | 1 - roles/engine/tasks/main.yml | 2 +- roles/engine/templates/parameters.yml.j2 | 4 ++-- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/roles/engine/defaults/main.yml b/roles/engine/defaults/main.yml index 3721e2f54..ca58ea135 100644 --- a/roles/engine/defaults/main.yml +++ b/roles/engine/defaults/main.yml @@ -78,7 +78,6 @@ engine_stepup_gateway_sfo_entity_id: "https://{{ engine_stepup_gateway_domain }} engine_stepup_gateway_sfo_sso_location: "https://{{ engine_stepup_gateway_domain }}/second-factor-only/single-sign-on" # SBS interrupt settings -engine_sbs_base_url: "sbs.{{ base_domain }}" engine_sbs_attributes_allowed: - 'urn:mace:dir:attribute-def:eduPersonEntitlement' - 'urn:mace:dir:attribute-def:uid' diff --git a/roles/engine/tasks/main.yml b/roles/engine/tasks/main.yml index c75ece132..2776c4b4c 100644 --- a/roles/engine/tasks/main.yml +++ b/roles/engine/tasks/main.yml @@ -208,7 +208,7 @@ PHP_MEMORY_LIMIT: "{{ engine_php_memory }}" APP_ENV: "prod" APP_SECRET: "{{ engine_parameters_secret }}" - APP_DEBUG: "{{ engine_debug | bool | int }}" + APP_DEBUG: "{{ engine_debug | bool | int | string }}" etc_hosts: host.docker.internal: host-gateway mounts: diff --git a/roles/engine/templates/parameters.yml.j2 b/roles/engine/templates/parameters.yml.j2 index 526104405..0c0d077f5 100644 --- a/roles/engine/templates/parameters.yml.j2 +++ b/roles/engine/templates/parameters.yml.j2 @@ -316,8 +316,8 @@ parameters: ########################################################################################## ## SBS external authorization/attribute enrichtment ########################################################################################## - sram.api_token: "{{ engine_sbs_api_token | default('') }}" - sram.base_url: "https://{{ engine_sbs_base_url }}/api/users/" + sram.api_token: "{{ sbs_engine_block_api_token | default('') }}" + sram.base_url: "https://{{ sbs_base_domain | default('sbs.example.org') }}/api/users/" sram.authz_location: "authz_eb" sram.attributes_location: "attributes_eb" sram.interrupt_location: "interrupt" From 82f50f7bafe38168d63f9d9879f6ff1f837bd196 Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Thu, 30 Apr 2026 15:12:22 +0200 Subject: [PATCH 69/73] Remove rsyslog dust --- roles/sram_ldap/files/logrotate_slapd | 13 ------------- roles/sram_ldap/files/rsyslog_slapd.conf | 2 -- roles/sram_ldap/handlers/main.yml | 18 ------------------ 3 files changed, 33 deletions(-) delete mode 100644 roles/sram_ldap/files/logrotate_slapd delete mode 100644 roles/sram_ldap/files/rsyslog_slapd.conf diff --git a/roles/sram_ldap/files/logrotate_slapd b/roles/sram_ldap/files/logrotate_slapd deleted file mode 100644 index f225a935f..000000000 --- a/roles/sram_ldap/files/logrotate_slapd +++ /dev/null @@ -1,13 +0,0 @@ -/var/log/slapd.log -{ - rotate 7 - daily - missingok - notifempty - delaycompress - compress - postrotate - invoke-rc.d rsyslog rotate > /dev/null - endscript -} - diff --git a/roles/sram_ldap/files/rsyslog_slapd.conf b/roles/sram_ldap/files/rsyslog_slapd.conf deleted file mode 100644 index a3435617f..000000000 --- a/roles/sram_ldap/files/rsyslog_slapd.conf +++ /dev/null @@ -1,2 +0,0 @@ -if $programname == 'slapd' then /var/log/slapd.log -if $programname == 'slapd' then ~ diff --git a/roles/sram_ldap/handlers/main.yml b/roles/sram_ldap/handlers/main.yml index 0510176a6..737cbb41f 100644 --- a/roles/sram_ldap/handlers/main.yml +++ b/roles/sram_ldap/handlers/main.yml @@ -1,22 +1,4 @@ --- -- name: restart rsyslog - service: - name: rsyslog - state: restarted - listen: "restart rsyslog" - -- name: systemd daemon-reload - systemd: - name: slapd - daemon_reload: yes - -- name: restart LDAP - systemd: - name: slapd - state: restarted - enabled: true - daemon-reload: true - - name: Restart the ldap container community.docker.docker_container: name: "{{ containers.ldap }}" From 9a3c379b094347ef0f690e154a4d8800af0b60b8 Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Mon, 4 May 2026 16:38:31 +0200 Subject: [PATCH 70/73] WIP --- roles/sram_ldap/defaults/main.yml | 50 ++-- roles/sram_ldap/handlers/main.yml | 2 +- roles/sram_ldap/tasks/admins.yml | 56 ++-- roles/sram_ldap/tasks/main.yml | 174 ++++-------- roles/sram_ldap/templates/slapd.service.j2 | 20 -- roles/sram_metadata/defaults/main.yml | 78 ------ roles/sram_metadata/files/01_idps.fd | 23 -- roles/sram_metadata/files/02_backend.fd | 14 - roles/sram_metadata/files/03_frontend.fd | 14 - roles/sram_metadata/files/surf.png | Bin 16016 -> 0 bytes roles/sram_metadata/files/surf.svg | 24 -- roles/sram_metadata/files/surf_bimi.svg | 15 - roles/sram_metadata/files/surfconext.crt | 3 - roles/sram_metadata/files/transform.xslt | 47 ---- .../sram_metadata/files/transform_proxy.xslt | 50 ---- roles/sram_metadata/handlers/main.yml | 19 -- roles/sram_metadata/tasks/http.yml | 48 ---- roles/sram_metadata/tasks/main.yml | 49 ---- roles/sram_metadata/tasks/pyff.yml | 106 -------- roles/sram_metadata/templates/index.html.j2 | 11 - .../templates/pyff-metadata.service.j2 | 12 - .../templates/pyff-metadata.timer.j2 | 8 - roles/sram_metadata/templates/vhosts.conf.j2 | 15 - roles/sram_midproxy/defaults/main.yml | 13 +- roles/sram_midproxy/tasks/main.yml | 18 +- roles/sram_plsc/defaults/main.yml | 22 +- roles/sram_plsc/handlers/main.yml | 12 - roles/sram_plsc/tasks/main.yml | 10 +- roles/sram_plsc/templates/plsc.yml.j2 | 24 +- roles/sram_redis/defaults/main.yml | 16 +- roles/sram_redis/tasks/main.yml | 28 +- roles/sram_redis/templates/redis.conf.j2 | 4 +- roles/sram_sbs/defaults/main.yml | 257 +++++++++--------- roles/sram_sbs/tasks/main.yml | 56 ++-- roles/sram_sbs/templates/alembic.ini.j2 | 2 +- roles/sram_sbs/templates/config.yml.j2 | 178 ++++++------ roles/sram_sbs/templates/disclaimer.css.j2 | 4 +- .../templates/saml_advanced_settings.json.j2 | 2 +- .../sram_sbs/templates/saml_settings.json.j2 | 14 +- roles/sram_sbs/templates/sbs-apache.conf.j2 | 2 +- roles/sram_sbs/templates/sbs.service.j2 | 32 --- 41 files changed, 420 insertions(+), 1112 deletions(-) delete mode 100644 roles/sram_ldap/templates/slapd.service.j2 delete mode 100644 roles/sram_metadata/defaults/main.yml delete mode 100644 roles/sram_metadata/files/01_idps.fd delete mode 100644 roles/sram_metadata/files/02_backend.fd delete mode 100644 roles/sram_metadata/files/03_frontend.fd delete mode 100644 roles/sram_metadata/files/surf.png delete mode 100644 roles/sram_metadata/files/surf.svg delete mode 100644 roles/sram_metadata/files/surf_bimi.svg delete mode 100644 roles/sram_metadata/files/surfconext.crt delete mode 100644 roles/sram_metadata/files/transform.xslt delete mode 100644 roles/sram_metadata/files/transform_proxy.xslt delete mode 100644 roles/sram_metadata/handlers/main.yml delete mode 100644 roles/sram_metadata/tasks/http.yml delete mode 100644 roles/sram_metadata/tasks/main.yml delete mode 100644 roles/sram_metadata/tasks/pyff.yml delete mode 100644 roles/sram_metadata/templates/index.html.j2 delete mode 100644 roles/sram_metadata/templates/pyff-metadata.service.j2 delete mode 100644 roles/sram_metadata/templates/pyff-metadata.timer.j2 delete mode 100644 roles/sram_metadata/templates/vhosts.conf.j2 delete mode 100644 roles/sram_sbs/templates/sbs.service.j2 diff --git a/roles/sram_ldap/defaults/main.yml b/roles/sram_ldap/defaults/main.yml index cb2bc981a..35d5029f3 100644 --- a/roles/sram_ldap/defaults/main.yml +++ b/roles/sram_ldap/defaults/main.yml @@ -1,38 +1,38 @@ --- -ldap_image: "ghcr.io/surfscz/sram-ldap:main" -ldap_conf_dir: "{{ current_release_appdir }}/sram/ldap" -ldap_ldif_dir: "{{ ldap_conf_dir }}/schema" -ldap_certs_dir: "{{ ldap_conf_dir }}/certs" -ldap_backup_dir: "{{ ldap_conf_dir }}/ldap" -ldap_data_dir: "{{ ldap_conf_dir}}/data" -ldap_uri: "ldap://localhost/" +sram_ldap_image: "ghcr.io/surfscz/sram-ldap:main" +sram_ldap_conf_dir: "{{ current_release_appdir }}/sram/ldap" +sram_ldap_ldif_dir: "{{ sram_ldap_conf_dir }}/schema" +sram_ldap_certs_dir: "{{ sram_ldap_conf_dir }}/certs" +sram_ldap_backup_dir: "{{ sram_ldap_conf_dir }}/ldap" +sram_ldap_data_dir: "{{ sram_ldap_conf_dir}}/data" +sram_ldap_uri: "ldap://localhost/" -ldap_user: "openldap" -ldap_group: "openldap" +sram_ldap_user: "openldap" +sram_ldap_group: "openldap" # admin_group: "ldap_admin" -ldap_admins: +sram_ldap_admins: - name: Admin uid: admin - pw_hash: + pw_hash: "!" sshkey: "" -ldap_loglevel: "stats stats2 filter" +sram_ldap_loglevel: "stats stats2 filter" -ldap_services_password: secret -ldap_monitor_password: secret -ldap_ldap_monitor_password: secret +sram_ldap_services_password: secret +sram_ldap_monitor_password: secret +sram_ldap_ldap_monitor_password: secret -ldap_uri: "ldap://localhost/" -ldap_rid_prefix: "ldap://" +sram_ldap_uri: "ldap://localhost/" +sram_ldap_rid_prefix: "ldap://" -ldap_base_domain: "{{ base_domain }}" -ldap_base_dn: >- - {{ ((ldap_base_domain.split('.')|length)*['dc=']) | - zip(ldap_base_domain.split('.')) | list | map('join', '') | list | join(',') }} -ldap_services_dn: - basedn: "dc=services,{{ ldap_base_dn }}" +sram_ldap_base_domain: "{{ base_domain }}" +sram_ldap_base_dn: >- + {{ ((sram_ldap_base_domain.split('.')|length)*['dc=']) | + zip(sram_ldap_base_domain.split('.')) | list | map('join', '') | list | join(',') }} +sram_ldap_services_dn: + basedn: "dc=services,{{ sram_ldap_base_dn }}" o: "Services" - binddn: "cn=admin,{{ ldap_base_dn }}" + binddn: "cn=admin,{{ sram_ldap_base_dn }}" -ldap_hosts: {} +sram_ldap_hosts: {} diff --git a/roles/sram_ldap/handlers/main.yml b/roles/sram_ldap/handlers/main.yml index 737cbb41f..f6136cfeb 100644 --- a/roles/sram_ldap/handlers/main.yml +++ b/roles/sram_ldap/handlers/main.yml @@ -1,6 +1,6 @@ --- - name: Restart the ldap container community.docker.docker_container: - name: "{{ containers.ldap }}" + name: "sram-ldap" restart: true state: started diff --git a/roles/sram_ldap/tasks/admins.yml b/roles/sram_ldap/tasks/admins.yml index dfba23d6e..f20648460 100644 --- a/roles/sram_ldap/tasks/admins.yml +++ b/roles/sram_ldap/tasks/admins.yml @@ -1,54 +1,34 @@ --- -# - name: Initialize DIT admin -# community.general.ldap_entry: -# dn: "{{ ldap_services_dn.binddn }}" -# objectClass: organizationalRole -# attributes: -# cn: "{{ ldap_services_dn.binddn | regex_replace('^cn=([^,]+).*', '\\1') }}" - -# determine which users need to be admin -# check for each role of each user if it leads to membership of group {{ldap_admin_group}} -# - name: determine ldap admins -# set_fact: -# ldap_admins: "{{ ldap_admins | default([]) + [item.0] }}" -# when: ldap_admin_group in role_to_groups[item.1] or ldap_admin_group in item.0.groups -# loop: "{{ users | subelements('roles') }}" - - name: determine ldap admins set_fact: - ldap_admins: "{{ ldap_admins }}" + ldap_admins: "{{ sram_ldap_admins }}" # Find existing ldap admins - name: Initialize admins (I) community.general.ldap_search: - dn: "{{ ldap_services_dn.basedn }}" + dn: "{{ sram_ldap_services_dn.basedn }}" scope: "onelevel" filter: "(objectClass=organizationalRole)" attrs: - "cn" - bind_dn: "{{ ldap_services_dn.binddn }}" - bind_pw: "{{ ldap_services_password }}" - server_uri: "{{ldap_uri }}" + bind_dn: "{{ sram_ldap_services_dn.binddn }}" + bind_pw: "{{ sram_ldap_services_password }}" + server_uri: "{{sram_ldap_uri }}" register: "existing_ldap_admins_result" -# ansible sucks like this: we need to extract the results from the result +# ansible trips over stuff like this: we need to extract the results from the result - name: Initialize admins (Ia) set_fact: existing_ldap_admins: "{{ existing_ldap_admins_result.results }}" -- debug: - var: "existing_ldap_admins" -- debug: - var: "ldap_admins" - # Remove LDAP non-admins - name: Initialize admins (II) community.general.ldap_entry: - dn: "cn={{ item.cn }},{{ ldap_services_dn.basedn }}" + dn: "cn={{ item.cn }},{{ sram_ldap_services_dn.basedn }}" state: absent - bind_dn: "{{ ldap_services_dn.binddn }}" - bind_pw: "{{ ldap_services_password }}" - server_uri: "{{ldap_uri }}" + bind_dn: "{{ sram_ldap_services_dn.binddn }}" + bind_pw: "{{ sram_ldap_services_password }}" + server_uri: "{{ sram_ldap_uri }}" when: > item.cn not in ldap_admins | map(attribute='uid') and item.cn != 'admin' @@ -57,26 +37,26 @@ # Insert LDAP admins - name: Initialize admins (III) community.general.ldap_entry: - dn: "cn={{ item.uid }},{{ ldap_services_dn.basedn }}" + dn: "cn={{ item.uid }},{{ sram_ldap_services_dn.basedn }}" objectClass: - simpleSecurityObject - organizationalRole attributes: description: An LDAP administrator userPassword: "{{ item.pw_hash }}" - bind_dn: "{{ ldap_services_dn.binddn }}" - bind_pw: "{{ ldap_services_password }}" - server_uri: "{{ldap_uri }}" + bind_dn: "{{ sram_ldap_services_dn.binddn }}" + bind_pw: "{{ sram_ldap_services_password }}" + server_uri: "{{ sram_ldap_uri }}" loop: "{{ ldap_admins }}" # Make sure passwords are updated for existing admins - name: Initialize admins (IV) community.general.ldap_attrs: - dn: "cn={{ item.uid }},{{ ldap_services_dn.basedn }}" + dn: "cn={{ item.uid }},{{ sram_ldap_services_dn.basedn }}" attributes: userPassword: "{{ item.pw_hash }}" - bind_dn: "{{ ldap_services_dn.binddn }}" - bind_pw: "{{ ldap_services_password }}" - server_uri: "{{ldap_uri }}" + bind_dn: "{{ sram_ldap_services_dn.binddn }}" + bind_pw: "{{ sram_ldap_services_password }}" + server_uri: "{{ sram_ldap_uri }}" loop: "{{ ldap_admins }}" diff --git a/roles/sram_ldap/tasks/main.yml b/roles/sram_ldap/tasks/main.yml index a4877722d..318e3b340 100644 --- a/roles/sram_ldap/tasks/main.yml +++ b/roles/sram_ldap/tasks/main.yml @@ -11,19 +11,17 @@ file: path: "{{ item.path }}" state: "directory" - # owner: "{{ ldap_user }}" - # group: "{{ ldap_group }}" mode: "{{ item.mode }}" with_items: - - { path: "{{ldap_ldif_dir}}", mode: "0755" } - - { path: "{{ldap_certs_dir}}", mode: "0755" } - - { path: "{{ldap_data_dir}}", mode: "0777" } + - { path: "{{sram_ldap_ldif_dir}}", mode: "0755" } + - { path: "{{sram_ldap_certs_dir}}", mode: "0755" } + - { path: "{{sram_ldap_data_dir}}", mode: "0777" } notify: Restart the ldap container - name: Copy schemas copy: src: "{{ item }}" - dest: "{{ ldap_ldif_dir }}/{{ item }}" + dest: "{{ sram_ldap_ldif_dir }}/{{ item }}" mode: "0644" with_items: - sczGroup.ldif @@ -38,51 +36,39 @@ - name: Copying ldap-add script copy: src: "{{ item }}" - dest: "{{ ldap_conf_dir }}/{{ item }}" + dest: "{{ sram_ldap_conf_dir }}/{{ item }}" mode: "0755" with_items: - ldap-add -# # cert is used for communication between ldap for sync -# # is generated in roles/certificates/tasks/main.yml -# - name: Copy wildcard frontend cert -# copy: -# src: "/etc/ssl/certs/sram-https.pem" # was installed here by update-ca-certificates -# remote_src: true -# dest: "{{ldap_certs_dir}}/frontend.crt" -# mode: "0644" -# when: "is_dev" -# notify: Restart the ldap container - - name: Setup ldap hosts vars: host: - key: "%s.{{ ldap_base_domain }}" + key: "%s.{{ sram_ldap_base_domain }}" value: "%s" etc_hosts: {} set_fact: etc_hosts: >- {{ etc_hosts | combine({ host.key | format(item.key): host.value | format(item.value) }) }} - with_dict: "{{ ldap_hosts }}" + with_dict: "{{ sram_ldap_hosts }}" - name: Create the ldap container community.docker.docker_container: name: "sram-ldap" - image: "{{ ldap_image }}" + image: "{{ sram_ldap_image }}" restart_policy: "always" state: started - pull: true + # pull: true ports: - 0.0.0.0:389:389 env: LDAP_ORGANISATION: "{{ env }}" - LDAP_DOMAIN: "{{ ldap_base_domain }}" - LDAP_ROOTPASS: "{{ ldap_services_password }}" + LDAP_DOMAIN: "{{ sram_ldap_base_domain }}" + LDAP_ROOTPASS: "{{ sram_ldap_services_password }}" etc_hosts: "{{ etc_hosts }}" volumes: - # For now the target side /opt/ldap is hard-coded - - "{{ ldap_conf_dir }}:/opt/ldap" + - "{{ sram_ldap_conf_dir }}:/opt/ldap" networks: - name: "loadbalancer" labels: @@ -92,11 +78,11 @@ traefik.tcp.routers.ldap.tls: "true" traefik.tcp.services.ldap.loadbalancer.server.port: "389" healthcheck: - test: - - "CMD" - - "bash" - - "-c" - - "[[ -S /var/run/slapd/ldapi ]]" + test: "-S /var/run/slapd/ldapi" + # - "CMD" + # - "bash" + # - "-c" + # - "[[ -S /var/run/slapd/ldapi ]]" register: "ldap_container" - name: Wait for LDAP initialization @@ -111,7 +97,6 @@ - name: Ensure the schemas are added to LDAP ansible.builtin.shell: - # For now the target side /opt/ldap is hard-coded cmd: "docker exec sram-ldap /opt/ldap/ldap-add /opt/ldap/schema/{{ item }}" register: "result" failed_when: "result.rc not in [0,80]" @@ -133,8 +118,8 @@ olcDbIndex: "{{item}}" state: "present" bind_dn: "cn=admin,cn=config" - bind_pw: "{{ ldap_services_password }}" - server_uri: "{{ ldap_uri }}" + bind_pw: "{{ sram_ldap_services_password }}" + server_uri: "{{ sram_ldap_uri }}" with_items: - "entryUUID eq" - "o eq" @@ -148,33 +133,21 @@ attributes: olcSizeLimit: "unlimited" bind_dn: "cn=admin,cn=config" - bind_pw: "{{ ldap_services_password }}" - server_uri: "{{ ldap_uri }}" + bind_pw: "{{ sram_ldap_services_password }}" + server_uri: "{{ sram_ldap_uri }}" - name: Set config community.general.ldap_attrs: dn: "cn=config" state: "present" attributes: - olcServerID: "{{ ldap_server_id }}" + olcServerID: "{{ sram_ldap_server_id }}" olcSizeLimit: "unlimited" - olcLogLevel: "{{ ldap_loglevel }}" + olcLogLevel: "{{ sram_ldap_loglevel }}" olcAttributeOptions: "time-" bind_dn: "cn=admin,cn=config" - bind_pw: "{{ ldap_services_password }}" - server_uri: "{{ ldap_uri }}" - -# # cert is used for communication between ldap for sync -# # is generated in roles/certificates/tasks/main.yml -# - name: Set TLS config -# community.general.ldap_attrs: -# dn: "cn=config" -# state: "exact" -# attributes: -# olcTLSCACertificateFile: "/opt/ldap/certs/frontend.crt" -# bind_dn: "cn=admin,cn=config" -# bind_pw: "{{ ldap_services_password }}" -# server_uri: "{{ ldap_uri }}" + bind_pw: "{{ sram_ldap_services_password }}" + server_uri: "{{ sram_ldap_uri }}" - name: Setup Modules community.general.ldap_attrs: @@ -184,8 +157,8 @@ - syncprov - dynlist.so bind_dn: "cn=admin,cn=config" - bind_pw: "{{ ldap_services_password }}" - server_uri: "{{ ldap_uri }}" + bind_pw: "{{ sram_ldap_services_password }}" + server_uri: "{{ sram_ldap_uri }}" - name: Setup Dynlist community.general.ldap_entry: @@ -196,8 +169,8 @@ attributes: olcDlAttrSet: "voPerson labeledURI member+memberOf@groupOfMembers" bind_dn: "cn=admin,cn=config" - bind_pw: "{{ ldap_services_password }}" - server_uri: "{{ ldap_uri }}" + bind_pw: "{{ sram_ldap_services_password }}" + server_uri: "{{ sram_ldap_uri }}" - name: Setup Syncprov community.general.ldap_entry: @@ -209,51 +182,8 @@ olcSpCheckpoint: 100 10 olcSpSessionLog: 100 bind_dn: "cn=admin,cn=config" - bind_pw: "{{ ldap_services_password }}" - server_uri: "{{ ldap_uri }}" - -# Leave this here in case we do want to build our own -# root database from scratch instead of relying on the -# domain based Debian slapd package setup -# -# - name: Setup main database -# community.general.ldap_attrs: -# dn: olcDatabase={1}mdb,cn=config -# attributes: -# olcSuffix: "{{ ldap_services_dn.basedn }}" -# olcRootDN: "{{ ldap_services_dn.binddn }}" -# olcRootPW: "{{ '%s' | format(services_ldap_password) | slapd_hash }}" -# state: exact -# -# - name: Set root credentials -# community.general.ldap_attrs: -# dn: olcDatabase={0}config,cn=config -# attributes: -# olcAccess: >- -# {0}to * -# by dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth manage -# {% if environment_name=="vm" %} -# by dn.exact=gidNumber=1000+uidNumber=1000,cn=peercred,cn=external,cn=auth manage -# {% endif %} -# by dn.exact="{{ ldap_services_dn.binddn }}" manage -# by * break -# state: exact -# bind_dn: "cn=admin,cn=config" -# bind_pw: "{{ ldap_services_password }}" -# server_uri: "{{ ldap_uri }}" -# -# -# - name: Get uid of openldap user -# ansible.builtin.getent: -# database: "passwd" -# key: "openldap" -# -# # store it in a nice var (so line below doesn't get too long) -# - name: Save gid of openldap user -# ansible.builtin.set_fact: -# openldap_auth: "gidNumber={{ansible_facts.getent_passwd['openldap'][2]}}+\ -# uidNumber={{ansible_facts.getent_passwd['openldap'][1]}}" -# + bind_pw: "{{ sram_ldap_services_password }}" + server_uri: "{{ sram_ldap_uri }}" - name: Set ACLs community.general.ldap_attrs: @@ -261,15 +191,15 @@ attributes: olcAccess: - >- - to dn.regex="(([^,]+),{{ ldap_services_dn.basedn }})$" - by dn.exact="{{ ldap_services_dn.binddn }}" write + to dn.regex="(([^,]+),{{ sram_ldap_services_dn.basedn }})$" + by dn.exact="{{ sram_ldap_services_dn.binddn }}" write by dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth write by dn.exact,expand="cn=admin,$1" read by * break - >- to * by dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth manage - by dn.regex="cn=[^,]+,{{ ldap_services_dn.basedn }}" read + by dn.regex="cn=[^,]+,{{ sram_ldap_services_dn.basedn }}" read {% if env=="vm" %} by dn.exact=gidNumber=1000+uidNumber=1000,cn=peercred,cn=external,cn=auth manage {% endif %} @@ -282,10 +212,10 @@ state: exact ordered: true bind_dn: "cn=admin,cn=config" - bind_pw: "{{ ldap_services_password }}" - server_uri: "{{ ldap_uri }}" + bind_pw: "{{ sram_ldap_services_password }}" + server_uri: "{{ sram_ldap_uri }}" -# ldap_rids: +# sram_ldap_rids: # 101: ldaps://ldap1.scz-vm.net/ # 102: ldaps://ldap2.scz-vm.net/ @@ -294,14 +224,14 @@ start: 101 rid: key: "%d" - value: "{{ ldap_rid_prefix }}%s.{{ ldap_base_domain }}/" + value: "{{ sram_ldap_rid_prefix }}%s.{{ sram_ldap_base_domain }}/" ldap_rids: {} set_fact: ldap_rids: >- {{ ldap_rids | combine({ rid.key | format(start|int): rid.value | format(item.key) }) }} start: "{{ start|int + 1 }}" - with_dict: "{{ ldap_hosts | dict2items | sort(attribute='key') }}" + with_dict: "{{ sram_ldap_hosts | dict2items | sort(attribute='key') }}" # Voor toekomstige Claude gebruikers: onderstaande construct levert aan het eind # een string representatie van de dict op, die niet meer gebruikt kan worden @@ -321,11 +251,11 @@ rid: >- rid={} provider="{}" - searchbase="{{ ldap_services_dn.basedn }}" + searchbase="{{ sram_ldap_services_dn.basedn }}" type=refreshAndPersist bindmethod=simple - binddn="{{ ldap_services_dn.binddn }}" - credentials={{ ldap_services_password }} + binddn="{{ sram_ldap_services_dn.binddn }}" + credentials={{ sram_ldap_services_password }} retry="30 +" timeout=30 network-timeout=5 @@ -341,29 +271,29 @@ olcSyncrepl: "{{ rids }}" olcMultiProvider: "TRUE" bind_dn: "cn=admin,cn=config" - bind_pw: "{{ ldap_services_password }}" - server_uri: "{{ ldap_uri }}" + bind_pw: "{{ sram_ldap_services_password }}" + server_uri: "{{ sram_ldap_uri }}" # We now have Syncrepl in place, so only write to primary - name: Initialize DIT community.general.ldap_entry: - dn: "{{ ldap_services_dn.basedn }}" + dn: "{{ sram_ldap_services_dn.basedn }}" state: "present" objectClass: - "top" - "dcObject" - "organization" attributes: - dc: "{{ ldap_services_dn.basedn | regex_replace('^dc=([^,]+).*', '\\1') }}" - o: "{{ ldap_services_dn.o }}" - bind_dn: "{{ ldap_services_dn.binddn }}" - bind_pw: "{{ ldap_services_password }}" - server_uri: "{{ ldap_uri }}" + dc: "{{ sram_ldap_services_dn.basedn | regex_replace('^dc=([^,]+).*', '\\1') }}" + o: "{{ sram_ldap_services_dn.o }}" + bind_dn: "{{ sram_ldap_services_dn.binddn }}" + bind_pw: "{{ sram_ldap_services_password }}" + server_uri: "{{ sram_ldap_uri }}" when: > - inventory_hostname in groups['ldap_primary'] + inventory_hostname in groups['sram_ldap_primary'] # We now have Syncrepl in place, so only write to primary - name: Add ldap admins include_tasks: "admins.yml" when: > - inventory_hostname in groups['ldap_primary'] + inventory_hostname in groups['sram_ldap_primary'] diff --git a/roles/sram_ldap/templates/slapd.service.j2 b/roles/sram_ldap/templates/slapd.service.j2 deleted file mode 100644 index 299b3af0c..000000000 --- a/roles/sram_ldap/templates/slapd.service.j2 +++ /dev/null @@ -1,20 +0,0 @@ -[Unit] -Description = LDAP server - -[Service] -Type = forking -User = root -SupplementaryGroups = ssl-cert -ExecStartPre=-/bin/mkdir -p /var/run/slapd -ExecStartPre=-/bin/chown openldap: /var/run/slapd -ExecStart = /usr/sbin/slapd -F /etc/ldap/slapd.d -u openldap -g openldap -h 'ldapi:/// ldap://localhost/ ldaps://{{inventory_hostname}}/' -Restart = always -RestartSec = 30 -PIDFile = /run/slapd/slapd.pid -# defaults are 1024:524288 which is too small for slapd -# see https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=378261 and -# https://www.openldap.org/lists/openldap-software/200802/msg00186.html -LimitNOFILE=4096:524288 - -[Install] -WantedBy = multi-user.target diff --git a/roles/sram_metadata/defaults/main.yml b/roles/sram_metadata/defaults/main.yml deleted file mode 100644 index 7982c7f9d..000000000 --- a/roles/sram_metadata/defaults/main.yml +++ /dev/null @@ -1,78 +0,0 @@ ---- -sram_metadata_image_server: "ghcr.io/openconext/openconext-basecontainers/apache2:latest" -sram_metadata_image_pyff: "ghcr.io/surfscz/sram-pyff:main" -sram_metadata_hostname: "meta.{{ base_domain }}" -sram_metadata_basedir: "{{current_release_appdir}}/sram/metadata" - -# server_name: "metadata-server" - -sram_metadata_user: "sram-metadata" -sram_metadata_group: "sram-metadata" - -# idps_source: "https://metadata.surfconext.nl/idps-metadata.xml" -# idps_cert: | -# -----BEGIN CERTIFICATE----- -# MIIEKjCCAhICEG12w6QqayYAWntxDN59dU0wDQYJKoZIhvcNAQELBQAwPDELMAkG -# A1UEBhMCTkwxEDAOBgNVBAoMB1NVUkZuZXQxGzAZBgNVBAMMElNVUkZjb25leHQg -# Um9vdCBDQTAeFw0xOTAxMTQxNjM5MDVaFw0yNDAxMTgxNjM5MDVaMGsxCzAJBgNV -# BAYTAk5MMRAwDgYDVQQIDAdVdHJlY2h0MRAwDgYDVQQKDAdTVVJGbmV0MRMwEQYD -# VQQLDApTVVJGY29uZXh0MSMwIQYDVQQDDBpTVVJGY29uZXh0IG1ldGFkYXRhIHNp -# Z25lcjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMckFyqXzW7dbMt4 -# wDdSLaAjFAbNziUgQaivu4dl9Uf/cZ4f36a9DfQBUSraNoIR76ruwK3TPfFalemp -# xmWTsoVSQpb3AOsWbU+i0YKS1cmcqMUC1fef2j1IbuK4B4nEu9S5saGNVGNvUJ+Y -# jDUpC5vyyp7boW9E1md2jIBI6Mw+ZhlmkPucqaphxurWnm0KbxTZrYLOBZ1IXj6r -# yrRoFwwtjEH+CW8cRn8OATK0q4yb0BVr2gY2tp/lTpASHZ3WVWBK0prwK0KkusY6 -# ck+/vvlk46IdEr803NB0Dm3ECh3i65mfCaWzVTtd/md874paK+65f1JeVyd5I5al -# M2KEpvkCAwEAATANBgkqhkiG9w0BAQsFAAOCAgEAjvJXXkxOqh3K0k2NdDG5EOTy -# bA+koRbAqhdY/qJoSnqTzwBxJc6aPs+L4q2PIoLo0gNJj1Nm1taLusaaK+CBx3ar -# 1kxEika5FM0dqFjD3i7Y5U0FMeDB5cReo8TNdo31VGoY7CbRjtqHLRTuKzNmIfEm -# ahLnHIBtarE82b7Mpg0aLxjrRR+t8wSCriy+e9AEPzC5bWxtPJA+OhU8U9hMuOs5 -# SzKmHwYue4WY3q1rRaDpK3fqgXRDRfznNn9/RDDbBos7CRMSAPEmAO28qLKBW/1z -# a2TKQLddZ3uoCurFNbToSTueKYVEnveQNO2P5X6uy4rcYkjeSiwbmHo7jYuHAxx4 -# uGzHMpoqoGNx+2iYjtUo3dJUXzcZai3X+RuuMKXXvqGzrxJsoKayNVAE1dWoUHJl -# RouPhDLTdZq/pblORhFS8r10rKhSScgrNuN9LTTV7EPFeVr8trocNwl8IruH+eNL -# 6/7b5Y7fb7rvpxeHjWrTz8a9BXAIAv+bgyrg4OHGRcNIQb0XF438HD9r8Zb92B6Z -# VCR3aVS5496+1td+8aN/Blzo59LhKPiHyGZCPHFV/oBqG7nxp603kcWmJOcG+AgB -# 9bFiAimF5LLk/LnMfplK9w0vvxWVcdQkDgVPYvEGNtttj0QC7/jM4ZeihGb6Oyzy -# DZA6aeg73/ygOATQ13A= -# -----END CERTIFICATE----- -sram_metadata_idps_filters: [] - -sram_metadata_idps_files: -- name: "dummy-idp" - metadata: | - - - - - - - SRAM VM Dummy IdP - SRAM VM Dummy IdP - https://test-idp.sram.example.org/ - - - Administrator - mailto:sinterklaas@example.nl - - - -# idps_xrd: "{{metadata_defaults.basedir}}/certs/surfconext.xrd" -# idps_source_dir: "/opt/metadata-src" -# idps_feed: "{{ metadata_defaults.basedir }}/idps_feed.fd" -# idps_file: "idps.xml" -## -# proxy_frontend_source: "https://satosa.local/frontend.xml" -# proxy_frontend_feed: "{{ metadata_defaults.basedir }}/frontend_feed.fd" -# proxy_frontend_file: "proxy_idp.xml" -# -# proxy_backend_source: "https://satosa.local/metadata/backend.xml" -# proxy_backend_feed: "{{ metadata_defaults.basedir }}/backend_feed.fd" -# proxy_backend_file: "proxy_sp.xml" diff --git a/roles/sram_metadata/files/01_idps.fd b/roles/sram_metadata/files/01_idps.fd deleted file mode 100644 index 46d58b663..000000000 --- a/roles/sram_metadata/files/01_idps.fd +++ /dev/null @@ -1,23 +0,0 @@ -- load fail_on_error True: - #- "https://metadata.test.surfconext.nl/idps-metadata.xml verify certs/surfconext.crt" - - "https://metadata.test.surfconext.nl/idps-metadata.xml" - - "src/" -- select: - - "https://idp.diy.surfconext.nl/saml2/idp/metadata.php" - - "http://mock-idp" - - "https://login.test.eduid.nl" - - "https://idp-acc.surfnet.nl" - - "https://login.uaccess-a.leidenuniv.nl/nidp/saml2/metadata" - - "test-idp.lab.surf.nl" - - "https://test-idp.sram.surf.nl/saml/saml2/idp/metadata.php" - - "https://idp.ci-runner.sram.surf.nl/saml/saml2/idp/metadata.php" -- xslt: - stylesheet: "xslt/transform.xslt" -- finalize: - cacheDuration: P7D - validUntil: P14D -- sign: - key: "certs/signing.key" - cert: "certs/signing.crt" -- publish: "out/idps.xml.new" -- stats diff --git a/roles/sram_metadata/files/02_backend.fd b/roles/sram_metadata/files/02_backend.fd deleted file mode 100644 index 698d615a4..000000000 --- a/roles/sram_metadata/files/02_backend.fd +++ /dev/null @@ -1,14 +0,0 @@ -- load fail_on_error True: - - "https://proxy.acc.sram.eduteams.org/metadata/backend.xml" -- select -- xslt: - stylesheet: "xslt/transform_proxy.xslt" -- finalize: - cacheDuration: P7D - validUntil: P14D - name: "SURF Research Access Management" -- sign: - key: "certs/signing.key" - cert: "certs/signing.crt" -- publish: "out/proxy_sp.xml.new" -- stats diff --git a/roles/sram_metadata/files/03_frontend.fd b/roles/sram_metadata/files/03_frontend.fd deleted file mode 100644 index 252206d42..000000000 --- a/roles/sram_metadata/files/03_frontend.fd +++ /dev/null @@ -1,14 +0,0 @@ -- load fail_on_error True: - - "https://proxy.acc.sram.eduteams.org/metadata/frontend.xml" -- select -- xslt: - stylesheet: "xslt/transform_proxy.xslt" -- finalize: - cacheDuration: P7D - validUntil: P14D - name: "SURF Research Access Management" -- sign: - key: "certs/signing.key" - cert: "certs/signing.crt" -- publish: "out/proxy_idp.xml.new" -- stats diff --git a/roles/sram_metadata/files/surf.png b/roles/sram_metadata/files/surf.png deleted file mode 100644 index e2bc4a3c2b61cabcd3babac9f9b67ef8c860dfbc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16016 zcmeHu2UJs8*XX4Ps5CzdA_6Lih%^Cd0fM41i~>3c7!j}}0VFhogiah$9BB#yhHga! zMT2w#2+~PFkP?!BfHZ*wX_5ZUW!}vH&3p5|nQ#67dh4yXzU5+h?>>9)b9OoV?7R2L zFQ+ZdL^kc(1VNC9`47i`f}jmd_)mx*Kw=HTIN(3t%O}l_Lu=tN%@yEnqt_28JOmX* z!hbx?^QCG4DClEuaYC?vvw*1FmeeWVb_m)7nIAuT)~{!RHF;giF6niDc-|uN@D_2E z4aPqFVRGWTdHRL;PwY|THQBQBZsloy0Xg-Z#7`Ib`Amrmd+7;i^I@~?ZwMXNUmY1w z(acQdbqZnSzjfu@Lv%h1XW2xP#6MC$K-*ae$ih6;%o>adI|mt5F$X( z?*JGT2K=9Ce`ofe(ynv=ZQkFR!PERXExfYd=KXWp-e(_&0?3X9fJOO;}fdPW#tF{DJnbt@yh}{&^w)6)XM? zA^zaTe`<+;QHcK^T~Q8%vEOv_*GBqtZ~23w|JlHQ!$|-3wEu>9@$aDb+d24Obwx!+ zMPx>H!IFEdgSPBipOhSrU&hDuwUIpy>eX8{c|$Z&Bcp&$GbBp4Pb>(0%_O?U2rl%|!RoXV--tD>m+S|xC7wQHcW;|A@g zCPD#Lp$gY6djd+GQXV|@iUM!~wU=xXijN2pMfIp!*7nX&f3I}YN?YenB2B;LQx9Sf z)2%R&>^IW9lgLIiX<0Az^7K1qPKLPRg^gpnb@_z2nh2p>g>eu80$n0HJy=ABKIX3iu%cL8zs5Q-yFtRVmy;qUvK4oHi$ zpLH9e$^zYmp`dt%4VdLlM+nvq0rhz&_$zd>ZnP}NY}wt(Ztz))!wG&a^_aGPzQo!Z z2d!SNS;@%B=kp?DqRtTnSJ5>q7k#a;ojp87;f&=6r0aywa);Q0WV)5~vQQ;o3T7-t za*r)j3|+o#b4JY|nZbN}8yX5(Cx zw`a#SW(9nESUmlz3?AbWzAA@b-)MF?FoW4iu4~G;XjLkL zZb)kMg`$TQ;s|ZhVF*IKZ;Uw>O?u%%)p0Q=h;<)I`LeMtsfXfe~%7@ru z+P#RA0>tSHXbe?azv1o@9~Ce-QF1L#$>+g#t{`eMRD&P7J9kpSmUXl!4#&M>_2Y(& zipH_=bTeKVSyZ#dAr+?asO-b)N_5G$X6QNFB87d)&5h2 z(H)1_TrEvrXqn0z7C5$>ccZc4Yxl$(mTzr_Wl|LQj2$fVXsy1Iqe1}hvz8?E^_p9$ zun^biyuO71@#_W55cRq^dxacGR_BM5v|8A|93*;i?$?w`nLruGbWnafxBC2?Y|TNJ zuEp$yP9H3MaZKl&cST(B7U=XVMmKw-!^N5v(at4V5gy{pm87>v2rp8$a}jSWd59Gl zqL*54&*6K}S&ovMC;6rqN8fE5^gU2Oh&zDt6wHvyDF!gz@vYGJn$i(fyfe6`;wl%7 z4o5)J*OOk!?R|92mRZpll?C~9x!ZP58W$02dH^ax6l_9=af zvJYWgl;}}fahM+p_^e=`i~-@Hd65rV9R?V@zRNtV683UXzbca_BghRrII1C-JfmWz zU^+XTJ3lmghf4yFZf^YGbHKQ0X@dZ?%H|QnU&b>sSG#nf(fBpQVZIl)wsWm;Xar<9 zIDb~0eSLLWgBJ>@#Y&2^L;Qh>f{9+-rp-{&n?1vrJRpBmd|-_+>OUn3E!Gul zoQ?>Ud@t*3)tq5IBDp*nT1bt04{hUeJ*RrN85h}MW@y4PTgL|F;dnhIyEenX&Ge`J z?4V5_DzR?YUBdA?EKRI>^~Vb9&vHF+ROzrlhXPYCj*k@P-kDnNs%OZZrEd}kD;iXj zao6mJv@A7gii45miIqv3sda7z$0%{w1HRQhcbjQgo6IG!JBgcon za}K|>$0TMy5e-r;jX5dA3QC*^Omp_Akq#^M;v_jsI#0(_#@(|niEyqk$N(n^bQ1EaQ6P7)Z5~rQ+X)XbazXDrn?^2wXpKTtS7ULnhy!N1PH( zns5?W*3aH&JyL#j>D#`;*B#`IT9$dnH1!%cG&=OHDDD+0MA?)U1m1Uz;FQE)Bjwj8 zr!C*EXg*eS(@TQw;FaL2AaCr+91#oYfEqG zQq9hpDInW9^rgs_yQojrI7msLHWI|yRg_g1HP5+gaW;FiIU^|KKn0~fTSmU;JMmB> zm-SA*PgZ}#=0IUxP~iKEJvR+GRlqpcO#=mcf>$HTz7$p^qNM!=zq)qD1j$}LfAoa| zdaiztv^0(~UHBD$Eg6$=E0<%mx>R1<^zzJz9jhg82iO+l1I1aPdQD|N=QmxqHDlgb zoO#k0p@Tb`nlH;nyhlEw)Vd-C=w{@C~8RXSxY>C~e9LwR>e! z!hkQKJ6E=Q*66ASK!B zh_zXGIkcx~S=lwB%)piVE-}I~{hCJO%X`R`8P~~X9q@oV2CRiPr@(lTA^WoOGCSnLn zT@Nw!J~KEqPZC57dWdnHoZ*>xBd4_8Ra$tbqhM_nDcAKGap;hz@#dm#%xa%CdZM&( zQM+s=%6HYh?~=1@lS!XB0U*XS2Y##NevsHTCQ9_m#3ma0`*Gjc9f)o%^a=G4f~rzq zBam%W_qvPJRCZFSy5XCsEdXd@KRbF_^T4H6%t@V#vY%)|P(SUGA;+d!H2D44ESK9R zE?R#V;DC^EksR|DLTjqsol0Th3VT7CCpt<_9BB|DEMB@urJQIg)8;oW!v8QR6GCG! zzgM+y?PM~@^_xwgyWiC0M#CG9uH|3%r7O^}OEu4`_f+3>Fx4%OTHT3Gu&Zr-<1p-h zMHo!`MihU6sW6VqTdmdSjszaJ(ri?`8L4@7a!xt8f2^aX*vqp`J6rjhn>avjt$lT1 zNhv$f{Zuj4=5=h@kJs*>sg0_rjgQipZB%cW?%cWS4NXBRk3S6IAk4)i6o-5=D{8cH zJWyP(8z(H~Z-b~S{8ou}t5s|mttr(7oDh!1o`UjE|kgP<@{z6QlqCr z_`986i*CDF#Q{+i$EaG;`5zbwA4R8QihP1Z1S*`Y=hdQqA+o z3nSN8{AkD-!N~U?H!bAw_v3p7I2dF z&HaJIRPJoG4C?Cn^m;8s_7<`+g;V?n+jVQkYLX8+a*xmAWv@q*?lOU=90j29o1J42#P zZ*K-eVvL&;9u*PxAESNNs=8fx^qXb4G=!%C^M*oOFuk#kqJAED|0@Ag=tNTCwW{Xs zi#%b7B5|Su3(G#R4GaY`mjx{R+pFYAw`<|K#1c&*5mfOz0pp^XaEn1%Z`_O1xFUuU z9}voRrbRdP3+e4K6J$CyvlNd3UcT&l-`a#|niB=85j5GPK4r~%C1-)nA}%2{!7eX= zQqSjm+s-PE*UC=v2Y*_|YWKtH>3kj^qhGJD=1#g{W-kTufmAWySHWLUouc7kmqU;7 zh_laP+Xw1ou=o3F5C&D9LZC7i%ma0fK#;+u{)_g-N&`($ZuwvjMLA zF{xjFSYxZ_DbMsRU*;l$s=MJimofgILWA%9!s1NXP}h#V=7L~jH`(=GA)^v~n2+AV;w8Yn&0{MdPxh5=BEXST?IFG){+d+F!tYHfjKACW$ z{H6I0`jW6}kcsxYKI@r?D@%B#Y`fFSDwy_~5>6HfMnsuwn%RcRTtq5G|ie+7@b7z&AgOswd zGs>GoI#JtUa4|U6d;)<&-__?KQQn1P$d<`e#m@hU95w7|sqcDfAwV%qP zDm5{`Pk)rmBo$uUNSVRMq90ybE&4`?e-jHgYzC8+G?S+FZW9G_z-_`3>$z-?McI=d29 z<88Y@zjXiXmwia*_G!7+&e~-MO-?6ImGoH1?snXCvqbDb>JTQxbJF*d6KpGp=b2|kj%Cj#%=z|% z@EtIyjl?85424ENLj+?eD@GLum(zfgcYuH~6uSBhv4WO6RmmTY#VJt;d*WamT9_HQ#3rF+=ly(PY+MgD@Fv`O~1BT0E+Md%UrX;&~4qx4%VXQ;t zkvA3~DpdC|zs%JIIoWMZQwyfweS73*TXOK_y}~O>smaS{E9Xh({ryJD zQ*K=oZs(USxhU$TGI~F313NUR9js%AS&xyXssZy=(M*dkY3LR%^xZ9jGN{z9P3Y$u z83oR@E*QB6<(m|5vU~fIdzIT~-PGKJ zOUK$dgz|5)my%sW$eq3Q@&|4MAM$f4m%YjfY`?_xxf8v4X;Et>Ag2h))EF!~a$6ur z({};XHvIg`D;h&J3@fRZEy4ra_nz<3zu2BzcXz2;qy{V3cOzoiB^F3l7mzF=nZKL! zQ-E~QiMofY%Zh+GJqzV$(JS^bNSy2p`Ht9}lSc+bUr zT<}d~W3!OV&WUVgjfRSu#`B0)$u$k$AAjMdlN%pDbM}qi)q9}K)o+RVE@S)qHqW8S zWN+1L9dBE*CX?F>Wyt(O1Q!M)C-3S|Wc+hALG7rD@}^_aOM#u=O(pwR&*jl>Ej`Dl zZ>hZ0tz>e>L(_HtY*O^c>w~#(9#A!?6`}xTz8Hj-u`?w)(rg`--5XKIQd0B9*`2ns zEk*>O&s)!z-hKD-yh5df>Dr%F&om+r!%5(29TomBuGO-hjQ6^wF8yatYd89gkVS#1 zoIVB>=>#{8m)vbx{O+yDEWL&jto){Db9*04JLL1__MYxy!{#PAnavAjx8~-13L0~y zid%QSdqdBTaBcz}wXr*G)rP2iFS!xrY&k>c*_>x{^Nhyw@uXs}U4>qDCnQ?%f}cZh z@sCcI4mkCf#K#2WOlnLTEMDzs=k)rP-v#mr5ft0re7Ebid=Nc7v&ASe-JY({-sctC zUPWzM7x&<&qVINojGq!LXO=6|8!(3g$#E7jvIZ5np(p3@g5vDcDKoa(9$N0Kwz|}j zX<~X!UT48bPQbEs84i<|cmyQHqBSJjZ%T~K?KSV)R>;rt`^&?ZUV(uyM6g%qv7$on zwD0b7VYLzR#hLy?E>vkE7KG7EqrSU8^*jo8|A0oM_m|v~+q;%j^=8PLc6gj}ANOj|A?5q6QkAQl&zF4B zr9Q5Sh(>pfM`?s!2*}^U2E^iQpI%oiDP$y}*s8XzZscA_3~W0VP2gx zebQ2D`Hu6#{_M_{2B*bGZj+_ez)kus!zn?P_`_x-=hVI#182d2_{@c8`NF*g^DjNc z?!Ab^O_T5YA*oHa+@ZDjNu7|1Smrw)^ou+=36h>0ms@QX(`-?w7{ZKab6Uu*hZPG| z%BO31%5UFnCO0&hIUG+N8cOXW_$x0l7%S{IaF#gsG*N{4Rj*Nbd^s&qWvJ>z2$kHC zfx}x(S-6Q^UOZp67TXk z$h7D0+nrVI4HJiM7IcM5sHEJSQHeW6{{H>Q_e=vLs_v6W(eGbZ9}2pM5_IiB6mLJA zsUrVrPu!_kK&w42n9dWLWNu2@jTZ51pRTKD{0c4AB3lZ)vE~KzkmZTa^yRhvH-$Cz zTNf7-a|%NjSa02j+CI@WK>lOtig)|4C+*;KlaF<4U;5oey{Z~4cKAvk zFi+TEP?fFaZuq<@2?2JiW}5W=IDBjVY^iSZ#sd)?qu!TlzMYJUZ?HCqb@-R3!lNQT z6Z=EP6Po=TzZO;q5?%#e;7o%^sec*AulMB%(XlXOD{id7e@Kcgm3X}=A*42S1hxXv z`Vd>Tdc|w8d^Pr`g|iSOvmL$*;JU(P>;N7GGS3K{RoiF(8G<)!T)KDj(yNN`Mi6N|&1Mfz z=foV!n7PT~y^vSXyGGin0^mKGW4ebI<}-9YBwUQwDQB^L&UDc}K5b8tRE0lRqZJH# z)5awzNvpN0;|RC$SP$Ggp!t&O{JvAw*ycHO-8L3COkKXgbi3_VKoG1RB|KQ8^LU-D9rx=~D%oRgr#l?aH zup`r^Z@xIOz<w$}shb-;HO4-?sv3`$h5TQnKN#*}GD3zJw~?V>fA*=5eSaA! zqUWD1n_V$@@xE-t$=6ufch`$lESesVw6f6!@K-^qOVG#ut=NPO!(X~6jYT~!bZo5M ziE^?+SCSaFjFoKKPFdMJqmcdq_XPepmMv>|dNWj=KpH-ABA2T_sFdzf+W4J#CJ;H! zi)?dH-nFv$Nz1w4@e8Nz02WAE_4r$OGa{c|-uR*<9AXV;HkNNHZJx1y&vC^a5@}4X zy%rdJFXH9!aGB^3y-br2uKrv%m8Nsbo8FD4EU(&Kxl>iA7hffA1|@oJ*8 zabE4Mx2Q^x!Ka2^VWY*qw5S14)XWYV%5IK`X($ayLkR$>9d_KxeHXe&VSd=(IxCd; zvoi`gJzhbJ5e&?i5IN3Ol?y+*W2`IE>J(3-a}A}?V|w2cWhKNAI@LVn>#2fJFuS5h z-I8iVgj?NB=c z$qj=k*aY+2C(07s6SmiOxY|k@e*Fm(^sec>lZL-nftBcTygqgK3`ZO{eG_@)Rse~# z9n>L@kS1~img(u(*l#!a9=n%b$y{{8*$uY<_lOLMBLpWtH`}hiJ zVTr2ozSrjTYM=Ta^uTsuBIwE=<^q> zS@{LMmgzJkzOIa~Ut>m)>TmacCJIhsvkx;W4u>I4E~lKSH7j=q1yS2nC+D?NlZvIc zjW54$4OqRFFhuEkT}aw-FOF+}9XUTq=T@}p))vUH1CJJnvq7s*qpw0cESG;-{fXJ6 zOnHSvCw*3fE|$v4&dK|F)-n~FFlMN#8*zNq1Nr_5WW z^*{bv)oplm##9pf&^vpLrazuTNnNc;Z$DGb2?;7v|H@9W(xlGH+JzBN*fZTHnoeN* z7B(K5w2Ucrkay1;?XJ0Gm+GtnIsjd0%GyV9_GjhlNR4_$SNXw0deE}$`_&a>_MkA5 zwt5NQi%quZH1t0(s0_;K(;gocl5v~Mae6KCPg+9A1@vt}kLE7fv!=4~>=uH(&T_h! zAZRr#i9fg*Bbkm|^9x0ccFC^22o-AhAo5kTX6B zmtx&9Tm0{%tvk-o`3rPVu&dL3nK!v`=VjlT+fHaU);aT-h=JaAD8SR1A5@30T>_1Z zwpvgm>%zFekDcp}7k}+xBzociESAq7^fj=vpM;^&%&52yfQz15{Ui>JIu}Xt2ysgy zBO#(=&n_Mz0=Q%`1Zos8EDFG`1u6+~d#YG`={%!Z`3!5Iry-`$f6%Styavs$#v;S* z#E=pV(DS%187QPMSuCHz7ht@7>?C6e0(i&lJMJ21$vU026N6}d4{GfZ|fCDJlY3SfUBDS#cils`Ai1e$v^ z-b}F&TOX&Z@)_(}|zTupE1KccHa3pZ-gM=&Gvxo=7JnRDKuVf58?NhMGyeE7}0 zfmI>Aa|vJTNjsWPxZIn8RmJ2L`NCST_LY<@8=%cLX{d3;`;^zYf<}Y3Q8cJ*&$ekv zjfaBpw3t5>BnbtKX>T2AB6{UEO?-hpz`3L%4!BV^`5v%fTfPt2psZip+a-L&|LrZ> zM-Unlt#Vi&lllBXuI2nM+!gTAnh{PTH;;bU4Vrc6;?PTA+E&*D$&+wgrcWFwJ|X}G zjEDNK@`rF-+M=l+kZ8*!m6ZRt3h& zKF>%VqD)D>@)ZDZDB}ayFF@Hv5Q2wWzLk?lq=}BTOY-1y20-^Xw;>~!rqT4_%zY5L z$jsc7nRQWPT}?ZC;TT-%RNI4qf_>7m@_{<@QIlQs6yO0Z8VMXg(8RUr-Ngq{@JZ&i zG9CO?l=7fHbiC9}D1)G?LF?`%gdwie*>-fa`;sugO7J$7bXNSlh5ZSg*N2YF-|hzm zjA#u;YBbi1`TY6AqiJoq=%V+@VnkC1;N-=J&$!JsO2 z@p7*)By%;6mA+Oe-^+UXk4#y$OSdKE!O8O$!3XY>&8iM_OLO9I& z%z~Rfqv>LVU9a4&o|5mZc^!)(O0c2UrZ6YxxG28BEY;5kqPsz1v$cy@$ZO?5)1ZA? zgoC3|k0FX;Kl1A6X`6B@jr>K>v^Fj`5us^PKb%PdW!J?d&LK1NpW>TN?cRG3G_=zT zmM{q(4$t;UkbZ%~V{ksxKG3X84h(G?Uui2Ryu8abe?9<*RiryNqU^B1OYekR@i6Lu z^wlY4eE5k?1W257y1f0AW+EG&L5hP9?9Hn6AJ(SAQsmBsq`6(m_(vis@OT_p;p%zz z2+cq9ADJlhW!4H2zV(~2yFHEyT+evB$BgxGCs7Rai=b`Jl=Pw6oiD2V6KEto@k4Cr zbjr7e;SVK<>qRu(Lxm^nZ0PebgRhH}6vADJFFw8YB#FdoS*Juuv= z3e&xt3;CKN`SN06AOlAESbHEjQ=-;*!)r}iZIV|64BIfg)OFiS3bNl=#tyw`j%SXlCWjj!;5*T0kRu@rHdUD ze_NnHrHHshz%!>rrv=D!)*0UO+gdhiTI3CgLLH>EWt`A94u~`*I*stSYhJqFW2G7d zwWs~8)D@VWmOe_4o(dL_#M+8?xxJkLy-2VjT=lvie}roaFW@TbdZvIK$^M{67(AE+ z2B+L=ZCSEZn4!Cmh7SK-=gh#Zusm}qz+Er+q2bLe^QF{Q!cW}N`UB^dP;)A3I6>J)ZcXWi4f#lM8egRmaZ4=t~!WTw& zz`93M=2p#2GF&WuLc!G-s8!Fm#Jc3x76T$YhArddirn}ag|9%uW9CgMyLdO`4FkU> z$~R1*uL4F&Zc<2ggRLYqs?5MB(ofiGH!&k%TIJ*|igX)o+k3YV&^>v%%%;=to^4@9 zywZSLFO?n`B0#Ew6tBkApI}}FQMdN%i`C0`2>6TzS%K!f9laOMp++HB_ z6zN+7Y47Q$`CGt7{A_J4m+^yd@n-Ni;}v>U;DO7p_zz6>9&C?VN5NkKC~k$BU`v0t zj*GK4$uLI9fqS!+5@R0x+v+E?VE$FzLW-@OLLhQY75~!CJb3Sw1`G}?k>aT-*5Cz^ zA;=ZSzs+U*f+YOBg>9Me=!2IIJg}_7idw%&1F|tVRRiS#z@}BKIu36FK+pqEH2*fN z8!U!6`$3D`LwJ(yS3v#*kBusW*bW(sfNT&TDE762jFk@>k^*Tb0m=w6Sw{m`Ydowr z0Q^stC~#);tml1D#zs*8hnWC>hmm=0LFZQq&taGjV}G>jh%pRH7dr&zy{pL7vi zL;0USLV)Kn?3Z7ke#3;fAV38rlYhDi4dQWt z1z;CY$WD3C&s_HmLt8!Xjo^SGDxDj9dgtU1>kPQ4v8Vhdzdar=0TsLEK_WjHdDK(n+B2HJs12 zt3pLS;4~z9{mgOH-?h$ctLkpB1@@$T{x|`y@IaNRQSj$NVie4#?}LzTY!>YDpw3g0 z7m0hY1z@lMh6qn$B+UQx)Di3Tg&OjG&64zT8L?bP2^Q%^YVwN)7rVedT2%3Ni`T`i zoEat^6bT?AQiNN0yYl@7?4>8osVHwj$msA9UeS7yk1$8}HqO2) zH06pU0)dKJaNM=S{Gy-t9U7nHUIrWeJ4ozBuUzHlqT!dHj}UqY2~l(NfpGK;dY^=t z%&S;g1tAFvy+Td(MR~4=&B{Jut29Yol_aV6d?0>7<(Erk>om{0hR>j7hSwOSPX<4{ zQWvgHZzGOEwMx+Si~8zWl|B~>c;hzNjuBt&dHVBdUVEZs+l@lrp?x}Zk{`~PSGdKx zDs=8^b7*u5tmrFeyN^CsTMrc(h?&bcq1n#|Vw5!GNMN1&Fj!41l7j3C+i}AqliyGX=h*VTjf)W^6 z)#_*!AFaHCC9f#@f_Fj=!%5_ZTvXo63nvYuw*udgjvHtY*Zq-H>jnp&(Z0t;I{eY< zR_+$VF18qmC^n@oA3h_5)`8b7tKJRb^iARR?{+W2OqR|+1s&m^x5c`a!r>?L2;MxN zk{68m^{r4*M&yQ}78xFL-gJ1379LDJTR^DK)_^M06ebBKpZVx)t1Q+vV224;n<$YR z1cOGkCcA{{@DHfLv9-vrX+C0ay)V5N0nd3KTO)fgs{c95*}4Iq=0chI+bo&L zeykzJMz~yf&=oUht}bNh7PGmlkw^AOQt#$w8X8tz;(inpI!|<1)D( zu&;=IBgWOunt8iHQb}aY1ay@tJDl4-33JpuS1`WpDlC!RtZZFj_Ya^n^XY)XAe8^u z%76hnDGLqj5y{AJ%9$=adV%PTC6$B$AAx!cT)RNF&RtQyO?pl{P6mMl;t3_aH!@E( zH{Eds_+aHD9&nSfbYh~CkaH^aj6jz4W>|x*&m2md^c#lzo)UIdBwjK#kJWz?`ICJ( zSSDm#5ptOKpJF;d|2?Mvd0hT2$ipYgder~z>{tf?0oOz7@3g-G{%}D2f%dNth(FN& zjl<=yh4{n41iS@As{iGOcJMU+f)MMdzs>tQGgw!Ds(`;C#6Q#iz7SYfe@^??Li~aD zudVpkLi~$X{2M~Ry83 - - - - - - - - - - - - - diff --git a/roles/sram_metadata/files/surf_bimi.svg b/roles/sram_metadata/files/surf_bimi.svg deleted file mode 100644 index f49b7a035..000000000 --- a/roles/sram_metadata/files/surf_bimi.svg +++ /dev/null @@ -1,15 +0,0 @@ - - - surf - - - - - - - - - - - - diff --git a/roles/sram_metadata/files/surfconext.crt b/roles/sram_metadata/files/surfconext.crt deleted file mode 100644 index 0e8f074e8..000000000 --- a/roles/sram_metadata/files/surfconext.crt +++ /dev/null @@ -1,3 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIFbjCCA1agAwIBAgIQagXJvtKqIRRO8zD41OktRjANBgkqhkiG9w0BAQsFADB8MQswCQYDVQQGEwJOTDEQMA4GA1UEBwwHVXRyZWNodDEQMA4GA1UECAwHVXRyZWNodDESMBAGA1UECgwJU1VSRiBCLlYuMRMwEQYDVQQLDApTVVJGY29uZXh0MSAwHgYDVQQDDBdTVVJGY29uZXh0IENBIDIwMjMgVEVTVDAeFw0yMzA2MDcxMTQxNDRaFw0yNTA2MDYxMTQxNDRaMIGVMQswCQYDVQQGEwJOTDEQMA4GA1UEBwwHVXRyZWNodDEQMA4GA1UECAwHVXRyZWNodDESMBAGA1UECgwJU1VSRiBCLlYuMRMwEQYDVQQLDApTVVJGY29uZXh0MTkwNwYDVQQDDDBTVVJGY29uZXh0IHRlc3QgZW52aXJvbm1lbnQgbWV0YWRhdGEgc2lnbmVyIDIwMjMwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQC1Wj1MYwzY646Wa9td4zUZb5W27+cbARhNbIZsteUIPV6unxoO6tHCLJhRxC4pBTQsdrhfhh3+s5rwm8mjhJs2rciQkCdPiTl860jqihhWi5bFXyGX5o1U5mZgomUT+o7+nUj0et1l/kbFJ0GqIKtf0uPj7R/zqTpqeT0c6VFxchU6LA8GOI9w5XIISEGi/IWlDKyM69I3DmbCip/rm8u6kIQ0qqXh58lNNOsZw8WYokCXP0IPFQWpPkKC1VGYtivwKLzzvNxSGcuvp39ui+37hrdjqiTxK68Z48vJ6l+KsJP+jpDXYBYE/NsSVYez3vbVTB/l664yvBfKyGIHHDdTq5akLCQDgYQzjeNOU1oSZbcsub0k+osp7MFGkslYRhLb0V9tX0Xu+7jXzGthPUWicN0XdlHS0JOlSgOBftPn8kcqYNMF0IZVe6V/AVgfj4/4iDk3OKl9FRctFp3kSa8GzLIbjqmYXpGGIEse6U2gfqHS9WHu4odfKH7rhD3hZssCAwEAAaNSMFAwHQYDVR0OBBYEFNclSgPTrGp4QJQZGjFu6VEBTX4PMB8GA1UdIwQYMBaAFI5kmzwW92s2rRY2B5NNjSYI2oj1MA4GA1UdDwEB/wQEAwIHgDANBgkqhkiG9w0BAQsFAAOCAgEAORNL7FGBkeq6u/rmcNf+jZZz27vw86COPOiN6ygTyxaBq5fmJ4JZlDnlfO4C/4iek2QjKdgPlpvATGUUMXJdO6a7A3/vXNuoIGu3Ug9GW4vpTVPulaYZedPHC8zBsxwRKwxpSTda7ubWDxH3vUxHz/zDOD2O71O6KFj6Ph8JXwa3TLH0xRN5CXa0UMKX0S+ck8MahCYnMtd99EBL/uOr0+D4q2HwxDRDpL4I9yRwyWxCafoR+6OfzO/vc/SGcjEk/9s0DrMKDkDTJlE9eZbaaWFFCkAkg3LHHLMYjykcTvjDEV75OohYcEC5/6uKHcB/ZQjHwkPBqv9pUF897yZ7sxS66GEJmqqVIC+ayWRvC8N+UmvMGWAdohrY7r7CPeTE+iVHaeB7xGTSI9BhTEv3yMNHhqzqIOvgr8h5iCv7B5hQL+V7MRqD7e7X9uRR7wbyGmwT4p4VFbz5VqthCOFobsMxam9Axt+saebRyH6Mg3Ro9D5WgGoZmTP1yyiMrmEHQdf9+iblbfTbRW0irlaX5t58fWB1u4QZqcamlhVcl65Fub0g+QkSyGDMD9G57z3CKOluNy6TxFZOxMynY6CEtaozDaiETm7NaNC1lkhi+SOHKRX5+q0KqJdnEC7GOX69hSDsCT905dpVnr8JgFKoUfXWSmbwTMj45190dw7RMzk= ------END CERTIFICATE----- diff --git a/roles/sram_metadata/files/transform.xslt b/roles/sram_metadata/files/transform.xslt deleted file mode 100644 index 80673b688..000000000 --- a/roles/sram_metadata/files/transform.xslt +++ /dev/null @@ -1,47 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/roles/sram_metadata/files/transform_proxy.xslt b/roles/sram_metadata/files/transform_proxy.xslt deleted file mode 100644 index 6e6f55d09..000000000 --- a/roles/sram_metadata/files/transform_proxy.xslt +++ /dev/null @@ -1,50 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - Security Response Team - mailto:securityincident@surf.nl - - - - - - - - - - - https://refeds.org/sirtfi2 - https://refeds.org/sirtfi - - - - - - diff --git a/roles/sram_metadata/handlers/main.yml b/roles/sram_metadata/handlers/main.yml deleted file mode 100644 index 2787eba12..000000000 --- a/roles/sram_metadata/handlers/main.yml +++ /dev/null @@ -1,19 +0,0 @@ ---- -# - name: "enable pyff-metadata job" -# systemd: -# name: "pyff-metadata.timer" -# enabled: true -# state: "started" -# daemon_reload: true - -# - name: "run pyff-metadata job" -# systemd: -# name: "pyff-metadata.service" -# state: "started" -# daemon_reload: true - -- name: Restart the pyFF container - community.docker.docker_container: - name: "sram-metadata-pyff" - restart: true - state: started diff --git a/roles/sram_metadata/tasks/http.yml b/roles/sram_metadata/tasks/http.yml deleted file mode 100644 index 561201096..000000000 --- a/roles/sram_metadata/tasks/http.yml +++ /dev/null @@ -1,48 +0,0 @@ ---- -- name: "Install index page" - template: - src: "index.html.j2" - dest: "{{sram_metadata_basedir}}/web/index.html" - mode: "0644" - -- name: "Install legacy link" - file: - src: "." - dest: "{{sram_metadata_basedir}}/web/metadata" - state: "link" - -- name: "Install logos" - copy: - src: "{{item}}" - dest: "{{sram_metadata_basedir}}/web" - mode: "0644" - with_items: - - "surf.svg" - - "surf.png" - - "surf_bimi.svg" - -- name: "Create the metadata-server container" - community.docker.docker_container: - name: "sram-metadata-server" - image: "{{ sram_metadata_image_server }}" - restart_policy: "always" - state: "started" - pull: true - mounts: - - source: "{{sram_metadata_basedir}}/web" - target: "/var/www/html" - type: "bind" - read_only: true - networks: - - name: "loadbalancer" - labels: - traefik.http.routers.metadata.rule: "Host(`{{ sram_metadata_hostname }}`)" - traefik.http.routers.metadata.tls: "true" - traefik.enable: "true" - healthcheck: - test: [ "CMD", "curl", "-fail", "http://localhost/" ] - interval: "10s" - timeout: "5s" - retries: 3 - start_period: "5s" - diff --git a/roles/sram_metadata/tasks/main.yml b/roles/sram_metadata/tasks/main.yml deleted file mode 100644 index 89c0ce9e9..000000000 --- a/roles/sram_metadata/tasks/main.yml +++ /dev/null @@ -1,49 +0,0 @@ ---- -- name: "Create metadata group" - group: - name: "{{ sram_metadata_group }}" - state: "present" - register: "result" - -- name: "Save metadata group gid" - set_fact: - metadata_group_gid: "{{ result.gid }}" - -- name: "Create metadata user" - user: - name: "{{ sram_metadata_user }}" - group: "{{ sram_metadata_group }}" - comment: "User to run metadata service" - shell: "/bin/false" - password: "!" - home: "{{ sram_metadata_basedir }}" - create_home: false - state: "present" - register: "result" - -- name: "Save metadata user uid" - set_fact: - metadata_user_uid: "{{ result.uid }}" - - -- name: "Create metadata directories" - file: - path: "{{ item.dir }}" - state: "directory" - mode: "{{ item.mode }}" - owner: "root" - group: "{{ sram_metadata_group }}" - with_items: - - { dir: "{{sram_metadata_basedir}}/web", mode: "0775" } - - { dir: "{{sram_metadata_basedir}}/feeds", mode: "0755" } - - { dir: "{{sram_metadata_basedir}}/src", mode: "0755" } - - { dir: "{{sram_metadata_basedir}}/certs", mode: "0755" } - - { dir: "{{sram_metadata_basedir}}/xslt", mode: "0755" } - notify: "Restart the pyFF container" - - -- name: "Start pyff container" - include_tasks: "pyff.yml" - -- name: "Start http container" - include_tasks: "http.yml" diff --git a/roles/sram_metadata/tasks/pyff.yml b/roles/sram_metadata/tasks/pyff.yml deleted file mode 100644 index 4e9b960d3..000000000 --- a/roles/sram_metadata/tasks/pyff.yml +++ /dev/null @@ -1,106 +0,0 @@ ---- -- name: "create self-signed Metadata Signing SSL certs" - shell: - cmd: ' - openssl genrsa -out "{{ sram_metadata_basedir }}/certs/signing.key" 2048; - openssl req -new -nodes -x509 -subj "/C=NL/CN=signing" - -days 3650 -key "{{ sram_metadata_basedir }}/certs/signing.key" - -out "{{ sram_metadata_basedir }}/certs/signing.crt" -extensions v3_ca; - chown {{sram_metadata_user}}:{{sram_metadata_group}} {{ sram_metadata_basedir }}/certs/*; - ' - creates: "{{ sram_metadata_basedir }}/certs/signing.crt" - when: "sram_metadata_signing_cert is not defined" - notify: "Restart the pyFF container" - -- name: "Write fixed Metadata signing certificates" - copy: - dest: "{{ sram_metadata_basedir }}/certs/{{ item.file }}" - content: "{{item.contents}}" - mode: "{{item.mode}}" - owner: "{{sram_metadata_user}}" - group: "{{sram_metadata_group}}" - with_items: - - { file: "signing.key", mode: "0640", contents: "{{sram_metadata_signing_cert.priv}}" } - - { file: "signing.crt", mode: "0644", contents: "{{sram_metadata_signing_cert.pub}}" } - when: "sram_metadata_signing_cert is defined" - notify: "Restart the pyFF container" - -- name: "Copy source certificates" - copy: - src: "{{ item }}" - dest: "{{ sram_metadata_basedir }}/certs" - mode: "0644" - with_items: - - "surfconext.crt" - notify: "Restart the pyFF container" - -- name: "Install IdP metadata" - copy: - content: "{{item.metadata}}" - dest: "{{ sram_metadata_basedir }}/src/{{item.name}}.xml" - mode: "0644" - with_items: "{{ sram_metadata_idps_files }}" - notify: "Restart the pyFF container" - -- name: "Copy pyFF xslt transformations" - copy: - src: "{{item}}" - dest: "{{sram_metadata_basedir}}/xslt" - mode: "0644" - with_items: - - "transform_proxy.xslt" - - "transform.xslt" - notify: "Restart the pyFF container" - -- name: "Copy pyFF feeds" - copy: - src: "{{item}}" - dest: "{{sram_metadata_basedir}}/feeds" - mode: "0644" - with_items: - - "01_idps.fd" - - "02_backend.fd" - - "03_frontend.fd" - notify: "Restart the pyFF container" - -- name: "Create the pyFF container" - community.docker.docker_container: - name: "sram-metadata-pyff" - image: "{{ sram_metadata_image_pyff }}" - restart_policy: "always" - state: "started" - pull: true - init: true - env: - USER: "{{ metadata_user_uid | string }}" - GROUP: "{{ metadata_group_gid | string }}" - mounts: - - source: "{{ sram_metadata_basedir }}/web" - target: "/opt/pyff/web" - type: "bind" - - source: "{{ sram_metadata_basedir }}/feeds" - target: "/opt/pyff/feeds" - type: "bind" - read_only: true - - source: "{{ sram_metadata_basedir }}/src" - target: "/opt/pyff/src" - type: "bind" - read_only: true - - source: "{{ sram_metadata_basedir }}/certs" - target: "/opt/pyff/certs" - type: "bind" - read_only: true - - source: "{{ sram_metadata_basedir }}/xslt" - target: "/opt/pyff/xslt" - type: "bind" - read_only: true - healthcheck: - test: - - "CMD" - - "bash" - - "-c" - - "[[ $(($(date +%s)-$(date -r /opt/pyff/web/idps.xml +%s))) -lt 400 ]]" - interval: "10s" - timeout: "5s" - retries: 3 - start_period: "5s" diff --git a/roles/sram_metadata/templates/index.html.j2 b/roles/sram_metadata/templates/index.html.j2 deleted file mode 100644 index f0e40b22b..000000000 --- a/roles/sram_metadata/templates/index.html.j2 +++ /dev/null @@ -1,11 +0,0 @@ - -SRAM - - -

SRAM metadata

-

SRAM IdP proxy metadata
-(for use by Service Providers)

-

SRAM SP proxy metadata
-(for use by Identity Providers)

- - diff --git a/roles/sram_metadata/templates/pyff-metadata.service.j2 b/roles/sram_metadata/templates/pyff-metadata.service.j2 deleted file mode 100644 index 3df9cc6e8..000000000 --- a/roles/sram_metadata/templates/pyff-metadata.service.j2 +++ /dev/null @@ -1,12 +0,0 @@ -[Unit] -Description=pyFF Metadata processing -After=syslog.target network.target - -[Service] -Type=oneshot -WorkingDirectory={{metadata.basedir}} -ExecStart=echo "pyff-metadata" -SyslogIdentifier=pyff-metadata - -[Install] -WantedBy=multi-user.target diff --git a/roles/sram_metadata/templates/pyff-metadata.timer.j2 b/roles/sram_metadata/templates/pyff-metadata.timer.j2 deleted file mode 100644 index b1231af1f..000000000 --- a/roles/sram_metadata/templates/pyff-metadata.timer.j2 +++ /dev/null @@ -1,8 +0,0 @@ -[Unit] -Description=Create Metadata timer - -[Timer] -OnCalendar=*:00 - -[Install] -WantedBy=multi-user.target diff --git a/roles/sram_metadata/templates/vhosts.conf.j2 b/roles/sram_metadata/templates/vhosts.conf.j2 deleted file mode 100644 index be3733827..000000000 --- a/roles/sram_metadata/templates/vhosts.conf.j2 +++ /dev/null @@ -1,15 +0,0 @@ - - ServerName sram-metadata-server - DocumentRoot /var/www/html - Header always set Referrer-Policy "strict-origin-when-cross-origin" - Header always set X-Content-Type-Options "nosniff" - Header always set X-XSS-Protection "1; mode=block" - - Require all granted - - - Require all granted - Options FollowSymLinks - Options -MultiViews - - diff --git a/roles/sram_midproxy/defaults/main.yml b/roles/sram_midproxy/defaults/main.yml index 3522fcb47..aeac841ea 100644 --- a/roles/sram_midproxy/defaults/main.yml +++ b/roles/sram_midproxy/defaults/main.yml @@ -1,8 +1,7 @@ --- -midproxy: - satosa_version: 8 - state_encryption_key: 'secret' - issuer: 'issuer' - client_id: 'client' - client_secret: 'secret' - sp_metadata: 'eb-metadata.xml' +sram_midproxy_satosa_version: 8 +sram_midproxy_state_encryption_key: 'secret' +sram_midproxy_issuer: 'issuer' +sram_midproxy_client_id: 'client' +sram_midproxy_client_secret: 'secret' +sram_midproxy_sp_metadata: 'eb-metadata.xml' diff --git a/roles/sram_midproxy/tasks/main.yml b/roles/sram_midproxy/tasks/main.yml index 2962a6f08..5270aab51 100644 --- a/roles/sram_midproxy/tasks/main.yml +++ b/roles/sram_midproxy/tasks/main.yml @@ -9,8 +9,8 @@ - name: Copy EB SP metadata ansible.builtin.copy: - src: "{{ inventory_dir }}/files/midproxy/{{ midproxy.sp_metadata }}" - dest: "/opt/sram/midproxy/{{ midproxy.sp_metadata }}" + src: "{{ inventory_dir }}/files/midproxy/{{ sram_midproxy_sp_metadata }}" + dest: "/opt/sram/midproxy/{{ sram_midproxy_sp_metadata }}" owner: 1000 group: 1000 mode: "0740" @@ -28,8 +28,8 @@ - name: Create the SATOSA container community.docker.docker_container: - name: midproxy - image: satosa:{{ midproxy.satosa_version }} + name: sram-midproxy + image: docker.io/satosa:{{ sram_midproxy_satosa_version }} pull: true restart_policy: "always" state: started @@ -38,11 +38,11 @@ - name: "loadbalancer" env: SATOSA_BASE: 'https://midproxy.{{ openconextaccess_base_domain }}' - SATOSA_STATE_ENCRYPTION_KEY: '{{ midproxy_state_encryption_key }}' - SATOSA_SP_METADATA: '{{ midproxy.sp_metadata }}' - SATOSA_ISSUER: '{{ midproxy.issuer }}' - SATOSA_CLIENT_ID: '{{ midproxy_client_id }}' - SATOSA_CLIENT_SECRET: '{{ midproxy_client_secret }}' + SATOSA_STATE_ENCRYPTION_KEY: '{{ sram_midproxy_state_encryption_key }}' + SATOSA_SP_METADATA: '{{ sram_midproxy_sp_metadata }}' + SATOSA_ISSUER: '{{ sram_midproxy_issuer }}' + SATOSA_CLIENT_ID: '{{ sram_midproxy_client_id }}' + SATOSA_CLIENT_SECRET: '{{ sram_midproxy_client_secret }}' volumes: - /opt/sram/midproxy:/etc/satosa labels: diff --git a/roles/sram_plsc/defaults/main.yml b/roles/sram_plsc/defaults/main.yml index f3b60a23e..6dd2780a1 100644 --- a/roles/sram_plsc/defaults/main.yml +++ b/roles/sram_plsc/defaults/main.yml @@ -1,12 +1,12 @@ --- -plsc_image: "ghcr.io/surfscz/sram-plsc:main" -plsc_conf_dir: "{{current_release_appdir}}/sram/plsc" -plsc_ansible_nolog: false -plsc_ldap_uri: "ldap://ldap:389/" -plsc_ldap_basedn: "dc=services,dc=vnet" -plsc_ldap_binddn: "cn=admin,dc=vnet" -plsc_ldap_password: "secret" -plsc_sbs_host: "http://sbs-server:8080" -plsc_sbs_user: "sysread" -plsc_sbs_password: "secret" -plsc_retry: 3 +sram_plsc_image: "ghcr.io/surfscz/sram-plsc:main" +sram_plsc_conf_dir: "{{current_release_appdir}}/sram/plsc" +sram_plsc_ansible_nolog: false +sram_plsc_ldap_uri: "ldap://ldap:389/" +sram_plsc_ldap_basedn: "dc=services,dc=vnet" +sram_plsc_ldap_binddn: "cn=admin,dc=vnet" +sram_plsc_ldap_password: "secret" +sram_plsc_sbs_host: "http://sbs-server:8080" +sram_plsc_sbs_user: "sysread" +sram_plsc_sbs_password: "secret" +sram_plsc_retry: 3 diff --git a/roles/sram_plsc/handlers/main.yml b/roles/sram_plsc/handlers/main.yml index 70cbb3672..a0dee373a 100644 --- a/roles/sram_plsc/handlers/main.yml +++ b/roles/sram_plsc/handlers/main.yml @@ -1,16 +1,4 @@ --- -# - name: enable plsc job -# systemd: -# name: "plsc.timer" -# enabled: true -# state: "restarted" -# daemon_reload: true - -# - name: "restart zabbix-agent" -# systemd: -# name: "zabbix-agent2.service" -# state: "restarted" - - name: Restart the plsc container community.docker.docker_container: name: sram-plsc diff --git a/roles/sram_plsc/tasks/main.yml b/roles/sram_plsc/tasks/main.yml index 0523ce118..2111b8646 100644 --- a/roles/sram_plsc/tasks/main.yml +++ b/roles/sram_plsc/tasks/main.yml @@ -1,28 +1,28 @@ --- - name: Make sure clients sync directory exists file: - path: "{{ plsc_conf_dir }}" + path: "{{ sram_plsc_conf_dir }}" state: directory mode: "0755" - name: "Create plsc.yml source if it doesn't exist" template: src: "plsc.yml.j2" - dest: "{{ plsc_conf_dir }}/plsc.yml" + dest: "{{ sram_plsc_conf_dir }}/plsc.yml" mode: "0640" - no_log: "{{plsc_ansible_nolog}}" + no_log: "{{ sram_plsc_ansible_nolog }}" notify: "Restart the plsc container" - name: Create the plsc container community.docker.docker_container: name: "sram-plsc" - image: "{{ plsc_image }}" + image: "{{ sram_plsc_image }}" restart_policy: "always" state: started pull: true mounts: - type: bind - source: "{{ plsc_conf_dir }}/plsc.yml" + source: "{{ sram_plsc_conf_dir }}/plsc.yml" target: "/opt/plsc/plsc.yml" networks: # TODO: Should this not be parametrized? diff --git a/roles/sram_plsc/templates/plsc.yml.j2 b/roles/sram_plsc/templates/plsc.yml.j2 index f1e7d4c6c..069f14d8b 100644 --- a/roles/sram_plsc/templates/plsc.yml.j2 +++ b/roles/sram_plsc/templates/plsc.yml.j2 @@ -1,25 +1,25 @@ --- ldap: src: - uri: "{{ plsc_ldap_uri }}" - basedn: "{{ plsc_ldap_basedn }}" - binddn: "{{ plsc_ldap_binddn }}" - passwd: "{{ plsc_ldap_password }}" + uri: "{{ sram_plsc_ldap_uri }}" + basedn: "{{ sram_plsc_ldap_basedn }}" + binddn: "{{ sram_plsc_ldap_binddn }}" + passwd: "{{ sram_plsc_ldap_password }}" sizelimit: 500 dst: - uri: "{{ plsc_ldap_uri }}" - basedn: "{{ plsc_ldap_basedn }}" - binddn: "{{ plsc_ldap_binddn }}" - passwd: "{{ plsc_ldap_password }}" + uri: "{{ sram_plsc_ldap_uri }}" + basedn: "{{ sram_plsc_ldap_basedn }}" + binddn: "{{ sram_plsc_ldap_binddn }}" + passwd: "{{ sram_plsc_ldap_password }}" sizelimit: 500 sbs: src: - host: "{{ plsc_sbs_host }}" - user: "{{ plsc_sbs_user }}" - passwd: "{{ plsc_sbs_password }}" + host: "{{ sram_plsc_sbs_host }}" + user: "{{ sram_plsc_sbs_user }}" + passwd: "{{ sram_plsc_sbs_password }}" verify_ssl: {{ false if env=='vm' else true }} timeout: 60 - retry: {{ plsc_retry }} + retry: {{ sram_plsc_retry }} pwd: "{CRYPT}!" uid: 1000 gid: 1000 diff --git a/roles/sram_redis/defaults/main.yml b/roles/sram_redis/defaults/main.yml index 857311145..28022a1af 100644 --- a/roles/sram_redis/defaults/main.yml +++ b/roles/sram_redis/defaults/main.yml @@ -1,9 +1,9 @@ --- -redis_image: "docker.io/library/redis:7" -redis_conf_dir: "{{ current_release_appdir }}/sram/redis" -redis_data_dir: "{{ current_release_appdir }}/sram/redis/data" -redis_user: redis -redis_group: redis -redis_redis_user: default -redis_redis_password: changethispassword -redis_max_memory: 100mb +sram_redis_image: "docker.io/library/redis:7" +sram_redis_conf_dir: "{{ current_release_appdir }}/sram/redis" +sram_redis_data_dir: "{{ current_release_appdir }}/sram/redis/data" +sram_redis_user: redis +sram_redis_group: redis +sram_redis_redis_user: default +sram_redis_redis_password: changethispassword +sram_redis_max_memory: 100mb diff --git a/roles/sram_redis/tasks/main.yml b/roles/sram_redis/tasks/main.yml index 0212fe0b0..b6fb8a694 100644 --- a/roles/sram_redis/tasks/main.yml +++ b/roles/sram_redis/tasks/main.yml @@ -1,7 +1,7 @@ --- - name: "Create redis group" group: - name: "{{ redis_group }}" + name: "{{ sram_redis_group }}" state: "present" register: "result" @@ -11,12 +11,12 @@ - name: "Create redis user" user: - name: "{{ redis_user }}" - group: "{{ redis_group }}" + name: "{{ sram_redis_user }}" + group: "{{ sram_redis_group }}" comment: "User to run SRAM Redis service" shell: "/bin/false" password: "!" - home: "{{ redis_conf_dir }}" + home: "{{ sram_redis_conf_dir }}" create_home: false state: "present" register: "result" @@ -29,34 +29,34 @@ file: path: "{{item.path}}" state: "directory" - owner: "{{ redis_user }}" - group: "{{ redis_group }}" + owner: "{{ sram_redis_user }}" + group: "{{ sram_redis_group }}" mode: "{{item.mode}}" with_items: - - { path: "{{redis_conf_dir}}", mode: "0755" } - - { path: "{{redis_data_dir}}", mode: "0755" } + - { path: "{{sram_redis_conf_dir}}", mode: "0755" } + - { path: "{{sram_redis_data_dir}}", mode: "0755" } - name: "Create redis config" template: src: "redis.conf.j2" - dest: "{{ redis_conf_dir }}/redis.conf" - owner: "{{ redis_user }}" - group: "{{ redis_group }}" + dest: "{{ sram_redis_conf_dir }}/redis.conf" + owner: "{{ sram_redis_user }}" + group: "{{ sram_redis_group }}" mode: "0644" notify: "Restart redis container" - name: "Create redis container" community.docker.docker_container: name: "sram-redis" - image: "{{ redis_image }}" + image: "{{ sram_redis_image }}" restart_policy: "always" state: "started" user: "{{ redis_user_uid }}:{{ redis_group_gid }}" command: | redis-server /usr/local/etc/redis/redis.conf volumes: - - "{{ redis_conf_dir }}:/usr/local/etc/redis" - - "{{ redis_data_dir }}:/data" + - "{{ sram_redis_conf_dir }}:/usr/local/etc/redis" + - "{{ sram_redis_data_dir }}:/data" networks: # TODO: Should this not be parametrized? - name: loadbalancer diff --git a/roles/sram_redis/templates/redis.conf.j2 b/roles/sram_redis/templates/redis.conf.j2 index 159ea9599..14d3ef177 100644 --- a/roles/sram_redis/templates/redis.conf.j2 +++ b/roles/sram_redis/templates/redis.conf.j2 @@ -1,3 +1,3 @@ -user {{redis_redis_user}} on +@all ~* &* >{{redis_redis_password}} -maxmemory {{ redis_max_memory }} +user {{ sram_redis_redis_user }} on +@all ~* &* >{{ sram_redis_redis_password }} +maxmemory {{ sram_redis_max_memory }} maxmemory-policy allkeys-lru diff --git a/roles/sram_sbs/defaults/main.yml b/roles/sram_sbs/defaults/main.yml index fe8696869..64bc443e4 100644 --- a/roles/sram_sbs/defaults/main.yml +++ b/roles/sram_sbs/defaults/main.yml @@ -1,130 +1,129 @@ --- -sbs_base_domain: "test2.sram.surf.nl" -sbs_ansible_nolog: true -sbs_base_url: "https://{{ sbs_base_domain }}" -sbs_server_image: "ghcr.io/surfscz/sram-sbs-server:main" -sbs_client_image: "ghcr.io/surfscz/sram-sbs-client:main" - -sbs_openidc_timeout: 86400 -sbs_sram_conf_dir: "{{ current_release_appdir }}/sram" - -sbs_work_dir: "{{ sbs_sram_conf_dir }}/sbs" -sbs_git_dir: "{{ sbs_work_dir }}/sbs" -sbs_env_dir: "{{ sbs_work_dir }}/sbs-env" -sbs_conf_dir: "{{ sbs_work_dir }}/config" -sbs_log_dir: "{{ sbs_work_dir }}/log" -sbs_apache_conf: "{{ sbs_work_dir }}/sbs.conf" -sbs_nginx_conf: "{{ sbs_work_dir }}/nginx.conf" - -sbs_db_name: "sbs" -sbs_db_user: "sbsrw" -# dbbackup_user: "sbs_backupper" -sbs_migration_user: "sbsmigrate" - -sbs_db_connection: "\ - mysql+mysqldb://%s:%s@{{ mariadb_host }}/{{ sbs_db_name }}\ +sram_sbs_base_domain: "test2.sram.surf.nl" +sram_sbs_ansible_nolog: true +sram_sbs_base_url: "https://{{ sram_sbs_base_domain }}" +sram_sbs_server_image: "ghcr.io/surfscz/sram-sbs-server:main" +sram_sbs_client_image: "ghcr.io/surfscz/sram-sbs-client:main" + +sram_sbs_openidc_timeout: 86400 +sram_sbs_sram_conf_dir: "{{ current_release_appdir }}/sram" + +sram_sbs_work_dir: "{{ sram_sbs_sram_conf_dir }}/sbs" +sram_sbs_git_dir: "{{ sram_sbs_work_dir }}/sbs" +sram_sbs_env_dir: "{{ sram_sbs_work_dir }}/sbs-env" +sram_sbs_conf_dir: "{{ sram_sbs_work_dir }}/config" +sram_sbs_log_dir: "{{ sram_sbs_work_dir }}/log" +sram_sbs_apache_conf: "{{ sram_sbs_work_dir }}/sbs.conf" +sram_sbs_nginx_conf: "{{ sram_sbs_work_dir }}/nginx.conf" + +sram_sbs_db_name: "sbs" +sram_sbs_db_user: "sbsrw" +sram_sbs_migration_user: "sbsmigrate" + +sram_sbs_db_connection: "\ + mysql+mysqldb://%s:%s@{{ mariadb_host }}/{{ sram_sbs_db_name }}\ ?ssl=true&charset=utf8mb4" -sbs_db_connection_sbs: "{{ sbs_db_connection | format(sbs_db_user, mysql_passwords.sbs) }}" -sbs_db_connection_migration: "\ - {{ sbs_db_connection | format(sbs_migration_user, mysql_passwords.sbsmigrate) }}" - -sbs_db_secret: secret -sbs_secret_key_suffix: suffix -sbs_encryption_key: encryption_key - -sbs_redis_host: sram-redis -sbs_redis_port: 6379 -sbs_redis_ssl: false -sbs_redis_user: default - -sbs_mail_host: "host.docker.internal" -sbs_mail_port: 25 - -sbs_user: "sbs" -sbs_group: "sbs" - -sbs_session_lifetime: 1440 -sbs_secret_key_suffix: "" - -sbs_oidc_crypto_password: "CHANGEME" -sbs_uid_attribute: "sub" - -sbs_disclaimer_color: "#a29c13" -sbs_disclaimer_label: wsgi - -sbs_urn_namespace: "urn:example:sbs" -sbs_eppn_scope: "sbs.example.edu" -sbs_restricted_co_default_org: "example.org" - -sbs_mail_sender_name: "SURF" -sbs_mail_sender_email: "no-reply@localhost" -sbs_exceptions_mail: "root@localhost" - -sbs_support_email: "sram-support@localhost" -sbs_admin_email: "sram-beheer@localhost" -sbs_ticket_email: "sram-support@surf.nl" -sbs_eduteams_email: "eduteams@localhost" - -sbs_suppress_mails: False - -sbs_wiki_link: "https://www.example.org/wiki" - -sbs_cron_hour_of_day: 4 -sbs_seed_allowed: True -sbs_api_keys_enabled: True -sbs_feedback_enabled: True -sbs_audit_trail_notifications_enabled: True -sbs_send_exceptions: False -sbs_send_js_exceptions: False -sbs_second_factor_authentication_required: True -sbs_totp_token_name: "SRAM-example" -sbs_notifications_enabled: True -sbs_invitation_reminders_enabled: True -sbs_invitation_expirations_enabled: True -sbs_open_requests_enabled: True -sbs_scim_sweep: False -sbs_impersonation_allowed: True -sbs_admin_platform_backdoor_totp: True -sbs_past_dates_allowed: True -sbs_mock_scim_enabled: True -sbs_log_to_stdout: True - -sbs_delete_orphaned: True -sbs_suspension_inactive_days: 365 -sbs_suspension_reminder_days: 14 -sbs_suspension_notify_admin: False - -sbs_oidc_config_url: "http://localhost/.well-known/openid-configuration" -sbs_oidc_authz_endpoint: "http://localhost/OIDC/authorization" -sbs_oidc_token_endpoint: "http://localhost/OIDC/token" -sbs_oidc_userinfo_endpoint: "http://localhost/OIDC/userinfo" -sbs_oidc_jwks_endpoint: "http://localhost/OIDC/jwks.json" -sbs_oidc_redirect_uri: "https://{{sbs_base_domain}}/api/users/resume-session" -sbs_oidc_jwt_audience: "https://localhost" -sbs_continue_eduteams_redirect_uri: "https://localhost/continue" -sbs_oidc_verify_peer: False -sbs_oidc_scopes: +sram_sbs_db_connection_sbs: "{{ sram_sbs_db_connection | format(sram_sbs_db_user, mysql_passwords.sbs) }}" +sram_sbs_db_connection_migration: "\ + {{ sram_sbs_db_connection | format(sram_sbs_migration_user, mysql_passwords.sbsmigrate) }}" + +sram_sbs_db_secret: secret +sram_sbs_secret_key_suffix: suffix +sram_sbs_encryption_key: encryption_key + +sram_sbs_redis_host: sram-redis +sram_sbs_redis_port: 6379 +sram_sbs_redis_ssl: false +sram_sbs_redis_user: default + +sram_sbs_mail_host: "host.docker.internal" +sram_sbs_mail_port: 25 + +sram_sbs_user: "sbs" +sram_sbs_group: "sbs" + +sram_sbs_session_lifetime: 1440 +sram_sbs_secret_key_suffix: "" + +sram_sbs_oidc_crypto_password: "CHANGEME" +sram_sbs_uid_attribute: "sub" + +sram_sbs_disclaimer_color: "#a29c13" +sram_sbs_disclaimer_label: wsgi + +sram_sbs_urn_namespace: "urn:example:sbs" +sram_sbs_eppn_scope: "sbs.example.edu" +sram_sbs_restricted_co_default_org: "example.org" + +sram_sbs_mail_sender_name: "SURF" +sram_sbs_mail_sender_email: "no-reply@localhost" +sram_sbs_exceptions_mail: "root@localhost" + +sram_sbs_support_email: "sram-support@localhost" +sram_sbs_admin_email: "sram-beheer@localhost" +sram_sbs_ticket_email: "sram-support@surf.nl" +sram_sbs_eduteams_email: "eduteams@localhost" + +sram_sbs_suppress_mails: False + +sram_sbs_wiki_link: "https://www.example.org/wiki" + +sram_sbs_cron_hour_of_day: 4 +sram_sbs_seed_allowed: True +sram_sbs_api_keys_enabled: True +sram_sbs_feedback_enabled: True +sram_sbs_audit_trail_notifications_enabled: True +sram_sbs_send_exceptions: False +sram_sbs_send_js_exceptions: False +sram_sbs_second_factor_authentication_required: True +sram_sbs_totp_token_name: "SRAM-example" +sram_sbs_notifications_enabled: True +sram_sbs_invitation_reminders_enabled: True +sram_sbs_invitation_expirations_enabled: True +sram_sbs_open_requests_enabled: True +sram_sbs_scim_sweep: False +sram_sbs_impersonation_allowed: True +sram_sbs_admin_platform_backdoor_totp: True +sram_sbs_past_dates_allowed: True +sram_sbs_mock_scim_enabled: True +sram_sbs_log_to_stdout: True + +sram_sbs_delete_orphaned: True +sram_sbs_suspension_inactive_days: 365 +sram_sbs_suspension_reminder_days: 14 +sram_sbs_suspension_notify_admin: False + +sram_sbs_oidc_config_url: "http://localhost/.well-known/openid-configuration" +sram_sbs_oidc_authz_endpoint: "http://localhost/OIDC/authorization" +sram_sbs_oidc_token_endpoint: "http://localhost/OIDC/token" +sram_sbs_oidc_userinfo_endpoint: "http://localhost/OIDC/userinfo" +sram_sbs_oidc_jwks_endpoint: "http://localhost/OIDC/jwks.json" +sram_sbs_oidc_redirect_uri: "https://{{sram_sbs_base_domain}}/api/users/resume-session" +sram_sbs_oidc_jwt_audience: "https://localhost" +sram_sbs_continue_eduteams_redirect_uri: "https://localhost/continue" +sram_sbs_oidc_verify_peer: False +sram_sbs_oidc_scopes: - openid -sbs_mfa_idp_allowed: false -sbs_eduteams_continue_endpoint: "https://localhost/continue" -sbs_eb_continue_endpoint: "https://engine.(.*)surfconext.nl(.*)" +sram_sbs_mfa_idp_allowed: false +sram_sbs_eduteams_continue_endpoint: "https://localhost/continue" +sram_sbs_eb_continue_endpoint: "https://engine.(.*)surfconext.nl(.*)" -sbs_manage_base_enabled: False -sbs_manage_base_url: "https://manage.{{base_domain}}" -sbs_manage_sram_rp_entity_id: "sbs.{{sbs_base_domain}}" -sbs_manage_verify_peer: False +sram_sbs_manage_base_enabled: False +sram_sbs_manage_base_url: "https://manage.{{base_domain}}" +sram_sbs_manage_sram_rp_entity_id: "sbs.{{sram_sbs_base_domain}}" +sram_sbs_manage_verify_peer: False -sbs_idp_metadata_url: "https://metadata.surfconext.nl/signed/2023/edugain-downstream-idp.xml " +sram_sbs_idp_metadata_url: "https://metadata.surfconext.nl/signed/2023/edugain-downstream-idp.xml " # backup_dir: "{{backup_base}}/sbs" -sbs_swagger_enabled: true +sram_sbs_swagger_enabled: true -sbs_ssid_identity_providers: [] -sbs_surf_secure_id: +sram_sbs_ssid_identity_providers: [] +sram_sbs_surf_secure_id: environment: "unknown.example.org" - sp_entity_id: "https://sbs.{{sbs_base_domain}}" - acs_url: "https://{{sbs_base_domain}}/api/users/acs" + sp_entity_id: "https://sbs.{{sram_sbs_base_domain}}" + acs_url: "https://{{sram_sbs_base_domain}}/api/users/acs" sa_gw_environment: "sa-gw.unknown.example.org" sa_idp_certificate: | -----BEGIN CERTIFICATE----- @@ -139,20 +138,20 @@ sbs_surf_secure_id: 12345 -----END CERTIFICATE----- -sbs_ssid_authncontext: "\ - http://{{ sbs_surf_secure_id.environment }}/assurance/sfo-level2" -sbs_ssid_entityid: "\ - https://{{ sbs_surf_secure_id.sa_gw_environment }}/second-factor-only/metadata" -sbs_ssid_sso_endpoint: "\ - https://{{ sbs_surf_secure_id.sa_gw_environment }}/second-factor-only/single-sign-on" +sram_sbs_ssid_authncontext: "\ + http://{{ sram_sbs_surf_secure_id.environment }}/assurance/sfo-level2" +sram_sbs_ssid_entityid: "\ + https://{{ sram_sbs_surf_secure_id.sa_gw_environment }}/second-factor-only/metadata" +sram_sbs_ssid_sso_endpoint: "\ + https://{{ sram_sbs_surf_secure_id.sa_gw_environment }}/second-factor-only/single-sign-on" -sbs_mfa_sso_minutes: 10 -sbs_mfa_fallback_enabled: true +sram_sbs_mfa_sso_minutes: 10 +sram_sbs_mfa_fallback_enabled: true -sbs_ldap_url: "ldap://ldap.example.com/dc=example,dc=com" -sbs_ldap_bind_account: "cn=admin,dc=entity_id,dc=services,dc=sram-tst,dc=surf,dc=nl" +sram_sbs_ldap_url: "ldap://ldap.example.com/dc=example,dc=com" +sram_sbs_ldap_bind_account: "cn=admin,dc=entity_id,dc=services,dc=sram-tst,dc=surf,dc=nl" -sbs_csp_style_hashes: +sram_sbs_csp_style_hashes: - 'sha256-0+ANsgYUJdh56RK8gGvTF2vnriYqvFHfWqtA8xXa+bA=' - 'sha256-3SnfHQolDHbZMbDAPmhrZf1keHiXfj/KJyh2phhFAAY=' - 'sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=' @@ -162,4 +161,4 @@ sbs_csp_style_hashes: - 'sha256-SU3XCwbQ/8qgzoGOWCYdkwIr3xRrl5rsvdFcpw8NSiE=' # on /new-service-request - 'sha256-WTC9gHKjIpzl5ub1eg/YrRy/k+jlzeyRojah9dxAApc=' # on /new-service-request -sbs_engine_block_api_token: secret +sram_sbs_engine_block_api_token: secret diff --git a/roles/sram_sbs/tasks/main.yml b/roles/sram_sbs/tasks/main.yml index c2cae754f..a9378fa36 100644 --- a/roles/sram_sbs/tasks/main.yml +++ b/roles/sram_sbs/tasks/main.yml @@ -1,7 +1,7 @@ --- - name: "Create SBS group" group: - name: "{{ sbs_group }}" + name: "{{ sram_sbs_group }}" state: "present" register: "result" @@ -11,12 +11,12 @@ - name: "Create SBS user" user: - name: "{{ sbs_user }}" - group: "{{ sbs_group }}" + name: "{{ sram_sbs_user }}" + group: "{{ sram_sbs_group }}" comment: "User to run SBS service" shell: "/bin/false" password: "!" - home: "{{ sbs_conf_dir }}" + home: "{{ sram_sbs_conf_dir }}" create_home: false state: "present" register: "result" @@ -33,14 +33,14 @@ group: "{{sbs_group_gid}}" mode: "{{item.mode}}" with_items: - - { path: "{{sbs_work_dir}}", mode: "0755" } - - { path: "{{sbs_conf_dir}}", mode: "0755" } - - { path: "{{sbs_conf_dir}}/saml", mode: "0755" } - - { path: "{{sbs_log_dir}}", mode: "0775" } + - { path: "{{sram_sbs_work_dir}}", mode: "0755" } + - { path: "{{sram_sbs_conf_dir}}", mode: "0755" } + - { path: "{{sram_sbs_conf_dir}}/saml", mode: "0755" } + - { path: "{{sram_sbs_log_dir}}", mode: "0775" } - name: "Fix file permissions" file: - path: "{{sbs_log_dir}}/{{item}}" + path: "{{sram_sbs_log_dir}}/{{item}}" owner: "{{sbs_user_uid}}" group: "{{sbs_group_gid}}" mode: "0664" @@ -54,7 +54,7 @@ - name: "Create SBS config files" template: src: "{{item.name}}.j2" - dest: "{{ sbs_conf_dir }}/{{item.name}}" + dest: "{{ sram_sbs_conf_dir }}/{{item.name}}" owner: "{{sbs_user_uid}}" group: "{{sbs_group_gid}}" mode: "{{item.mode}}" @@ -63,26 +63,26 @@ - { name: "alembic.ini", mode: "0644" } - { name: "disclaimer.css", mode: "0644" } - { name: "sbs-apache.conf", mode: "0644" } - no_log: "{{sbs_ansible_nolog}}" + no_log: "{{ sram_sbs_ansible_nolog }}" notify: "Restart sbs containers" - name: "Pull sbs image" community.docker.docker_image_pull: name: "{{ item }}" with_items: - - "{{ sbs_client_image }}" - - "{{ sbs_server_image }}" - register: "sbs_image" + - "{{ sram_sbs_client_image }}" + - "{{ sram_sbs_server_image }}" + register: "sram_sbs_image" - name: "Migration" # For some reason --check breaks this block - when: "sbs_image is changed and not ansible_check_mode" + when: "sram_sbs_image is changed and not ansible_check_mode" block: - name: "Run SBS migrations" throttle: 1 community.docker.docker_container: name: "sram-sbs-migration" - image: "{{ sbs_server_image }}" + image: "{{ sram_sbs_server_image }}" pull: "never" state: "started" restart_policy: "no" @@ -94,8 +94,8 @@ # don't actually run the server command: "/bin/true" volumes: - - "{{ sbs_conf_dir }}:/sbs-config" - - "{{ sbs_log_dir }}:/opt/sbs/log" + - "{{ sram_sbs_conf_dir }}:/sbs-config" + - "{{ sram_sbs_log_dir }}:/opt/sbs/log" networks: # TODO: Should we parametrize this? - name: "loadbalancer" @@ -104,36 +104,36 @@ changed_when: "'[alembic.runtime.migration] Running upgrade' in result.container.Output | default('')" notify: "Restart sbs containers" - # Remove the migration container; we can do that with auto_remove, because if we use that, ansible - # will not save the output in result + # Remove the migration container; we can't do that with auto_remove, + # because if we use that, ansible will not save the output in result - name: "Remove migration container" community.docker.docker_container: name: "sram-sbs-migration" state: "absent" - # TODO: fix this by only running this if "sbs_image is changed" + # TODO: fix this by only running this if "sram_sbs_image is changed" changed_when: false - name: "Start sbs client container" community.docker.docker_container: name: "sram-sbs-client" - image: "{{ sbs_client_image }}" + image: "{{ sram_sbs_client_image }}" pull: "never" restart_policy: "always" state: "started" volumes: - - "{{ sbs_conf_dir }}/sbs-apache.conf:/etc/apache2/sites-enabled/sbs.conf:ro" - - "{{ sbs_conf_dir }}/disclaimer.css:/opt/sbs/client/dist/disclaimer.css:ro" + - "{{ sram_sbs_conf_dir }}/sbs-apache.conf:/etc/apache2/sites-enabled/sbs.conf:ro" + - "{{ sram_sbs_conf_dir }}/disclaimer.css:/opt/sbs/client/dist/disclaimer.css:ro" networks: - name: "loadbalancer" labels: - traefik.http.routers.sbsclient.rule: "Host(`{{ sbs_base_domain }}`)" + traefik.http.routers.sbsclient.rule: "Host(`{{ sram_sbs_base_domain }}`)" traefik.http.routers.sbsclient.tls: "true" traefik.enable: "true" - name: "Start SBS server container" community.docker.docker_container: name: "sram-sbs-server" - image: "{{ sbs_server_image }}" + image: "{{ sram_sbs_server_image }}" restart_policy: "always" state: "started" env: @@ -144,8 +144,8 @@ RUN_MIGRATIONS: "0" pull: "never" volumes: - - "{{ sbs_conf_dir }}:/sbs-config" - - "{{ sbs_log_dir }}:/opt/sbs/log" + - "{{ sram_sbs_conf_dir }}:/sbs-config" + - "{{ sram_sbs_log_dir }}:/opt/sbs/log" - "/tmp/ci-runner:/tmp/ci-runner" networks: - name: "loadbalancer" diff --git a/roles/sram_sbs/templates/alembic.ini.j2 b/roles/sram_sbs/templates/alembic.ini.j2 index 7849e4f89..e6049eebb 100644 --- a/roles/sram_sbs/templates/alembic.ini.j2 +++ b/roles/sram_sbs/templates/alembic.ini.j2 @@ -35,7 +35,7 @@ script_location = migrations # are written from script.py.mako # output_encoding = utf-8 -sqlalchemy.url = {{ sbs_db_connection_migration }} +sqlalchemy.url = {{ sram_sbs_db_connection_migration }} # Logging configuration [loggers] diff --git a/roles/sram_sbs/templates/config.yml.j2 b/roles/sram_sbs/templates/config.yml.j2 index 9f597debe..0325a043f 100644 --- a/roles/sram_sbs/templates/config.yml.j2 +++ b/roles/sram_sbs/templates/config.yml.j2 @@ -1,110 +1,110 @@ --- database: - uri: {{ sbs_db_connection_sbs }} + uri: {{ sram_sbs_db_connection_sbs }} redis: {% if environment_shortname == 'test2' %} - uri: "redis://{{ sbs_redis_user }}:{{ sbs_redis_password }}@{{sbs_redis_host}}/" + uri: "redis://{{ sram_sbs_redis_user }}:{{ sram_sbs_redis_password }}@{{ sram_sbs_redis_host }}/" {% else %} - uri: "redis{% if sbs_redis_ssl %}s{% endif %}://{{ sbs_redis_user }}:{{ sbs_redis_password }}@{{ sbs_redis_host }}:{{ sbs_redis_port }}/" + uri: "redis{% if sram_sbs_redis_ssl %}s{% endif %}://{{ sram_sbs_redis_user }}:{{ sram_sbs_redis_password }}@{{ sram_sbs_redis_host }}:{{ sram_sbs_redis_port }}/" {% endif %} # add a per-release suffix here to invalidate sessions on new releases -secret_key: {{ sbs_db_secret }}{{sbs_secret_key_suffix}} +secret_key: {{ sram_sbs_db_secret }}{{ sram_sbs_secret_key_suffix }} # Must be a base64 encoded key of 128, 192, or 256 bits. # Generate: base64.b64encode(os.urandom(256 // 8)).decode() -encryption_key: {{ sbs_encryption_key }} +encryption_key: {{ sram_sbs_encryption_key }} # Lifetime of session in minutes (one day is 60 * 24) -permanent_session_lifetime: {{ sbs_session_lifetime }} +permanent_session_lifetime: {{ sram_sbs_session_lifetime }} logging: - log_to_stdout: {{ sbs_log_to_stdout }} + log_to_stdout: {{ sram_sbs_log_to_stdout }} # Valid scopes are "READ" and "WRITE" api_users: -{% for name, user in sbs_api_users.items() %} +{% for name, user in sram_sbs_api_users.items() %} - name: "{{ name }}" password: "{{ user.password }}" scopes: "[ {{ user.scopes | join(', ') }} ]" {% endfor %} oidc: - client_id: "{{ sbs_oidc_client_id }}" - client_secret: "{{ sbs_oidc_client_secret }}" - audience: "{{ sbs_oidc_jwt_audience }}" - verify_peer: {{ sbs_oidc_verify_peer }} - authorization_endpoint: "{{ sbs_oidc_authz_endpoint}}" - token_endpoint: "{{ sbs_oidc_token_endpoint }}" - userinfo_endpoint: "{{ sbs_oidc_userinfo_endpoint }}" - jwks_endpoint: "{{ sbs_oidc_jwks_endpoint }}" + client_id: "{{ sram_sbs_oidc_client_id }}" + client_secret: "{{ sram_sbs_oidc_client_secret }}" + audience: "{{ sram_sbs_oidc_jwt_audience }}" + verify_peer: {{ sram_sbs_oidc_verify_peer }} + authorization_endpoint: "{{ sram_sbs_oidc_authz_endpoint}}" + token_endpoint: "{{ sram_sbs_oidc_token_endpoint }}" + userinfo_endpoint: "{{ sram_sbs_oidc_userinfo_endpoint }}" + jwks_endpoint: "{{ sram_sbs_oidc_jwks_endpoint }}" #Note that the paths for these uri's is hardcoded and only domain and port differ per environment - redirect_uri: "{{ sbs_oidc_redirect_uri }}" - continue_eduteams_redirect_uri: "{{ sbs_eduteams_continue_endpoint }}" - continue_eb_redirect_uri: "{{ sbs_eb_continue_endpoint }}" - second_factor_authentication_required: {{ sbs_second_factor_authentication_required }} - totp_token_name: "{{ sbs_totp_token_name }}" + redirect_uri: "{{ sram_sbs_oidc_redirect_uri }}" + continue_eduteams_redirect_uri: "{{ sram_sbs_eduteams_continue_endpoint }}" + continue_eb_redirect_uri: "{{ sram_sbs_eb_continue_endpoint }}" + second_factor_authentication_required: {{ sram_sbs_second_factor_authentication_required }} + totp_token_name: "{{ sram_sbs_totp_token_name }}" # The service_id in the proxy_authz endpoint when logging into SBS. Most likely to equal the oidc.client_id - sram_service_entity_id: "{{ sbs_oidc_client_id }}" - scopes: {{ sbs_oidc_scopes }} + sram_service_entity_id: "{{ sram_sbs_oidc_client_id }}" + scopes: {{ sram_sbs_oidc_scopes }} base_scope: "{{ base_domain }}" -entitlement_group_namespace: "{{ sbs_urn_namespace }}" -eppn_scope: " {{ sbs_eppn_scope }}" +entitlement_group_namespace: "{{ sram_sbs_urn_namespace }}" +eppn_scope: " {{ sram_sbs_eppn_scope }}" scim_schema_sram: "urn:mace:surf.nl:sram:scim:extension" collaboration_creation_allowed_entitlement: "urn:mace:surf.nl:sram:allow-create-co" {% if env == "prd" %} environment_disclaimer: "" {% else %} -environment_disclaimer: "{{ sbs_disclaimer_label }}" +environment_disclaimer: "{{ sram_sbs_disclaimer_label }}" {% endif %} # All services in the white list can be requested in the create-restricted-co API # The default organisation is a fallback for when the administrator has no schac_home_org restricted_co: services_white_list: [ "https://cloud" ] - default_organisation: "{{ sbs_restricted_co_default_org }}" + default_organisation: "{{ sram_sbs_restricted_co_default_org }}" mail: - host: {{ sbs_mail_host }} - port: {{ sbs_mail_port }} - sender_name: {{ sbs_mail_sender_name }} - sender_email: {{ sbs_mail_sender_email }} - suppress_sending_mails: {{ sbs_suppress_mails }} - info_email: {{ sbs_support_email }} - beheer_email: {{ sbs_admin_email }} - ticket_email: {{ sbs_ticket_email }} - eduteams_email: {{ sbs_eduteams_email }} + host: {{ sram_sbs_mail_host }} + port: {{ sram_sbs_mail_port }} + sender_name: {{ sram_sbs_mail_sender_name }} + sender_email: {{ sram_sbs_mail_sender_email }} + suppress_sending_mails: {{ sram_sbs_suppress_mails }} + info_email: {{ sram_sbs_support_email }} + beheer_email: {{ sram_sbs_admin_email }} + ticket_email: {{ sram_sbs_ticket_email }} + eduteams_email: {{ sram_sbs_eduteams_email }} # Do we mail a summary of new Organizations and Services to the beheer_email? - audit_trail_notifications_enabled: {{ sbs_audit_trail_notifications_enabled }} + audit_trail_notifications_enabled: {{ sram_sbs_audit_trail_notifications_enabled }} account_deletion_notifications_enabled: True - send_exceptions: {{ sbs_send_exceptions }} - send_js_exceptions: {{ sbs_send_js_exceptions }} - send_exceptions_recipients: [ "{{ sbs_exceptions_mail }}" ] + send_exceptions: {{ sram_sbs_send_exceptions }} + send_js_exceptions: {{ sram_sbs_send_js_exceptions }} + send_exceptions_recipients: [ "{{ sram_sbs_exceptions_mail }}" ] environment: "{{ base_domain }}" manage: - enabled: {{ sbs_manage_base_enabled }} + enabled: {{ sram_sbs_manage_base_enabled }} # The entity_id of the SRAM RP in Manage for API retrieval, e.g "sbs.test2.sram.surf.nl" - sram_rp_entity_id: "{{ sbs_manage_sram_rp_entity_id }}" - base_url: "{{ sbs_manage_base_url }}" - user: "{{ sbs_manage_user }}" - password: "{{ sbs_manage_password }}" - verify_peer: {{ sbs_manage_verify_peer }} + sram_rp_entity_id: "{{ sram_sbs_manage_sram_rp_entity_id }}" + base_url: "{{ sram_sbs_manage_base_url }}" + user: "{{ sram_sbs_manage_user }}" + password: "{{ sram_sbs_manage_password }}" + verify_peer: {{ sram_sbs_manage_verify_peer }} aup: version: 1 url_aup_en: "https://edu.nl/6wb63" url_aup_nl: "https://edu.nl/6wb63" -base_url: {{ sbs_base_url }} -socket_url: {{ sbs_base_url }} -base_server_url: {{ sbs_base_url }} -wiki_link: {{ sbs_wiki_link }} +base_url: {{ sram_sbs_base_url }} +socket_url: {{ sram_sbs_base_url }} +base_server_url: {{ sram_sbs_base_url }} +wiki_link: {{ sram_sbs_wiki_link }} admin_users: -{% for admin_user in sbs_admin_users %} +{% for admin_user in sram_sbs_admin_users %} - uid: "{{ admin_user.uid }}" {% endfor %} @@ -117,17 +117,17 @@ organisation_categories: - "SURF" feature: - seed_allowed: {{ sbs_seed_allowed }} - api_keys_enabled: {{ sbs_api_keys_enabled }} - feedback_enabled: {{ sbs_feedback_enabled }} - impersonation_allowed: {{ sbs_impersonation_allowed }} - sbs_swagger_enabled: {{ sbs_swagger_enabled }} - admin_platform_backdoor_totp: {{ sbs_admin_platform_backdoor_totp }} - past_dates_allowed: {{ sbs_past_dates_allowed }} - mock_scim_enabled: {{ sbs_mock_scim_enabled }} + seed_allowed: {{ sram_sbs_seed_allowed }} + api_keys_enabled: {{ sram_sbs_api_keys_enabled }} + feedback_enabled: {{ sram_sbs_feedback_enabled }} + impersonation_allowed: {{ sram_sbs_impersonation_allowed }} + sbs_swagger_enabled: {{ sram_sbs_swagger_enabled }} + admin_platform_backdoor_totp: {{ sram_sbs_admin_platform_backdoor_totp }} + past_dates_allowed: {{ sram_sbs_past_dates_allowed }} + mock_scim_enabled: {{ sram_sbs_mock_scim_enabled }} metadata: - idp_url: "{{sbs_idp_metadata_url}}" + idp_url: "{{ sram_sbs_idp_metadata_url }}" parse_at_startup: True # No need for environment specific values scope_override: @@ -136,7 +136,7 @@ metadata: platform_admin_notifications: # Do we daily check for CO join_requests and CO requests and send a summary mail to beheer_email? enabled: False - cron_hour_of_day: {{ sbs_cron_hour_of_day }} + cron_hour_of_day: {{ sram_sbs_cron_hour_of_day }} # How long before we include open join_requests in the summary outstanding_join_request_days_threshold: 7 # How long before we include open CO requests in the summary @@ -144,8 +144,8 @@ platform_admin_notifications: user_requests_retention: # Do we daily check for CO join_requests and CO requests and delete approved and denied? - enabled: {{ sbs_notifications_enabled }} - cron_hour_of_day: {{ sbs_cron_hour_of_day }} + enabled: {{ sram_sbs_notifications_enabled }} + cron_hour_of_day: {{ sram_sbs_cron_hour_of_day }} # How long before we delete approved / denied join_requests outstanding_join_request_days_threshold: 90 # How long before we delete approved / denied CO requests @@ -153,24 +153,24 @@ user_requests_retention: # The retention config determines how long users may be inactive, how long the reminder email is valid and when do we resent the magic link retention: - cron_hour_of_day: {{ sbs_cron_hour_of_day }} + cron_hour_of_day: {{ sram_sbs_cron_hour_of_day }} # how many days of inactivity before a user is suspended # 0 allows for any last_login_date in the past to trigger suspension notification - allowed_inactive_period_days: {{ sbs_suspension_inactive_days }} + allowed_inactive_period_days: {{ sram_sbs_suspension_inactive_days }} # how many days before suspension do we send a warning # -1 will suspend notified users on second suspension cron - reminder_suspend_period_days: {{ sbs_suspension_reminder_days }} + reminder_suspend_period_days: {{ sram_sbs_suspension_reminder_days }} # how many days after suspension do we delete the account remove_suspended_users_period_days: 90 # how many days before deletion do we send a reminder reminder_expiry_period_days: 7 # whether to send a notification of the result of the retention process to the beheer_email - admin_notification_mail: {{ sbs_suspension_notify_admin }} + admin_notification_mail: {{ sram_sbs_suspension_notify_admin }} collaboration_expiration: # Do we daily check for CO's that will be deleted because they have been expired? - enabled: {{ sbs_notifications_enabled }} - cron_hour_of_day: {{ sbs_cron_hour_of_day }} + enabled: {{ sram_sbs_notifications_enabled }} + cron_hour_of_day: {{ sram_sbs_cron_hour_of_day }} # How long after expiration do we actually delete expired collaborations expired_collaborations_days_threshold: 90 # How many days before actual expiration do we mail the organisation members @@ -178,8 +178,8 @@ collaboration_expiration: collaboration_suspension: # Do we daily check for CO's that will be suspended because of inactivity? - enabled: {{ sbs_notifications_enabled }} - cron_hour_of_day: {{ sbs_cron_hour_of_day }} + enabled: {{ sram_sbs_notifications_enabled }} + cron_hour_of_day: {{ sram_sbs_cron_hour_of_day }} # After how many days of inactivity do we suspend collaborations collaboration_inactivity_days_threshold: 365 # How many days before actual suspension do we mail the organisation members @@ -189,8 +189,8 @@ collaboration_suspension: membership_expiration: # Do we daily check for memberships that will be deleted because they have been expired? - enabled: {{ sbs_notifications_enabled }} - cron_hour_of_day: {{ sbs_cron_hour_of_day }} + enabled: {{ sram_sbs_notifications_enabled }} + cron_hour_of_day: {{ sram_sbs_cron_hour_of_day }} # How long after expiration do we actually delete expired memberships expired_memberships_days_threshold: 90 # How many days before actual expiration do we mail the co admin and member @@ -198,15 +198,15 @@ membership_expiration: invitation_reminders: # Do we daily check for invitations that need a reminder? - enabled: {{ sbs_invitation_reminders_enabled }} - cron_hour_of_day: {{ sbs_cron_hour_of_day }} + enabled: {{ sram_sbs_invitation_reminders_enabled }} + cron_hour_of_day: {{ sram_sbs_cron_hour_of_day }} # How many days before expiration of an invitation do we remind the user? invitation_reminders_threshold: 5 invitation_expirations: # Do we daily check for invitations that are expired / accepted and are eligible for deletion ? - enabled: {{ sbs_invitation_expirations_enabled }} - cron_hour_of_day: {{ sbs_cron_hour_of_day }} + enabled: {{ sram_sbs_invitation_expirations_enabled }} + cron_hour_of_day: {{ sram_sbs_cron_hour_of_day }} # How long after expiration of an invitation do we delete the invitation? nbr_days_remove_expired_invitations: 10 # How long after expiration of an API created invitation do we delete the invitation? @@ -214,38 +214,38 @@ invitation_expirations: orphan_users: # Do we daily check for users that are orphans soo they can be deleted? - enabled: {{ sbs_delete_orphaned }} - cron_hour_of_day: {{ sbs_cron_hour_of_day }} + enabled: {{ sram_sbs_delete_orphaned }} + cron_hour_of_day: {{ sram_sbs_cron_hour_of_day }} # How long after created do we delete orphan users delete_days_threshold: 14 open_requests: # Do we weekly check for all open requests? - enabled: {{ sbs_open_requests_enabled }} + enabled: {{ sram_sbs_open_requests_enabled }} cron_day_of_week: 1 scim_sweep: # Do we enable scim sweeps? - enabled: {{ sbs_scim_sweep }} + enabled: {{ sram_sbs_scim_sweep }} # How often do we check if scim sweeps are needed per service cron_minutes_expression: "*/15" ldap: - url: "{{ sbs_ldap_url }}" - bind_account: "{{ sbs_ldap_bind_account }}" + url: "{{ sram_sbs_ldap_url }}" + bind_account: "{{ sram_sbs_ldap_bind_account }}" # A MFA login in a different flow is valid for X minutes -mfa_sso_time_in_minutes: {{sbs_mfa_sso_minutes}} +mfa_sso_time_in_minutes: {{ sram_sbs_mfa_sso_minutes }} # whether to fall back to TOTP MFA -mfa_fallback_enabled: {{sbs_mfa_fallback_enabled}} +mfa_fallback_enabled: {{ sram_sbs_mfa_fallback_enabled }} # Lower case entity ID's and schac_home allowed skipping MFA. # Note that for a login directly into SRAM only schac_home can be used as the entity_idp of the IdP is unknown -mfa_idp_allowed: {{sbs_mfa_idp_allowed}} +mfa_idp_allowed: {{ sram_sbs_mfa_idp_allowed }} # Lower case schachome organisations / entity ID's where SURFSecure ID is used for step-up -ssid_identity_providers: {{sbs_ssid_identity_providers}} +ssid_identity_providers: {{ sram_sbs_ssid_identity_providers }} ssid_config_folder: saml @@ -256,9 +256,9 @@ rate_limit_totp_guesses_per_30_seconds: 10 # The uid's of user that will never be suspended or deleted excluded_user_accounts: -{% for excluded_user in sbs_excluded_users %} +{% for excluded_user in sram_sbs_excluded_users %} - uid: "{{ excluded_user.uid }}" {% endfor %} engine_block: - api_token: {{ sbs_engine_block_api_token }} + api_token: {{ sram_sbs_engine_block_api_token }} diff --git a/roles/sram_sbs/templates/disclaimer.css.j2 b/roles/sram_sbs/templates/disclaimer.css.j2 index 0211d17d8..455cb97cb 100644 --- a/roles/sram_sbs/templates/disclaimer.css.j2 +++ b/roles/sram_sbs/templates/disclaimer.css.j2 @@ -1,6 +1,6 @@ {% if env!="prd" -%} body::after { - background: {{ sbs_disclaimer_color }}; - content: "{{ sbs_disclaimer_label }}"; + background: {{ sram_sbs_disclaimer_color }}; + content: "{{ sram_sbs_disclaimer_label }}"; } {% endif %} diff --git a/roles/sram_sbs/templates/saml_advanced_settings.json.j2 b/roles/sram_sbs/templates/saml_advanced_settings.json.j2 index bdde32050..0d03c63d7 100644 --- a/roles/sram_sbs/templates/saml_advanced_settings.json.j2 +++ b/roles/sram_sbs/templates/saml_advanced_settings.json.j2 @@ -11,7 +11,7 @@ "wantNameIdEncrypted": false, "wantAttributeStatement": false, "wantAssertionsEncrypted": false, - "requestedAuthnContext": ["{{sbs_ssid_authncontext}}"], + "requestedAuthnContext": ["{{ sram_sbs_ssid_authncontext }}"], "requestedAuthnContextComparison": "minimum", "failOnAuthnContextMismatch": false, "allowSingleLabelDomains": false, diff --git a/roles/sram_sbs/templates/saml_settings.json.j2 b/roles/sram_sbs/templates/saml_settings.json.j2 index bb5788e97..073651110 100644 --- a/roles/sram_sbs/templates/saml_settings.json.j2 +++ b/roles/sram_sbs/templates/saml_settings.json.j2 @@ -2,21 +2,21 @@ "strict": true, "debug": true, "sp": { - "entityId": "{{ sbs_surf_secure_id.sp_entity_id }}", + "entityId": "{{ sram_sbs_surf_secure_id.sp_entity_id }}", "assertionConsumerService": { - "url": "{{ sbs_surf_secure_id.acs_url }}", + "url": "{{ sram_sbs_surf_secure_id.acs_url }}", "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST" }, "NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified", - "x509cert": "{{ sbs_surf_secure_id.pub | barepem }}", - "privateKey": "{{ sbs_surf_secure_id.priv | barepem }}" + "x509cert": "{{ sram_sbs_surf_secure_id.pub | barepem }}", + "privateKey": "{{ sram_sbs_surf_secure_id.priv | barepem }}" }, "idp": { - "entityId": "{{ sbs_ssid_entityid }}", + "entityId": "{{ sram_sbs_ssid_entityid }}", "singleSignOnService": { - "url": "{{ sbs_ssid_sso_endpoint }}", + "url": "{{ sram_sbs_ssid_sso_endpoint }}", "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" }, - "x509cert": "{{ sbs_surf_secure_id.sa_idp_certificate | barepem }}" + "x509cert": "{{ sram_sbs_surf_secure_id.sa_idp_certificate | barepem }}" } } diff --git a/roles/sram_sbs/templates/sbs-apache.conf.j2 b/roles/sram_sbs/templates/sbs-apache.conf.j2 index af8c32ce7..ebf19db99 100644 --- a/roles/sram_sbs/templates/sbs-apache.conf.j2 +++ b/roles/sram_sbs/templates/sbs-apache.conf.j2 @@ -1,4 +1,4 @@ -ServerName {{ sbs_base_domain }} +ServerName {{ sram_sbs_base_domain }} #ErrorLog /proc/self/fd/2 #CustomLog /proc/self/fd/1 common DocumentRoot /opt/sbs/client/dist diff --git a/roles/sram_sbs/templates/sbs.service.j2 b/roles/sram_sbs/templates/sbs.service.j2 deleted file mode 100644 index 2920ddc8d..000000000 --- a/roles/sram_sbs/templates/sbs.service.j2 +++ /dev/null @@ -1,32 +0,0 @@ -[Unit] -Description=SBS -After=network.target - -[Service] -DynamicUser=true -User=_sram_sbs -Group=_sram_sbs -SupplementaryGroups={{sbs_group}} - -WorkingDirectory={{sbs_git_dir}} -ReadWritePaths={{sbs_log_dir}} -NoNewPrivileges=true -PrivateTmp=true - -Environment="CONFIG=config/config.yml" -Environment="PROFILE=log_to_stdout" -# the python requests module uses the CAs provided by the certifi package by default -# we'll just take the OS-provided CAs, thankyouverymuch -Environment="REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt" - -Type=notify -ExecStart={{sbs_env_dir}}/bin/gunicorn --worker-class eventlet --workers {{sbs_num_workers}} --bind 127.0.0.1:8080 server.__main__:app - -Restart=on-failure -RestartSec=10 - -KillMode=mixed -TimeoutStopSec=5 - -[Install] -WantedBy=multi-user.target From 77ea11762ffbad7ffbf8cc581d8dd9d93cbcffb1 Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Tue, 5 May 2026 09:28:22 +0200 Subject: [PATCH 71/73] Fix ldap test --- roles/sram_ldap/tasks/main.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/roles/sram_ldap/tasks/main.yml b/roles/sram_ldap/tasks/main.yml index 318e3b340..c1322739e 100644 --- a/roles/sram_ldap/tasks/main.yml +++ b/roles/sram_ldap/tasks/main.yml @@ -78,11 +78,11 @@ traefik.tcp.routers.ldap.tls: "true" traefik.tcp.services.ldap.loadbalancer.server.port: "389" healthcheck: - test: "-S /var/run/slapd/ldapi" - # - "CMD" - # - "bash" - # - "-c" - # - "[[ -S /var/run/slapd/ldapi ]]" + test: + - "CMD" + - "bash" + - "-c" + - "[[ -S /var/run/slapd/ldapi ]]" register: "ldap_container" - name: Wait for LDAP initialization From 4793fe7d0afb052b7d086968f97902c2ed92bc74 Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Tue, 5 May 2026 10:03:25 +0200 Subject: [PATCH 72/73] Parametrize satosa image --- roles/sram_midproxy/tasks/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/sram_midproxy/tasks/main.yml b/roles/sram_midproxy/tasks/main.yml index 5270aab51..d90d93e01 100644 --- a/roles/sram_midproxy/tasks/main.yml +++ b/roles/sram_midproxy/tasks/main.yml @@ -29,7 +29,7 @@ - name: Create the SATOSA container community.docker.docker_container: name: sram-midproxy - image: docker.io/satosa:{{ sram_midproxy_satosa_version }} + image: "{{ sram_midproxy_satosa_image }}" pull: true restart_policy: "always" state: started From 027c1615a9034ab6f129e22d0a31bde3b49e1329 Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Wed, 6 May 2026 11:46:50 +0200 Subject: [PATCH 73/73] Add CRM to AA scopes --- roles/invite/templates/serverapplication.yml.j2 | 1 + 1 file changed, 1 insertion(+) diff --git a/roles/invite/templates/serverapplication.yml.j2 b/roles/invite/templates/serverapplication.yml.j2 index 81e7dae5d..bf59520da 100644 --- a/roles/invite/templates/serverapplication.yml.j2 +++ b/roles/invite/templates/serverapplication.yml.j2 @@ -161,6 +161,7 @@ external-api-configuration: password: "{{ invite_attribute_aggregation_secret }}" scopes: - attribute_aggregation + - crm - username: {{ invite.lifecycle_user }} password: "{{ invite.lifecycle_secret }}" scopes: