diff --git a/environments/template/group_vars/all.yml b/environments/template/group_vars/all.yml
index 1d8bd6f84..a314ae546 100644
--- a/environments/template/group_vars/all.yml
+++ b/environments/template/group_vars/all.yml
@@ -30,6 +30,8 @@ admin_email: "openconext-admin@example.edu"
environment_shortname: ""
environment_ribbon_colour: ""
+current_release_appdir: /opt/openconext
+
httpd_csp:
lenient: "default-src 'self'; object-src 'none'; script-src 'self' 'unsafe-inline'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; form-action 'self'; frame-ancestors 'none'; base-uri 'none'"
lenient_with_static_img: "default-src 'self'; object-src 'none'; script-src 'self' 'unsafe-inline'; style-src 'self' 'unsafe-inline'; img-src 'self' https://{{ static_vhost }} http://localhost:* data:; form-action 'self'; frame-ancestors 'none'; base-uri 'none'"
diff --git a/environments/template/group_vars/template.yml b/environments/template/group_vars/template.yml
index 0a4308714..dc2642d3b 100644
--- a/environments/template/group_vars/template.yml
+++ b/environments/template/group_vars/template.yml
@@ -156,15 +156,15 @@ voot:
- { name: "voot", level: "DEBUG" }
externalGroupProviders:
- {
- type: "teams",
- url: "https://teams.{{ base_domain }}/api/voot",
- credentials: {
- username: "{{ teams.voot_api_user }}",
- secret: "{{ external_group_provider_secrets.teams }}"
- },
- schacHomeOrganization: "{{ base_domain}}",
- name: "SURFteams",
- timeoutMillis: 15000
+ type: "invite",
+ url: "https://invite.{{ base_domain }}/api/external/v1/voot",
+ credentials: {
+ username: "{{ invite.vootuser }}",
+ secret: "{{ invite.vootsecret }}"
+ },
+ schacHomeOrganization: "N/A",
+ name: "Invite",
+ timeoutMillis: 3000
}
oidc_playground:
@@ -338,6 +338,7 @@ manage:
features: push, validation, push_preview, orphans, find_my_data, edugain, auto_refresh
environment: template
super_user_team_names: "urn:collab:group:test.surfteams.nl:nl:surfnet:diensten:surfconext_tpm_core"
+ sram_rp_entity_id: "sbs.test.sram.surf.nl"
apiUsers:
- {
name: "dashboard",
diff --git a/roles/dashboard/templates/serverapplication.yml.j2 b/roles/dashboard/templates/serverapplication.yml.j2
index 410d97f70..45109b554 100644
--- a/roles/dashboard/templates/serverapplication.yml.j2
+++ b/roles/dashboard/templates/serverapplication.yml.j2
@@ -80,6 +80,7 @@ dashboard.feature.consent={{ dashboard.feature_consent }}
# Valid choices are 'MOCK', 'PDP' or 'MANAGE', 'MOCK' is for local development
dashboard.feature.pdpSource={{ dashboard.pdp_source }}
dashboard.feature.statistics=true
+dashboard.feature.statisticsDown={{ dashboard.feature_statsdown }}
dashboard.feature.mail={{ dashboard.feature_mail }}
dashboard.feature.oidc={{ dashboard.feature_oidc }}
dashboard.feature.stepup={{ dashboard.feature_stepup }}
diff --git a/roles/docker/defaults/main.yml b/roles/docker/defaults/main.yml
index 9e9b32ef9..e8b651210 100644
--- a/roles/docker/defaults/main.yml
+++ b/roles/docker/defaults/main.yml
@@ -8,5 +8,6 @@ docker_apt_gpg_key_checksum: "sha256:1500c1f56fa9e26b9b8f42452a553675796ade0807c
docker_apt_filename: "docker"
docker_install_traefik: true
docker_traefik_ldaps: false
+docker_traefik_version: 3.6.10
docker_traefik_ports:
- 0.0.0.0:443:443
diff --git a/roles/docker/tasks/main.yml b/roles/docker/tasks/main.yml
index a9067a5b1..fe0a22829 100644
--- a/roles/docker/tasks/main.yml
+++ b/roles/docker/tasks/main.yml
@@ -82,7 +82,7 @@
- name: Create the Traefik loadbalancer
community.docker.docker_container:
name: loadbalancer
- image: traefik:latest
+ image: traefik:{{ docker_traefik_version }}
published_ports: "{{ docker_traefik_ports }}"
pull: true
restart_policy: "always"
diff --git a/roles/engine/defaults/main.yml b/roles/engine/defaults/main.yml
index cdc888cd5..ca58ea135 100644
--- a/roles/engine/defaults/main.yml
+++ b/roles/engine/defaults/main.yml
@@ -17,6 +17,7 @@ engine_api_feature_consent_remove: 0
engine_api_feature_metadata_api: 1
engine_api_feature_deprovision: 1
engine_feature_send_user_attributes: 0
+engine_feature_enable_sbs_interrupt: 0
# Cutoff point for showing unfiltered IdPs on the WAYF
engine_wayf_cutoff_point_for_showing_unfiltered_idps: 50
@@ -76,6 +77,13 @@ engine_stepup_gateway_sfo_entity_id: "https://{{ engine_stepup_gateway_domain }}
# The single sign-on endpoint used for Stepup Gateway SFO callouts
engine_stepup_gateway_sfo_sso_location: "https://{{ engine_stepup_gateway_domain }}/second-factor-only/single-sign-on"
+# SBS interrupt settings
+engine_sbs_attributes_allowed:
+ - 'urn:mace:dir:attribute-def:eduPersonEntitlement'
+ - 'urn:mace:dir:attribute-def:uid'
+ - 'urn:mace:dir:attribute-def:eduPersonPrincipalName'
+ - 'urn:oid:1.3.6.1.4.1.24552.500.1.1.1.13'
+
## The minimum priority of messages that will be logged
engine_logging_passthru_level: NOTICE
diff --git a/roles/engine/tasks/main.yml b/roles/engine/tasks/main.yml
index c75ece132..2776c4b4c 100644
--- a/roles/engine/tasks/main.yml
+++ b/roles/engine/tasks/main.yml
@@ -208,7 +208,7 @@
PHP_MEMORY_LIMIT: "{{ engine_php_memory }}"
APP_ENV: "prod"
APP_SECRET: "{{ engine_parameters_secret }}"
- APP_DEBUG: "{{ engine_debug | bool | int }}"
+ APP_DEBUG: "{{ engine_debug | bool | int | string }}"
etc_hosts:
host.docker.internal: host-gateway
mounts:
diff --git a/roles/engine/templates/parameters.yml.j2 b/roles/engine/templates/parameters.yml.j2
index 77903de1e..0c0d077f5 100644
--- a/roles/engine/templates/parameters.yml.j2
+++ b/roles/engine/templates/parameters.yml.j2
@@ -228,6 +228,7 @@ parameters:
feature_stepup_sfo_override_engine_entityid: {{ engine_feature_stepup_override_entityid | bool | to_json }}
feature_enable_idp_initiated_flow: {{ engine_feature_idp_initiated_flow | bool | to_json }}
feature_stepup_send_user_attributes: {{ engine_feature_send_user_attributes | bool | to_json }}
+ feature_enable_sram_interrupt: {{ engine_feature_enable_sbs_interrupt | bool | to_json }}
##########################################################################################
## PROFILE SETTINGS
##########################################################################################
@@ -310,3 +311,15 @@ parameters:
# used in the authentication log record. The attributeName will be searched in the response attributes and if present
# the log data will be enriched. The values of the response attributes are the final values after ARP and Attribute Manipulation.
auth.log.attributes: {{ engine_log_attributes }}
+
+
+ ##########################################################################################
+ ## SBS external authorization/attribute enrichtment
+ ##########################################################################################
+ sram.api_token: "{{ sbs_engine_block_api_token | default('') }}"
+ sram.base_url: "https://{{ sbs_base_domain | default('sbs.example.org') }}/api/users/"
+ sram.authz_location: "authz_eb"
+ sram.attributes_location: "attributes_eb"
+ sram.interrupt_location: "interrupt"
+ sram.verify_peer: true
+ sram.allowed_attributes: {{ engine_sbs_attributes_allowed }}
diff --git a/roles/haproxy/tasks/main.yml b/roles/haproxy/tasks/main.yml
index 55f38f8f1..d765af2bc 100644
--- a/roles/haproxy/tasks/main.yml
+++ b/roles/haproxy/tasks/main.yml
@@ -16,7 +16,7 @@
- name: Install haproxy and socat
ansible.builtin.apt:
name:
- - "haproxy=3.0.*"
+ - "haproxy"
- "socat"
- "git"
state: "present"
@@ -88,17 +88,6 @@
group: haproxy
mode: "0770"
-- name: Create combined key and certificate file for HAproxy
- ansible.builtin.copy:
- content: >
- {{ item.key_content }}{{ lookup('file', '{{ inventory_dir }}/files/certs/{{ item.crt_name }}') }}
- dest: "/etc/haproxy/certs/{{ item.name }}_haproxy.pem"
- mode: "0600"
- with_items: "{{ haproxy_sni_ip.certs }}"
- when: haproxy_sni_ip.certs is defined
- notify:
- - "reload haproxy"
-
- name: Create backend CA directory
ansible.builtin.file:
path: "{{ tls_backend_ca | dirname }}"
diff --git a/roles/haproxy/templates/certlist.lst.j2 b/roles/haproxy/templates/certlist.lst.j2
index 3e8bb226d..800a79b39 100644
--- a/roles/haproxy/templates/certlist.lst.j2
+++ b/roles/haproxy/templates/certlist.lst.j2
@@ -3,11 +3,6 @@
/etc/haproxy/certs/{{ host }}.pem [ocsp-update on]
{% endfor %}
{% endif %}
-{% if haproxy_sni_ip.certs is defined %}
-{% for cert in haproxy_sni_ip.certs %}
-/etc/haproxy/certs/{{ cert.name }}_haproxy.pem [ocsp-update on]
-{% endfor %}
-{% endif %}
{% if haproxy_extra_certs is defined %}
{% for cert in haproxy_extra_certs %}
{{ cert }} [ocsp-update on]
diff --git a/roles/haproxy/templates/haproxy_backend.cfg.j2 b/roles/haproxy/templates/haproxy_backend.cfg.j2
index d2387c033..8ef005da4 100644
--- a/roles/haproxy/templates/haproxy_backend.cfg.j2
+++ b/roles/haproxy/templates/haproxy_backend.cfg.j2
@@ -67,3 +67,18 @@
{% endfor %}
{% endif %}
{% endfor %}
+
+{% if haproxy_ldap_servers is defined %}
+#---------------------------------------------------------------------
+# ldap backend
+#---------------------------------------------------------------------
+backend ldap_servers
+ mode tcp
+ option tcpka
+
+ option ldap-check
+
+ {% for server in haproxy_ldap_servers -%}
+ server {{server.label}} {{server.ip}}:{{server.port}} ssl verify none check weight 10 {% if loop.index==1 %}on-marked-up shutdown-backup-sessions{% else %}backup{% endif %}
+ {% endfor %}
+{% endif %}
diff --git a/roles/haproxy/templates/haproxy_frontend.cfg.j2 b/roles/haproxy/templates/haproxy_frontend.cfg.j2
index 6082e9c03..4909a0074 100644
--- a/roles/haproxy/templates/haproxy_frontend.cfg.j2
+++ b/roles/haproxy/templates/haproxy_frontend.cfg.j2
@@ -12,8 +12,8 @@ frontend stats
# -------------------------------------------------------------------
frontend internet_ip
- bind {{ haproxy_sni_ip.ipv4 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent
- bind {{ haproxy_sni_ip.ipv6 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent
+ bind {{ haproxy_sni_ip.ipv4 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent
+ bind {{ haproxy_sni_ip.ipv6 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent
bind {{ haproxy_sni_ip.ipv4 }}:80 transparent
bind {{ haproxy_sni_ip.ipv6 }}:80 transparent
# Logging is done in the local_ip backend, otherwise all requests are logged twice
@@ -30,7 +30,7 @@ frontend internet_ip
http-request redirect scheme https code 301 if !{ ssl_fc }
# Log the user agent in the httplogs
capture request header User-agent len 256
- # Put the useragent header in a variable, shared between request and response.
+ # Put the useragent header in a variable, shared between request and response.
http-request set-var(txn.useragent) req.fhdr(User-Agent)
# The ACL below makes sure only supported http methods are allowed
acl valid_method method {{ haproxy_supported_http_methods }}
@@ -51,7 +51,7 @@ frontend internet_ip
http-response replace-header Set-Cookie (?i)(^(?!.*samesite).*$) \1;\ SameSite=None if !no_same_site_uas
# Remove an already present SameSite cookie attribute for unsupported browsers
http-response replace-value Set-Cookie (^.*)(?i);\ *SameSite=(Lax|Strict|None)(.*$) \1\3 if no_same_site_uas
- # Log whether the no_same_site_uas ACL has been hit
+ # Log whether the no_same_site_uas ACL has been hit
http-request set-header samesitesupport samesite_notsupported if no_same_site_uas
http-request set-header samesitesupport samesite_supported if !no_same_site_uas
# We need a dummy backend in order to be able to rewrite the loadbalancer cookies
@@ -66,7 +66,7 @@ frontend local_ip
acl valid_vhost hdr(host) -f /etc/haproxy/acls/validvhostsunrestricted.acl
acl staging req.cook(staging) -m str true
acl staging src -f /etc/haproxy/acls/stagingips.acl
- acl stagingvhost hdr(host) -i -M -f /etc/haproxy/maps/backendsstaging.map
+ acl stagingvhost hdr(host) -i -M -f /etc/haproxy/maps/backendsstaging.map
use_backend %[req.hdr(host),lower,map(/etc/haproxy/maps/backendsstaging.map)] if stagingvhost staging
use_backend %[req.hdr(host),lower,map(/etc/haproxy/maps/backends.map)]
option httplog
@@ -82,7 +82,7 @@ frontend local_ip
http-request capture sc_http_req_rate(0) len 4
# Create an ACL when the request rate exceeds {{ haproxy_max_request_rate }} per 10s
acl exceeds_max_request_rate_per_ip sc_http_req_rate(0) gt {{ haproxy_max_request_rate }}
- # Measure and log the request rate per path and ip
+ # Measure and log the request rate per path and ip
http-request track-sc1 base32+src table st_httpreqs_per_ip_and_path
http-request capture sc_http_req_rate(1) len 4
# Some paths allow for a higher ratelimit. These are in a seperate mapfile
@@ -96,7 +96,7 @@ frontend local_ip
http-request deny if ! valid_vhost
# Deny the request when the request rate exceeds {{ haproxy_max_request_rate }} per 10s
http-request deny deny_status 429 if exceeds_max_request_rate_per_ip !allowlist
- # Deny the request when the request rate per host header url path and src ip exceeds {{ haproxy_max_request_rate_ip_path }} per 1 m
+ # Deny the request when the request rate per host header url path and src ip exceeds {{ haproxy_max_request_rate_ip_path }} per 1 m
http-request deny deny_status 429 if exceeds_max_request_rate_per_ip_and_path !allowlist
# Create some http redirects
{% if haproxy_securitytxt_target_url is defined %}
@@ -111,8 +111,8 @@ frontend local_ip
## -------------------------------------------------------------------
frontend internet_restricted_ip
- bind {{ haproxy_sni_ip_restricted.ipv4 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent
- bind {{ haproxy_sni_ip_restricted.ipv6 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent
+ bind {{ haproxy_sni_ip_restricted.ipv4 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent
+ bind {{ haproxy_sni_ip_restricted.ipv6 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent
bind {{ haproxy_sni_ip_restricted.ipv4 }}:80 transparent
bind {{ haproxy_sni_ip_restricted.ipv6 }}:80 transparent
# Logging is done in the local_ip_restriced backend, otherwise all requests are logged twice
@@ -128,8 +128,8 @@ frontend internet_restricted_ip
# We redirect all port 80 to port 443
http-request redirect scheme https code 301 if !{ ssl_fc }
# Log the user agent in the httplogs
- capture request header User-agent len 256
- # Put the useragent header in a variable, shared between request and response.
+ capture request header User-agent len 256
+ # Put the useragent header in a variable, shared between request and response.
http-request set-var(txn.useragent) req.fhdr(User-Agent)
# The ACL below makes sure only supported http methods are allowed
acl valid_method method {{ haproxy_supported_http_methods }}
@@ -155,12 +155,12 @@ frontend internet_restricted_ip
# frontend restricted ip addresses localhost
# traffic coming back from the dummy backend ends up here
# -------------------------------------------------------------------
-frontend localhost_restricted
+frontend localhost_restricted
bind 127.0.0.1:82 accept-proxy
acl valid_vhost hdr(host) -f /etc/haproxy/acls/validvhostsrestricted.acl
acl staging req.cook(staging) -m str true
acl staging src -f /etc/haproxy/acls/stagingips.acl
- acl stagingvhost hdr(host) -i -M -f /etc/haproxy/maps/backendsstaging.map
+ acl stagingvhost hdr(host) -i -M -f /etc/haproxy/maps/backendsstaging.map
use_backend %[req.hdr(host),lower,map(/etc/haproxy/maps/backendsstaging.map)] if stagingvhost staging
use_backend %[req.hdr(host),lower,map(/etc/haproxy/maps/backends.map)]
option httplog
@@ -177,7 +177,7 @@ frontend localhost_restricted
# Create an ACL when the request rate exceeds {{ haproxy_max_request_rate }} per 10s
acl exceeds_max_request_rate_per_ip sc_http_req_rate(0) gt {{ haproxy_max_request_rate }}
http-request deny deny_status 429 if exceeds_max_request_rate_per_ip !allowlist
- # Measure and log the request rate per path and ip
+ # Measure and log the request rate per path and ip
http-request track-sc1 base32+src table st_httpreqs_per_ip_and_path
http-request capture sc_http_req_rate(1) len 4
# Some paths allow for a higher ratelimit. These are in a seperate mapfile
@@ -191,7 +191,7 @@ frontend localhost_restricted
http-request deny if ! valid_vhost
# Deny the request when the request rate exceeds {{ haproxy_max_request_rate }} per 10s
http-request deny deny_status 429 if exceeds_max_request_rate_per_ip !allowlist
- # Deny the request when the request rate per host header url path and src ip exceeds {{ haproxy_max_request_rate_ip_path }} per 1 m
+ # Deny the request when the request rate per host header url path and src ip exceeds {{ haproxy_max_request_rate_ip_path }} per 1 m
http-request deny deny_status 429 if exceeds_max_request_rate_per_ip_and_path !allowlist
# Create some http redirects
{% if haproxy_securitytxt_target_url is defined %}
@@ -201,3 +201,19 @@ frontend localhost_restricted
http-request redirect location %[base,map_reg(/etc/haproxy/maps/redirects.map)] if { base,map_reg(/etc/haproxy/maps/redirects.map) -m found }
{% endif %}
+
+{% if haproxy_ldap_servers is defined %}
+#--------------------------------------------------------------------
+# frontend public ips ldap
+# -------------------------------------------------------------------
+listen ldap
+ mode tcp
+ no option dontlognull
+ option tcplog
+ option logasap
+ timeout client 900s
+ timeout server 901s
+ bind {{ haproxy_sni_ip.ipv4 }}:636 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 transparent
+ bind {{ haproxy_sni_ip.ipv6 }}:636 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 transparent
+ use_backend ldap_servers
+{% endif %}
diff --git a/roles/haproxy/templates/update_ocsp.j2 b/roles/haproxy/templates/update_ocsp.j2
deleted file mode 100644
index 2ed61f528..000000000
--- a/roles/haproxy/templates/update_ocsp.j2
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-# Call hapos-upd to update OCSP stapling info foreach of our haproxy certificates
-
-# probably we want to continue even if one fails
-set -e
-
-{% for cert in haproxy_sni_ip.certs %}
-/usr/local/sbin/hapos-upd --partial-chain --good-only --socket /var/lib/haproxy/haproxy.stats \
- --VAfile /etc/pki/haproxy/{{ cert.name }}_haproxy.pem \
- --cert /etc/pki/haproxy/{{ cert.name }}_haproxy.pem
-{% endfor %}
diff --git a/roles/invite/templates/serverapplication.yml.j2 b/roles/invite/templates/serverapplication.yml.j2
index 26c97d432..bf59520da 100644
--- a/roles/invite/templates/serverapplication.yml.j2
+++ b/roles/invite/templates/serverapplication.yml.j2
@@ -7,6 +7,7 @@ logging:
org.springframework.security: WARN
com.zaxxer.hikari: ERROR
invite: DEBUG
+ net.javacrumbs.shedlock: DEBUG
server:
port: 8080
@@ -74,11 +75,13 @@ crypto:
private-key-location: file:///private_key_pkcs8.pem
cron:
- user-cleaner-expression: "0 0/30 * * * *"
+ user-cleaner-cron: "PT30M"
+ user-cleaner-cron-initial-delay: "PT10M"
user-cleaner-lock-at-least-for: "PT5M"
user-cleaner-lock-at-most-for: "PT28M"
last-activity-duration-days: 1000
- role-expiration-notifier-expression: "0 0/30 * * * *"
+ role-expiration-notifier-cron: "PT30M"
+ role-expiration-notifier-cron-initial-delay: "PT15M"
# Set to -1 to suppress role expiry notifications
role-expiration-notifier-duration-days: 5
role-expiration-notifier-lock-at-least-for: "PT5M"
@@ -87,7 +90,7 @@ cron:
metadata-resolver-fixed-rate-milliseconds: 86_400_000
metadata-resolver-url: "https://metadata.{{ base_domain }}/idps-metadata.xml"
# A value of 0 means no logs will be deleted
- purge-audit-log-days: 365
+ purge-audit-log-days: 0
# A value of 0 means no invitations will be deleted
purge-expired-invitations-days: 365
@@ -158,6 +161,7 @@ external-api-configuration:
password: "{{ invite_attribute_aggregation_secret }}"
scopes:
- attribute_aggregation
+ - crm
- username: {{ invite.lifecycle_user }}
password: "{{ invite.lifecycle_secret }}"
scopes:
diff --git a/roles/mailpit/defaults/main.yml b/roles/mailpit/defaults/main.yml
new file mode 100644
index 000000000..f400a8d2e
--- /dev/null
+++ b/roles/mailpit/defaults/main.yml
@@ -0,0 +1,5 @@
+---
+mailpit_image: "axllent/mailpit"
+mailpit_hostname: "mailpit.{{ base_domain }}"
+mailpit_user: "mailpit"
+mailpit_group: "mailpit"
diff --git a/roles/mailpit/tasks/main.yml b/roles/mailpit/tasks/main.yml
new file mode 100644
index 000000000..5d89f4a7c
--- /dev/null
+++ b/roles/mailpit/tasks/main.yml
@@ -0,0 +1,37 @@
+---
+- name: "Create mailpit group"
+ group:
+ name: "{{ mailpit_group }}"
+ state: "present"
+ register: "result"
+
+- name: "Create mailpit user"
+ user:
+ name: "{{ mailpit_user }}"
+ group: "{{ mailpit_group }}"
+ comment: "User to run Mailpit service"
+ shell: "/bin/false"
+ password: "!"
+ create_home: false
+ state: "present"
+ register: "result"
+
+- name: "Save mailpit user uid"
+ set_fact:
+ mailpit_user_uid: "{{ result.uid }}"
+
+- name: "Create mailpit container"
+ docker_container:
+ name: "mailpit"
+ image: "{{ mailpit_image }}"
+ restart_policy: "always"
+ state: "started"
+ user: "{{ mailpit_user_uid }}"
+ ports:
+ networks:
+ - name: "loadbalancer"
+ labels:
+ traefik.enable: "true"
+ traefik.http.routers.mailpit.rule: "Host(`{{ mailpit_hostname }}`)"
+ traefik.http.routers.mailpit.tls: "true"
+ traefik.http.services.mailpit.loadbalancer.server.port: 8025
diff --git a/roles/manage/templates/application.yml.j2 b/roles/manage/templates/application.yml.j2
index 5790737b0..aec21cfdf 100644
--- a/roles/manage/templates/application.yml.j2
+++ b/roles/manage/templates/application.yml.j2
@@ -83,6 +83,9 @@ policies:
allowed_attributes: file://{{ manage_dir }}/policies/allowed_attributes.json
extra_saml_attributes: file://{{ manage_dir }}/policies/extra_saml_attributes.json
+sram:
+ sram_rp_entity_id: "{{ manage.sram_rp_entity_id }}"
+
spring:
mail:
host: {{ smtp_server }}
diff --git a/roles/myconext/templates/application.yml.j2 b/roles/myconext/templates/application.yml.j2
index f00d7b668..9e2af3722 100644
--- a/roles/myconext/templates/application.yml.j2
+++ b/roles/myconext/templates/application.yml.j2
@@ -104,6 +104,14 @@ mobile_app_redirect: eduid:///client/mobile
# For this RP we nudge the user to use the magic link
mobile_app_rp_entity_id: {{ myconext.mobile_app_rp_entity_id }}
+create-from-institution:
+ return-url-allowed-domains:
+{% for url in myconext.create_from_institution_return_url_allowed_domains | default([]) %}
+ - "{{ url }}"
+{% else %}
+ [] # lege lijst wanneer er geen URLs zijn
+{% endfor %}
+
# The host headers to identify the service the user is logged in
host_headers:
service_desk: servicedesk.{{ myconext_base_domain }}
diff --git a/roles/openaccess/defaults/main.yml b/roles/openaccess/defaults/main.yml
index ba813a4c8..888e97b36 100644
--- a/roles/openaccess/defaults/main.yml
+++ b/roles/openaccess/defaults/main.yml
@@ -1,3 +1,5 @@
---
openaccess_server_restart_policy: always
openaccess_server_restart_retries: 0
+openaccess_docker_networks:
+ - name: loadbalancer
diff --git a/roles/openaccess/tasks/main.yml b/roles/openaccess/tasks/main.yml
index c3cfb6e4a..5f92ead18 100644
--- a/roles/openaccess/tasks/main.yml
+++ b/roles/openaccess/tasks/main.yml
@@ -19,9 +19,15 @@
- serverapplication.yml
notify: restart accessserver
+
+- name: Debug mariadb_in_docker # Show with -vv
+ ansible.builtin.debug:
+ msg: "{{ mariadb_in_docker }}"
+ verbosity: 2
+
- name: Add the MariaDB docker network to the list of networks when MariaDB runs in Docker
ansible.builtin.set_fact:
- invite_docker_networks:
+ openaccess_docker_networks:
- name: loadbalancer
- name: openconext_mariadb
when: mariadb_in_docker | default(false) | bool
@@ -36,8 +42,7 @@
restart_policy: "{{ openaccess_server_restart_policy }}"
restart_retries: "{{ openaccess_server_restart_retries }}" # Only for restart policy on-failure
state: started
- networks:
- - name: "loadbalancer"
+ networks: "{{ openaccess_docker_networks }}"
mounts:
- source: /opt/openconext/openaccess/serverapplication.yml
target: /application.yml
@@ -95,4 +100,4 @@
S3_STORAGE_URL : "{{ openconextaccess.s3_storage.url }}"
S3_STORAGE_KEY : "{{ openconextaccess.s3_storage.key }}"
S3_STORAGE_SECRET : "{{ openconextaccess.s3_storage.secret }}"
- S3_STORAGE_BUCKET : "{{ openconextaccess.s3_storage.bucket }}"
\ No newline at end of file
+ S3_STORAGE_BUCKET : "{{ openconextaccess.s3_storage.bucket }}"
diff --git a/roles/openaccess/templates/serverapplication.yml.j2 b/roles/openaccess/templates/serverapplication.yml.j2
index c4cb82c89..80da77a94 100644
--- a/roles/openaccess/templates/serverapplication.yml.j2
+++ b/roles/openaccess/templates/serverapplication.yml.j2
@@ -20,6 +20,8 @@ spring:
jdbc:
cleanup-cron: "-"
initialize-schema: always
+ flush-mode: on_save
+ save-mode: on_set_attribute
store-type: jdbc
timeout: 8h
mvc:
@@ -29,17 +31,17 @@ spring:
client:
registration:
oidcng:
- client-id: {{ oidc_playground.client_id }}
- client-secret: {{ oidc_playground.secret }}
+ client-id: {{ openconextaccess.oidcng.client_id }}
+ client-secret: {{ openconextaccess.oidcng.secret }}
redirect-uri: "{baseUrl}/login/oauth2/code/{registrationId}"
authorization-grant-type: "authorization_code"
scope: openid
provider:
oidcng:
- authorization-uri: "https://connect.{{ base_domain }}/oidc/authorize"
- token-uri: "https://connect.{{ base_domain }}/oidc/token"
- user-info-uri: "https://connect.{{ base_domain }}/oidc/userinfo"
- jwk-set-uri: "https://connect.{{ base_domain }}/oidc/certs"
+ authorization-uri: {{ openconextaccess.oidcng.authorization_uri }}
+ token-uri: {{ openconextaccess.oidcng.token_uri }}
+ user-info-uri: {{ openconextaccess.oidcng.user_info_uri }}
+ jwk-set-uri: {{ openconextaccess.oidcng.jwk_set_uri }}
user-name-attribute: sub
user-info-authentication-method: client_secret_basic
jpa:
@@ -62,12 +64,22 @@ spring:
host: {{ smtp_server }}
oidcng:
- discovery-url: "https://connect.test2.surfconext.nl/oidc/.well-known/openid-configuration"
- introspect-url: "https://connect.test2.surfconext.nl/oidc/introspect"
+ discovery-url: {{ openconextaccess.oidcng.discovery_url }}
+ introspect-url: {{ openconextaccess.oidcng.introspect_url }}
resource-server-id: myconext.rs
resource-server-secret: secret
base-url: {{ openconextaccess_base_domain }}
+cron:
+ user-cleaner-cron: "PT60M"
+ user-cleaner-cron-initial-delay: "PT10M"
+ user-cleaner-lock-at-least-for: "PT5M"
+ user-cleaner-lock-at-most-for: "PT28M"
+ org-contact-reminder-days: 365
+ org-delete-after-days: 365
+ user-inactivity-warn-days: 365
+ user-inactivity-delete-days: 1095
+
lifecycle:
user: lifecycle
password: {{ openconextaccess_lifecycle_secret }}
@@ -90,7 +102,8 @@ config:
client-url: "https://{{ openconextaccess_base_domain }}"
base-url: "{{ base_domain }}"
edu_id_schac_home_organization: "eduid.nl"
- discovery: "https://connect.test2.surfconext.nl/oidc/.well-known/openid-configuration"
+ surf_schac_home_organization: "example.com"
+ discovery: "https://connect.surfconext.nl/oidc/.well-known/openid-configuration"
invite: "https://invite.{{ base_domain }}"
sram: "https://{{ env }}.sram.surf.nl/"
service_desk: "https://servicedesk.surf.nl/jira/plugins/servlet/desk/user/requests?reporter=all"
@@ -105,7 +118,7 @@ config:
entityid: "https://idp.diy.surfconext.nl"
descriptionEN: "Een test-IdP met fictieve gebruikersaccounts. De metadata vind je hier"
descriptionNL: "Een test-IdP met fictieve gebruikersaccounts. De metadata vind je hier"
- idp_proxy_meta_data: https://metadata.test2.surfconext.nl/idp-metadata.xml
+ idp_proxy_meta_data: {{ openconextaccess.idp_proxy_meta_data }}
minimal_stepup_acr_level: "http://{{ base_domain }}/assurance/loa2"
features:
- name: idp
@@ -121,7 +134,7 @@ config:
- "{{ loa }}"
{% endfor %}
-eduid-idp-entity-id: "https://login.{{ myconext_base_domain }}"
+eduid-idp-entity-id: {{ openconextaccess.eduid_idp_entity_id }}
super-admin:
users:
@@ -140,6 +153,7 @@ email:
contactEmail: "{{ support_email }}"
serviceDeskEmail: "{{ support_email }}"
supportEmail: "support@surfconext.nl"
+ jiraErrorEmail: "{{ support_email }}"
environment: "{{ environment_shortname }}"
manage:
@@ -166,19 +180,19 @@ invite:
user: {{ invite.access_user }}
password: "{{ invite.access_secret }}"
-# Todo relace with openconextaccess user
-statistics:
- enabled: True
- url: {{ dashboard.stats_url }}
- user: {{ dashboard.stats_user }}
- password: {{ stats_dashboard_api_password }}
-
s3storage:
url: {{ openconextaccess.s3_storage.url }}
key: {{ openconextaccess.s3_storage.key }}
secret: {{ openconextaccess.s3_storage.secret }}
bucket: {{ openconextaccess.s3_storage.bucket }}
+statistics:
+ enabled: {{ openconextaccess.statistics.enabled }}
+ url: {{ openconextaccess.statistics.url }}
+ user: {{ openconextaccess.statistics.user }}
+ password: {{ openconextaccess.statistics.password }}
+
+
management:
health:
mail:
diff --git a/roles/rsyslog/tasks/main.yml b/roles/rsyslog/tasks/main.yml
index 1fc0608dc..a531fd677 100644
--- a/roles/rsyslog/tasks/main.yml
+++ b/roles/rsyslog/tasks/main.yml
@@ -1,9 +1,10 @@
-- name: Install rsyslog
+- name: Install rsyslog and python modules
ansible.builtin.package:
name:
- rsyslog
- rsyslog-gnutls
- rsyslog-relp
+ - python3-dateutil
state: present
notify:
- "restart rsyslog"
diff --git a/roles/rsyslog/tasks/process_auth_logs.yml b/roles/rsyslog/tasks/process_auth_logs.yml
index e62027530..804bf629b 100644
--- a/roles/rsyslog/tasks/process_auth_logs.yml
+++ b/roles/rsyslog/tasks/process_auth_logs.yml
@@ -39,7 +39,7 @@
state: present
when: ansible_os_family == "Debian"
-- name: Create a python script that parses log_logins per environment
+- name: Create a python script that parses eb log_logins per environment
ansible.builtin.template:
src: parse_ebauth_to_mysql.py.j2
dest: /usr/local/sbin/parse_ebauth_to_mysql_{{ item.name }}.py
@@ -49,7 +49,17 @@
with_items: "{{ rsyslog_environments }}"
when: item.db_loglogins_name is defined
-- name: Put log_logins logrotate scripts
+- name: Create a python script that parses stepup log_logins per environment
+ ansible.builtin.template:
+ src: parse_stepupauth_to_mysql.py.j2
+ dest: /usr/local/sbin/parse_stepupauth_to_mysql_{{ item.name }}.py
+ mode: 0740
+ owner: root
+ group: root
+ with_items: "{{ rsyslog_environments }}"
+ when: item.db_loglogins_name is defined
+
+- name: Put log_logins logrotate scripts for eb
ansible.builtin.template:
src: logrotate_ebauth.j2
dest: /etc/logrotate.d/logrotate_ebauth_{{ item.name }}
@@ -59,6 +69,16 @@
with_items: "{{ rsyslog_environments }}"
when: item.db_loglogins_name is defined
+- name: Put log_logins logrotate scripts for stepup
+ ansible.builtin.template:
+ src: logrotate_stepupauth.j2
+ dest: /etc/logrotate.d/logrotate_stepupauth_{{ item.name }}
+ mode: 0644
+ owner: root
+ group: root
+ with_items: "{{ rsyslog_environments }}"
+ when: item.db_loglogins_name is defined
+
- name: Create logdirectory for log_logins cleanup script
ansible.builtin.file:
path: "{{ rsyslog_dir }}/apps/{{ item.name }}/loglogins_cleanup/"
diff --git a/roles/rsyslog/templates/logrotate_stepupauth.j2 b/roles/rsyslog/templates/logrotate_stepupauth.j2
new file mode 100644
index 000000000..be1a50652
--- /dev/null
+++ b/roles/rsyslog/templates/logrotate_stepupauth.j2
@@ -0,0 +1,16 @@
+{{ rsyslog_dir }}/log_logins/{{ item.name }}/stepup-authentication.log
+{
+ missingok
+ daily
+ rotate 180
+ sharedscripts
+ dateext
+ dateyesterday
+ compress
+ delaycompress
+ create 0640 root {{ rsyslog_read_group }}
+ postrotate
+ /usr/local/sbin/parse_stepupauth_to_mysql_{{ item.name }}.py > /dev/null
+ systemctl kill -s HUP rsyslog.service
+ endscript
+}
diff --git a/roles/rsyslog/templates/parse_ebauth_to_mysql.py.j2 b/roles/rsyslog/templates/parse_ebauth_to_mysql.py.j2
index b37f4720c..7e0bc7bcb 100644
--- a/roles/rsyslog/templates/parse_ebauth_to_mysql.py.j2
+++ b/roles/rsyslog/templates/parse_ebauth_to_mysql.py.j2
@@ -21,11 +21,17 @@ cursor = db.cursor()
def update_lastseen(user_id, date):
query = """
- REPLACE INTO last_login (userid, lastseen)
+ INSERT INTO last_login (userid, lastseen)
VALUES (%s, %s)
+ ON DUPLICATE KEY UPDATE
+ lastseen = GREATEST(lastseen, VALUES(lastseen))
"""
- cursor.execute(query, (user_id, date))
- db.commit()
+ try:
+ cursor.execute(query, (user_id, date))
+ db.commit()
+ except Exception as e:
+ db.rollback()
+ print(f"Error updating last_login for user {user_id}: {e}")
def load_in_mysql(a,b,c,d,e,f,g,h):
sql = """insert into log_logins(idpentityid,spentityid,loginstamp,userid,keyid,sessionid,requestid,trustedproxyentityid) values(%s,%s,%s,%s,%s,%s,%s,%s)"""
@@ -73,4 +79,3 @@ for filename in os.listdir(workdir):
cursor.close()
db.close()
-
diff --git a/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2 b/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2
new file mode 100644
index 000000000..843fe44bc
--- /dev/null
+++ b/roles/rsyslog/templates/parse_stepupauth_to_mysql.py.j2
@@ -0,0 +1,152 @@
+#!/usr/bin/python3
+# This script parses rotated stepup-authentication.log files produced by engineblock.
+# It filters for successful logins (authentication_result:OK) and inserts the data
+# into the log_logins and last_login MySQL tables.
+# This script is intended to be run separately during logrotate.
+
+import os
+import sys
+import json
+import MySQLdb
+from dateutil.parser import parse
+
+# Configuration variables (to be injected by Ansible/Jinja2)
+mysql_host="{{ item.db_loglogins_host }}"
+mysql_user="{{ item.db_loglogins_user }}"
+mysql_password="{{ item.db_loglogins_password }}"
+mysql_db="{{ item.db_loglogins_name }}"
+workdir="{{ rsyslog_dir }}/log_logins/{{ item.name}}/"
+
+# Establish database connection
+try:
+ db = MySQLdb.connect(mysql_host,mysql_user,mysql_password,mysql_db )
+ cursor = db.cursor()
+except Exception as e:
+ print(f"Error connecting to MySQL: {e}")
+ sys.exit(1)
+
+# --- Database Functions ---
+
+def update_lastseen(user_id, date):
+ """
+ Updates the last_login table.
+ Uses GREATEST() to ensure only newer dates overwrite the existing 'lastseen' value.
+ """
+ query = """
+ INSERT INTO last_login (userid, lastseen)
+ VALUES (%s, %s)
+ ON DUPLICATE KEY UPDATE
+ lastseen = GREATEST(lastseen, VALUES(lastseen))
+ """
+ try:
+ cursor.execute(query, (user_id, date))
+ db.commit()
+ except Exception as e:
+ db.rollback()
+ print(f"Error updating last_login for user {user_id}: {e}")
+
+def load_stepup_in_mysql(idp, sp, loginstamp, userid, requestid):
+ """
+ Inserts Step-up login data into the log_logins table.
+ Fills keyid, sessionid, and trustedproxyentityid with NULL.
+ """
+ # Columns in log_logins: idpentityid, spentityid, loginstamp, userid, keyid, sessionid, requestid, trustedproxyentityid
+
+ keyid = None
+ sessionid = None
+ trustedproxyentityid = None
+
+ sql = """
+ INSERT INTO log_logins(idpentityid, spentityid, loginstamp, userid, keyid, sessionid, requestid, trustedproxyentityid)
+ VALUES(%s, %s, %s, %s, %s, %s, %s, %s)
+ """
+ try:
+ cursor.execute(sql, (idp, sp, loginstamp, userid, keyid, sessionid, requestid, trustedproxyentityid))
+ db.commit()
+ except Exception as e:
+ db.rollback()
+ print(f"Error inserting stepup data: {e}")
+ # Print the data that failed insertion
+ print((idp, sp, loginstamp, userid, keyid, sessionid, requestid, trustedproxyentityid))
+
+# --- Parsing Function ---
+
+def parse_stepup_lines(a):
+ """
+ Opens the stepup log file, parses each line, filters for successful logins,
+ and loads the data into MySQL.
+ """
+ input_file = open((a), 'r')
+ for line in input_file:
+ try:
+ # Assumes JSON data starts after the first ']:'
+ jsonline = line.split(']:',2)[1]
+ data = json.loads(jsonline)
+ except:
+ continue
+
+ # 1. Filtering condition: Only parse logs having authentication_result:OK
+ # Only successful authentications are logged, so this check is not
+ # necessary. There is currently a bug in the Stepup-Gateway where
+ # FAILED is logged, even though the result is OK, making this check
+ # do the wrong thing now.
+ #
+ #if data.get("context").("authentication_result") != "OK":
+ # continue
+
+ # 2. Extract required fields
+ context = data.get("context")
+
+ if not isinstance(context, dict):
+ print("Skipping line: context is missing or invalid")
+ continue
+
+ user_id = context.get("identity_id")
+ timestamp = context.get("datetime")
+ request_id = context.get("request_id")
+ sp_entity_id = context.get("requesting_sp")
+ idp_entity_id = context.get("authenticating_idp")
+
+ # Basic data validation
+ if not user_id or not timestamp:
+ print(
+ "Skipping line: validation failed "
+ f"(user_id={user_id!r}, timestamp={timestamp!r}, request_id={request_id!r})"
+ )
+ continue
+
+ try:
+ # 3. Format date and time for MySQL
+ loginstamp = parse(timestamp).strftime("%Y-%m-%d %H:%M:%S")
+ last_login_date = parse(timestamp).strftime("%Y-%m-%d")
+ except:
+ print(
+ "Skipping line: timestamp parsing failed "
+ f"(timestamp={timestamp!r}, user_id={user_id!r}, error={e})"
+ )
+ continue
+
+ # 4. Insert into MySQL
+ load_stepup_in_mysql(idp_entity_id, sp_entity_id, loginstamp, user_id, request_id)
+
+ # 5. Update last login date
+ update_lastseen(user_id, last_login_date)
+
+
+# --- Main Execution ---
+
+## Loop over the files and parse them one by one
+for filename in os.listdir(workdir):
+ filetoparse=(os.path.join(workdir, filename))
+
+ # Check for Stepup files, ignore compressed files
+ if os.path.isfile(filetoparse) and filename.startswith("stepup-authentication.log-") and not filename.endswith(".gz"):
+ print(f"Parsing stepup log file: {filename}")
+ parse_stepup_lines(filetoparse)
+ else:
+ continue
+
+# Close database connection
+cursor.close()
+db.close()
+print("Stepup log parsing complete.")
diff --git a/roles/sram_ldap/defaults/main.yml b/roles/sram_ldap/defaults/main.yml
new file mode 100644
index 000000000..35d5029f3
--- /dev/null
+++ b/roles/sram_ldap/defaults/main.yml
@@ -0,0 +1,38 @@
+---
+sram_ldap_image: "ghcr.io/surfscz/sram-ldap:main"
+sram_ldap_conf_dir: "{{ current_release_appdir }}/sram/ldap"
+sram_ldap_ldif_dir: "{{ sram_ldap_conf_dir }}/schema"
+sram_ldap_certs_dir: "{{ sram_ldap_conf_dir }}/certs"
+sram_ldap_backup_dir: "{{ sram_ldap_conf_dir }}/ldap"
+sram_ldap_data_dir: "{{ sram_ldap_conf_dir}}/data"
+sram_ldap_uri: "ldap://localhost/"
+
+sram_ldap_user: "openldap"
+sram_ldap_group: "openldap"
+
+# admin_group: "ldap_admin"
+sram_ldap_admins:
+ - name: Admin
+ uid: admin
+ pw_hash: "!"
+ sshkey: ""
+
+sram_ldap_loglevel: "stats stats2 filter"
+
+sram_ldap_services_password: secret
+sram_ldap_monitor_password: secret
+sram_ldap_ldap_monitor_password: secret
+
+sram_ldap_uri: "ldap://localhost/"
+sram_ldap_rid_prefix: "ldap://"
+
+sram_ldap_base_domain: "{{ base_domain }}"
+sram_ldap_base_dn: >-
+ {{ ((sram_ldap_base_domain.split('.')|length)*['dc=']) |
+ zip(sram_ldap_base_domain.split('.')) | list | map('join', '') | list | join(',') }}
+sram_ldap_services_dn:
+ basedn: "dc=services,{{ sram_ldap_base_dn }}"
+ o: "Services"
+ binddn: "cn=admin,{{ sram_ldap_base_dn }}"
+
+sram_ldap_hosts: {}
diff --git a/roles/sram_ldap/files/eduMember.ldif b/roles/sram_ldap/files/eduMember.ldif
new file mode 100644
index 000000000..42894d596
--- /dev/null
+++ b/roles/sram_ldap/files/eduMember.ldif
@@ -0,0 +1,27 @@
+dn: cn=eduMember,cn=schema,cn=config
+objectClass: olcSchemaConfig
+cn: eduMember
+# Internet X.500 Schema for Ldappc
+# Includes the eduMember ObjectClass schema
+#
+#
+# An auxiliary object class, "eduMember," is a convenient container
+# for an extensible set of attributes concerning group memberships.
+# At this time, the only attributes specified as belonging to the
+# object class are "isMemberOf" and "hasMember."
+#
+olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.5.1.1
+ NAME 'isMemberOf'
+ DESC 'identifiers for groups to which containing entity belongs'
+ EQUALITY caseExactMatch
+ SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )
+olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.5.1.2
+ NAME 'hasMember'
+ DESC 'identifiers for entities that are members of the group'
+ EQUALITY caseExactMatch
+ SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )
+olcObjectClasses: ( 1.3.6.1.4.1.5923.1.5.2.1
+ NAME 'eduMember'
+ AUXILIARY
+ MAY ( isMemberOf $ hasMember )
+ )
diff --git a/roles/sram_ldap/files/eduPerson.ldif b/roles/sram_ldap/files/eduPerson.ldif
new file mode 100644
index 000000000..e4f2c96a0
--- /dev/null
+++ b/roles/sram_ldap/files/eduPerson.ldif
@@ -0,0 +1,83 @@
+dn: cn=eduperson,cn=schema,cn=config
+objectClass: olcSchemaConfig
+cn: eduperson
+olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.1
+ NAME 'eduPersonAffiliation'
+ DESC 'eduPerson per Internet2 and EDUCAUSE'
+ EQUALITY caseIgnoreMatch
+ SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )
+olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.7
+ NAME 'eduPersonEntitlement'
+ DESC 'eduPerson per Internet2 and EDUCAUSE'
+ EQUALITY caseExactMatch
+ SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )
+olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.2
+ NAME 'eduPersonNickName'
+ DESC 'eduPerson per Internet2 and EDUCAUSE'
+ EQUALITY caseIgnoreMatch
+ SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )
+olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.3
+ NAME 'eduPersonOrgDN'
+ DESC 'eduPerson per Internet2 and EDUCAUSE'
+ EQUALITY distinguishedNameMatch
+ SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )
+olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.4
+ NAME 'eduPersonOrgUnitDN'
+ DESC 'eduPerson per Internet2 and EDUCAUSE'
+ EQUALITY distinguishedNameMatch
+ SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )
+olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.5
+ NAME 'eduPersonPrimaryAffiliation'
+ DESC 'eduPerson per Internet2 and EDUCAUSE'
+ EQUALITY caseIgnoreMatch
+ SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )
+olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.8
+ NAME 'eduPersonPrimaryOrgUnitDN'
+ DESC 'eduPerson per Internet2 and EDUCAUSE'
+ EQUALITY distinguishedNameMatch
+ SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )
+olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.6
+ NAME 'eduPersonPrincipalName'
+ DESC 'eduPerson per Internet2 and EDUCAUSE'
+ EQUALITY caseIgnoreMatch
+ SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )
+olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.12
+ NAME 'eduPersonPrincipalNamePrior'
+ DESC 'eduPersonPrincipalNamePrior per Internet2'
+ EQUALITY caseIgnoreMatch
+ SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )
+olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.9
+ NAME 'eduPersonScopedAffiliation'
+ DESC 'eduPerson per Internet2 and EDUCAUSE'
+ EQUALITY caseIgnoreMatch
+ SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )
+olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.10
+ NAME 'eduPersonTargetedID'
+ DESC 'eduPerson per Internet2 and EDUCAUSE'
+ EQUALITY caseExactMatch
+ SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )
+olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.11
+ NAME 'eduPersonAssurance'
+ DESC 'eduPerson per Internet2 and EDUCAUSE'
+ EQUALITY caseExactMatch
+ SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )
+olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.13
+ NAME 'eduPersonUniqueId'
+ DESC 'eduPersonUniqueId per Internet2'
+ EQUALITY caseIgnoreMatch
+ SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE )
+olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.16
+ NAME 'eduPersonOrcid'
+ DESC 'ORCID researcher identifiers belonging to the principal'
+ EQUALITY caseIgnoreMatch
+ SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )
+olcObjectClasses: ( 1.3.6.1.4.1.5923.1.1.2
+ NAME 'eduPerson'
+ AUXILIARY
+ MAY (
+ eduPersonAffiliation $ eduPersonNickname $ eduPersonOrgDN $
+ eduPersonOrgUnitDN $ eduPersonPrimaryAffiliation $
+ eduPersonPrincipalName $ eduPersonEntitlement $ eduPersonPrimaryOrgUnitDN $
+ eduPersonScopedAffiliation $ eduPersonTargetedID $ eduPersonAssurance $
+ eduPersonPrincipalNamePrior $ eduPersonUniqueId $ eduPersonOrcid )
+ )
diff --git a/roles/sram_ldap/files/groupOfMembers.ldif b/roles/sram_ldap/files/groupOfMembers.ldif
new file mode 100644
index 000000000..aa10094d3
--- /dev/null
+++ b/roles/sram_ldap/files/groupOfMembers.ldif
@@ -0,0 +1,19 @@
+# Internet X.500 Schema for Ldappc
+# Includes the groupOfMembers ObjectClass schema
+#
+# Taken from RFC2307bis draft 2
+# https://tools.ietf.org/html/draft-howard-rfc2307bis-02
+#
+# An structural object class, "groupOfMembers" is a convenient container
+# for an extensible set of attributes concerning group memberships.
+#
+dn: cn=groupOfMembers,cn=schema,cn=config
+objectClass: olcSchemaConfig
+cn: groupOfMembers
+olcObjectClasses: ( 1.3.6.1.1.1.2.18 SUP top STRUCTURAL
+ NAME 'groupOfMembers'
+ DESC 'A group with members (DNs)'
+ MUST cn
+ MAY ( businessCategory $ seeAlso $ owner $ ou $ o $
+ description $ member )
+ )
diff --git a/roles/sram_ldap/files/ldap-add b/roles/sram_ldap/files/ldap-add
new file mode 100644
index 000000000..3d0c5e487
--- /dev/null
+++ b/roles/sram_ldap/files/ldap-add
@@ -0,0 +1,51 @@
+#!/usr/bin/env bash
+
+# Copyright (C) 2015-2019 Maciej Delmanowski
+# Copyright (C) 2015-2019 DebOps
+# SPDX-License-Identifier: GPL-3.0-only
+
+# Check if specified LDAP schema file is loaded in the local slapd cn=config
+# database. If not, try loading it in the server.
+
+
+set -o nounset -o pipefail -o errexit
+
+schema_file="${1}"
+
+if [ -z "${schema_file}" ] ; then
+ printf "Error: You need to specify schema file to load\\n" && exit 1
+fi
+
+if [ ! -e "${schema_file}" ] ; then
+ printf "Error: %s does not exist\\n" "${schema_file}" && exit 1
+fi
+
+if [ ! -r "${schema_file}" ] ; then
+ printf "Error: %s is unreadable\\n" "${schema_file}" && exit 1
+fi
+
+# The schema file is already converted, we can deal with them directly
+if [[ "${schema_file}" == *.ldif ]] ; then
+
+ # Get the DN of the schema
+ schema_dn="$(grep -E '^^dn:\s' "${schema_file}")"
+
+ # Get list of already installed schemas from local LDAP server
+ schema_list() {
+ ldapsearch -Y EXTERNAL -H ldapi:/// -LLLQ -b 'cn=schema,cn=config' dn \
+ | sed -e '/^$/d' -e 's/{[0-9]\+}//'
+ }
+
+ if schema_list | grep -q "${schema_dn}" ; then
+
+ # Schema is already installed, do nothing
+ exit 80
+
+ else
+
+ # Try installing the schema in the database
+ ldapadd -Y EXTERNAL -H ldapi:/// -f "${schema_file}"
+
+ fi
+
+fi
diff --git a/roles/sram_ldap/files/ldapPublicKey.ldif b/roles/sram_ldap/files/ldapPublicKey.ldif
new file mode 100644
index 000000000..8968b6e96
--- /dev/null
+++ b/roles/sram_ldap/files/ldapPublicKey.ldif
@@ -0,0 +1,21 @@
+dn: cn=openssh-lpk-openldap,cn=schema,cn=config
+objectClass: olcSchemaConfig
+cn: openssh-lpk-openldap
+#
+# LDAP Public Key Patch schema for use with openssh-ldappubkey
+# useful with PKA-LDAP also
+#
+# Author: Eric AUGE
+#
+# Based on the proposal of : Mark Ruijter
+#
+# octetString SYNTAX
+olcAttributeTypes: ( 1.3.6.1.4.1.24552.500.1.1.1.13 NAME 'sshPublicKey'
+ DESC 'MANDATORY: OpenSSH Public key'
+ EQUALITY octetStringMatch
+ SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 )
+# printableString SYNTAX yes|no
+olcObjectClasses: ( 1.3.6.1.4.1.24552.500.1.1.2.0 NAME 'ldapPublicKey' SUP top AUXILIARY
+ DESC 'MANDATORY: OpenSSH LPK olcObjectClasses:'
+ MUST ( sshPublicKey $ uid )
+ )
diff --git a/roles/sram_ldap/files/sczGroup.ldif b/roles/sram_ldap/files/sczGroup.ldif
new file mode 100644
index 000000000..d1b5cb332
--- /dev/null
+++ b/roles/sram_ldap/files/sczGroup.ldif
@@ -0,0 +1,23 @@
+# Internet X.500 Schema for Ldappc
+# Includes the sczGroup ObjectClass schema
+#
+# An auxiliary object class, "sczGroup," is a convenient container
+# for an extensible set of attributes concerning group memberships.
+# At this time, the only attribute specified as belonging to the
+# object class is "sczMember."
+#
+# It is specifically configured to support the memberOf overlay.
+#
+dn: cn=sczGroup,cn=schema,cn=config
+objectClass: olcSchemaConfig
+cn: sczGroup
+olcAttributeTypes: ( 1.3.6.1.4.1.1076.20.40.50.1.1
+ NAME 'sczMember'
+ DESC 'DN identifiers for entities that are members of the group'
+ EQUALITY distinguishedNameMatch
+ SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )
+olcObjectClasses: ( 1.3.6.1.4.1.1076.20.40.50.1
+ NAME 'sczGroup'
+ AUXILIARY
+ MAY ( sczMember )
+ )
diff --git a/roles/sram_ldap/files/sramPerson.ldif b/roles/sram_ldap/files/sramPerson.ldif
new file mode 100644
index 000000000..e194381d1
--- /dev/null
+++ b/roles/sram_ldap/files/sramPerson.ldif
@@ -0,0 +1,23 @@
+# Internet X.500 Schema for Ldappc
+# Includes the sramPerson ObjectClass schema
+#
+# An auxiliary object class, "sramPerson," is a convenient container
+# for an extensible set of attributes concerning sram persons.
+# At this time, the only attribute specified as belonging to the
+# object class is "sramInactiveDays".
+#
+dn: cn=sramPerson,cn=schema,cn=config
+objectClass: olcSchemaConfig
+cn: sramPerson
+olcAttributeTypes: ( 1.3.6.1.4.1.1076.20.100.20.2.1 NAME 'sramInactiveDays'
+ DESC 'Number of days this entity was inactive'
+ EQUALITY IntegerMatch
+ ORDERING IntegerOrderingMatch
+ SYNTAX 1.3.6.1.4.1.1466.115.121.1.27
+ )
+olcObjectClasses: ( 1.3.6.1.4.1.1076.20.100.20.1.1 NAME 'sramPerson'
+ AUXILIARY
+ MAY (
+ sramInactiveDays
+ )
+ )
diff --git a/roles/sram_ldap/files/voPerson.ldif b/roles/sram_ldap/files/voPerson.ldif
new file mode 100644
index 000000000..bdce11ed8
--- /dev/null
+++ b/roles/sram_ldap/files/voPerson.ldif
@@ -0,0 +1,44 @@
+dn: cn=voperson,cn=schema,cn=config
+objectClass: olcSchemaConfig
+cn: voperson
+olcAttributeTypes: {0}( 1.3.6.1.4.1.34998.3.3.1.1 NAME 'voPersonApplicationUID
+ ' DESC 'voPerson Application-Specific User Identifier' EQUALITY caseIgnoreMat
+ ch SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )
+olcAttributeTypes: {1}( 1.3.6.1.4.1.34998.3.3.1.2 NAME 'voPersonAuthorName' DE
+ SC 'voPerson Author Name' EQUALITY caseIgnoreMatch SYNTAX '1.3.6.1.4.1.1466.1
+ 15.121.1.15' )
+olcAttributeTypes: {2}( 1.3.6.1.4.1.34998.3.3.1.3 NAME 'voPersonCertificateDN'
+ DESC 'voPerson Certificate Distinguished Name' EQUALITY distinguishedNameMat
+ ch SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )
+olcAttributeTypes: {3}( 1.3.6.1.4.1.34998.3.3.1.4 NAME 'voPersonCertificateIss
+ uerDN' DESC 'voPerson Certificate Issuer DN' EQUALITY distinguishedNameMatch
+ SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' )
+olcAttributeTypes: {4}( 1.3.6.1.4.1.34998.3.3.1.5 NAME 'voPersonExternalID' DE
+ SC 'voPerson Scoped External Identifier' EQUALITY caseIgnoreMatch SYNTAX '1.3
+ .6.1.4.1.1466.115.121.1.15' )
+olcAttributeTypes: {5}( 1.3.6.1.4.1.34998.3.3.1.6 NAME 'voPersonID' DESC 'voPe
+ rson Unique Identifier' EQUALITY caseIgnoreMatch SYNTAX '1.3.6.1.4.1.1466.115
+ .121.1.15' )
+olcAttributeTypes: {6}( 1.3.6.1.4.1.34998.3.3.1.7 NAME 'voPersonPolicyAgreemen
+ t' DESC 'voPerson Policy Agreement Indicator' EQUALITY caseIgnoreMatch SYNTAX
+ '1.3.6.1.4.1.1466.115.121.1.15' )
+olcAttributeTypes: {7}( 1.3.6.1.4.1.34998.3.3.1.8 NAME 'voPersonSoRID' DESC 'v
+ oPerson External Identifier' EQUALITY caseIgnoreMatch SYNTAX '1.3.6.1.4.1.146
+ 6.115.121.1.15' )
+olcAttributeTypes: {8}( 1.3.6.1.4.1.34998.3.3.1.9 NAME 'voPersonStatus' DESC '
+ voPerson Status' EQUALITY caseIgnoreMatch SYNTAX '1.3.6.1.4.1.1466.115.121.1.
+ 15' )
+olcAttributeTypes: {9}( 1.3.6.1.4.1.34998.3.3.1.10 NAME 'voPersonAffiliation'
+ DESC 'voPerson Affiliation Within Local Scope' EQUALITY caseIgnoreMatch SYNTA
+ X '1.3.6.1.4.1.1466.115.121.1.15' )
+olcAttributeTypes: {10}( 1.3.6.1.4.1.34998.3.3.1.11 NAME 'voPersonExternalAffi
+ liation' DESC 'voPerson Scoped External Affiliation' EQUALITY caseIgnoreMatch
+ SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )
+olcAttributeTypes: {11}( 1.3.6.1.4.1.34998.3.3.1.12 NAME 'voPersonScopedAffili
+ ation' DESC 'voPerson Affiliation With Explicit Local Scope' EQUALITY caseIgn
+ oreMatch SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' )
+olcObjectClasses: {0}( 1.3.6.1.4.1.34998.3.3.1 NAME 'voPerson' AUXILIARY MAY (
+ voPersonAffiliation $ voPersonApplicationUID $ voPersonAuthorName $ voPerson
+ CertificateDN $ voPersonCertificateIssuerDN $ voPersonExternalAffiliation $ v
+ oPersonExternalID $ voPersonID $ voPersonPolicyAgreement $ voPersonScopedAffi
+ liation $ voPersonSoRID $ voPersonStatus ) )
diff --git a/roles/sram_ldap/handlers/main.yml b/roles/sram_ldap/handlers/main.yml
new file mode 100644
index 000000000..f6136cfeb
--- /dev/null
+++ b/roles/sram_ldap/handlers/main.yml
@@ -0,0 +1,6 @@
+---
+- name: Restart the ldap container
+ community.docker.docker_container:
+ name: "sram-ldap"
+ restart: true
+ state: started
diff --git a/roles/sram_ldap/tasks/admins.yml b/roles/sram_ldap/tasks/admins.yml
new file mode 100644
index 000000000..f20648460
--- /dev/null
+++ b/roles/sram_ldap/tasks/admins.yml
@@ -0,0 +1,62 @@
+---
+- name: determine ldap admins
+ set_fact:
+ ldap_admins: "{{ sram_ldap_admins }}"
+
+# Find existing ldap admins
+- name: Initialize admins (I)
+ community.general.ldap_search:
+ dn: "{{ sram_ldap_services_dn.basedn }}"
+ scope: "onelevel"
+ filter: "(objectClass=organizationalRole)"
+ attrs:
+ - "cn"
+ bind_dn: "{{ sram_ldap_services_dn.binddn }}"
+ bind_pw: "{{ sram_ldap_services_password }}"
+ server_uri: "{{sram_ldap_uri }}"
+ register: "existing_ldap_admins_result"
+
+# ansible trips over stuff like this: we need to extract the results from the result
+- name: Initialize admins (Ia)
+ set_fact:
+ existing_ldap_admins: "{{ existing_ldap_admins_result.results }}"
+
+# Remove LDAP non-admins
+- name: Initialize admins (II)
+ community.general.ldap_entry:
+ dn: "cn={{ item.cn }},{{ sram_ldap_services_dn.basedn }}"
+ state: absent
+ bind_dn: "{{ sram_ldap_services_dn.binddn }}"
+ bind_pw: "{{ sram_ldap_services_password }}"
+ server_uri: "{{ sram_ldap_uri }}"
+ when: >
+ item.cn not in ldap_admins | map(attribute='uid')
+ and item.cn != 'admin'
+ loop: "{{existing_ldap_admins}}"
+
+# Insert LDAP admins
+- name: Initialize admins (III)
+ community.general.ldap_entry:
+ dn: "cn={{ item.uid }},{{ sram_ldap_services_dn.basedn }}"
+ objectClass:
+ - simpleSecurityObject
+ - organizationalRole
+ attributes:
+ description: An LDAP administrator
+ userPassword: "{{ item.pw_hash }}"
+ bind_dn: "{{ sram_ldap_services_dn.binddn }}"
+ bind_pw: "{{ sram_ldap_services_password }}"
+ server_uri: "{{ sram_ldap_uri }}"
+ loop: "{{ ldap_admins }}"
+
+# Make sure passwords are updated for existing admins
+- name: Initialize admins (IV)
+ community.general.ldap_attrs:
+ dn: "cn={{ item.uid }},{{ sram_ldap_services_dn.basedn }}"
+ attributes:
+ userPassword: "{{ item.pw_hash }}"
+ bind_dn: "{{ sram_ldap_services_dn.binddn }}"
+ bind_pw: "{{ sram_ldap_services_password }}"
+ server_uri: "{{ sram_ldap_uri }}"
+ loop: "{{ ldap_admins }}"
+
diff --git a/roles/sram_ldap/tasks/main.yml b/roles/sram_ldap/tasks/main.yml
new file mode 100644
index 000000000..c1322739e
--- /dev/null
+++ b/roles/sram_ldap/tasks/main.yml
@@ -0,0 +1,299 @@
+---
+# playbook to install and configure all components of the LDAP
+- name: Install LDAP utils
+ apt:
+ state: "present"
+ name:
+ - "python3-ldap" # for ansible ldap modules
+ install_recommends: false
+
+- name: Ensure that a number of directories exist
+ file:
+ path: "{{ item.path }}"
+ state: "directory"
+ mode: "{{ item.mode }}"
+ with_items:
+ - { path: "{{sram_ldap_ldif_dir}}", mode: "0755" }
+ - { path: "{{sram_ldap_certs_dir}}", mode: "0755" }
+ - { path: "{{sram_ldap_data_dir}}", mode: "0777" }
+ notify: Restart the ldap container
+
+- name: Copy schemas
+ copy:
+ src: "{{ item }}"
+ dest: "{{ sram_ldap_ldif_dir }}/{{ item }}"
+ mode: "0644"
+ with_items:
+ - sczGroup.ldif
+ - groupOfMembers.ldif
+ - eduPerson.ldif
+ - ldapPublicKey.ldif
+ - eduMember.ldif
+ - voPerson.ldif
+ - sramPerson.ldif
+ notify: Restart the ldap container
+
+- name: Copying ldap-add script
+ copy:
+ src: "{{ item }}"
+ dest: "{{ sram_ldap_conf_dir }}/{{ item }}"
+ mode: "0755"
+ with_items:
+ - ldap-add
+
+- name: Setup ldap hosts
+ vars:
+ host:
+ key: "%s.{{ sram_ldap_base_domain }}"
+ value: "%s"
+ etc_hosts: {}
+ set_fact:
+ etc_hosts: >-
+ {{ etc_hosts |
+ combine({ host.key | format(item.key): host.value | format(item.value) }) }}
+ with_dict: "{{ sram_ldap_hosts }}"
+
+- name: Create the ldap container
+ community.docker.docker_container:
+ name: "sram-ldap"
+ image: "{{ sram_ldap_image }}"
+ restart_policy: "always"
+ state: started
+ # pull: true
+ ports:
+ - 0.0.0.0:389:389
+ env:
+ LDAP_ORGANISATION: "{{ env }}"
+ LDAP_DOMAIN: "{{ sram_ldap_base_domain }}"
+ LDAP_ROOTPASS: "{{ sram_ldap_services_password }}"
+ etc_hosts: "{{ etc_hosts }}"
+ volumes:
+ - "{{ sram_ldap_conf_dir }}:/opt/ldap"
+ networks:
+ - name: "loadbalancer"
+ labels:
+ traefik.enable: "true"
+ traefik.tcp.routers.ldap.entrypoints: "ldaps"
+ traefik.tcp.routers.ldap.rule: "HostSNI(`*`)"
+ traefik.tcp.routers.ldap.tls: "true"
+ traefik.tcp.services.ldap.loadbalancer.server.port: "389"
+ healthcheck:
+ test:
+ - "CMD"
+ - "bash"
+ - "-c"
+ - "[[ -S /var/run/slapd/ldapi ]]"
+ register: "ldap_container"
+
+- name: Wait for LDAP initialization
+ ansible.builtin.wait_for:
+ port: 389
+ delay: 5
+
+- name: Wait for 5 seconds
+ ansible.builtin.wait_for:
+ timeout: 5
+ when: "ldap_container is changed"
+
+- name: Ensure the schemas are added to LDAP
+ ansible.builtin.shell:
+ cmd: "docker exec sram-ldap /opt/ldap/ldap-add /opt/ldap/schema/{{ item }}"
+ register: "result"
+ failed_when: "result.rc not in [0,80]"
+ changed_when: "result.rc != 80"
+ become: true
+ loop:
+ - "sczGroup.ldif"
+ - "groupOfMembers.ldif"
+ - "eduPerson.ldif"
+ - "ldapPublicKey.ldif"
+ - "eduMember.ldif"
+ - "voPerson.ldif"
+ - "sramPerson.ldif"
+
+- name: Set indices
+ community.general.ldap_attrs:
+ dn: "olcDatabase={1}mdb,cn=config"
+ attributes:
+ olcDbIndex: "{{item}}"
+ state: "present"
+ bind_dn: "cn=admin,cn=config"
+ bind_pw: "{{ sram_ldap_services_password }}"
+ server_uri: "{{ sram_ldap_uri }}"
+ with_items:
+ - "entryUUID eq"
+ - "o eq"
+ - "dc eq"
+ - "entryCSN eq"
+
+- name: Set olcDatabase={-1}frontend olcSizeLimit
+ community.general.ldap_attrs:
+ dn: "olcDatabase={-1}frontend,cn=config"
+ state: "exact"
+ attributes:
+ olcSizeLimit: "unlimited"
+ bind_dn: "cn=admin,cn=config"
+ bind_pw: "{{ sram_ldap_services_password }}"
+ server_uri: "{{ sram_ldap_uri }}"
+
+- name: Set config
+ community.general.ldap_attrs:
+ dn: "cn=config"
+ state: "present"
+ attributes:
+ olcServerID: "{{ sram_ldap_server_id }}"
+ olcSizeLimit: "unlimited"
+ olcLogLevel: "{{ sram_ldap_loglevel }}"
+ olcAttributeOptions: "time-"
+ bind_dn: "cn=admin,cn=config"
+ bind_pw: "{{ sram_ldap_services_password }}"
+ server_uri: "{{ sram_ldap_uri }}"
+
+- name: Setup Modules
+ community.general.ldap_attrs:
+ dn: cn=module{0},cn=config
+ attributes:
+ olcModuleLoad:
+ - syncprov
+ - dynlist.so
+ bind_dn: "cn=admin,cn=config"
+ bind_pw: "{{ sram_ldap_services_password }}"
+ server_uri: "{{ sram_ldap_uri }}"
+
+- name: Setup Dynlist
+ community.general.ldap_entry:
+ dn: olcOverlay=dynlist,olcDatabase={1}mdb,cn=config
+ objectClass:
+ - olcOverlayConfig
+ - olcDynamicList
+ attributes:
+ olcDlAttrSet: "voPerson labeledURI member+memberOf@groupOfMembers"
+ bind_dn: "cn=admin,cn=config"
+ bind_pw: "{{ sram_ldap_services_password }}"
+ server_uri: "{{ sram_ldap_uri }}"
+
+- name: Setup Syncprov
+ community.general.ldap_entry:
+ dn: olcOverlay=syncprov,olcDatabase={1}mdb,cn=config
+ objectClass:
+ - olcOverlayConfig
+ - olcSyncProvConfig
+ attributes:
+ olcSpCheckpoint: 100 10
+ olcSpSessionLog: 100
+ bind_dn: "cn=admin,cn=config"
+ bind_pw: "{{ sram_ldap_services_password }}"
+ server_uri: "{{ sram_ldap_uri }}"
+
+- name: Set ACLs
+ community.general.ldap_attrs:
+ dn: "olcDatabase={1}mdb,cn=config"
+ attributes:
+ olcAccess:
+ - >-
+ to dn.regex="(([^,]+),{{ sram_ldap_services_dn.basedn }})$"
+ by dn.exact="{{ sram_ldap_services_dn.binddn }}" write
+ by dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth write
+ by dn.exact,expand="cn=admin,$1" read
+ by * break
+ - >-
+ to *
+ by dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth manage
+ by dn.regex="cn=[^,]+,{{ sram_ldap_services_dn.basedn }}" read
+ {% if env=="vm" %}
+ by dn.exact=gidNumber=1000+uidNumber=1000,cn=peercred,cn=external,cn=auth manage
+ {% endif %}
+ by * break
+ - >-
+ to attrs=userPassword
+ by self write
+ by anonymous auth
+ by * break
+ state: exact
+ ordered: true
+ bind_dn: "cn=admin,cn=config"
+ bind_pw: "{{ sram_ldap_services_password }}"
+ server_uri: "{{ sram_ldap_uri }}"
+
+# sram_ldap_rids:
+# 101: ldaps://ldap1.scz-vm.net/
+# 102: ldaps://ldap2.scz-vm.net/
+
+- name: Set rids
+ vars:
+ start: 101
+ rid:
+ key: "%d"
+ value: "{{ sram_ldap_rid_prefix }}%s.{{ sram_ldap_base_domain }}/"
+ ldap_rids: {}
+ set_fact:
+ ldap_rids: >-
+ {{ ldap_rids | combine({ rid.key | format(start|int):
+ rid.value | format(item.key) }) }}
+ start: "{{ start|int + 1 }}"
+ with_dict: "{{ sram_ldap_hosts | dict2items | sort(attribute='key') }}"
+
+# Voor toekomstige Claude gebruikers: onderstaande construct levert aan het eind
+# een string representatie van de dict op, die niet meer gebruikt kan worden
+# in de hieropvolgende 'Setup rids' task...
+# - name: Set rids
+# set_fact:
+# ldap_rids: >-
+# {%- set result = {} %}
+# {%- for host in (ldap_hosts | dict2items | sort(attribute='key')) %}
+# {%- set _ = result.update({(101 + loop.index0)|string: \
+# ldap_rid_prefix ~ host.key ~ '.' ~ base_domain ~ '/'}) %}
+# {%- endfor %}
+# {{ result }}
+
+- name: Setup rids
+ vars:
+ rid: >-
+ rid={}
+ provider="{}"
+ searchbase="{{ sram_ldap_services_dn.basedn }}"
+ type=refreshAndPersist
+ bindmethod=simple
+ binddn="{{ sram_ldap_services_dn.binddn }}"
+ credentials={{ sram_ldap_services_password }}
+ retry="30 +"
+ timeout=30
+ network-timeout=5
+ rids: []
+ set_fact:
+ rids: "{{ rids + [ rid.format(item.key, item.value) ] }}"
+ with_dict: "{{ dict(ldap_rids) }}"
+
+- name: Setup Syncrepl
+ community.general.ldap_attrs:
+ dn: olcDatabase={1}mdb,cn=config
+ attributes:
+ olcSyncrepl: "{{ rids }}"
+ olcMultiProvider: "TRUE"
+ bind_dn: "cn=admin,cn=config"
+ bind_pw: "{{ sram_ldap_services_password }}"
+ server_uri: "{{ sram_ldap_uri }}"
+
+# We now have Syncrepl in place, so only write to primary
+- name: Initialize DIT
+ community.general.ldap_entry:
+ dn: "{{ sram_ldap_services_dn.basedn }}"
+ state: "present"
+ objectClass:
+ - "top"
+ - "dcObject"
+ - "organization"
+ attributes:
+ dc: "{{ sram_ldap_services_dn.basedn | regex_replace('^dc=([^,]+).*', '\\1') }}"
+ o: "{{ sram_ldap_services_dn.o }}"
+ bind_dn: "{{ sram_ldap_services_dn.binddn }}"
+ bind_pw: "{{ sram_ldap_services_password }}"
+ server_uri: "{{ sram_ldap_uri }}"
+ when: >
+ inventory_hostname in groups['sram_ldap_primary']
+
+# We now have Syncrepl in place, so only write to primary
+- name: Add ldap admins
+ include_tasks: "admins.yml"
+ when: >
+ inventory_hostname in groups['sram_ldap_primary']
diff --git a/roles/sram_ldap/templates/ldap-backup.sh.j2 b/roles/sram_ldap/templates/ldap-backup.sh.j2
new file mode 100644
index 000000000..2c6aa9201
--- /dev/null
+++ b/roles/sram_ldap/templates/ldap-backup.sh.j2
@@ -0,0 +1,19 @@
+#!/bin/bash
+# vim:ft=sh
+set -e
+
+if [ $UID -ne 0 ]
+then
+ echo "Sorry, this script must run as root"
+ exit 1
+fi
+
+BACKUP_DIR="{{ldap_backup_dir}}"
+BACKUP_FILE="$BACKUP_DIR/ldap_$(/bin/date +%Y-%m-%d_%H:%M)"
+
+mkdir -p -m 0755 "$BACKUP_DIR"
+
+/usr/sbin/slapcat -o ldif-wrap=no -n0 | /bin/bzip2 -c6 > "${BACKUP_FILE}.db0.ldif.bz2"
+/usr/sbin/slapcat -o ldif-wrap=no -n1 | /bin/bzip2 -c6 > "${BACKUP_FILE}.db1.ldif.bz2"
+
+exit 0
diff --git a/roles/sram_ldap/templates/ldap.conf.j2 b/roles/sram_ldap/templates/ldap.conf.j2
new file mode 100644
index 000000000..d7fa7c227
--- /dev/null
+++ b/roles/sram_ldap/templates/ldap.conf.j2
@@ -0,0 +1,16 @@
+#
+# LDAP Defaults
+#
+
+# See ldap.conf(5) for details
+# This file should be world readable but not world writable.
+
+#BASE dc=example,dc=com
+#URI ldap://ldap.example.com ldap://ldap-master.example.com:666
+
+#SIZELIMIT 12
+#TIMELIMIT 15
+#DEREF never
+
+# TLS certificates (needed for GnuTLS)
+TLS_CACERT {{ ssl_certs_dir }}/{{ internal_base_domain }}.crt
diff --git a/roles/sram_midproxy/defaults/main.yml b/roles/sram_midproxy/defaults/main.yml
new file mode 100644
index 000000000..aeac841ea
--- /dev/null
+++ b/roles/sram_midproxy/defaults/main.yml
@@ -0,0 +1,7 @@
+---
+sram_midproxy_satosa_version: 8
+sram_midproxy_state_encryption_key: 'secret'
+sram_midproxy_issuer: 'issuer'
+sram_midproxy_client_id: 'client'
+sram_midproxy_client_secret: 'secret'
+sram_midproxy_sp_metadata: 'eb-metadata.xml'
diff --git a/roles/sram_midproxy/files/internal_attributes.yaml b/roles/sram_midproxy/files/internal_attributes.yaml
new file mode 100644
index 000000000..eb3dcd66e
--- /dev/null
+++ b/roles/sram_midproxy/files/internal_attributes.yaml
@@ -0,0 +1,22 @@
+attributes:
+ displayname:
+ openid: [name]
+ saml: [displayName]
+ givenname:
+ openid: [given_name]
+ saml: [givenName]
+ mail:
+ openid: [email]
+ saml: [mail]
+ name:
+ openid: [name]
+ saml: [cn]
+ surname:
+ openid: [family_name]
+ saml: [sn, surname]
+ uid:
+ openid: [sub]
+ saml: [uid]
+ schachomeorganization:
+ openid: [schac_home_organization]
+ saml: [schacHomeOrganization]
diff --git a/roles/sram_midproxy/files/plugins/attribute-maps/basic.py b/roles/sram_midproxy/files/plugins/attribute-maps/basic.py
new file mode 100644
index 000000000..f98466df5
--- /dev/null
+++ b/roles/sram_midproxy/files/plugins/attribute-maps/basic.py
@@ -0,0 +1,51 @@
+DEF = "urn:mace:dir:attribute-def:"
+TERENA = "urn:mace:terena.org:attribute-def:"
+
+MAP = {
+ "identifier": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic",
+ "fro": {
+ f"{TERENA}schacHomeOrganization": "schacHomeOrganization",
+ f"{DEF}cn": "cn",
+ f"{DEF}displayName": "displayName",
+ f"{DEF}eduPersonAffiliation": "eduPersonAffiliation",
+ f"{DEF}eduPersonEntitlement": "eduPersonEntitlement",
+ f"{DEF}eduPersonPrincipalName": "eduPersonPrincipalName",
+ f"{DEF}eduPersonScopedAffiliation": "eduPersonScopedAffiliation",
+ f"{DEF}eduPersonTargetedID": "eduPersonTargetedID",
+ f"{DEF}eduPersonAssurance": "eduPersonAssurance",
+ f"{DEF}email": "email",
+ f"{DEF}emailAddress": "emailAddress",
+ f"{DEF}givenName": "givenName",
+ f"{DEF}gn": "gn",
+ f"{DEF}isMemberOf": "isMemberOf",
+ f"{DEF}mail": "mail",
+ f"{DEF}member": "member",
+ f"{DEF}name": "name",
+ f"{DEF}sn": "sn",
+ f"{DEF}surname": "surname",
+ f"{DEF}uid": "uid",
+ },
+ "to": {
+ "schacHomeOrganization": f"{TERENA}schacHomeOrganization",
+ "cn": f"{DEF}cn",
+ "displayName": f"{DEF}displayName",
+ "eduPersonAffiliation": f"{DEF}eduPersonAffiliation",
+ "eduPersonEntitlement": f"{DEF}eduPersonEntitlement",
+ "eduPersonPrincipalName": f"{DEF}eduPersonPrincipalName",
+ "eduPersonScopedAffiliation": f"{DEF}eduPersonScopedAffiliation",
+ "eduPersonTargetedID": f"{DEF}eduPersonTargetedID",
+ "eduPersonAssurance": f"{DEF}eduPersonAssurance",
+ "eduPersonOrcid": f"{DEF}eduPersonOrcid",
+ "email": f"{DEF}email",
+ "emailAddress": f"{DEF}emailAddress",
+ "givenName": f"{DEF}givenName",
+ "gn": f"{DEF}gn",
+ "isMemberOf": f"{DEF}isMemberOf",
+ "mail": f"{DEF}mail",
+ "member": f"{DEF}member",
+ "name": f"{DEF}name",
+ "sn": f"{DEF}sn",
+ "surname": f"{DEF}surname",
+ "uid": f"{DEF}uid",
+ },
+}
diff --git a/roles/sram_midproxy/files/plugins/backends/openid_backend.yaml b/roles/sram_midproxy/files/plugins/backends/openid_backend.yaml
new file mode 100644
index 000000000..cb78fcccd
--- /dev/null
+++ b/roles/sram_midproxy/files/plugins/backends/openid_backend.yaml
@@ -0,0 +1,14 @@
+module: satosa.backends.openid_connect.OpenIDConnectBackend
+name: myaccessid
+config:
+ provider_metadata:
+ issuer: !ENV SATOSA_ISSUER
+ client:
+ verify_ssl: yes
+ auth_req_params:
+ response_type: code
+ scope: [openid, profile, email, schac_home_organization]
+ client_metadata:
+ client_id: !ENV SATOSA_CLIENT_ID
+ client_secret: !ENV SATOSA_CLIENT_SECRET
+ redirect_uris: [/]
diff --git a/roles/sram_midproxy/files/plugins/backends/saml2_backend.yaml b/roles/sram_midproxy/files/plugins/backends/saml2_backend.yaml
new file mode 100644
index 000000000..ed97d539c
--- /dev/null
+++ b/roles/sram_midproxy/files/plugins/backends/saml2_backend.yaml
@@ -0,0 +1 @@
+---
diff --git a/roles/sram_midproxy/files/plugins/frontends/ping_frontend.yaml b/roles/sram_midproxy/files/plugins/frontends/ping_frontend.yaml
new file mode 100644
index 000000000..c09b218b6
--- /dev/null
+++ b/roles/sram_midproxy/files/plugins/frontends/ping_frontend.yaml
@@ -0,0 +1,3 @@
+module: satosa.frontends.ping.PingFrontend
+name: ping
+config: null
diff --git a/roles/sram_midproxy/files/plugins/frontends/saml2_frontend.yaml b/roles/sram_midproxy/files/plugins/frontends/saml2_frontend.yaml
new file mode 100644
index 000000000..1f8029b66
--- /dev/null
+++ b/roles/sram_midproxy/files/plugins/frontends/saml2_frontend.yaml
@@ -0,0 +1,63 @@
+module: satosa.frontends.saml2.SAMLFrontend
+name: idp
+config:
+ #acr_mapping:
+ # "": "urn:oasis:names:tc:SAML:2.0:ac:classes:unspecified"
+ # "https://accounts.google.com": "http://eidas.europa.eu/LoA/low"
+
+ endpoints:
+ single_sign_on_service:
+ 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST': sso/post
+ 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect': sso/redirect
+
+ # If configured and not false or empty the common domain cookie _saml_idp will be set
+ # with or have appended the IdP used for authentication. The default is not to set the
+ # cookie. If the value is a dictionary with key 'domain' then the domain for the cookie
+ # will be set to the value for the 'domain' key. If no 'domain' is set then the domain
+ # from the BASE defined for the proxy will be used.
+ #common_domain_cookie:
+ # domain: .example.com
+
+ entityid_endpoint: true
+ enable_metadata_reload: no
+
+ idp_config:
+ organization: {display_name: SURF, name: SURF, url: 'https://www.surf.nl/'}
+ contact_person:
+ - {contact_type: technical, email_address: 'mailto:sram-beheer@surf.nl', given_name: Technical}
+ - {contact_type: support, email_address: 'mailto:sram-beheer@surf.nl', given_name: Support}
+ - {contact_type: other, email_address: 'mailto:sram-beheer@surf.nl', given_name: Security, extension_attributes: {'xmlns:remd': 'http://refeds.org/metadata', 'remd:contactType': 'http://refeds.org/metadata/contactType/security'}}
+ key_file: frontend.key
+ cert_file: frontend.crt
+ metadata:
+ # remote:
+ # - url: https://engine.test2.surfconext.nl/authentication/sp/metadata
+ # cert: null
+ local: [!ENV SATOSA_SP_METADATA]
+ entityid: //proxy.xml
+ accepted_time_diff: 60
+ attribute_map_dir: plugins/attribute-maps
+ service:
+ idp:
+ endpoints:
+ single_sign_on_service: []
+ name: Proxy IdP
+ ui_info:
+ display_name:
+ - lang: en
+ text: "MyAccessID proxy"
+ description:
+ - lang: en
+ text: "MyAccessID proxy"
+ keywords:
+ - lang: en
+ text: ["MyAccessID", "proxy"]
+ name_id_format: ['urn:oasis:names:tc:SAML:2.0:nameid-format:persistent', 'urn:oasis:names:tc:SAML:2.0:nameid-format:transient']
+ policy:
+ default:
+ fail_on_missing_requested: false
+ name_form: urn:oasis:names:tc:SAML:2.0:attrname-format:basic
+ attribute_restrictions: null
+ lifetime: {minutes: 15}
+ encrypt_assertion: false
+ encrypted_advice_attributes: false
diff --git a/roles/sram_midproxy/files/plugins/microservices/generate_attributes.yaml b/roles/sram_midproxy/files/plugins/microservices/generate_attributes.yaml
new file mode 100644
index 000000000..86ac4e1f1
--- /dev/null
+++ b/roles/sram_midproxy/files/plugins/microservices/generate_attributes.yaml
@@ -0,0 +1,8 @@
+module: satosa.micro_services.attribute_generation.AddSyntheticAttributes
+name: AddSyntheticAttributes
+config:
+ synthetic_attributes:
+ default:
+ default:
+ schachomeorganization: >-
+ {{ uid.scope }}
diff --git a/roles/sram_midproxy/files/plugins/microservices/regex_attributes.yaml b/roles/sram_midproxy/files/plugins/microservices/regex_attributes.yaml
new file mode 100644
index 000000000..e820311e7
--- /dev/null
+++ b/roles/sram_midproxy/files/plugins/microservices/regex_attributes.yaml
@@ -0,0 +1,10 @@
+module: satosa.micro_services.attribute_processor.AttributeProcessor
+name: RegexAttributeProcessor
+config:
+ process:
+ - attribute: uid
+ processors:
+ - name: RegexSubProcessor
+ module: satosa.micro_services.processors.regex_sub_processor
+ regex_sub_match_pattern: ^(.+)@.+$
+ regex_sub_replace_pattern: \1
diff --git a/roles/sram_midproxy/files/proxy_conf.yaml b/roles/sram_midproxy/files/proxy_conf.yaml
new file mode 100644
index 000000000..136268e61
--- /dev/null
+++ b/roles/sram_midproxy/files/proxy_conf.yaml
@@ -0,0 +1,74 @@
+# BASE: https://example.com
+BASE: !ENV SATOSA_BASE
+
+COOKIE_STATE_NAME: "SATOSA_STATE"
+CONTEXT_STATE_DELETE: yes
+#STATE_ENCRYPTION_KEY: "asdASD123"
+
+cookies_samesite_compat:
+ - ["SATOSA_STATE", "SATOSA_STATE_LEGACY"]
+
+INTERNAL_ATTRIBUTES: "internal_attributes.yaml"
+
+BACKEND_MODULES:
+ - "plugins/backends/openid_backend.yaml"
+
+FRONTEND_MODULES:
+ - "plugins/frontends/saml2_frontend.yaml"
+ - "plugins/frontends/ping_frontend.yaml"
+
+MICRO_SERVICES:
+ - "plugins/microservices/generate_attributes.yaml"
+ - "plugins/microservices/regex_attributes.yaml"
+
+LOGGING:
+ version: 1
+ formatters:
+ simple:
+ format: "[%(asctime)s] [%(levelname)s] [%(name)s.%(funcName)s] %(message)s"
+ handlers:
+ stdout:
+ class: logging.StreamHandler
+ stream: "ext://sys.stdout"
+ level: INFO
+ formatter: simple
+ syslog:
+ class: logging.handlers.SysLogHandler
+ address: "/dev/log"
+ level: INFO
+ formatter: simple
+ debug_file:
+ class: logging.FileHandler
+ filename: satosa-debug.log
+ encoding: utf8
+ level: INFO
+ formatter: simple
+ error_file:
+ class: logging.FileHandler
+ filename: satosa-error.log
+ encoding: utf8
+ level: ERROR
+ formatter: simple
+ info_file:
+ class: logging.handlers.RotatingFileHandler
+ filename: satosa-info.log
+ encoding: utf8
+ maxBytes: 10485760 # 10MB
+ backupCount: 20
+ level: INFO
+ formatter: simple
+ loggers:
+ satosa:
+ level: INFO
+ saml2:
+ level: INFO
+ oidcendpoint:
+ level: INFO
+ pyop:
+ level: INFO
+ oic:
+ level: INFO
+ root:
+ level: INFO
+ handlers:
+ - stdout
diff --git a/roles/sram_midproxy/tasks/main.yml b/roles/sram_midproxy/tasks/main.yml
new file mode 100644
index 000000000..d90d93e01
--- /dev/null
+++ b/roles/sram_midproxy/tasks/main.yml
@@ -0,0 +1,59 @@
+---
+- name: Create directory to keep configfile
+ ansible.builtin.file:
+ dest: "/opt/sram/midproxy"
+ state: directory
+ owner: 1000
+ group: 1000
+ mode: "0770"
+
+- name: Copy EB SP metadata
+ ansible.builtin.copy:
+ src: "{{ inventory_dir }}/files/midproxy/{{ sram_midproxy_sp_metadata }}"
+ dest: "/opt/sram/midproxy/{{ sram_midproxy_sp_metadata }}"
+ owner: 1000
+ group: 1000
+ mode: "0740"
+
+- name: Copy SATOSA conf files
+ ansible.builtin.copy:
+ src: "{{ item }}"
+ dest: "/opt/sram/midproxy/{{ item }}"
+ owner: 1000
+ group: 1000
+ with_items:
+ - internal_attributes.yaml
+ - proxy_conf.yaml
+ - plugins/
+
+- name: Create the SATOSA container
+ community.docker.docker_container:
+ name: sram-midproxy
+ image: "{{ sram_midproxy_satosa_image }}"
+ pull: true
+ restart_policy: "always"
+ state: started
+ restart: true
+ networks:
+ - name: "loadbalancer"
+ env:
+ SATOSA_BASE: 'https://midproxy.{{ openconextaccess_base_domain }}'
+ SATOSA_STATE_ENCRYPTION_KEY: '{{ sram_midproxy_state_encryption_key }}'
+ SATOSA_SP_METADATA: '{{ sram_midproxy_sp_metadata }}'
+ SATOSA_ISSUER: '{{ sram_midproxy_issuer }}'
+ SATOSA_CLIENT_ID: '{{ sram_midproxy_client_id }}'
+ SATOSA_CLIENT_SECRET: '{{ sram_midproxy_client_secret }}'
+ volumes:
+ - /opt/sram/midproxy:/etc/satosa
+ labels:
+ traefik.http.routers.midproxy.rule: "Host(`midproxy.{{ openconextaccess_base_domain }}`)"
+ traefik.http.routers.midproxy.tls: "true"
+ traefik.enable: "true"
+ # curl is not availavble in the minimized satosa image
+ # so this healthcheck won't work
+ # healthcheck:
+ # test: ["CMD", "curl", "--fail" , "http://localhost" ]
+ # interval: 10s
+ # timeout: 10s
+ # retries: 3
+ # start_period: 10s
diff --git a/roles/sram_plsc/defaults/main.yml b/roles/sram_plsc/defaults/main.yml
new file mode 100644
index 000000000..6dd2780a1
--- /dev/null
+++ b/roles/sram_plsc/defaults/main.yml
@@ -0,0 +1,12 @@
+---
+sram_plsc_image: "ghcr.io/surfscz/sram-plsc:main"
+sram_plsc_conf_dir: "{{current_release_appdir}}/sram/plsc"
+sram_plsc_ansible_nolog: false
+sram_plsc_ldap_uri: "ldap://ldap:389/"
+sram_plsc_ldap_basedn: "dc=services,dc=vnet"
+sram_plsc_ldap_binddn: "cn=admin,dc=vnet"
+sram_plsc_ldap_password: "secret"
+sram_plsc_sbs_host: "http://sbs-server:8080"
+sram_plsc_sbs_user: "sysread"
+sram_plsc_sbs_password: "secret"
+sram_plsc_retry: 3
diff --git a/roles/sram_plsc/handlers/main.yml b/roles/sram_plsc/handlers/main.yml
new file mode 100644
index 000000000..a0dee373a
--- /dev/null
+++ b/roles/sram_plsc/handlers/main.yml
@@ -0,0 +1,6 @@
+---
+- name: Restart the plsc container
+ community.docker.docker_container:
+ name: sram-plsc
+ restart: true
+ state: started
diff --git a/roles/sram_plsc/tasks/main.yml b/roles/sram_plsc/tasks/main.yml
new file mode 100644
index 000000000..2111b8646
--- /dev/null
+++ b/roles/sram_plsc/tasks/main.yml
@@ -0,0 +1,29 @@
+---
+- name: Make sure clients sync directory exists
+ file:
+ path: "{{ sram_plsc_conf_dir }}"
+ state: directory
+ mode: "0755"
+
+- name: "Create plsc.yml source if it doesn't exist"
+ template:
+ src: "plsc.yml.j2"
+ dest: "{{ sram_plsc_conf_dir }}/plsc.yml"
+ mode: "0640"
+ no_log: "{{ sram_plsc_ansible_nolog }}"
+ notify: "Restart the plsc container"
+
+- name: Create the plsc container
+ community.docker.docker_container:
+ name: "sram-plsc"
+ image: "{{ sram_plsc_image }}"
+ restart_policy: "always"
+ state: started
+ pull: true
+ mounts:
+ - type: bind
+ source: "{{ sram_plsc_conf_dir }}/plsc.yml"
+ target: "/opt/plsc/plsc.yml"
+ networks:
+ # TODO: Should this not be parametrized?
+ - name: "loadbalancer"
diff --git a/roles/sram_plsc/templates/plsc.yml.j2 b/roles/sram_plsc/templates/plsc.yml.j2
new file mode 100644
index 000000000..069f14d8b
--- /dev/null
+++ b/roles/sram_plsc/templates/plsc.yml.j2
@@ -0,0 +1,25 @@
+---
+ldap:
+ src:
+ uri: "{{ sram_plsc_ldap_uri }}"
+ basedn: "{{ sram_plsc_ldap_basedn }}"
+ binddn: "{{ sram_plsc_ldap_binddn }}"
+ passwd: "{{ sram_plsc_ldap_password }}"
+ sizelimit: 500
+ dst:
+ uri: "{{ sram_plsc_ldap_uri }}"
+ basedn: "{{ sram_plsc_ldap_basedn }}"
+ binddn: "{{ sram_plsc_ldap_binddn }}"
+ passwd: "{{ sram_plsc_ldap_password }}"
+ sizelimit: 500
+sbs:
+ src:
+ host: "{{ sram_plsc_sbs_host }}"
+ user: "{{ sram_plsc_sbs_user }}"
+ passwd: "{{ sram_plsc_sbs_password }}"
+ verify_ssl: {{ false if env=='vm' else true }}
+ timeout: 60
+ retry: {{ sram_plsc_retry }}
+pwd: "{CRYPT}!"
+uid: 1000
+gid: 1000
diff --git a/roles/sram_redis/defaults/main.yml b/roles/sram_redis/defaults/main.yml
new file mode 100644
index 000000000..28022a1af
--- /dev/null
+++ b/roles/sram_redis/defaults/main.yml
@@ -0,0 +1,9 @@
+---
+sram_redis_image: "docker.io/library/redis:7"
+sram_redis_conf_dir: "{{ current_release_appdir }}/sram/redis"
+sram_redis_data_dir: "{{ current_release_appdir }}/sram/redis/data"
+sram_redis_user: redis
+sram_redis_group: redis
+sram_redis_redis_user: default
+sram_redis_redis_password: changethispassword
+sram_redis_max_memory: 100mb
diff --git a/roles/sram_redis/handlers/main.yml b/roles/sram_redis/handlers/main.yml
new file mode 100644
index 000000000..b08f0b62b
--- /dev/null
+++ b/roles/sram_redis/handlers/main.yml
@@ -0,0 +1,6 @@
+---
+- name: Restart redis container
+ community.docker.docker_container:
+ name: sram-redis
+ state: started
+ restart: true
diff --git a/roles/sram_redis/tasks/main.yml b/roles/sram_redis/tasks/main.yml
new file mode 100644
index 000000000..b6fb8a694
--- /dev/null
+++ b/roles/sram_redis/tasks/main.yml
@@ -0,0 +1,62 @@
+---
+- name: "Create redis group"
+ group:
+ name: "{{ sram_redis_group }}"
+ state: "present"
+ register: "result"
+
+- name: "Save redis group gid"
+ set_fact:
+ redis_group_gid: "{{ result.gid }}"
+
+- name: "Create redis user"
+ user:
+ name: "{{ sram_redis_user }}"
+ group: "{{ sram_redis_group }}"
+ comment: "User to run SRAM Redis service"
+ shell: "/bin/false"
+ password: "!"
+ home: "{{ sram_redis_conf_dir }}"
+ create_home: false
+ state: "present"
+ register: "result"
+
+- name: "Save redis user uid"
+ set_fact:
+ redis_user_uid: "{{ result.uid }}"
+
+- name: "Create directories"
+ file:
+ path: "{{item.path}}"
+ state: "directory"
+ owner: "{{ sram_redis_user }}"
+ group: "{{ sram_redis_group }}"
+ mode: "{{item.mode}}"
+ with_items:
+ - { path: "{{sram_redis_conf_dir}}", mode: "0755" }
+ - { path: "{{sram_redis_data_dir}}", mode: "0755" }
+
+- name: "Create redis config"
+ template:
+ src: "redis.conf.j2"
+ dest: "{{ sram_redis_conf_dir }}/redis.conf"
+ owner: "{{ sram_redis_user }}"
+ group: "{{ sram_redis_group }}"
+ mode: "0644"
+ notify: "Restart redis container"
+
+- name: "Create redis container"
+ community.docker.docker_container:
+ name: "sram-redis"
+ image: "{{ sram_redis_image }}"
+ restart_policy: "always"
+ state: "started"
+ user: "{{ redis_user_uid }}:{{ redis_group_gid }}"
+ command: |
+ redis-server /usr/local/etc/redis/redis.conf
+ volumes:
+ - "{{ sram_redis_conf_dir }}:/usr/local/etc/redis"
+ - "{{ sram_redis_data_dir }}:/data"
+ networks:
+ # TODO: Should this not be parametrized?
+ - name: loadbalancer
diff --git a/roles/sram_redis/templates/redis.conf.j2 b/roles/sram_redis/templates/redis.conf.j2
new file mode 100644
index 000000000..14d3ef177
--- /dev/null
+++ b/roles/sram_redis/templates/redis.conf.j2
@@ -0,0 +1,3 @@
+user {{ sram_redis_redis_user }} on +@all ~* &* >{{ sram_redis_redis_password }}
+maxmemory {{ sram_redis_max_memory }}
+maxmemory-policy allkeys-lru
diff --git a/roles/sram_sbs/defaults/main.yml b/roles/sram_sbs/defaults/main.yml
new file mode 100644
index 000000000..64bc443e4
--- /dev/null
+++ b/roles/sram_sbs/defaults/main.yml
@@ -0,0 +1,164 @@
+---
+sram_sbs_base_domain: "test2.sram.surf.nl"
+sram_sbs_ansible_nolog: true
+sram_sbs_base_url: "https://{{ sram_sbs_base_domain }}"
+sram_sbs_server_image: "ghcr.io/surfscz/sram-sbs-server:main"
+sram_sbs_client_image: "ghcr.io/surfscz/sram-sbs-client:main"
+
+sram_sbs_openidc_timeout: 86400
+sram_sbs_sram_conf_dir: "{{ current_release_appdir }}/sram"
+
+sram_sbs_work_dir: "{{ sram_sbs_sram_conf_dir }}/sbs"
+sram_sbs_git_dir: "{{ sram_sbs_work_dir }}/sbs"
+sram_sbs_env_dir: "{{ sram_sbs_work_dir }}/sbs-env"
+sram_sbs_conf_dir: "{{ sram_sbs_work_dir }}/config"
+sram_sbs_log_dir: "{{ sram_sbs_work_dir }}/log"
+sram_sbs_apache_conf: "{{ sram_sbs_work_dir }}/sbs.conf"
+sram_sbs_nginx_conf: "{{ sram_sbs_work_dir }}/nginx.conf"
+
+sram_sbs_db_name: "sbs"
+sram_sbs_db_user: "sbsrw"
+sram_sbs_migration_user: "sbsmigrate"
+
+sram_sbs_db_connection: "\
+ mysql+mysqldb://%s:%s@{{ mariadb_host }}/{{ sram_sbs_db_name }}\
+ ?ssl=true&charset=utf8mb4"
+sram_sbs_db_connection_sbs: "{{ sram_sbs_db_connection | format(sram_sbs_db_user, mysql_passwords.sbs) }}"
+sram_sbs_db_connection_migration: "\
+ {{ sram_sbs_db_connection | format(sram_sbs_migration_user, mysql_passwords.sbsmigrate) }}"
+
+sram_sbs_db_secret: secret
+sram_sbs_secret_key_suffix: suffix
+sram_sbs_encryption_key: encryption_key
+
+sram_sbs_redis_host: sram-redis
+sram_sbs_redis_port: 6379
+sram_sbs_redis_ssl: false
+sram_sbs_redis_user: default
+
+sram_sbs_mail_host: "host.docker.internal"
+sram_sbs_mail_port: 25
+
+sram_sbs_user: "sbs"
+sram_sbs_group: "sbs"
+
+sram_sbs_session_lifetime: 1440
+sram_sbs_secret_key_suffix: ""
+
+sram_sbs_oidc_crypto_password: "CHANGEME"
+sram_sbs_uid_attribute: "sub"
+
+sram_sbs_disclaimer_color: "#a29c13"
+sram_sbs_disclaimer_label: wsgi
+
+sram_sbs_urn_namespace: "urn:example:sbs"
+sram_sbs_eppn_scope: "sbs.example.edu"
+sram_sbs_restricted_co_default_org: "example.org"
+
+sram_sbs_mail_sender_name: "SURF"
+sram_sbs_mail_sender_email: "no-reply@localhost"
+sram_sbs_exceptions_mail: "root@localhost"
+
+sram_sbs_support_email: "sram-support@localhost"
+sram_sbs_admin_email: "sram-beheer@localhost"
+sram_sbs_ticket_email: "sram-support@surf.nl"
+sram_sbs_eduteams_email: "eduteams@localhost"
+
+sram_sbs_suppress_mails: False
+
+sram_sbs_wiki_link: "https://www.example.org/wiki"
+
+sram_sbs_cron_hour_of_day: 4
+sram_sbs_seed_allowed: True
+sram_sbs_api_keys_enabled: True
+sram_sbs_feedback_enabled: True
+sram_sbs_audit_trail_notifications_enabled: True
+sram_sbs_send_exceptions: False
+sram_sbs_send_js_exceptions: False
+sram_sbs_second_factor_authentication_required: True
+sram_sbs_totp_token_name: "SRAM-example"
+sram_sbs_notifications_enabled: True
+sram_sbs_invitation_reminders_enabled: True
+sram_sbs_invitation_expirations_enabled: True
+sram_sbs_open_requests_enabled: True
+sram_sbs_scim_sweep: False
+sram_sbs_impersonation_allowed: True
+sram_sbs_admin_platform_backdoor_totp: True
+sram_sbs_past_dates_allowed: True
+sram_sbs_mock_scim_enabled: True
+sram_sbs_log_to_stdout: True
+
+sram_sbs_delete_orphaned: True
+sram_sbs_suspension_inactive_days: 365
+sram_sbs_suspension_reminder_days: 14
+sram_sbs_suspension_notify_admin: False
+
+sram_sbs_oidc_config_url: "http://localhost/.well-known/openid-configuration"
+sram_sbs_oidc_authz_endpoint: "http://localhost/OIDC/authorization"
+sram_sbs_oidc_token_endpoint: "http://localhost/OIDC/token"
+sram_sbs_oidc_userinfo_endpoint: "http://localhost/OIDC/userinfo"
+sram_sbs_oidc_jwks_endpoint: "http://localhost/OIDC/jwks.json"
+sram_sbs_oidc_redirect_uri: "https://{{sram_sbs_base_domain}}/api/users/resume-session"
+sram_sbs_oidc_jwt_audience: "https://localhost"
+sram_sbs_continue_eduteams_redirect_uri: "https://localhost/continue"
+sram_sbs_oidc_verify_peer: False
+sram_sbs_oidc_scopes:
+ - openid
+
+sram_sbs_mfa_idp_allowed: false
+sram_sbs_eduteams_continue_endpoint: "https://localhost/continue"
+sram_sbs_eb_continue_endpoint: "https://engine.(.*)surfconext.nl(.*)"
+
+sram_sbs_manage_base_enabled: False
+sram_sbs_manage_base_url: "https://manage.{{base_domain}}"
+sram_sbs_manage_sram_rp_entity_id: "sbs.{{sram_sbs_base_domain}}"
+sram_sbs_manage_verify_peer: False
+
+sram_sbs_idp_metadata_url: "https://metadata.surfconext.nl/signed/2023/edugain-downstream-idp.xml "
+# backup_dir: "{{backup_base}}/sbs"
+
+sram_sbs_swagger_enabled: true
+
+sram_sbs_ssid_identity_providers: []
+sram_sbs_surf_secure_id:
+ environment: "unknown.example.org"
+ sp_entity_id: "https://sbs.{{sram_sbs_base_domain}}"
+ acs_url: "https://{{sram_sbs_base_domain}}/api/users/acs"
+ sa_gw_environment: "sa-gw.unknown.example.org"
+ sa_idp_certificate: |
+ -----BEGIN CERTIFICATE-----
+ 12345
+ -----END CERTIFICATE-----
+ priv: |
+ -----BEGIN RSA PRIVATE KEY-----
+ abcde
+ -----END RSA PRIVATE KEY-----
+ pub: |
+ -----BEGIN CERTIFICATE-----
+ 12345
+ -----END CERTIFICATE-----
+
+sram_sbs_ssid_authncontext: "\
+ http://{{ sram_sbs_surf_secure_id.environment }}/assurance/sfo-level2"
+sram_sbs_ssid_entityid: "\
+ https://{{ sram_sbs_surf_secure_id.sa_gw_environment }}/second-factor-only/metadata"
+sram_sbs_ssid_sso_endpoint: "\
+ https://{{ sram_sbs_surf_secure_id.sa_gw_environment }}/second-factor-only/single-sign-on"
+
+sram_sbs_mfa_sso_minutes: 10
+sram_sbs_mfa_fallback_enabled: true
+
+sram_sbs_ldap_url: "ldap://ldap.example.com/dc=example,dc=com"
+sram_sbs_ldap_bind_account: "cn=admin,dc=entity_id,dc=services,dc=sram-tst,dc=surf,dc=nl"
+
+sram_sbs_csp_style_hashes:
+ - 'sha256-0+ANsgYUJdh56RK8gGvTF2vnriYqvFHfWqtA8xXa+bA='
+ - 'sha256-3SnfHQolDHbZMbDAPmhrZf1keHiXfj/KJyh2phhFAAY='
+ - 'sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU='
+ - 'sha256-Ng6y+QCkPChG4Q49SIfXB5ToIDcDhITtQNFkDBPpCTw='
+ - 'sha256-orBPipbqpMvkNi+Z+m6qEn0XS6ymmAQE6+FwCNs1FbQ='
+ - 'sha256-vFt3L2qLqpJmRpcXGbYr2UVSmgSp9VCUzz2lnqWIATw='
+ - 'sha256-SU3XCwbQ/8qgzoGOWCYdkwIr3xRrl5rsvdFcpw8NSiE=' # on /new-service-request
+ - 'sha256-WTC9gHKjIpzl5ub1eg/YrRy/k+jlzeyRojah9dxAApc=' # on /new-service-request
+
+sram_sbs_engine_block_api_token: secret
diff --git a/roles/sram_sbs/files/yarn.gpg b/roles/sram_sbs/files/yarn.gpg
new file mode 100644
index 000000000..3e9e7d155
--- /dev/null
+++ b/roles/sram_sbs/files/yarn.gpg
@@ -0,0 +1,243 @@
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v1
+
+mQINBFf0j5oBEADS6cItqCbf4lOLICohq2aHqM5I1jsz3DC4ddIU5ONbKXP1t0wk
+FEUPRzd6m80cTo7Q02Bw7enh4J6HvM5XVBSSGKENP6XAsiOZnY9nkXlcQAPFRnCn
+CjEfoOPZ0cBKjn2IpIXXcC+7xh4p1yruBpOsCbT6BuzA+Nm9j4cpRjdRdWSSmdID
+TyMZClmYm/NIfCPduYvNZxZXhW3QYeieP7HIonhZSHVu/jauEUyHLVsieUIvAOJI
+cXYpwLlrw0yy4flHe1ORJzuA7EZ4eOWCuKf1PgowEnVSS7Qp7lksCuljtfXgWelB
+XGJlAMD90mMbsNpQPF8ywQ2wjECM8Q6BGUcQuGMDBtFihobb+ufJxpUOm4uDt0y4
+zaw+MVSi+a56+zvY0VmMGVyJstldPAcUlFYBDsfC9+zpzyrAqRY+qFWOT2tj29R5
+ZNYvUUjEmA/kXPNIwmEr4oj7PVjSTUSpwoKamFFE6Bbha1bzIHpdPIRYc6cEulp3
+dTOWfp+Cniiblp9gwz3HeXOWu7npTTvJBnnyRSVtQgRnZrrtRt3oLZgmj2fpZFCE
+g8VcnQOb0iFcIM7VlWL0QR4SOz36/GFyezZkGsMlJwIGjXkqGhcEHYVDpg0nMoq1
+qUvizxv4nKLanZ5jKrV2J8V09PbL+BERIi6QSeXhXQIui/HfV5wHXC6DywARAQAB
+tBxZYXJuIFBhY2thZ2luZyA8eWFybkBkYW4uY3g+iQI5BBMBCAAjBQJX9I+aAhsD
+BwsJCAcDAgEGFQgCCQoLBBYCAwECHgECF4AACgkQFkawG4blAxB52Q/9FcyGIEK2
+QamDhookuoUGGYjIeN+huQPWmc6mLPEKS2Vahk5jnJKVtAFiaqINiUtt/1jZuhF2
+bVGITvZK79kM6lg42xQcnhypzQPgkN7GQ/ApYqeKqCh1wV43KzT/CsJ9TrI0SC34
+qYHTEXXUprAuwQitgAJNi5QMdMtauCmpK+Xtl/72aetvL8jMFElOobeGwKgfLo9+
+We2EkKhSwyiy3W5TYI1UlV+evyyT+N0pmhRUSH6sJpzDnVYYPbCWa2b+0D/PHjXi
+edKcely/NvqyVGoWZ+j41wkp5Q0wK2ybURS1ajfaKt0OcMhRf9XCfeXAQvU98mEk
+FlfPaq0CXsjOy8eJXDeoc1dwxjDi2YbfHel0CafjrNp6qIFG9v3JxPUU19hG9lxD
+Iv7VXftvMpjJCo/J4Qk+MOv7KsabgXg1iZHmllyyH3TY4AA4VA+mlceiiOHdXbKk
+Q3BfS1jdXPV+2kBfqM4oWANArlrFTqtop8PPsDNqh/6SrVsthr7WTvC5q5h/Lmxy
+Krm4Laf7JJMvdisfAsBbGZcR0Xv/Vw9cf2OIEzeOWbj5xul0kHT1vHhVNrBNanfe
+t79RTDGESPbqz+bTS7olHWctl6TlwxA0/qKlI/PzXfOg63Nqy15woq9buca+uTcS
+ccYO5au+g4Z70IEeQHsq5SC56qDR5/FvYyu5Ag0EV/SPmgEQANDSEMBKp6ER86y+
+udfKdSLP9gOv6hPsAgCHhcvBsks+ixeX9U9KkK7vj/1q6wodKf9oEbbdykHgIIB1
+lzY1l7u7/biAtQhTjdEZPh/dt3vjogrJblUEC0rt+fZe325ociocS4Bt9I75Ttkd
+nWgkE4uOBJsSllpUbqfLBfYR58zz2Rz1pkBqRTkmJFetVNYErYi2tWbeJ59GjUN7
+w1K3GhxqbMbgx4dF5+rjGs+KI9k6jkGeeQHqhDk+FU70oLVLuH2Dmi9IFjklKmGa
+3BU7VpNxvDwdoV7ttRYEBcBnPOmL24Sn4Xhe2MDCqgJwwyohd9rk8neV7GtavVea
+Tv6bnzi1iJRgDld51HFWG8X+y55i5cYWaiXHdHOAG1+t35QUrczm9+sgkiKSk1II
+TlEFsfwRl16NTCMGzjP5kGCm/W+yyyvBMw7CkENQcd23fMsdaQ/2UNYJau2PoRH/
+m+IoRehIcmE0npKeLVTDeZNCzpmfY18T542ibK49kdjZiK6G/VyBhIbWEFVu5Ll9
++8GbcO9ucYaaeWkFS8Hg0FZafMk59VxKiICKLZ5he/C4f0UssXdyRYU6C5BH8UTC
+QLg0z8mSSL+Wb2iFVPrn39Do7Zm8ry6LBCmfCf3pI99Q/1VaLDauorooJV3rQ5kC
+JEiAeqQtLOvyoXIex1VbzlRUXmElABEBAAGJAh8EGAEIAAkFAlf0j5oCGwwACgkQ
+FkawG4blAxAUUQ//afD0KLHjClHsA/dFiW+5qVzI8kPMHwO1QcUjeXrB6I3SluOT
+rLSPhOsoS72yAaU9hFuq8g9ecmFrl3Skp/U4DHZXioEmozyZRp7eVsaHTewlfaOb
+6g7+v52ktYdomcp3BM5v/pPZCnB5rLrH2KaUWbpY6V6tqtCHbF7zftDqcBENJDXf
+hiCqS19J08GZFjDEqGDrEj3YEmEXZMN7PcXEISPIz6NYI6rw4yVH8AXfQW6vpPzm
+ycHwI0QsVW2NQdcZ6zZt+phm6shNUbN2iDdg3BJICmIvQf8qhO3bOh0Bwc11FLHu
+MKuGVxnWN82HyIsuUB7WDLBHEOtg61Zf1nAF1PQK52YuQz3EWI4LL9OqVqfSTY1J
+jqIfj+u1PY2UHrxZfxlz1M8pXb1grozjKQ5aNqBKRrcMZNx71itR5rv18qGjGR2i
+Sciu/xah7zAroEQrx72IjYt03tbk/007CvUlUqFIFB8kY1bbfX8JAA+TxelUniUR
+2CY8eom5HnaPpKE3kGXZ0jWkudbWb7uuWcW1FE/bO+VtexpBL3SoXmwbVMGnJIEi
+Uvy8m6ez0kzLXzJ/4K4b8bDO4NjFX2ocKdzLA89Z95KcZUxEG0O7kaDCu0x3BEge
+uArJLecD5je2/2HXAdvkOAOUi6Gc/LiJrtInc0vUFsdqWCUK5Ao/MKvdMFW5Ag0E
+V/SP2AEQALRcYv/hiv1n3VYuJbFnEfMkGwkdBYLGo3hiHKY8xrsFVePl9SkL8aqd
+C310KUFNI42gGY/lz54RUHOqfMszTdafFrmwU18ECWGo4oG9qEutIKG7fkxcvk2M
+tgsOMZFJqVDS1a9I4QTIkv1ellLBhVub9S7vhe/0jDjXs9IyOBpYQrpCXAm6SypC
+fpqkDJ4qt/yFheATcm3s8ZVTsk2hiz2jnbqfvpte3hr3XArDjZXr3mGAp3YY9JFT
+zVBOhyhT/92e6tURz8a/+IrMJzhSyIDel9L+2sHHo9E+fA3/h3lg2mo6EZmRTuvE
+v9GXf5xeP5lSCDwS6YBXevJ8OSPlocC8Qm8ziww6dy/23XTxPg4YTkdf42i7VOpS
+pa7EvBGne8YrmUzfbrxyAArK05lo56ZWb9ROgTnqM62wfvrCbEqSHidN3WQQEhMH
+N7vtXeDPhAd8vaDhYBk4A/yWXIwgIbMczYf7Pl7oY3bXlQHb0KW/y7N3OZCr5mPW
+94VLLH/v+T5R4DXaqTWeWtDGXLih7uXrG9vdlyrULEW+FDSpexKFUQe83a+Vkp6x
+GX7FdMC9tNKYnPeRYqPF9UQEJg+MSbfkHSAJgky+bbacz+eqacLXMNCEk2LXFV1B
+66u2EvSkGZiH7+6BNOar84I3qJrU7LBD7TmKBDHtnRr9JXrAxee3ABEBAAGJBEQE
+GAEIAA8FAlf0j9gCGwIFCQHhM4ACKQkQFkawG4blAxDBXSAEGQEIAAYFAlf0j9gA
+CgkQ0QH3iZ1B88PaoA//VuGdF5sjxRIOAOYqXypOD9/Kd7lYyxmtCwnvKdM7f8O5
+iD8oR2Pk1RhYHjpkfMRVjMkaLfxIRXfGQsWfKN2Zsa4zmTuNy7H6X26XW3rkFWpm
+dECz1siGRvcpL6NvwLPIPQe7tST72q03u1H7bcyLGk0sTppgMoBND7yuaBTBZkAO
+WizR+13x7FV+Y2j430Ft/DOe/NTc9dAlp6WmF5baOZClULfFzCTf9OcS2+bo68oP
+gwWwnciJHSSLm6WRjsgoDxo5f3xBJs0ELKCr4jMwpSOTYqbDgEYOQTmHKkX8ZeQA
+7mokc9guA0WK+DiGZis85lU95mneyJ2RuYcz6/VDwvT84ooe1swVkC2palDqBMwg
+jZSTzbcUVqZRRnSDCe9jtpvF48WK4ZRiqtGO6Avzg1ZwMmWSr0zHQrLrUMTq/62W
+KxLyj2oPxgptRg589hIwXVxJRWQjFijvK/xSjRMLgg73aNTq6Ojh98iyKAQ3HfzW
+6iXBLLuGfvxflFednUSdWorr38MspcFvjFBOly+NDSjPHamNQ2h19iHLrYT7t4ve
+nU9PvC+ORvXGxTN8mQR9btSdienQ8bBuU/mg/c417w6WbY7tkkqHqUuQC9LoaVdC
+QFeE/SKGNe+wWN/EKi0QhXR9+UgWA41Gddi83Bk5deuTwbUeYkMDeUlOq3yyemcG
+VxAA0PSktXnJgUj63+cdXu7ustVqzMjVJySCKSBtwJOge5aayonCNxz7KwoPO34m
+Gdr9P4iJfc9kjawNV79aQ5aUH9uU2qFlbZOdO8pHOTjy4E+J0wbJb3VtzCJc1Eaa
+83kZLFtJ45Fv2WQQ2Nv3Fo+yqAtkOkaBZv9Yq0UTaDkSYE9MMzHDVFx11TT21NZD
+xu2QiIiqBcZfqJtIFHN5jONjwPG08xLAQKfUNROzclZ1h4XYUT+TWouopmpNeay5
+JSNcp5LsC2Rn0jSFuZGPJ1rBwB9vSFVA/GvOj8qEdfhjN3XbqPLVdOeChKuhlK0/
+sOLZZG91SHmT5SjP2zM6QKKSwNgHX4xZt4uugSZiY13+XqnrOGO9zRH8uumhsQmI
+eFEdT27fsXTDTkWPI2zlHTltQjH1iebqqM9gfa2KUt671WyoL1yLhWrgePvDE+He
+r002OslvvW6aAIIBki3FntPDqdIH89EEB4UEGqiA1eIZ6hGaQfinC7/IOkkm/mEa
+qdeoI6NRS521/yf7i34NNj3IaL+rZQFbVWdbTEzAPtAs+bMJOHQXSGZeUUFrEQ/J
+ael6aNg7mlr7cacmDwZWYLoCfY4w9GW6JHi6i63np8EA34CXecfor7cAX4XfaokB
+XjyEkrnfV6OWYS7f01JJOcqYANhndxz1Ph8bxoRPelf5q+W5Ag0EWBU7dwEQAL1p
+wH4prFMFMNV7MJPAwEug0Mxf3OsTBtCBnBYNvgFB+SFwKQLyDXUujuGQudjqQPCz
+/09MOJPwGCOi0uA0BQScJ5JAfOq33qXi1iXCj9akeCfZXCOWtG3Izc3ofS6uee7K
+fWUF1hNyA3PUwpRtM2pll+sQEO3y/EN7xYGUOM0mlCawrYGtxSNMlWBlMk/y5HK9
+upz+iHwUaEJ4PjV+P4YmDq0PnPvXE4qhTIvxx0kO5oZF0tAJCoTg1HE7o99/xq9Z
+rejDR1JJj6btNw1YFQsRDLxRZv4rL9He10lmLhiQE8QN7zOWzyJbRP++tWY2d2zE
+yFzvsOsGPbBqLDNkbb9d8Bfvp+udG13sHAEtRzI2UWe5SEdVHobAgu5l+m10WlsN
+TG/L0gJe1eD1bwceWlnSrbqw+y+pam9YKWqdu18ETN6CeAbNo4w7honRkcRdZyoG
+p9zZf3o1bGBBMla6RbLuJBoRDOy2Ql7B+Z87N0td6KlHI6X8fNbatbtsXR7qLUBP
+5oRb6nXX4+DnTMDbvFpE2zxnkg+C354Tw5ysyHhM6abB2+zCXcZ3holeyxC+BUrO
+gGPyLH/s01mg2zmttwC1UbkaGkQ6SwCoQoFEVq9Dp96B6PgZxhEw0GMrKRw53LoX
+4rZif9Exv6qUFsGY8U9daEdDPF5UHYe7t/nPpfW3ABEBAAGJBD4EGAEIAAkFAlgV
+O3cCGwICKQkQFkawG4blAxDBXSAEGQEIAAYFAlgVO3cACgkQRsITDf0kl/VynQ/+
+P3Vksu4fno26vA7ml9bzV3mu/X/gzU1HqySqYv9Zwzk2o512Z4QkoT/8lRepIG7v
+AFRQzPn56Pz/vpMfiMDaf6thxs8wpv4y3m+rcQIQKO4sN3wwFPPbvM8wGoY6fGav
+IkLKKIXy1BpzRGltGduf0c29+ycvzccQpyuTrZk4Zl73kLyBS8fCt+MZWejMMolD
+uuLJiHbXci6+Pdi3ImabyStbNnJYmSyruNHcLHlgIbyugTiAcdTy0Bi/z8MfeYwj
+VAwEkX4b2NwtuweYLzupBOTv0SqYCmBduZObkS5LHMZ+5Yh9Hfrd04uMdO5cIiy0
+AsGehTRC3Xyaea7Qk993rNcGEzX7LNB1GB2BXSq9FYPb+q0ewf8k8Lr9E0WG0dvD
+OaJSkSGedgdA1QzvTgpAAkVWsXlksShVf4NVskxNUGDRaPLeRB+IV/5jO+kRsFuO
+g5Tlkn6cgu1+Bn5gIfv0ny9K7TeC697gRQIcK8db1t8XidgSKbRmsSYEaRCy3c9x
+w2/N7DLU/Js3gV8FUd7cZpaYN+k/erMdyfqLA7oFd+HLbA5Du/971yF8/6Bof8zp
+jB9+QPRIARpcROEcQXz09dtl8wW8M0r09xpna+0Jk6JxF+stD97+hzikQXIxUtCX
+j35ps9USSxv1cuz0MaFdWGW13OugtN4bQ2DNgelbTDUEKg//YTbBl9oGYQxHv9S5
+qvZVNvV3DuI18E5VW5ddyo/JfW24+Tukli/ZjPQYnMOP86nnIqo/LPGb4nV1uWL4
+KhmOCbH7t43+TkAwdwoxLjYP7iOqQp9VRPFjomUfvtmLjHp4r3cVEt5QeJEZLiSC
+zSKMjPKqRMo5nNs3Et+/FyWCMRYdSggwhBfkbKKo44H9pmL3bTLqyir7EJAcArla
+zjKMyZqRsK3gZfQgoASN5xAhemVWHnnecVSAqrOW599EBkc7Kf6lXjTVHtHN02vX
+YYRZ16zrEjrfwb23LR+lAxSfWxLDovKLBg2SPbpduEv1GxyEFgF7v9fco4aQbuh/
+fOGvA8nuXkC5nI6ukw4c4zwmJ5+SNQthFUYKWLd4hR4qrCoJkMEWZmsCRtqxjVCJ
+/i9ygRJHOGAWaam7bS+U7pdmq2mgF+qTxb2vX6mSzI3q3M7drGUA3EdaZo1hPA5u
+kWi7tMCGqPQmtUFRnUvHPzCDuXLYT8lRxhTxDi3T5MXdIUlAUTcNpwG8Ill0xkGc
+pMlh0D5p44GEdMFfJiXw6AUETHcqC2qZr2rP9kpzvVlapIrsPRg/DU+s70YnccI3
+iMCVm4/WrghFeK232zkjiwRVOm+IEWBlDFrm4MMjfguUeneYbK9WhqJnss9nc4QK
+Vhzuyn3GTtg1w/T6CaYVXBjcHFmJBEQEGAEIAA8CGwIFAlokZSMFCQQWmKMCKcFd
+IAQZAQgABgUCWBU7dwAKCRBGwhMN/SSX9XKdD/4/dWSy7h+ejbq8DuaX1vNXea79
+f+DNTUerJKpi/1nDOTajnXZnhCShP/yVF6kgbu8AVFDM+fno/P++kx+IwNp/q2HG
+zzCm/jLeb6txAhAo7iw3fDAU89u8zzAahjp8Zq8iQsoohfLUGnNEaW0Z25/Rzb37
+Jy/NxxCnK5OtmThmXveQvIFLx8K34xlZ6MwyiUO64smIdtdyLr492LciZpvJK1s2
+cliZLKu40dwseWAhvK6BOIBx1PLQGL/Pwx95jCNUDASRfhvY3C27B5gvO6kE5O/R
+KpgKYF25k5uRLkscxn7liH0d+t3Ti4x07lwiLLQCwZ6FNELdfJp5rtCT33es1wYT
+Nfss0HUYHYFdKr0Vg9v6rR7B/yTwuv0TRYbR28M5olKRIZ52B0DVDO9OCkACRVax
+eWSxKFV/g1WyTE1QYNFo8t5EH4hX/mM76RGwW46DlOWSfpyC7X4GfmAh+/SfL0rt
+N4Lr3uBFAhwrx1vW3xeJ2BIptGaxJgRpELLdz3HDb83sMtT8mzeBXwVR3txmlpg3
+6T96sx3J+osDugV34ctsDkO7/3vXIXz/oGh/zOmMH35A9EgBGlxE4RxBfPT122Xz
+BbwzSvT3Gmdr7QmTonEX6y0P3v6HOKRBcjFS0JePfmmz1RJLG/Vy7PQxoV1YZbXc
+66C03htDYM2B6VtMNQkQFkawG4blAxCiVRAAhq/1L5YlsmItiC6MROtPP+lfAWRm
+MSkoIuAtzkV/orqPetwWzjYLgApOvVXBuf9FdJ5vAx1IXG3mDx6mQQWkr4t9onwC
+UuQ7lE29qmvCHB3FpKVJPKiGC6xK38t5dGAJtbUMZBQb1vDuQ7new8dVLzBSH1VZ
+7gx9AT+WEptWznb1US1AbejO0uT8jsVc/McK4R3LQmVy9+hbTYZFz1zCImuv9SCN
+ZPSdLpDe41QxcMfKiW7XU4rshJULKd4HYG92KjeJU80zgCyppOm85ENiMz91tPT7
++A4O7XMlOaJEH8t/2SZGBE/dmHjSKcWIpJYrIZKXTrNv7rSQGvweNG5alvCAvnrL
+J2cRpU1Rziw7auEU1YiSse+hQ1ZBIzWhPMunIdnkL/BJunBTVE7hPMMG7alOLy5Z
+0ikNytVewasZlm/dj5tEsfvF7tisVTZWVjWCvEMTP5fecNMEAwbZdBDyQBAN00y7
+xp4Pwc/kPLuaqESyTTt8jGek/pe7/+6fu0GQmR2gZKGagAxeZEvXWrxSJp/q81XS
+QGcO6QYMff7VexY3ncdjSVLro+Z3ZtYt6aVIGAEEA5UE341yCGIeN+nr27CXD4fH
+F28aPh+AJzYh+uVjQhHbL8agwcyCMLgU88u1U0tT5Qtjwnw+w+3UNhROvn495REp
+eEwD60iVeiuF5FW5Ag0EWbWWowEQALCiEk5Ic40W7/v5hqYNjrRlxTE/1axOhhzt
+8eCB7eOeNOMQKwabYxqBceNmol/guzlnFqLtbaA6yZQkzz/K3eNwWQg7CfXO3+p/
+dN0HtktPfdCk+kY/t7StKRjINW6S9xk9KshiukmdiDq8JKS0HgxqphBB3tDjmo6/
+RiaOEFMoUlXKSU+BYYpBpLKg53P8F/8nIsK2aZJyk8XuBd0UXKI+N1gfCfzoDWnY
+Hs73LQKcjrTaZQauT81J7+TeWoLI28vkVxyjvTXAyjSBnhxTYfwUNGSoawEXyJ1u
+KCwhIpklxcCMI9Hykg7sKNsvmJ4uNcRJ7cSRfb0g5DR9dLhR+eEvFd+o4PblKk16
+AI48N8Zg1dLlJuV2cAtl0oBPk+tnbZukvkS5n1IzTSmiiPIXvK2t506VtfFEw4iZ
+rJWf2Q9//TszBM3r1FPATLH7EAeG5P8RV+ri7L7NvzP6ZQClRDUsxeimCSe8v/t0
+OpheCVMlM9TpVcKGMw8ig/WEodoLOP4iqBs4BKR7fuydjDqbU0k/sdJTltp7IIdK
+1e49POIQ7pt+SUrsq/HnPW4woLC1WjouBWyr2M7/a0SldPidZ2BUAK7O9oXosidZ
+MJT7dBp3eHrspY4bdkSxsd0nshj0ndtqNktxkrSFRkoFpMz0J/M3Q93CjdHuTLpT
+HQEWjm/7ABEBAAGJBEQEGAEIAA8FAlm1lqMCGwIFCQJ2LQACKQkQFkawG4blAxDB
+XSAEGQEIAAYFAlm1lqMACgkQ4HTRbrb/TeMpDQ//eOIsCWY2gYOGACw42JzMVvuT
+DrgRT4hMhgHCGeKzn1wFL1EsbSQV4Z6pYvnNayuEakgIz14wf4UFs5u1ehfBwatm
+akSQJn32ANcAvI0INAkLEoqqy81mROjMc9FFrOkdqjcN7yN0BzH9jNYL/gsvmOOw
+Ou+dIH3C1Lgei844ZR1BZK1900mohuRwcji0sdROMcrKrGjqd4yb6f7yl0wbdAxA
+3IHT3TFGczC7Y41P2OEpaJeVIZZgxkgQsJ14qK/QGpdKvmZAQpjHBipeO/H+qxyO
+T5Y+f15VLWGOOVL090+ZdtF7h3m4X2+L7xWsFIgdOprfO60gq3e79YFfgNBYU5BG
+tJGFGlJ0sGtnpzx5QCRka0j/1E5lIu00sW3WfGItFd48hW6wHCloyoi7pBR7xqSE
+oU/U5o7+nC8wHFrDYyqcyO9Q3mZDw4LvlgnyMOM+qLv/fNgO9USE4T30eSvc0t/5
+p1hCKNvyxHFghdRSJqn70bm6MQY+kd6+B/k62Oy8eCwRt4PR+LQEIPnxN7xGuNpV
+O1oMyhhO41osYruMrodzw81icBRKYFlSuDOQ5jlcSajc6TvF22y+VXy7nx1q/CN4
+tzB/ryUASU+vXS8/QNM6qI/QbbgBy7VtHqDbs2KHp4cP0j9KYQzMrKwtRwfHqVrw
+FLkCp61EHwSlPsEFiglpMg/8DQ92O4beY0n7eSrilwEdJg89IeepTBm1QYiLM33q
+WLR9CABYAIiDG7qxviHozVfX6kUwbkntVpyHAXSbWrM3kD6jPs3u/dimLKVyd29A
+VrBSn9FC04EjtDWsj1KB7HrFN4oo9o0JLSnXeJb8FnPf3MitaKltvj/kZhegozIs
++zvpzuri0LvoB4fNA0T4eAmxkGkZBB+mjNCrUHIakyPZVzWGL0QGsfK1Q9jvw0OE
+rqHJYX8A1wLre/HkBne+e5ezS6Mc7kFW33Y1arfbHFNAe12juPsOxqK76qNilUbQ
+pPtNvWP3FTpbkAdodMLq/gQ+M5yHwPe8SkpZ8wYCfcwEemz/P+4QhQB8tbYbpcPx
+J+aQjVjcHpsLdrlSY3JL/gqockR7+97GrCzqXbgvsqiWr16Zyn6mxYWEHn9HXMh3
+b+2IYKFFXHffbIBq/mfibDnZtQBrZpn2uyh6F2ZuOsZh0LTD7RL53KV3fi90nS00
+Gs1kbMkPycL1JLqvYQDpllE2oZ1dKDYkwivGyDQhRNfERL6JkjyiSxfZ2c84r2HP
+gnJTi/WBplloQkM+2NfXrBo6kLHSC6aBndRKk2UmUhrUluGcQUyfzYRFH5kVueIY
+fDaBPus9gb+sjnViFRpqVjefwlXSJEDHWP3Cl2cuo2mJjeDghj400U6pjSUW3bIC
+/PK5Ag0EXCxEEQEQAKVjsdljwPDGO+48879LDa1d7GEu/Jm9HRK6INCQiSiS/0mH
+keKa6t4DRgCY2ID9lFiegx2Er+sIgL0chs16XJrFO21ukw+bkBdm2HYUKSsUFmr/
+bms8DkmAM699vRYVUAzO9eXG/g8lVrAzlb3RT7eGHYKd15DT5KxXDQB+T+mWE9qD
+5RJwEyPjSU+4WjYF+Rr9gbSuAt5UySUb9jTR5HRNj9wtb4YutfP9jbfqy8esQVG9
+R/hpWKb2laxvn8Qc2Xj93qNIkBt/SILfx9WDJl0wNUmu+zUwpiC2wrLFTgNOpq7g
+9wRPtg5mi8MXExWwSF2DlD54yxOOAvdVACJFBXEcstQ3SWg8gxljG8eLMpDjwoIB
+ax3DZwiYZjkjJPeydSulh8vKoFBCQkf2PcImXdOk2HqOV1L7FROM6fKydeSLJbx1
+7SNjVdQnq1OsyqSO0catAFNptMHBsN+tiCI29gpGegaoumV9cnND69aYvyPBgvdt
+mzPChjSmc6rzW1yXCJDm2qzwm/BcwJNXW5B3EUPxc0qSWste9fUna0G4l/WMuaIz
+VkuTgXf1/r9HeQbjtxAztxH0d0VgdHAWPDkUYmztcZ4sd0PWkVa18qSrOvyhI96g
+CzdvMRLX17m1kPvP5PlPulvqizjDs8BScqeSzGgSbbQVm5Tx4w2uF4/n3FBnABEB
+AAGJBEQEGAECAA8FAlwsRBECGwIFCQIKEgACKQkQFkawG4blAxDBXSAEGQECAAYF
+AlwsRBEACgkQI+cWZ4i2Ph6B0g//cPis3v2M6XvAbVoM3GIMXnsVj1WAHuwA/ja7
+UfZJ9+kV/PiMLkAbW0fBj0/y0O3Ry12VVQGXhC+Vo4j6C8qwFP4OXa6EsxHXuvWM
+IztBaX1Kav613aXBtxp6tTrud0FFUh4sDc1RREb3tMr6y5cvFJgnrdWcX1gsl6OD
+cgWBGNc6ZX7H7j48hMR6KmNeZocW7p8W+BgDQJqXYwVNL15qOHzVAh0dWsFLE9gw
+BTmDCY03x9arxSNDGCXyxt6E77LbNVIoSRlEbkvi6j33nEbuERICYl6CltXQCyiV
+KjheJcLMjbgv5+bLCv2zfeJ/WyOmOGKpHRu+lBV1GvliRxUblVlmjWPhYPBZXGyj
+II16Tqr+ilREcZFW+STccbrVct75JWLbxwlEmix+W1HwSRCR+KHx3Cur4ZPMOBlP
+sFilOOsNa7ROUB56t7zv21Ef3BeeaCd9c4kzNGN8d1icEqSXoWWPqgST0LZPtZyq
+WZVnWrHChVHfrioxhSnw8O3wY1A2GSahiCSvvjvOeEoJyU21ZMw6AVyHCh6v42oY
+adBfGgFwNo5OCMhNxNy/CcUrBSDqyLVTM5QlNsT75Ys7kHHnc+Jk+xx4JpiyNCz5
+LzcPhlwpqnJQcjJdY1hDhK75Ormj/NfCMeZ8g1aVPX4xEq8AMyZYhZ5/lmM+13Rd
+v8ZW6FK7HQ/+IAKzntxOjw0MzCXkksKdmIOZ2bLeOVI8aSLaUmoT5CLuoia9g7iF
+HlYrSY+01riRrAaPtYx0x8onfyVxL9dlW/Fv5+qc1fF5FxdhyIgdqgzm82TnXHu/
+haUxYmUvNrbsmmNl5UTTOf+YQHMccKFdYfZ2rCBtbN2niXG1tuz2+k83pozu4mJ1
+rOOLNAsQoY3yR6OODte1FyOgp7blwDhTIoQb8/UiJ7CMBI3OPrfoXFAnhYoxeRSA
+N4UFu9/HIkqfaQgRPCZS1gNerWF6r6yz9AZWUZqjSJssjBqXCtK9bGbTYBZk+pw3
+H9Nd0RJ2WJ9qPqmlmUr1wdqct0ChsJx1xAT86QrssicJ/HFFmF45hlnGkHUBWLaV
+Jt8YkLb/DqOIbVbwyCLQtJ80VQLEeupfmu5QNsTpntRYNKf8cr00uc8vSYXYFRxa
+5H5oRT1eoFEEjDDvokNnHXfT+Hya44IjYpzaqvAgeDp6sYlOdtWIv/V3s+trxACw
+TkRN7zw3lLTbT8PK9szK0fYZ5KHG1/AKH+mbZ6qNc/25PNbAFRtttLGuEIC3HJ12
+IAp2JdjioeD2OnWLu4ZeCT2CKKFsleZPrSyCrn3gyZPmfYvv5h2JbQNO6uweOrZE
+NWX5SU43OBoplbuKJZsMP6p6NahuGnIeJLlv509JYAf/HN4ARyvvOpO5Ag0EXDf1
+bwEQAKBByJMoxQ7H6AsQP29qjY8/pfDiNloQDHasUXoOyTfUetam3rY/UWCHFrMD
+0jvOHNIqEVJPsSWrxBYf+i4NNECsCSj39JHdVLOkn6pJcRnMzmljS8ojOybYRUTT
+KdKlV+jYy6hqAjTvnf/pzZOrNseKyxAo/xETphN2UEBKOZwV5j5YV6VXptt6xn1x
+EL1wzahZr6qz/gXn5//mg6aPPUCJt7BPBtC34HGoyHUn4Cx/jSU7zlQLV11VyTyt
+/TY69Wgc1k21oS0tm44uw8D+4bIXYewxNq0utt75c75JK5rPKCpIkaSgE3YUPAhM
+fpoUxSgo+hrTaocLbQm3/fDfRqYhw9IWrOuWLYEEI5NqS0etq2X+nM2oEXymxUM1
+45dicUv27B1YU5IciRaoA3Bwkl3uyvLhkwBNgJGpBoRsgyWKhlUpdMOSAFPHag0D
+HNCKbFTGxZOJ1+BoDsIscK864AodI0YvhMFByWGRwQMszQpK/vg9uUdIMDYTzI0i
+nvCrOht4R91z/2VZXHlv4D38UYsVE5P6u7N8T6T4SzERBKSktWhnJmMRJK5FQQwM
+zWCnSj9TGMC5+JYeMjRV1pUwpZw8iOlDg0x8LfMQ3XbZ0/bvlPsXOjiYmHAjrLZf
+qL0vR5jPyrfVUxF/XHJBBC9SEvvXrEDK+G+V9NmNavUNrhLnABEBAAGJBEQEGAEC
+AA8FAlw39W8CGwIFCQH+NIACKQkQFkawG4blAxDBXSAEGQECAAYFAlw39W8ACgkQ
+T3dnk2lHW6p0eg/+K2JJu1RbTSLJPFYQhLcxX+5d2unkuNLIy3kArtZuB992E2Fw
+00okPGtuPdSyk2ygh4DeYnwmabIWChi7LDp+YnqcI4GfMxNG6RsHs+A/77rLBST3
+BB1sejZppmKCQZDSC2pvYaZBpS80UvftCZ9RFdY+kTC22Btn/5ekiQOfIqhUH9Cy
+GWS/YlGciomVIVn1hSPN8l4EpBCDtceRaephvzjQIZT3AxOfSlpwJviYjAOkSX4q
+WyIjC5Ke5kfEOldUuBN1JGAm45tKlrz/LD/+VOc2IWpbkOIAVSldUgpRyiIJQAZ8
+0trNxrJI7ncaID8lAa7pBptJiL0KorRjk3c6Y7p830Nwe0J5e5+W1RzN4wlR8+9u
+uRyP8Mcwz/Hz2jwMiv38Vk4tAOe4PYNZuDnpjZ28yCpF3UUgvzjarubFAcg2jd8S
+auCQFlmOfvT+1qIMSeLmWBOdlzJTUpJRcZqnkEE4WtiMSlxyWVFvUwOmKSGi8CLo
+GW1Ksh9thQ9zKhvVUiVoKn4Z79HXr4pX6rnp+mweJ2dEZtlqD7HxjVTlCHn9fzCl
+t/Nt0h721fJbS587AC/ZMgg5GV+GKu6Mij0sPAowUJVCIwN9uK/GHICZEAoMSngP
+8xzKnhU5FD38vwBvsqbKxTtICrv2NuwnQ0WBBQ58w5mv2RCMr2W6iegSKIDjwxAA
+hDpCw0dlUOodY4omJB19Ra9zIZO5IGxT2+oksks3uWkT/l+I7FY0+YNtIZnC01Ge
+RJxJtuDwQXigYEKn1UEJ7ymBKrAdCEY0OC344AffLx81aOYWbbW7XaO6rZn8nyZu
+0oC95dGlQQdWYJBLcTwANx50iQQGkR5a+XF87yVciFm6x5Cf78pzJ5OBvN3qLJzN
+4YBftPMKIgbozGm6/3I6DDT0SMeCOhamshoBf7Ksqd6N+XUjRHZr7UwprWDJlhSC
+XFF1e6tjlf22NwZ9UH29VswFkepT99tfBFpobjbzfABO0YnAj72WcR2ZKP7oYHf7
+EkhI2ssWQ9PRPTwdOSXZDEH0s4cJqO+ZzRoAPE+3hbHlGukAqZiiHRlNpOvPdO6Q
+mgVBRsURs5i+4vylfat59HUtzQWbTF1bnZbMlefttb5CHRJNb3PTuxHR562Uzp9/
+/SZfDhAx7SYgwRF+FANWJsvX+I7CbP4qvOzutvIYTsNchbCxrOl+0PxMxWaYZzVb
+ZW45mO0LFUNCFqcnr3Sot5e9n0C0vjKBV9XgICHKKgeHaMwOMirb1MKvvMpJ3+NI
+BYZJ6d+LyhFXL0xJXccUnEXsmk2h4SBEEZYIhAk9ntRmzOXhXFLAOS8agWlmvYwh
+xeeb76cVOYlpLw1utXV9hbuo+oM109vMs73mpF88g4g=
+=oMDY
+-----END PGP PUBLIC KEY BLOCK-----
diff --git a/roles/sram_sbs/handlers/main.yml b/roles/sram_sbs/handlers/main.yml
new file mode 100644
index 000000000..bc8be505b
--- /dev/null
+++ b/roles/sram_sbs/handlers/main.yml
@@ -0,0 +1,9 @@
+---
+- name: Restart sbs containers
+ community.docker.docker_container:
+ name: "{{ item }}"
+ state: started
+ restart: true
+ loop:
+ - sram-sbs-client
+ - sram-sbs-server
diff --git a/roles/sram_sbs/tasks/main.yml b/roles/sram_sbs/tasks/main.yml
new file mode 100644
index 000000000..a9378fa36
--- /dev/null
+++ b/roles/sram_sbs/tasks/main.yml
@@ -0,0 +1,155 @@
+---
+- name: "Create SBS group"
+ group:
+ name: "{{ sram_sbs_group }}"
+ state: "present"
+ register: "result"
+
+- name: "Save SBS group gid"
+ set_fact:
+ sbs_group_gid: "{{ result.gid }}"
+
+- name: "Create SBS user"
+ user:
+ name: "{{ sram_sbs_user }}"
+ group: "{{ sram_sbs_group }}"
+ comment: "User to run SBS service"
+ shell: "/bin/false"
+ password: "!"
+ home: "{{ sram_sbs_conf_dir }}"
+ create_home: false
+ state: "present"
+ register: "result"
+
+- name: "Save sbs user uid"
+ set_fact:
+ sbs_user_uid: "{{ result.uid }}"
+
+- name: "Create directories"
+ file:
+ path: "{{item.path}}"
+ state: "directory"
+ owner: "{{sbs_user_uid}}"
+ group: "{{sbs_group_gid}}"
+ mode: "{{item.mode}}"
+ with_items:
+ - { path: "{{sram_sbs_work_dir}}", mode: "0755" }
+ - { path: "{{sram_sbs_conf_dir}}", mode: "0755" }
+ - { path: "{{sram_sbs_conf_dir}}/saml", mode: "0755" }
+ - { path: "{{sram_sbs_log_dir}}", mode: "0775" }
+
+- name: "Fix file permissions"
+ file:
+ path: "{{sram_sbs_log_dir}}/{{item}}"
+ owner: "{{sbs_user_uid}}"
+ group: "{{sbs_group_gid}}"
+ mode: "0664"
+ state: "touch"
+ modification_time: "preserve"
+ access_time: "preserve"
+ with_items:
+ - "sbs.log"
+ - "sbs.debug.log"
+
+- name: "Create SBS config files"
+ template:
+ src: "{{item.name}}.j2"
+ dest: "{{ sram_sbs_conf_dir }}/{{item.name}}"
+ owner: "{{sbs_user_uid}}"
+ group: "{{sbs_group_gid}}"
+ mode: "{{item.mode}}"
+ with_items:
+ - { name: "config.yml", mode: "0644" }
+ - { name: "alembic.ini", mode: "0644" }
+ - { name: "disclaimer.css", mode: "0644" }
+ - { name: "sbs-apache.conf", mode: "0644" }
+ no_log: "{{ sram_sbs_ansible_nolog }}"
+ notify: "Restart sbs containers"
+
+- name: "Pull sbs image"
+ community.docker.docker_image_pull:
+ name: "{{ item }}"
+ with_items:
+ - "{{ sram_sbs_client_image }}"
+ - "{{ sram_sbs_server_image }}"
+ register: "sram_sbs_image"
+
+- name: "Migration"
+ # For some reason --check breaks this block
+ when: "sram_sbs_image is changed and not ansible_check_mode"
+ block:
+ - name: "Run SBS migrations"
+ throttle: 1
+ community.docker.docker_container:
+ name: "sram-sbs-migration"
+ image: "{{ sram_sbs_server_image }}"
+ pull: "never"
+ state: "started"
+ restart_policy: "no"
+ detach: false
+ env:
+ RUNAS_UID: "{{ sbs_user_uid | string }}"
+ RUNAS_GID: "{{ sbs_group_gid | string }}"
+ MIGRATIONS_ONLY: "1"
+ # don't actually run the server
+ command: "/bin/true"
+ volumes:
+ - "{{ sram_sbs_conf_dir }}:/sbs-config"
+ - "{{ sram_sbs_log_dir }}:/opt/sbs/log"
+ networks:
+ # TODO: Should we parametrize this?
+ - name: "loadbalancer"
+ register: "result"
+ failed_when: "'container' not in result or result.container.State.ExitCode != 0"
+ changed_when: "'[alembic.runtime.migration] Running upgrade' in result.container.Output | default('')"
+ notify: "Restart sbs containers"
+
+ # Remove the migration container; we can't do that with auto_remove,
+ # because if we use that, ansible will not save the output in result
+ - name: "Remove migration container"
+ community.docker.docker_container:
+ name: "sram-sbs-migration"
+ state: "absent"
+ # TODO: fix this by only running this if "sram_sbs_image is changed"
+ changed_when: false
+
+- name: "Start sbs client container"
+ community.docker.docker_container:
+ name: "sram-sbs-client"
+ image: "{{ sram_sbs_client_image }}"
+ pull: "never"
+ restart_policy: "always"
+ state: "started"
+ volumes:
+ - "{{ sram_sbs_conf_dir }}/sbs-apache.conf:/etc/apache2/sites-enabled/sbs.conf:ro"
+ - "{{ sram_sbs_conf_dir }}/disclaimer.css:/opt/sbs/client/dist/disclaimer.css:ro"
+ networks:
+ - name: "loadbalancer"
+ labels:
+ traefik.http.routers.sbsclient.rule: "Host(`{{ sram_sbs_base_domain }}`)"
+ traefik.http.routers.sbsclient.tls: "true"
+ traefik.enable: "true"
+
+- name: "Start SBS server container"
+ community.docker.docker_container:
+ name: "sram-sbs-server"
+ image: "{{ sram_sbs_server_image }}"
+ restart_policy: "always"
+ state: "started"
+ env:
+ RUNAS_UID: "{{ sbs_user_uid | string }}"
+ RUNAS_GID: "{{ sbs_group_gid | string }}"
+ CONFIG: "/opt/sbs/server/config/config.yml"
+ REQUESTS_CA_BUNDLE: "/etc/ssl/certs/ca-certificates.crt"
+ RUN_MIGRATIONS: "0"
+ pull: "never"
+ volumes:
+ - "{{ sram_sbs_conf_dir }}:/sbs-config"
+ - "{{ sram_sbs_log_dir }}:/opt/sbs/log"
+ - "/tmp/ci-runner:/tmp/ci-runner"
+ networks:
+ - name: "loadbalancer"
+ # TODO: fix this: this is only for dev
+ etc_hosts:
+ oidc-op.scz-vm.net: "172.20.1.24"
+ host.docker.internal: host-gateway
diff --git a/roles/sram_sbs/templates/alembic.ini.j2 b/roles/sram_sbs/templates/alembic.ini.j2
new file mode 100644
index 000000000..e6049eebb
--- /dev/null
+++ b/roles/sram_sbs/templates/alembic.ini.j2
@@ -0,0 +1,72 @@
+# A generic, single database configuration.
+
+[alembic]
+# path to migration scripts
+script_location = migrations
+
+# template used to generate migration files
+# file_template = %%(rev)s_%%(slug)s
+
+# timezone to use when rendering the date
+# within the migration file as well as the filename.
+# string value is passed to dateutil.tz.gettz()
+# leave blank for localtime
+# timezone =
+
+# max length of characters to apply to the
+# "slug" field
+#truncate_slug_length = 40
+
+# set to 'true' to run the environment during
+# the 'revision' command, regardless of autogenerate
+# revision_environment = false
+
+# set to 'true' to allow .pyc and .pyo files without
+# a source .py file to be detected as revisions in the
+# versions/ directory
+# sourceless = false
+
+# version location specification; this defaults
+# to alembic/versions. When using multiple version
+# directories, initial revisions must be specified with --version-path
+# version_locations = %(here)s/bar %(here)s/bat alembic/versions
+
+# the output encoding used when revision files
+# are written from script.py.mako
+# output_encoding = utf-8
+
+sqlalchemy.url = {{ sram_sbs_db_connection_migration }}
+
+# Logging configuration
+[loggers]
+keys = root,sqlalchemy,alembic
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = NOTSET
+handlers = console
+
+[logger_sqlalchemy]
+level = WARN
+handlers =
+qualname = sqlalchemy.engine
+
+[logger_alembic]
+level = INFO
+handlers =
+qualname = alembic
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = DEBUG
+formatter = generic
+
+[formatter_generic]
+format = %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %H:%M:%S
diff --git a/roles/sram_sbs/templates/config.yml.j2 b/roles/sram_sbs/templates/config.yml.j2
new file mode 100644
index 000000000..0325a043f
--- /dev/null
+++ b/roles/sram_sbs/templates/config.yml.j2
@@ -0,0 +1,264 @@
+---
+database:
+ uri: {{ sram_sbs_db_connection_sbs }}
+
+redis:
+{% if environment_shortname == 'test2' %}
+ uri: "redis://{{ sram_sbs_redis_user }}:{{ sram_sbs_redis_password }}@{{ sram_sbs_redis_host }}/"
+{% else %}
+ uri: "redis{% if sram_sbs_redis_ssl %}s{% endif %}://{{ sram_sbs_redis_user }}:{{ sram_sbs_redis_password }}@{{ sram_sbs_redis_host }}:{{ sram_sbs_redis_port }}/"
+{% endif %}
+
+# add a per-release suffix here to invalidate sessions on new releases
+secret_key: {{ sram_sbs_db_secret }}{{ sram_sbs_secret_key_suffix }}
+# Must be a base64 encoded key of 128, 192, or 256 bits.
+# Generate: base64.b64encode(os.urandom(256 // 8)).decode()
+encryption_key: {{ sram_sbs_encryption_key }}
+
+# Lifetime of session in minutes (one day is 60 * 24)
+permanent_session_lifetime: {{ sram_sbs_session_lifetime }}
+
+logging:
+ log_to_stdout: {{ sram_sbs_log_to_stdout }}
+
+# Valid scopes are "READ" and "WRITE"
+api_users:
+{% for name, user in sram_sbs_api_users.items() %}
+ - name: "{{ name }}"
+ password: "{{ user.password }}"
+ scopes: "[ {{ user.scopes | join(', ') }} ]"
+{% endfor %}
+
+oidc:
+ client_id: "{{ sram_sbs_oidc_client_id }}"
+ client_secret: "{{ sram_sbs_oidc_client_secret }}"
+ audience: "{{ sram_sbs_oidc_jwt_audience }}"
+ verify_peer: {{ sram_sbs_oidc_verify_peer }}
+ authorization_endpoint: "{{ sram_sbs_oidc_authz_endpoint}}"
+ token_endpoint: "{{ sram_sbs_oidc_token_endpoint }}"
+ userinfo_endpoint: "{{ sram_sbs_oidc_userinfo_endpoint }}"
+ jwks_endpoint: "{{ sram_sbs_oidc_jwks_endpoint }}"
+ #Note that the paths for these uri's is hardcoded and only domain and port differ per environment
+ redirect_uri: "{{ sram_sbs_oidc_redirect_uri }}"
+ continue_eduteams_redirect_uri: "{{ sram_sbs_eduteams_continue_endpoint }}"
+ continue_eb_redirect_uri: "{{ sram_sbs_eb_continue_endpoint }}"
+ second_factor_authentication_required: {{ sram_sbs_second_factor_authentication_required }}
+ totp_token_name: "{{ sram_sbs_totp_token_name }}"
+ # The service_id in the proxy_authz endpoint when logging into SBS. Most likely to equal the oidc.client_id
+ sram_service_entity_id: "{{ sram_sbs_oidc_client_id }}"
+ scopes: {{ sram_sbs_oidc_scopes }}
+
+base_scope: "{{ base_domain }}"
+entitlement_group_namespace: "{{ sram_sbs_urn_namespace }}"
+eppn_scope: " {{ sram_sbs_eppn_scope }}"
+scim_schema_sram: "urn:mace:surf.nl:sram:scim:extension"
+collaboration_creation_allowed_entitlement: "urn:mace:surf.nl:sram:allow-create-co"
+
+{% if env == "prd" %}
+environment_disclaimer: ""
+{% else %}
+environment_disclaimer: "{{ sram_sbs_disclaimer_label }}"
+{% endif %}
+
+# All services in the white list can be requested in the create-restricted-co API
+# The default organisation is a fallback for when the administrator has no schac_home_org
+restricted_co:
+ services_white_list: [ "https://cloud" ]
+ default_organisation: "{{ sram_sbs_restricted_co_default_org }}"
+
+mail:
+ host: {{ sram_sbs_mail_host }}
+ port: {{ sram_sbs_mail_port }}
+ sender_name: {{ sram_sbs_mail_sender_name }}
+ sender_email: {{ sram_sbs_mail_sender_email }}
+ suppress_sending_mails: {{ sram_sbs_suppress_mails }}
+ info_email: {{ sram_sbs_support_email }}
+ beheer_email: {{ sram_sbs_admin_email }}
+ ticket_email: {{ sram_sbs_ticket_email }}
+ eduteams_email: {{ sram_sbs_eduteams_email }}
+ # Do we mail a summary of new Organizations and Services to the beheer_email?
+ audit_trail_notifications_enabled: {{ sram_sbs_audit_trail_notifications_enabled }}
+ account_deletion_notifications_enabled: True
+ send_exceptions: {{ sram_sbs_send_exceptions }}
+ send_js_exceptions: {{ sram_sbs_send_js_exceptions }}
+ send_exceptions_recipients: [ "{{ sram_sbs_exceptions_mail }}" ]
+ environment: "{{ base_domain }}"
+
+manage:
+ enabled: {{ sram_sbs_manage_base_enabled }}
+ # The entity_id of the SRAM RP in Manage for API retrieval, e.g "sbs.test2.sram.surf.nl"
+ sram_rp_entity_id: "{{ sram_sbs_manage_sram_rp_entity_id }}"
+ base_url: "{{ sram_sbs_manage_base_url }}"
+ user: "{{ sram_sbs_manage_user }}"
+ password: "{{ sram_sbs_manage_password }}"
+ verify_peer: {{ sram_sbs_manage_verify_peer }}
+
+aup:
+ version: 1
+ url_aup_en: "https://edu.nl/6wb63"
+ url_aup_nl: "https://edu.nl/6wb63"
+
+base_url: {{ sram_sbs_base_url }}
+socket_url: {{ sram_sbs_base_url }}
+base_server_url: {{ sram_sbs_base_url }}
+wiki_link: {{ sram_sbs_wiki_link }}
+
+admin_users:
+{% for admin_user in sram_sbs_admin_users %}
+ - uid: "{{ admin_user.uid }}"
+{% endfor %}
+
+organisation_categories:
+ - "HBO"
+ - "MBO"
+ - "UMC"
+ - "University"
+ - "Research"
+ - "SURF"
+
+feature:
+ seed_allowed: {{ sram_sbs_seed_allowed }}
+ api_keys_enabled: {{ sram_sbs_api_keys_enabled }}
+ feedback_enabled: {{ sram_sbs_feedback_enabled }}
+ impersonation_allowed: {{ sram_sbs_impersonation_allowed }}
+ sbs_swagger_enabled: {{ sram_sbs_swagger_enabled }}
+ admin_platform_backdoor_totp: {{ sram_sbs_admin_platform_backdoor_totp }}
+ past_dates_allowed: {{ sram_sbs_past_dates_allowed }}
+ mock_scim_enabled: {{ sram_sbs_mock_scim_enabled }}
+
+metadata:
+ idp_url: "{{ sram_sbs_idp_metadata_url }}"
+ parse_at_startup: True
+ # No need for environment specific values
+ scope_override:
+ knaw.nl: "Koninklijke Nederlandse Akademie van Wetenschappen (KNAW)"
+
+platform_admin_notifications:
+ # Do we daily check for CO join_requests and CO requests and send a summary mail to beheer_email?
+ enabled: False
+ cron_hour_of_day: {{ sram_sbs_cron_hour_of_day }}
+ # How long before we include open join_requests in the summary
+ outstanding_join_request_days_threshold: 7
+ # How long before we include open CO requests in the summary
+ outstanding_coll_request_days_threshold: 7
+
+user_requests_retention:
+ # Do we daily check for CO join_requests and CO requests and delete approved and denied?
+ enabled: {{ sram_sbs_notifications_enabled }}
+ cron_hour_of_day: {{ sram_sbs_cron_hour_of_day }}
+ # How long before we delete approved / denied join_requests
+ outstanding_join_request_days_threshold: 90
+ # How long before we delete approved / denied CO requests
+ outstanding_coll_request_days_threshold: 90
+
+# The retention config determines how long users may be inactive, how long the reminder email is valid and when do we resent the magic link
+retention:
+ cron_hour_of_day: {{ sram_sbs_cron_hour_of_day }}
+ # how many days of inactivity before a user is suspended
+ # 0 allows for any last_login_date in the past to trigger suspension notification
+ allowed_inactive_period_days: {{ sram_sbs_suspension_inactive_days }}
+ # how many days before suspension do we send a warning
+ # -1 will suspend notified users on second suspension cron
+ reminder_suspend_period_days: {{ sram_sbs_suspension_reminder_days }}
+ # how many days after suspension do we delete the account
+ remove_suspended_users_period_days: 90
+ # how many days before deletion do we send a reminder
+ reminder_expiry_period_days: 7
+ # whether to send a notification of the result of the retention process to the beheer_email
+ admin_notification_mail: {{ sram_sbs_suspension_notify_admin }}
+
+collaboration_expiration:
+ # Do we daily check for CO's that will be deleted because they have been expired?
+ enabled: {{ sram_sbs_notifications_enabled }}
+ cron_hour_of_day: {{ sram_sbs_cron_hour_of_day }}
+ # How long after expiration do we actually delete expired collaborations
+ expired_collaborations_days_threshold: 90
+ # How many days before actual expiration do we mail the organisation members
+ expired_warning_mail_days_threshold: 10
+
+collaboration_suspension:
+ # Do we daily check for CO's that will be suspended because of inactivity?
+ enabled: {{ sram_sbs_notifications_enabled }}
+ cron_hour_of_day: {{ sram_sbs_cron_hour_of_day }}
+ # After how many days of inactivity do we suspend collaborations
+ collaboration_inactivity_days_threshold: 365
+ # How many days before actual suspension do we mail the organisation members
+ inactivity_warning_mail_days_threshold: 10
+ # After how many days after suspension do we actually delete the collaboration
+ collaboration_deletion_days_threshold: 90
+
+membership_expiration:
+ # Do we daily check for memberships that will be deleted because they have been expired?
+ enabled: {{ sram_sbs_notifications_enabled }}
+ cron_hour_of_day: {{ sram_sbs_cron_hour_of_day }}
+ # How long after expiration do we actually delete expired memberships
+ expired_memberships_days_threshold: 90
+ # How many days before actual expiration do we mail the co admin and member
+ expired_warning_mail_days_threshold: 10
+
+invitation_reminders:
+ # Do we daily check for invitations that need a reminder?
+ enabled: {{ sram_sbs_invitation_reminders_enabled }}
+ cron_hour_of_day: {{ sram_sbs_cron_hour_of_day }}
+ # How many days before expiration of an invitation do we remind the user?
+ invitation_reminders_threshold: 5
+
+invitation_expirations:
+ # Do we daily check for invitations that are expired / accepted and are eligible for deletion ?
+ enabled: {{ sram_sbs_invitation_expirations_enabled }}
+ cron_hour_of_day: {{ sram_sbs_cron_hour_of_day }}
+ # How long after expiration of an invitation do we delete the invitation?
+ nbr_days_remove_expired_invitations: 10
+ # How long after expiration of an API created invitation do we delete the invitation?
+ nbr_days_remove_api_expired_invitations: 30
+
+orphan_users:
+ # Do we daily check for users that are orphans soo they can be deleted?
+ enabled: {{ sram_sbs_delete_orphaned }}
+ cron_hour_of_day: {{ sram_sbs_cron_hour_of_day }}
+ # How long after created do we delete orphan users
+ delete_days_threshold: 14
+
+open_requests:
+ # Do we weekly check for all open requests?
+ enabled: {{ sram_sbs_open_requests_enabled }}
+ cron_day_of_week: 1
+
+scim_sweep:
+ # Do we enable scim sweeps?
+ enabled: {{ sram_sbs_scim_sweep }}
+ # How often do we check if scim sweeps are needed per service
+ cron_minutes_expression: "*/15"
+
+ldap:
+ url: "{{ sram_sbs_ldap_url }}"
+ bind_account: "{{ sram_sbs_ldap_bind_account }}"
+
+# A MFA login in a different flow is valid for X minutes
+mfa_sso_time_in_minutes: {{ sram_sbs_mfa_sso_minutes }}
+
+# whether to fall back to TOTP MFA
+mfa_fallback_enabled: {{ sram_sbs_mfa_fallback_enabled }}
+
+# Lower case entity ID's and schac_home allowed skipping MFA.
+# Note that for a login directly into SRAM only schac_home can be used as the entity_idp of the IdP is unknown
+mfa_idp_allowed: {{ sram_sbs_mfa_idp_allowed }}
+
+# Lower case schachome organisations / entity ID's where SURFSecure ID is used for step-up
+ssid_identity_providers: {{ sram_sbs_ssid_identity_providers }}
+
+ssid_config_folder: saml
+
+pam_web_sso:
+ session_timeout_seconds: 300
+
+rate_limit_totp_guesses_per_30_seconds: 10
+
+# The uid's of user that will never be suspended or deleted
+excluded_user_accounts:
+{% for excluded_user in sram_sbs_excluded_users %}
+ - uid: "{{ excluded_user.uid }}"
+{% endfor %}
+
+engine_block:
+ api_token: {{ sram_sbs_engine_block_api_token }}
diff --git a/roles/sram_sbs/templates/disclaimer.css.j2 b/roles/sram_sbs/templates/disclaimer.css.j2
new file mode 100644
index 000000000..455cb97cb
--- /dev/null
+++ b/roles/sram_sbs/templates/disclaimer.css.j2
@@ -0,0 +1,6 @@
+{% if env!="prd" -%}
+body::after {
+ background: {{ sram_sbs_disclaimer_color }};
+ content: "{{ sram_sbs_disclaimer_label }}";
+}
+{% endif %}
diff --git a/roles/sram_sbs/templates/saml_advanced_settings.json.j2 b/roles/sram_sbs/templates/saml_advanced_settings.json.j2
new file mode 100644
index 000000000..0d03c63d7
--- /dev/null
+++ b/roles/sram_sbs/templates/saml_advanced_settings.json.j2
@@ -0,0 +1,35 @@
+{
+ "security": {
+ "nameIdEncrypted": false,
+ "authnRequestsSigned": true,
+ "logoutRequestSigned": false,
+ "logoutResponseSigned": false,
+ "signMetadata": false,
+ "wantMessagesSigned": false,
+ "wantAssertionsSigned": true,
+ "wantNameId" : true,
+ "wantNameIdEncrypted": false,
+ "wantAttributeStatement": false,
+ "wantAssertionsEncrypted": false,
+ "requestedAuthnContext": ["{{ sram_sbs_ssid_authncontext }}"],
+ "requestedAuthnContextComparison": "minimum",
+ "failOnAuthnContextMismatch": false,
+ "allowSingleLabelDomains": false,
+ "signatureAlgorithm": "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256",
+ "digestAlgorithm": "http://www.w3.org/2001/04/xmlenc#sha256",
+ "rejectDeprecatedAlgorithm": true
+ },
+ "contactPerson": {
+ "technical": {
+ "givenName": "{{ mail.admin_name }}",
+ "emailAddress": "{{ mail.admin_address }}"
+ }
+ },
+ "organization": {
+ "en-US": {
+ "name": "{{ org.name }}",
+ "displayname": "{{ org.name }}",
+ "url": "{{ org.url }}"
+ }
+ }
+}
diff --git a/roles/sram_sbs/templates/saml_settings.json.j2 b/roles/sram_sbs/templates/saml_settings.json.j2
new file mode 100644
index 000000000..073651110
--- /dev/null
+++ b/roles/sram_sbs/templates/saml_settings.json.j2
@@ -0,0 +1,22 @@
+{
+ "strict": true,
+ "debug": true,
+ "sp": {
+ "entityId": "{{ sram_sbs_surf_secure_id.sp_entity_id }}",
+ "assertionConsumerService": {
+ "url": "{{ sram_sbs_surf_secure_id.acs_url }}",
+ "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
+ },
+ "NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified",
+ "x509cert": "{{ sram_sbs_surf_secure_id.pub | barepem }}",
+ "privateKey": "{{ sram_sbs_surf_secure_id.priv | barepem }}"
+ },
+ "idp": {
+ "entityId": "{{ sram_sbs_ssid_entityid }}",
+ "singleSignOnService": {
+ "url": "{{ sram_sbs_ssid_sso_endpoint }}",
+ "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
+ },
+ "x509cert": "{{ sram_sbs_surf_secure_id.sa_idp_certificate | barepem }}"
+ }
+}
diff --git a/roles/sram_sbs/templates/sbs-apache.conf.j2 b/roles/sram_sbs/templates/sbs-apache.conf.j2
new file mode 100644
index 000000000..ebf19db99
--- /dev/null
+++ b/roles/sram_sbs/templates/sbs-apache.conf.j2
@@ -0,0 +1,30 @@
+ServerName {{ sram_sbs_base_domain }}
+#ErrorLog /proc/self/fd/2
+#CustomLog /proc/self/fd/1 common
+DocumentRoot /opt/sbs/client/dist
+
+Header set Content-Security-Policy "default-src 'self'; base-uri 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; frame-src 'none'; form-action 'self' https://*.{{ base_domain }}; frame-ancestors 'none'; block-all-mixed-content;"
+Header set Permissions-Policy "accelerometer=(), ambient-light-sensor=(), autoplay=(), battery=(), camera=(), cross-origin-isolated=(), display-capture=(), document-domain=(), encrypted-media=(), execution-while-not-rendered=(), execution-while-out-of-viewport=(), fullscreen=(), geolocation=(), gyroscope=(), keyboard-map=(), magnetometer=(), microphone=(), midi=(), navigation-override=(), payment=(), picture-in-picture=(), publickey-credentials-get=(), screen-wake-lock=(), sync-xhr=(), usb=(), web-share=(), xr-spatial-tracking=(), clipboard-read=(), clipboard-write=(self), gamepad=(), speaker-selection=()"
+
+RewriteEngine On
+RewriteCond %{REQUEST_URI} !^/(api|pam-weblogin|flasgger_static|swagger|health|config|info|socket.io)
+RewriteCond %{DOCUMENT_ROOT}%{REQUEST_FILENAME} !-f
+RewriteRule ^/(.*)$ /index.html [L]
+
+ProxyRequests off
+ProxyPassMatch ^/(api|pam-weblogin|flasgger_static|swagger|health|config|info) http://sram-sbs-server:8080/
+ProxyPassReverse / http://sram-sbs-server:8080/
+ProxyPass /socket.io/ ws://sram-sbs-server:8080/socket.io/
+ProxyPassReverse /socket.io/ ws://sram-sbs-server:8080/socket.io/
+
+
+ Header set Cache-Control: "public, max-age=31536000, immutable"
+
+
+ Header set Cache-Control: "no-cache, private"
+
+
+
+ Require all granted
+ Options -Indexes
+
diff --git a/roles/stepupazuremfa/tasks/main.yml b/roles/stepupazuremfa/tasks/main.yml
index 7e01fdc56..dbee925c5 100644
--- a/roles/stepupazuremfa/tasks/main.yml
+++ b/roles/stepupazuremfa/tasks/main.yml
@@ -26,15 +26,16 @@
- "{{ current_release_config_dir_name }}"
- "{{ current_release_appdir }}/public/images"
-- name: Create federation-metadata cache dir
+- name: Create and empty the federation-metadata cache dir
ansible.builtin.file:
- state: directory
- dest: "{{ item }}"
+ state: "{{ item }}"
+ dest: "{{ current_release_appdir }}/federation-metadata"
owner: "{{ appname }}"
group: root
mode: "0755"
with_items:
- - "{{ current_release_appdir }}/federation-metadata"
+ - absent
+ - directory
- name: Install images
ansible.builtin.include_role: