diff --git a/README.md b/README.md index f48c680..6ed1de8 100644 --- a/README.md +++ b/README.md @@ -95,6 +95,21 @@ To destroy the infrastructure, go to the `xnat-aws/provision` directory and type terraform destroy ``` +If this command is interrupted i.e. you lose internet connection when running locally, you may find that you can no longer run `terraform destroy` successfully. +Therefore you need to manually delete some resources in the AWS console, but you can encounter errors when attempting to delete certain resources: +`The vpc 'vpc-id' has dependencies and cannot be deleted.` or +`Network interface is currently in use and is of type "interface".` + +To find the remaining VPC dependencies, go to the `xnat-aws/provision` directory and type: + +```bash + ./show_resources_to_delete.sh +``` + +N.B. You need to add your `VPC ID` and `region` to the `show_resources_to_delete.sh` script. + +After deleting the dependiences you can retry deleting your VPC and/or Network interface - [see more info](https://repost.aws/knowledge-center/troubleshoot-dependency-error-delete-vpc). + ## AWS cost estimate [It is estimated](provision/aws-cost-estimate.pdf) the AWS resources will cost approximately **$270 diff --git a/configure/.gitignore b/configure/.gitignore index a46f5a3..2ef38cc 100644 --- a/configure/.gitignore +++ b/configure/.gitignore @@ -9,5 +9,5 @@ hosts.yml .vault_password group_vars/all/vault -group_vars/web/vault +group_vars/xnat/vault diff --git a/configure/README.md b/configure/README.md index 0c717c8..aa27ebe 100644 --- a/configure/README.md +++ b/configure/README.md @@ -15,7 +15,7 @@ This will install the dependencies listed in [requirements.txt](requirements.txt To run the configuration with Ansible we will need to: - install required Ansible roles and collection -- run the `install_container_service.yml` and `install_xnat.yml` playbooks +- run the `install_xnat.yml` playbooks These steps are done in the script `xnat-aws/configure/install_xnat.sh`. To run the script, go to the `xnat-aws/configure` directory and run the following command: diff --git a/configure/group_vars/all/vars b/configure/group_vars/all/vars new file mode 100644 index 0000000..fcbe17e --- /dev/null +++ b/configure/group_vars/all/vars @@ -0,0 +1,100 @@ +--- +xnat_data_dir: /data +xnat_root_dir: "{{ xnat_data_dir }}/xnat" +xnat_home_dir: "{{ xnat_root_dir }}/home" + +xnat_common_config: + admin_email: xnatadmin@{{ hostvars['xnat_web']['ansible_host'] }} + restrictUserListAccessToAdmins: true + uiAllowNonAdminProjectCreation: false + allowNonAdminsToClaimUnassignedSessions: true + reloadPrearcDatabaseOnStartup: true + par: false + primaryAdminUsername: "{{ xnat_service_admin.username }}" + receivedFileUser: "{{ xnat_service_admin.username }}" + ipsThatCanSendEmailsThroughRest: 127.0.0.1 + sessionXmlRebuilderInterval: "5" + # "^.*$" for all IPs + enabledProviders: + - localdb + enableSitewideAnonymizationScript: true + sitewideAnonymizationScript: + //\nversion \"6.1\"\nproject != \"Unassigned\" ? (0008,1030) := + project\n(0010,0010) := subject\n(0010,0020) := session + +xnat_service_admin: + username: "admin_user" + firstname: "admin" + lastname: "user" + password: "{{ vault_service_admin_password }}" + +package_registry: + enabled: false + url: "" + authentication_header: + Bearer {{ vault_package_registry_token | default(omit) }} + +# Set this to true if selinux is enabled on the hosting OS +selinux_enabled: true + +# XNAT supports PostgreSQL 11-14 +postgresql_version: 14 +postgresql_use_ssl: false + +java_keystore: + keystore_pass: "{{ vault_keystore_password }}" + +# JSON representation of the site-wide anonymisation script: this could be +# defined in a string, or extracted from a template file e.g. using +# lookup('template, 'foo.j2') | to_json +xnat_sitewide_anonymization_script: + "{{ xnat_common_config.sitewideAnonymizationScript | to_json }}" + +# web server VM +web_server: + host: "{{ hostvars['xnat_web']['ansible_host'] }}" + url: "http://{{ hostvars['xnat_web']['ansible_host'] }}" + ip: "{{ hostvars['xnat_web']['private_ip'] }}" + storage_dir: "{{ external_storage_drive }}/data" + +# database server VM +db_server: + host: "{{ hostvars['xnat_web']['database_hostname'] }}" + port: "{{ hostvars['xnat_web']['database_port'] }}" + postgresql_database: "{{ hostvars['xnat_web']['database_name'] }}" + postgresql_user: "{{ hostvars['xnat_web']['database_user'] }}" + postgresql_password: "{{ vault_postgres_xnat_password }}" + +# SSL certificate settings +ssl: + use_ssl: false + server_cert: "/etc/ssl/certs/{{ hostvars['xnat_web']['ansible_host'] }}.cert" + server_key: "/etc/ssl/certs/{{ hostvars['xnat_web']['ansible_host'] }}.key" + validate_certs: false + + +# XNAT configuration +xnat_config: + site_name: MIRSG_XNAT + site_description:

MIRSG XNAT

A test instance of XNAT. + admin_password: "{{ vault_admin_password }}" + +# mirsg.infrastructure.install_python +install_python: + version: "3" + pip_version: 21.3.1 + pip_executable: /usr/bin/pip3 + system_packages: + - python3 + - python3-pip + - python3-setuptools + pip_packages: + - cryptography + +# Mount point for external storage +external_storage_drive: "/storage" +mount_efs_src: "{{ hostvars[inventory_hostname]['efs_hostname'] }}:/" +mount_efs_directory: "{{ external_storage_drive }}" +mount_efs_fstype: "nfs4" +mount_efs_opts: "nfsvers=4.1,rsize=1048576,hard,timeo=600,retrans=2,noresvport" +mount_efs_state: mounted diff --git a/configure/group_vars/all/vars/docker.yml b/configure/group_vars/all/vars/docker.yml deleted file mode 100644 index 6e194a0..0000000 --- a/configure/group_vars/all/vars/docker.yml +++ /dev/null @@ -1,10 +0,0 @@ ---- -docker_client: - owner: "{{ tomcat.owner }}" - group: "{{ tomcat.owner }}" - cert_dir: "/usr/share/tomcat/.docker" - ssl_key_file: "/usr/share/tomcat/.docker/key.pem" - ssl_csr_file: "/usr/share/tomcat/.docker/docker.csr" - ssl_pk8_file: "/usr/share/tomcat/.docker/docker.pk8" - ssl_cert_file: "/usr/share/tomcat/.docker/cert.pem" - server_ca_cert_file: "/usr/share/tomcat/.docker/ca.pem" diff --git a/configure/group_vars/all/vars/general.yml b/configure/group_vars/all/vars/general.yml deleted file mode 100644 index 48b0226..0000000 --- a/configure/group_vars/all/vars/general.yml +++ /dev/null @@ -1,35 +0,0 @@ ---- -package_registry: - enabled: false - url: "" - authentication_header: {} - -# Directory on the control node where the Ansible scripts can store files that -# need to be temporarily cached, such as certificate files that are copied -# between hosts. -# You can persist these files between runs to speed up future deployments. -# In a CI setup, it is best to choose a location that is not within the -# repository clone, as the files that are created could interfere with the CI's -# automated checkout and update processes -ansible_cache_dir: "{{ lookup('env', 'HOME') }}/ansible_persistent_files" - -# Locale for the servers -server_locale: "en_GB.UTF-8" - -# Set this to true if selinux is enabled on the hosting OS -selinux_enabled: true - -# Mount point for external storage -EXTERNAL_STORAGE_DRIVE: "/storage" - -# Infrastructure -monitoring_service_enabled: false -container_service_enabled: true -container_service_remote_xnat_root: "/storage/data/xnat" - -efs_mount: - src: "{{ hostvars[inventory_hostname]['efs_hostname'] }}:/" - directory: "{{ EXTERNAL_STORAGE_DRIVE }}" - fstype: "nfs4" - opts: "nfsvers=4.1,rsize=1048576,hard,timeo=600,retrans=2,noresvport" - state: mounted diff --git a/configure/group_vars/all/vars/python.yml b/configure/group_vars/all/vars/python.yml deleted file mode 100644 index e87db9c..0000000 --- a/configure/group_vars/all/vars/python.yml +++ /dev/null @@ -1,11 +0,0 @@ ---- -install_python: - version: "3" - pip_version: "21.3.1" - pip_executable: "pip3" - system_packages: - - python3 - - python3-pip - - python3-setuptools - pip_packages: - - cryptography diff --git a/configure/group_vars/all/vars/xnat.yml b/configure/group_vars/all/vars/xnat.yml deleted file mode 100644 index 798ff9c..0000000 --- a/configure/group_vars/all/vars/xnat.yml +++ /dev/null @@ -1,15 +0,0 @@ ---- -# web server VM -xnat_web_server: - host: "{{ hostvars['xnat_web']['ansible_host'] }}" - url: "http://{{ hostvars['xnat_web']['ansible_host'] }}" - ip: "{{ hostvars['xnat_web']['private_ip'] }}" - storage_dir: "{{ EXTERNAL_STORAGE_DRIVE }}/data" - -# database server VM -xnat_db: - host: "{{ hostvars['xnat_web']['database_hostname'] }}" - port: "{{ hostvars['xnat_web']['database_port'] }}" - postgres_xnat_database: "{{ hostvars['xnat_web']['database_name'] }}" - postgres_xnat_user: "{{ hostvars['xnat_web']['database_user'] }}" - postgres_xnat_password: "{{ vault_postgres_xnat_password }}" diff --git a/configure/group_vars/container_service.yml b/configure/group_vars/container_service.yml new file mode 100644 index 0000000..7042230 --- /dev/null +++ b/configure/group_vars/container_service.yml @@ -0,0 +1,8 @@ +--- +# mirsg.infrastructure.docker - only used by the container_service_host group +# but the container_service_client group needs access to these variables +docker_client_certificate_cache_directory: + "{{ ansible_cache_dir }}/cserv_certificates/cserv" +docker_server_hostname: "{{ hostvars['xnat_cserv']['ansible_host'] }}" +docker_server_ip: "{{ hostvars['xnat_cserv']['private_ip'] }}" +docker_server_port: 2376 diff --git a/configure/group_vars/container_service_client.yml b/configure/group_vars/container_service_client.yml new file mode 100644 index 0000000..abede34 --- /dev/null +++ b/configure/group_vars/container_service_client.yml @@ -0,0 +1,15 @@ +--- +# mirsg.xnat_container_service +xnat_container_service_name: Container Service +xnat_container_service_url: "{{ web_server.url }}/xapi/docker/server" +xnat_container_service_client_hostname: "{{ hostvars['xnat_web']['ansible_host'] }}" +xnat_container_service_validate_certs: "{{ ssl.validate_certs }}" + +xnat_container_service_hostname: "{{ docker_server_hostname }}" +xnat_container_service_ip: "{{ docker_service_ip }}" +xnat_container_service_port: "{{ docker_server_port }}" +xnat_container_service_certificate_cache_directory: + "{{ docker_client_certificate_cache_directory }}" + +xnat_container_service_path_translation_xnat_prefix: "{{ xnat_root_dir }}" +xnat_container_service_path_translation_docker_prefix: /storage/data/xnat diff --git a/configure/group_vars/container_service_host.yml b/configure/group_vars/container_service_host.yml new file mode 100644 index 0000000..2cf110d --- /dev/null +++ b/configure/group_vars/container_service_host.yml @@ -0,0 +1,10 @@ +--- +# mirsg.infrastructure.docker +docker_generate_certificates: true # generate TLS certs for clients +docker_client_hostnames: + - "{{ hostvars['xnat_web']['ansible_host'] }}" +docker_tls_verify: false + +# docker<25 is required for XNAT +# see: https://groups.google.com/g/xnat_discussion/c/yyPBkN4kayE/m/LUe5GQH5AAAJ +docker_version: 24.0.9 diff --git a/configure/group_vars/cserv_hosts/vars/docker.yml b/configure/group_vars/cserv_hosts/vars/docker.yml deleted file mode 100644 index 79bc034..0000000 --- a/configure/group_vars/cserv_hosts/vars/docker.yml +++ /dev/null @@ -1,18 +0,0 @@ ---- -docker: - owner: "root" - group: "root" - cert_dir: "/home/docker/.docker" - client_cert_dir: "/home/docker/.docker/client_certs" - ca_key: "/home/docker/.docker/ca.key" - ca_csr: "/home/docker/.docker/ca.csr" - ca_cert: "/home/docker/.docker/ca.pem" - server_key: "/home/docker/.docker/server-key.pem" - server_csr: "/home/docker/.docker/server.csr" - server_cert: "/home/docker/.docker/server-cert.pem" - config_dir: "/etc/docker" - daemon_conf_file: "/etc/docker/daemon.json" - service_file_dir: "/etc/systemd/system/docker.service.d" - service_name: "docker" - repo_url: "https://download.docker.com/linux/centos/docker-ce.repo" - yum_package: "docker" diff --git a/configure/group_vars/web/vars/java.yml b/configure/group_vars/web/vars/java.yml deleted file mode 100644 index 60ea99c..0000000 --- a/configure/group_vars/web/vars/java.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -java: - profile_d: "/etc/profile.d" - home: "/usr/lib/jvm/jre" - package: "java-1.8.0-openjdk-devel" - keystore_path: "/usr/lib/jvm/jre/lib/security/cacerts/" diff --git a/configure/group_vars/web/vars/nginx.yml b/configure/group_vars/web/vars/nginx.yml deleted file mode 100644 index 28fe241..0000000 --- a/configure/group_vars/web/vars/nginx.yml +++ /dev/null @@ -1,25 +0,0 @@ ---- -nginx: - owner: root - group: root - log_folder: "/var/log/nginx" - http_port: 80 - https_port: 443 - certs_dir: "/etc/nginx/ssl" - dh_params_file: "/etc/nginx/ssl/dhparam.pem" - conf_file: "/etc/nginx/nginx.conf" - ssl_cert_file: "/etc/nginx/ssl/server.cert" - ssl_key_file: "/etc/nginx/ssl/server.key" - -# Support for ipv6 -ipv6_enabled: false - -# Bit size for OpenSSL Diffie-Hellman Parameters. Higher bit sizes are more -# secure, but require exponentially larger times for the one-off parameter -# generation. Use 4096 for production. These may take 10mins+ to generate but -# are only generated once per server. -# For local testing (non-production), use 2096 to speed up deployment. -diffie_helman_size_bits: 2048 - -dicom_port: 8104 -xnat_dicom_port: 8105 diff --git a/configure/group_vars/web/vars/postgresql.yml b/configure/group_vars/web/vars/postgresql.yml deleted file mode 100644 index f050d95..0000000 --- a/configure/group_vars/web/vars/postgresql.yml +++ /dev/null @@ -1,14 +0,0 @@ ---- -# XNAT supports PostgreSQL 10 - 14 -# See: https://groups.google.com/g/xnat_discussion/c/INKizGBktEQ/m/jauYgo8oAAAJ -postgresql_version: "14" - -# For installing postgres -postgresql_rpm_gpg_key_pgdg: "https://www.postgresql.org/download/keys/RPM-GPG-KEY-PGDG" - -# mirsg.postgresql - download and install - we need to do this on both the web server and the db -postgresql_install: - disable_gpg_check: false - rpm: "https://download.postgresql.org/pub/repos/yum/reporpms/EL-{{ ansible_facts['distribution_major_version'] }}-x86_64/pgdg-redhat-repo-latest.noarch.rpm" - yum_package: "postgresql{{ postgresql_version }}-server" - yum_contrib_package: "postgresql{{ postgresql_version }}-contrib" diff --git a/configure/group_vars/web/vars/ssl.yml b/configure/group_vars/web/vars/ssl.yml deleted file mode 100644 index 5328770..0000000 --- a/configure/group_vars/web/vars/ssl.yml +++ /dev/null @@ -1,7 +0,0 @@ ---- -# SSL certificate settings -ssl: - use_ssl: false - server_cert: "/etc/ssl/certs/{{ hostvars['xnat_web']['ansible_host'] }}.cert" - server_key: "/etc/ssl/certs/{{ hostvars['xnat_web']['ansible_host'] }}.key" - validate_certs: no diff --git a/configure/group_vars/web/vars/tomcat.yml b/configure/group_vars/web/vars/tomcat.yml deleted file mode 100644 index 06e0791..0000000 --- a/configure/group_vars/web/vars/tomcat.yml +++ /dev/null @@ -1,29 +0,0 @@ ---- -tomcat_version: 9.0.76 - -tomcat_webapp_name: "ROOT" - -tomcat: - catalina_home: "/usr/share/tomcat" - config_file: "/usr/share/tomcat/conf/tomcat.conf" - server_config_file: "/usr/share/tomcat/conf/server.xml" - service_config_file: "/etc/systemd/system/tomcat.service" - owner: "tomcat" - group: "tomcat" - hostname: localhost - server_port: 8005 - catalina_port: 8983 - catalina_redirect_port: 8443 - shutdown_port: 8005 - port: 8080 - root: "/usr/share/tomcat/webapps/{{ tomcat_webapp_name }}" - root_webapp: "/usr/share/tomcat/webapps/{{ tomcat_webapp_name }}.war" - binary_url: "https://archive.apache.org/dist/tomcat/tomcat-\ - {{ tomcat_version.split('.')[0] }}/v{{ tomcat_version }}/bin/\ - apache-tomcat-{{ tomcat_version }}.tar.gz" - -# You may want to increase the heap space if you have enough RAM available -java_mem: - Xms: "512M" - Xmx: "16G" - MetaspaceSize: "300M" diff --git a/configure/group_vars/web/vars/xnat.yml b/configure/group_vars/web/vars/xnat.yml deleted file mode 100644 index 7241419..0000000 --- a/configure/group_vars/web/vars/xnat.yml +++ /dev/null @@ -1,108 +0,0 @@ ---- -xnat_data_dir: "/data" -xnat_root_dir: "{{ xnat_data_dir }}/xnat" -xnat_home_dir: "{{ xnat_root_dir }}/home" - -xnat: - owner: "{{ tomcat.owner }}" - group: "{{ tomcat.group }}" - install_downloads: "{{ tomcat.catalina_home }}/install_downloads" - pipeline_install_dir: "{{ tomcat.catalina_home }}/pipeline_installer" - processingUrl: "" - -xnat_version: 1.8.7.1 -xnat_pipeline_version: 1.8.3 - -xnat_source: - war_file_name: "xnat-web-{{ xnat_version }}.war" - plugins_downloads_dir: "/ansible/downloads/xnat_plugins" - xnat_downloads_dir: "/ansible/downloads/xnat" - pipeline_installer_file_name: "pipeline-installer-{{ xnat_pipeline_version }}.tar" - xnat_war_url: "https://api.bitbucket.org/2.0/repositories/xnatdev/xnat-web/downloads/xnat-web-{{ xnat_version }}.war" - pipelines_url: "https://api.github.com/repos/NrgXnat/xnat-pipeline-engine/tarball/{{ xnat_pipeline_version }}" - context_file_location: "/usr/share/tomcat/webapps/ROOT/META-INF/context.xml" - -xnat_plugin_urls: - - "https://api.bitbucket.org/2.0/repositories/xnatdev/xsync/downloads/xsync-plugin-all-1.5.0.jar" - #- "https://api.bitbucket.org/2.0/repositories/xnatx/ldap-auth-plugin/downloads/ldap-auth-plugin-1.1.0.jar" - - "https://api.bitbucket.org/2.0/repositories/xnatdev/container-service/downloads/container-service-3.3.0-fat.jar" - - "https://api.bitbucket.org/2.0/repositories/xnatx/xnatx-batch-launch-plugin/downloads/batch-launch-0.6.0.jar" - - "https://github.com/VUIIS/dax/raw/main/misc/xnat-plugins/dax-plugin-genProcData-1.4.2.jar" - - "https://api.bitbucket.org/2.0/repositories/icrimaginginformatics/ohif-viewer-xnat-plugin/downloads/ohif-viewer-3.4.1.jar" - - "https://api.bitbucket.org/2.0/repositories/xnatx/ml-plugin/downloads/ml-plugin-1.0.2.jar" - - "https://api.bitbucket.org/2.0/repositories/xnatx/datasets-plugin/downloads/datasets-plugin-1.0.2.jar" - - "https://api.bitbucket.org/2.0/repositories/xnatdev/xnat-image-viewer-plugin/downloads/ximgview-plugin-1.0.2.jar" - - "https://api.bitbucket.org/2.0/repositories/xnatx/xnatx-dxm-settings-plugin/downloads/dxm-settings-plugin-1.0.jar" - -xnat_plugin_bundle_urls: [] -xnat_plugin_packages: [] -xnat_server_specific_plugin_urls: [] -xnat_server_specific_plugin_packages: [] - -pipeline_engine_enabled: true - -java_keystore: - keystore_pass: "{{ vault_keystore_password }}" - -# JSON representation of the site-wide anonymisation script: this could be -# defined in a string, or extracted from a template file e.g. using -# lookup('template, 'foo.j2') | to_json -xnat_sitewide_anonymization_script: "{{ xnat_common_config.sitewideAnonymizationScript | to_json }}" - -# Path to server logo file -xnat_config_logo: "/images/logo.png" - -xnat_service_admin: - username: "admin_user" - firstname: "admin" - lastname: "user" - password: "{{ vault_service_admin_password }}" - -# XNAT configuration shared between all servers -xnat_common_config: - admin_email: "xnatadmin@{{ hostvars['xnat_web']['ansible_host'] }}" - restrictUserListAccessToAdmins: true - uiAllowNonAdminProjectCreation: false - allowNonAdminsToClaimUnassignedSessions: true - reloadPrearcDatabaseOnStartup: true - par: false - primaryAdminUsername: "{{ xnat_service_admin.username }}" - receivedFileUser: "{{ xnat_service_admin.username }}" - ipsThatCanSendEmailsThroughRest: "127.0.0.1" - sessionXmlRebuilderInterval: "5" - # "^.*$" for all IPs - enabledProviders: ["localdb"] - enableSitewideAnonymizationScript: true - sitewideAnonymizationScript: "//\nversion \"6.1\"\nproject != \"Unassigned\" ? (0008,1030) := project\n(0010,0010) := subject\n(0010,0020) := session" - -# XNAT configuration -xnat_config: - site_name: "XNAT" - site_description: "

XNAT

An instance of XNAT deployed on AWS." - admin_password: "{{ vault_admin_password }}" - -# LDAP configuration -ldap: - enabled: false - name: "" - address: "" - userdn: "" - password: "{{ vault_ldap_password }}" - base: "" - filter: "" - ca_cert: "" - keystore_alias: "" - -ldap_ca_cert_file_on_client: "{{ xnat.install_downloads }}/certs/ldap-ca.cert" - -# Mail server settings -smtp: - enabled: "false" - hostname: "192.168.56.101" - port: "2525" - protocol: "smtp" - auth: "" - username: "" - password: "{{ vault_smtp_password }}" - start_tls: "false" - ssl_trust: "*" diff --git a/configure/group_vars/web/vars/xnat_project.yml b/configure/group_vars/web/vars/xnat_project.yml deleted file mode 100644 index 606d1ea..0000000 --- a/configure/group_vars/web/vars/xnat_project.yml +++ /dev/null @@ -1,25 +0,0 @@ ---- -# Variables for creating an XNAT project -xnat_project: - id: ibash - metadata_file: 'ibash_project.xml' - -xnat_investigator: - title: "Prof" - firstname: "Charles" - lastname: "Xavier" - institution: "Xavier Institute For Higher Learning" - -xnat_project_owner: - username: "profX" - password: "carlos1602" - firstName: "Charles" - lastName: "Xavier" - email: "c.xavier@{{ hostvars['xnat_web']['ansible_host'] }}" - -xnat_download_dir: "ideas-workshop-datasets" -xnat_project_data: - url: "https://ideas-workshop-datasets.s3.amazonaws.com/xnat-ibash.zip" - zip_file: "{{ xnat_download_dir }}/ibash.zip" - unzip_target: "{{ xnat_download_dir }}/ibash" - sessions_metadata: "{{ xnat_download_dir }}/ibash/ibash-sessions.csv" diff --git a/configure/group_vars/xnat/vars b/configure/group_vars/xnat/vars new file mode 100644 index 0000000..99b7478 --- /dev/null +++ b/configure/group_vars/xnat/vars @@ -0,0 +1,44 @@ +# mirsg.xnat.xnat +# Some times the default admin account hasn't finished creating even after tomcat has started +# Add a delay here to give the admin account a chance to be created +# Note, this issue only seems to happen in CI +xnat_wait_for_tomcat: 15 + +# You may want to increase the heap space if you have enough RAM available +java_mem: + Xms: "512M" + Xmx: "6G" + MetaspaceSize: "300M" + +xnat_plugin_urls: + - https://api.bitbucket.org/2.0/repositories/xnatdev/container-service/downloads/container-service-3.4.3-fat.jar + - https://api.bitbucket.org/2.0/repositories/icrimaginginformatics/ohif-viewer-xnat-plugin/downloads/ohif-viewer-3.6.2.jar + - https://api.bitbucket.org/2.0/repositories/xnatx/ml-plugin/downloads/ml-plugin-1.0.2.jar + - https://api.bitbucket.org/2.0/repositories/xnatx/datasets-plugin/downloads/datasets-plugin-1.0.3.jar + - https://api.bitbucket.org/2.0/repositories/xnatdev/xnat-image-viewer-plugin/downloads/ximgview-plugin-1.0.2.jar + - https://api.bitbucket.org/2.0/repositories/xnatx/xnatx-dxm-settings-plugin/downloads/dxm-settings-plugin-1.0.jar + +# Variables for creating an XNAT project +xnat_project: + id: ibash + metadata_file: 'ibash_project.xml' + +xnat_investigator: + title: "Prof" + firstname: "Charles" + lastname: "Xavier" + institution: "Xavier Institute For Higher Learning" + +xnat_project_owner: + username: "profX" + password: "carlos1602" + firstName: "Charles" + lastName: "Xavier" + email: "c.xavier@{{ hostvars['xnat_web']['ansible_host'] }}" + +xnat_download_dir: "ideas-workshop-datasets" +xnat_project_data: + url: "https://ideas-workshop-datasets.s3.amazonaws.com/xnat-ibash.zip" + zip_file: "{{ xnat_download_dir }}/ibash.zip" + unzip_target: "{{ xnat_download_dir }}/ibash" + sessions_metadata: "{{ xnat_download_dir }}/ibash/ibash-sessions.csv" diff --git a/configure/group_vars/xnat_container_service/vars/container_service.yml b/configure/group_vars/xnat_container_service/vars/container_service.yml deleted file mode 100644 index 24bfd21..0000000 --- a/configure/group_vars/xnat_container_service/vars/container_service.yml +++ /dev/null @@ -1,13 +0,0 @@ ---- -# Variables for the Container Service hosts and the clients (web servers) it serves -xnat_container_service: - name: "Container Service" - host: "{{ hostvars['xnat_cserv']['ansible_host'] }}" - ip: "{{ hostvars['xnat_cserv']['private_ip'] }}" - port: "2376" - clients: - - "{{ hostvars['xnat_web']['ansible_host'] }}" - client_ips: - - "{{ hostvars['xnat_web']['ansible_ip'] }}" - -xnat_container_service_temp_files_cert_dir: "{{ ansible_cache_dir }}/cserv_certificates/cserv" diff --git a/configure/install_xnat.sh b/configure/install_xnat.sh index 8c9b340..167165f 100755 --- a/configure/install_xnat.sh +++ b/configure/install_xnat.sh @@ -4,8 +4,5 @@ set -e echo "Install the required Ansible dependencies" ansible-galaxy install -r playbooks/roles/requirements.yml --force -echo "Install the XNAT Container service" -ansible-playbook playbooks/install_container_service.yml -i hosts.yml --vault-password-file=.vault_password - echo "Install and configure XNAT" ansible-playbook playbooks/install_xnat.yml -i hosts.yml --vault-password-file=.vault_password diff --git a/configure/playbooks/group_vars/all.yml b/configure/playbooks/group_vars/all.yml new file mode 100644 index 0000000..d917b2f --- /dev/null +++ b/configure/playbooks/group_vars/all.yml @@ -0,0 +1,13 @@ +--- +ansible_cache_dir: "{{ lookup('env', 'HOME') }}/ansible_persistent_files" + +# mirsg.infrastructure.postgresql - download and install - we need to do this on both the web server and the db +postgresql_install: + disable_gpg_check: false + rpm: >- + https://download.postgresql.org/pub/repos/yum/reporpms/EL-{{ + ansible_facts['distribution_major_version'] }}-{{ + ansible_facts['architecture'] }}/pgdg-redhat-repo-latest.noarch.rpm + yum_package: postgresql{{ postgresql_version }}-server + yum_contrib_package: postgresql{{ postgresql_version }}-contrib # required only on CentOS 7 + yum_client_package: postgresql{{ postgresql_version }} diff --git a/configure/playbooks/group_vars/xnat.yml b/configure/playbooks/group_vars/xnat.yml new file mode 100644 index 0000000..e3bb105 --- /dev/null +++ b/configure/playbooks/group_vars/xnat.yml @@ -0,0 +1,62 @@ +--- +xnat: + owner: "{{ tomcat_owner }}" + group: "{{ tomcat_group }}" + install_downloads: "{{ tomcat_catalina_home }}/install_downloads" + pipeline_install_dir: "{{ tomcat_catalina_home }}/pipeline_installer" + processingUrl: "" + +xnat_source: + war_file_name: xnat-web-{{ xnat_version }}.war + plugins_downloads_dir: /ansible/downloads/xnat_plugins + xnat_downloads_dir: /ansible/downloads/xnat + pipeline_installer_file_name: + pipeline-installer-{{ xnat_pipeline_version }}.tar + xnat_war_url: + https://api.bitbucket.org/2.0/repositories/xnatdev/xnat-web/downloads/xnat-web-{{ + xnat_version }}.war + pipelines_url: + https://api.github.com/repos/NrgXnat/xnat-pipeline-engine/tarball/{{ + xnat_pipeline_version }} + context_file_location: /usr/share/tomcat/webapps/ROOT/META-INF/context.xml + +# mirsg.infrastructure.tomcat +tomcat_version: 9.0.82 +tomcat_owner: tomcat +tomcat_group: tomcat + +tomcat_webapp_name: ROOT +tomcat_root: /usr/share/tomcat/webapps/{{ tomcat_webapp_name }} +tomcat_root_webapp: "{{ tomcat_root }}.war" + +tomcat_catalina_home: /usr/share/tomcat +tomcat_catalina_opts: >- + -Dxnat.home={{ xnat_home_dir }} -Xms{{ java_mem.Xms | default("512M") }} + -Xmx{{ java_mem.Xmx | default("1G") }} -XX:MetaspaceSize={{ + java_mem.MetaspaceSize | default("100M") }} -XX:+UseG1GC -server + +tomcat_hostname: localhost +tomcat_port: 8080 + +tomcat_backup_directory: /usr/share/tomcat_bkp +tomcat_items_to_restore: + - "{{ tomcat_backup_directory }}/webapps" + - "{{ tomcat_backup_directory }}/logs" + - "{{ tomcat_backup_directory }}/install_downloads" + - "{{ tomcat_backup_directory }}/.postgresql" + +java: + keystore_path: /usr/lib/jvm/jre/lib/security/cacerts/ + +ldap_ca_cert_file_on_client: "{{ xnat.install_downloads }}/certs/ldap-ca.cert" + +# mirsg.infrastructure.nginx +nginx_use_ssl: "{{ ssl.use_ssl }}" +nginx_server_name: "{{ web_server.host }}" +nginx_upstream_port: 104 +nginx_upstream_listen_port: 8104 +nginx_proxy_port: 8080 # tomcat +nginx_root: /usr/share/tomcat/webapps/ROOT +nginx_app_access_log: "{{ nginx_log_folder }}/xnat.access.log" +nginx_app_error_log: "{{ nginx_log_folder }}/xnat.error.log" +nginx_conf_template: nginx_reverse_proxy_aws.j2 diff --git a/configure/playbooks/install_container_service.yml b/configure/playbooks/install_container_service.yml deleted file mode 100644 index 38b7204..0000000 --- a/configure/playbooks/install_container_service.yml +++ /dev/null @@ -1,25 +0,0 @@ ---- -# Set up the Container Service on the hosts -# The client configuration is done in `install_xnat.yml` -- name: Wait until instance is running - hosts: cserv_hosts - gather_facts: false - - roles: - - { role: wait_until_running } - -- name: Provision container service host - hosts: cserv_hosts - become: true - become_user: root - become_method: sudo - gather_facts: true - - roles: - - { role: provision } - - { role: mirsg.install_python } - - { role: docker } - - { - role: sign_docker_client_cert, - clients: "{{ xnat_container_service.clients }}", - } diff --git a/configure/playbooks/install_xnat.yml b/configure/playbooks/install_xnat.yml index 6dc3fa3..0e27d41 100644 --- a/configure/playbooks/install_xnat.yml +++ b/configure/playbooks/install_xnat.yml @@ -1,32 +1,36 @@ --- -- name: Wait until instance is running - hosts: web - gather_facts: false +- name: Install and configure XNAT + hosts: xnat + become: true + gather_facts: true roles: - - { role: wait_until_running } + - role: wait_until_running + - role: mount_efs + - role: mirsg.infrastructure.provision + - role: mirsg.infrastructure.install_python + - role: mirsg.infrastructure.install_java + - role: mirsg.infrastructure.tomcat + - role: mirsg.infrastructure.nginx + - role: mirsg.infrastructure.xnat -- name: Install dependencies - hosts: web +- name: Setup Container Service on hosts + hosts: container_service_host become: true - become_user: root - become_method: sudo gather_facts: true roles: - - { role: provision } - - { role: mirsg.install_python } - - { role: java } - - { role: tomcat } - - { role: nginx } + - role: wait_until_running + - role: mount_efs + - role: mirsg.infrastructure.provision + - role: mirsg.infrastructure.install_python + - role: mirsg.infrastructure.docker -- name: Install and configure XNAT - hosts: web +- name: Setup Container Service on clients (i.e. the web servers) + hosts: container_service_client become: true - become_user: root - become_method: sudo gather_facts: true roles: - - { role: xnat } - - { role: container_service_client, when: container_service_enabled } + - role: mirsg.infrastructure.xnat_container_service + - role: container_service_images diff --git a/configure/playbooks/rerun_tomcat.yml b/configure/playbooks/rerun_tomcat.yml deleted file mode 100644 index 319b668..0000000 --- a/configure/playbooks/rerun_tomcat.yml +++ /dev/null @@ -1,22 +0,0 @@ ---- -- name: Stop and restart tomcat - hosts: web - become: true - become_user: root - become_method: sudo - gather_facts: true - - pre_tasks: - - name: stop tomcat - ansible.builtin.service: - name: tomcat - state: stopped - - roles: - - { role: tomcat } - - post_tasks: - - name: restart tomcat - ansible.builtin.service: - name: tomcat - state: restarted diff --git a/configure/playbooks/roles/container_service_client/tasks/main.yml b/configure/playbooks/roles/container_service_client/tasks/main.yml deleted file mode 100644 index a196aa9..0000000 --- a/configure/playbooks/roles/container_service_client/tasks/main.yml +++ /dev/null @@ -1,69 +0,0 @@ ---- -- name: Ensure docker cert dir exists on client - ansible.builtin.file: - path: "{{ docker_client.cert_dir }}" - state: directory - owner: "{{ docker_client.owner }}" - group: "{{ docker_client.group }}" - mode: 0700 - -- name: Copy docker server certificate to client - ansible.builtin.copy: - src: "{{ xnat_container_service_temp_files_cert_dir }}/ca.pem" - dest: "{{ docker_client.server_ca_cert_file }}" - owner: "{{ docker_client.owner }}" - group: "{{ docker_client.group }}" - mode: 0600 - -- name: Copy signed docker client certificate to client - ansible.builtin.copy: - src: "{{ xnat_container_service_temp_files_cert_dir }}/{{ xnat_web_server.host }}.cert" - dest: "{{ docker_client.ssl_cert_file }}" - owner: "{{ docker_client.owner }}" - group: "{{ docker_client.group }}" - mode: 0600 - -- name: Copy private key to client - ansible.builtin.copy: - src: "{{ xnat_container_service_temp_files_cert_dir }}/key.pem" - dest: "{{ docker_client.ssl_key_file }}" - owner: "{{ docker_client.owner }}" - group: "{{ docker_client.group }}" - mode: 0600 - -- name: "Configure XNAT to talk to container service" - ansible.builtin.uri: - url: "{{ xnat_web_server.url }}/xapi/docker/server" - user: "{{ xnat_service_admin.username }}" - password: "{{ xnat_service_admin.password }}" - method: POST - body_format: json - body: - name: "{{ xnat_container_service.name }}" - host: "https://{{ xnat_container_service.host }}:{{ xnat_container_service.port }}" - cert-path: "{{ docker_client.cert_dir }}" - swarm-mode: false - path-translation-xnat-prefix: "{{ xnat_root_dir }}" - path-translation-docker-prefix: "{{ container_service_remote_xnat_root }}" - pull-images-on-xnat-init: false - container-user: "" - validate_certs: "{{ ssl.validate_certs }}" - status_code: 200, 201 - -# Return 201 if added, 400 if already exists -- name: Add images to Container Service - ansible.builtin.uri: - url: "{{ xnat_web_server.url }}/xapi/commands" - user: "{{ xnat_service_admin.username }}" - password: "{{ xnat_service_admin.password }}" - method: POST - body: "{{ lookup('file', command_file) }}" - body_format: json - validate_certs: "{{ ssl.validate_certs }}" - status_code: [201, 400] - register: xnat_commands_added - changed_when: xnat_commands_added.status == 201 - loop_control: - loop_var: "command_file" - with_fileglob: - - "files/*-command.json" diff --git a/configure/playbooks/roles/container_service_client/files/dcm2niix-command.json b/configure/playbooks/roles/container_service_images/files/dcm2niix-command.json similarity index 100% rename from configure/playbooks/roles/container_service_client/files/dcm2niix-command.json rename to configure/playbooks/roles/container_service_images/files/dcm2niix-command.json diff --git a/configure/playbooks/roles/container_service_client/files/defaced-mri-convert-command.json b/configure/playbooks/roles/container_service_images/files/defaced-mri-convert-command.json similarity index 100% rename from configure/playbooks/roles/container_service_client/files/defaced-mri-convert-command.json rename to configure/playbooks/roles/container_service_images/files/defaced-mri-convert-command.json diff --git a/configure/playbooks/roles/container_service_client/files/defaced-recon-all-command.json b/configure/playbooks/roles/container_service_images/files/defaced-recon-all-command.json similarity index 100% rename from configure/playbooks/roles/container_service_client/files/defaced-recon-all-command.json rename to configure/playbooks/roles/container_service_images/files/defaced-recon-all-command.json diff --git a/configure/playbooks/roles/container_service_client/files/defaced-recon-all-gpu-command.json b/configure/playbooks/roles/container_service_images/files/defaced-recon-all-gpu-command.json similarity index 100% rename from configure/playbooks/roles/container_service_client/files/defaced-recon-all-gpu-command.json rename to configure/playbooks/roles/container_service_images/files/defaced-recon-all-gpu-command.json diff --git a/configure/playbooks/roles/container_service_client/files/mri-convert-command.json b/configure/playbooks/roles/container_service_images/files/mri-convert-command.json similarity index 100% rename from configure/playbooks/roles/container_service_client/files/mri-convert-command.json rename to configure/playbooks/roles/container_service_images/files/mri-convert-command.json diff --git a/configure/playbooks/roles/container_service_client/files/pydeface-command.json b/configure/playbooks/roles/container_service_images/files/pydeface-command.json similarity index 100% rename from configure/playbooks/roles/container_service_client/files/pydeface-command.json rename to configure/playbooks/roles/container_service_images/files/pydeface-command.json diff --git a/configure/playbooks/roles/container_service_client/files/recon-all-command.json b/configure/playbooks/roles/container_service_images/files/recon-all-command.json similarity index 100% rename from configure/playbooks/roles/container_service_client/files/recon-all-command.json rename to configure/playbooks/roles/container_service_images/files/recon-all-command.json diff --git a/configure/playbooks/roles/container_service_client/files/recon-all-gpu-command.json b/configure/playbooks/roles/container_service_images/files/recon-all-gpu-command.json similarity index 100% rename from configure/playbooks/roles/container_service_client/files/recon-all-gpu-command.json rename to configure/playbooks/roles/container_service_images/files/recon-all-gpu-command.json diff --git a/configure/playbooks/roles/container_service_images/tasks/main.yml b/configure/playbooks/roles/container_service_images/tasks/main.yml new file mode 100644 index 0000000..52ad20b --- /dev/null +++ b/configure/playbooks/roles/container_service_images/tasks/main.yml @@ -0,0 +1,18 @@ +--- +# Return 201 if added, 400 if already exists +- name: Add images to Container Service + ansible.builtin.uri: + url: "{{ web_server.url }}/xapi/commands" + user: "{{ xnat_service_admin.username }}" + password: "{{ xnat_service_admin.password }}" + method: POST + body: "{{ lookup('file', command_file) }}" + body_format: json + validate_certs: "{{ ssl.validate_certs }}" + status_code: [201, 400] + register: xnat_commands_added + changed_when: xnat_commands_added.status == 201 + loop_control: + loop_var: "command_file" + with_fileglob: + - "files/*-command.json" diff --git a/configure/playbooks/roles/docker/handlers/main.yml b/configure/playbooks/roles/docker/handlers/main.yml deleted file mode 100644 index c91c97e..0000000 --- a/configure/playbooks/roles/docker/handlers/main.yml +++ /dev/null @@ -1,10 +0,0 @@ ---- -- name: restart docker - ansible.builtin.service: - name: "{{ docker.service_name }}" - state: restarted - -- name: reload docker - ansible.builtin.service: - name: "{{ docker.service_name }}" - state: reloaded diff --git a/configure/playbooks/roles/docker/tasks/create_docker_server_cert.yml b/configure/playbooks/roles/docker/tasks/create_docker_server_cert.yml deleted file mode 100644 index 8f1de0f..0000000 --- a/configure/playbooks/roles/docker/tasks/create_docker_server_cert.yml +++ /dev/null @@ -1,67 +0,0 @@ ---- -- name: Ensure docker cert dir exists - ansible.builtin.file: - path: "{{ docker.cert_dir }}" - state: directory - owner: "{{ docker.owner }}" - group: "{{ docker.group }}" - mode: 0700 - -- name: Generate CA private key - community.crypto.openssl_privatekey: - path: "{{ docker.ca_key }}" - owner: "{{ docker.owner }}" - group: "{{ docker.group }}" - mode: 0400 - -- name: Generate CA CSR - community.crypto.openssl_csr: - path: "{{ docker.ca_csr }}" - privatekey_path: "{{ docker.ca_key }}" - common_name: "{{ xnat_container_service.host }}" - subject_alt_name: "IP:{{ xnat_container_service.ip }}" - basic_constraints_critical: true - basic_constraints: ["CA:TRUE"] - -- name: Generate self-signed CA certificate - community.crypto.x509_certificate: - path: "{{ docker.ca_cert }}" - privatekey_path: "{{ docker.ca_key }}" - csr_path: "{{ docker.ca_csr }}" - provider: selfsigned - owner: "{{ docker.owner }}" - group: "{{ docker.group }}" - mode: 0400 - notify: restart docker - -- name: Generate server private key - community.crypto.openssl_privatekey: - path: "{{ docker.server_key }}" - owner: "{{ docker.owner }}" - group: "{{ docker.group }}" - mode: 0400 - -- name: Generate server CSR - community.crypto.openssl_csr: - path: "{{ docker.server_csr }}" - privatekey_path: "{{ docker.server_key }}" - common_name: "{{ xnat_container_service.host }}" - subject_alt_name: "IP:{{ xnat_container_service.ip }}" - -- name: Generate server certificate - community.crypto.x509_certificate: - path: "{{ docker.server_cert }}" - csr_path: "{{ docker.server_csr }}" - provider: ownca - ownca_path: "{{ docker.ca_cert }}" - ownca_privatekey_path: "{{ docker.ca_key }}" - owner: "{{ docker.owner }}" - group: "{{ docker.group }}" - mode: 0400 - notify: restart docker - -- name: Copy server certificate - ansible.builtin.fetch: - src: "{{ docker.ca_cert }}" - dest: "{{ xnat_container_service_temp_files_cert_dir }}/ca.pem" - flat: true diff --git a/configure/playbooks/roles/docker/tasks/main.yml b/configure/playbooks/roles/docker/tasks/main.yml deleted file mode 100644 index f6613a2..0000000 --- a/configure/playbooks/roles/docker/tasks/main.yml +++ /dev/null @@ -1,74 +0,0 @@ ---- -- name: Ensure Ansible sefcontext dependencies are installed (Python 2) - ansible.builtin.yum: - name: ["libselinux-python", "policycoreutils-python"] - state: installed - when: ansible_facts["python"]["version"]["major"] is version("2") - -- name: Ensure Ansible sefcontext dependencies are installed (Python 3) - ansible.builtin.yum: - name: ["python3-libselinux", "policycoreutils-python-utils"] - state: installed - when: ansible_facts["python"]["version"]["major"] is version("3") - -- name: Ensure docker dependencies are installed - ansible.builtin.yum: - name: ["yum-utils", "device-mapper-persistent-data", "lvm2", "epel-release"] - state: installed - -- name: Add Docker repository - ansible.builtin.command: "yum-config-manager --add-repo={{ docker.repo_url }}" - args: - creates: /etc/yum.repos.d/docker.repo - -- name: Ensure docker is installed - ansible.builtin.yum: - name: ["docker-ce", "docker-ce-cli", "containerd.io"] - state: installed - -- name: Ensure docker service directory {{ docker.service_file_dir }} exists - ansible.builtin.file: - path: "{{ docker.service_file_dir }}" - owner: "root" - group: "root" - state: directory - mode: 0700 - -- name: Set custom docker service configuration - ansible.builtin.template: - src: docker.service.j2 - dest: "{{ docker.service_file_dir }}/docker.conf" - owner: "root" - group: "root" - mode: 0644 - notify: reload docker - -- name: Ensure docker config directory {{ docker.config_dir }} exists - ansible.builtin.file: - path: "{{ docker.config_dir }}" - owner: "{{ docker.owner }}" - group: "{{ docker.group }}" - state: directory - mode: 0700 - -- name: Write docker daemon configuration file - ansible.builtin.template: - src: daemon.json.j2 - dest: "{{ docker.daemon_conf_file }}" - owner: "{{ docker.owner }}" - group: "{{ docker.group }}" - mode: 0640 - notify: reload docker - -- name: Generate container server certificate - import_tasks: create_docker_server_cert.yml - notify: restart docker - -- name: "Ensure docker service configuraiton is reloaded before restarting the service" - ansible.builtin.meta: flush_handlers - -- name: Ensure docker daemon is running - ansible.builtin.service: - name: "{{ docker.service_name }}" - state: started - enabled: true diff --git a/configure/playbooks/roles/docker/templates/daemon.json.j2 b/configure/playbooks/roles/docker/templates/daemon.json.j2 deleted file mode 100644 index 89108b4..0000000 --- a/configure/playbooks/roles/docker/templates/daemon.json.j2 +++ /dev/null @@ -1,7 +0,0 @@ -{ - "hosts": ["tcp://{{ xnat_container_service.ip }}:{{ xnat_container_service.port }}", "unix:///var/run/docker.sock"], - "tlsverify": true, - "tlscacert": "{{ docker.ca_cert }}", - "tlscert": "{{ docker.server_cert }}", - "tlskey": "{{ docker.server_key }}" - } diff --git a/configure/playbooks/roles/docker/templates/docker.service.j2 b/configure/playbooks/roles/docker/templates/docker.service.j2 deleted file mode 100644 index 04c3d0e..0000000 --- a/configure/playbooks/roles/docker/templates/docker.service.j2 +++ /dev/null @@ -1,5 +0,0 @@ -.include /lib/systemd/system/{{ docker.service_name }}.service - -[Service] -ExecStart= -ExecStart=/usr/bin/dockerd diff --git a/configure/playbooks/roles/java/tasks/main.yml b/configure/playbooks/roles/java/tasks/main.yml deleted file mode 100644 index 4cd508b..0000000 --- a/configure/playbooks/roles/java/tasks/main.yml +++ /dev/null @@ -1,12 +0,0 @@ ---- -- name: ensure Java version {{ java.package }} is installed - ansible.builtin.package: - name: "{{ java.package }}" - state: installed - -- name: Set JAVA_HOME through shell script - ansible.builtin.template: - src: "java_home.sh.j2" - dest: "{{ java.profile_d }}/java_home.sh" - mode: 0644 - when: java.home is defined and java.home != '' diff --git a/configure/playbooks/roles/java/templates/java_home.sh.j2 b/configure/playbooks/roles/java/templates/java_home.sh.j2 deleted file mode 100644 index ab5bbdb..0000000 --- a/configure/playbooks/roles/java/templates/java_home.sh.j2 +++ /dev/null @@ -1 +0,0 @@ -export JAVA_HOME="{{ java.home }}" diff --git a/configure/playbooks/roles/provision/tasks/mount_efs.yml b/configure/playbooks/roles/mount_efs/tasks/main.yml similarity index 56% rename from configure/playbooks/roles/provision/tasks/mount_efs.yml rename to configure/playbooks/roles/mount_efs/tasks/main.yml index 6daf184..2df96df 100644 --- a/configure/playbooks/roles/provision/tasks/mount_efs.yml +++ b/configure/playbooks/roles/mount_efs/tasks/main.yml @@ -6,14 +6,14 @@ - name: Ensure mount directory exists ansible.builtin.file: - path: "{{ efs_mount.directory }}" + path: "{{ mount_efs_directory }}" state: directory mode: "0755" - name: Mount the volume ansible.posix.mount: - src: "{{ efs_mount.src }}" - name: "{{ efs_mount.directory }}" - fstype: "{{ efs_mount.fstype }}" - opts: "{{ efs_mount.opts }}" - state: "{{ efs_mount.state }}" + src: "{{ mount_efs_src }}" + name: "{{ mount_efs_directory }}" + fstype: "{{ mount_efs_fstype }}" + opts: "{{ mount_efs_opts }}" + state: "{{ mount_efs_state }}" diff --git a/configure/playbooks/roles/nginx/handlers/main.yml b/configure/playbooks/roles/nginx/handlers/main.yml deleted file mode 100644 index adf8c39..0000000 --- a/configure/playbooks/roles/nginx/handlers/main.yml +++ /dev/null @@ -1,10 +0,0 @@ ---- -- name: restart nginx - ansible.builtin.service: - name: nginx - state: restarted - -- name: reload nginx - ansible.builtin.service: - name: nginx - state: reloaded diff --git a/configure/playbooks/roles/nginx/tasks/main.yml b/configure/playbooks/roles/nginx/tasks/main.yml deleted file mode 100644 index 24f6f51..0000000 --- a/configure/playbooks/roles/nginx/tasks/main.yml +++ /dev/null @@ -1,88 +0,0 @@ ---- -- name: Check if SELinux is in 'enforcing' mode - ansible.builtin.lineinfile: - path: /etc/selinux/config - regexp: "^SELINUX=enforcing" - state: absent - check_mode: true - changed_when: false - register: selinux_enforced - -- name: Check SELinux flag on but VM off - ansible.builtin.fail: - msg: "SELinux flag enabled but disabled on VM" - when: selinux_enabled and not selinux_enforced.found - -- name: Check SELinux flag off but VM on - ansible.builtin.fail: - msg: "SELinux flag disabled but enabled on VM" - when: not selinux_enabled and selinux_enforced.found - -- name: Configure SELinux to allow nginx to listen on port {{ dicom_port }} - community.general.seport: - ports: "{{ dicom_port }}" - proto: tcp - setype: http_port_t - state: present - when: selinux_enabled - -# See: https://stackoverflow.com/a/24830777/17623640 -- name: Configure SELinux to allow httpd to act as relay and keep it persistent across reboots - ansible.posix.seboolean: - name: httpd_can_network_relay - state: true - persistent: true - -- name: Ensure epel is installed - ansible.builtin.yum: - name: "epel-release" - state: installed - -- name: Ensure nginx is installed - ansible.builtin.yum: - name: ["nginx", "nginx-mod-stream"] - state: installed - -- name: Ensure nginx certs directory exists - ansible.builtin.file: - path: "{{ nginx.certs_dir }}" - owner: "{{ nginx.owner }}" - group: "{{ nginx.group }}" - state: directory - mode: 0700 - -- name: Copy server certificates to nginx - ansible.builtin.copy: - remote_src: true - src: "{{ item.src }}" - dest: "{{ item.dest }}" - owner: "{{ nginx.owner }}" - group: "{{ nginx.group }}" - mode: 0600 - with_items: - - { src: "{{ ssl.server_cert }}", dest: "{{ nginx.ssl_cert_file }}" } - - { src: "{{ ssl.server_key }}", dest: "{{ nginx.ssl_key_file }}" } - notify: reload nginx - when: ssl.use_ssl - -- name: Generate Diffie-Hellman (DH) parameters with {{ diffie_helman_size_bits }} bits. - community.crypto.openssl_dhparam: - path: "{{ nginx.dh_params_file }}" - size: "{{ diffie_helman_size_bits }}" - notify: reload nginx - -- name: Copy nginx config file - ansible.builtin.template: - src: "nginx.j2" - dest: "{{ nginx.conf_file }}" - owner: "{{ nginx.owner }}" - group: "{{ nginx.group }}" - mode: 0644 - force: true - notify: reload nginx - -- name: Ensure nginx is running - ansible.builtin.service: - name: nginx - state: started - enabled: true diff --git a/configure/playbooks/roles/provision/tasks/locale.yml b/configure/playbooks/roles/provision/tasks/locale.yml deleted file mode 100644 index d93aa62..0000000 --- a/configure/playbooks/roles/provision/tasks/locale.yml +++ /dev/null @@ -1,11 +0,0 @@ ---- -- name: Install locale language pack - ansible.builtin.yum: - name: glibc-langpack-en - state: present - -- name: Set locale # noqa: no-changed when - ansible.builtin.command: "localectl set-locale LANGUAGE={{ server_locale | quote }}" - register: set_locale - failed_when: "'Failed to issue method call: ' in set_locale.stderr" - \ No newline at end of file diff --git a/configure/playbooks/roles/provision/tasks/main.yml b/configure/playbooks/roles/provision/tasks/main.yml deleted file mode 100644 index d0615bd..0000000 --- a/configure/playbooks/roles/provision/tasks/main.yml +++ /dev/null @@ -1,9 +0,0 @@ ---- -- name: "Update packages" - import_tasks: package_update.yml - -- name: "Set locale" - import_tasks: locale.yml - -- name: "Mount EFS volume" - ansible.builtin.include_tasks: "mount_efs.yml" diff --git a/configure/playbooks/roles/provision/tasks/package_update.yml b/configure/playbooks/roles/provision/tasks/package_update.yml deleted file mode 100644 index df68937..0000000 --- a/configure/playbooks/roles/provision/tasks/package_update.yml +++ /dev/null @@ -1,25 +0,0 @@ ---- -- name: Ensure epel is installed - become: true - ansible.builtin.yum: - name: "epel-release" - state: installed - -- name: Disable default Postgres module (Red Hat 8+) # noqa command-instead-of-module - ansible.builtin.command: yum module disable -y postgresql - register: disable_postgresql_module - changed_when: - - "'Disabling modules:' in disable_postgresql_module.stdout" - -- name: Install PostgreSQL RPM key - ansible.builtin.rpm_key: - state: present - key: "{{ postgresql_rpm_gpg_key_pgdg }}" - when: "'cserv_hosts' not in group_names" - -- name: Ensure postgres RPM is installed on the web server - ansible.builtin.yum: - name: "{{ postgresql_install.rpm }}" - state: present - disable_gpg_check: "{{ postgresql_install.disable_gpg_check }}" - when: "'web' in group_names" diff --git a/configure/playbooks/roles/requirements.yml b/configure/playbooks/roles/requirements.yml index f4e174a..5df3047 100644 --- a/configure/playbooks/roles/requirements.yml +++ b/configure/playbooks/roles/requirements.yml @@ -1,17 +1,8 @@ --- collections: - - community.general - - ansible.posix - - community.docker - - community.crypto - - amazon.aws - -roles: - - - src: https://github.com/UCL-MIRSG/ansible-role-install-python.git - version: 2023.02.9.0 - name: mirsg.install_python - - - src: https://github.com/UCL-MIRSG/ansible-role-ssl-certificates.git - version: 2023.02.8.0 - name: mirsg.ssl_certificates + - name: ansible.posix + - name: amazon.aws + - name: mirsg.infrastructure + type: git + source: https://github.com/UCL-MIRSG/ansible-collection-infra.git + version: 1.22.0 diff --git a/configure/playbooks/roles/setup_xnat_db/tasks/main.yml b/configure/playbooks/roles/setup_xnat_db/tasks/main.yml deleted file mode 100644 index 8698c8f..0000000 --- a/configure/playbooks/roles/setup_xnat_db/tasks/main.yml +++ /dev/null @@ -1,22 +0,0 @@ ---- -- name: Ensure postgres is running - ansible.builtin.service: - name: "{{ postgresql_service.name }}" - state: started - enabled: true - -- name: "Create XNAT PostgreSQL user" - become: true - become_user: postgres - community.postgresql.postgresql_user: - name: "{{ xnat_db.postgres_xnat_user }}" - password: "{{ xnat_db.postgres_xnat_password }}" - port: "{{ xnat_db.port }}" - -- name: "Create XNAT PostgreSQL db" - become: true - become_user: postgres - community.postgresql.postgresql_db: - name: "{{ xnat_db.postgres_xnat_database }}" - owner: "{{ xnat_db.postgres_xnat_user }}" - port: "{{ xnat_db.port }}" diff --git a/configure/playbooks/roles/setup_xnat_project/tasks/create_investigator.yml b/configure/playbooks/roles/setup_xnat_project/tasks/create_investigator.yml index 3157fb1..02f78fb 100644 --- a/configure/playbooks/roles/setup_xnat_project/tasks/create_investigator.yml +++ b/configure/playbooks/roles/setup_xnat_project/tasks/create_investigator.yml @@ -3,7 +3,7 @@ # Returns 200 if created, 409 if already exists - name: "Create new investigator" ansible.builtin.uri: - url: "{{ xnat_web_server.url }}/xapi/investigators/" + url: "{{ web_server.url }}/xapi/investigators/" user: "{{ xnat_service_admin.username }}" password: "{{ xnat_service_admin.password }}" method: POST diff --git a/configure/playbooks/roles/setup_xnat_project/tasks/create_owner.yml b/configure/playbooks/roles/setup_xnat_project/tasks/create_owner.yml index 08e3d66..057e41b 100644 --- a/configure/playbooks/roles/setup_xnat_project/tasks/create_owner.yml +++ b/configure/playbooks/roles/setup_xnat_project/tasks/create_owner.yml @@ -3,7 +3,7 @@ # Returns 201 if created, 409 if already exists - name: "Create project owner" ansible.builtin.uri: - url: "{{ xnat_web_server.url }}/xapi/users/" + url: "{{ web_server.url }}/xapi/users/" user: "{{ xnat_service_admin.username }}" password: "{{ xnat_service_admin.password }}" method: POST @@ -25,7 +25,7 @@ # Set project owner - name: Make user owner of the project ansible.builtin.uri: - url: "{{ xnat_web_server.url }}/data/projects/{{ xnat_project.id }}/users/Owners/{{ xnat_project_owner.username }}" + url: "{{ web_server.url }}/data/projects/{{ xnat_project.id }}/users/Owners/{{ xnat_project_owner.username }}" user: "{{ xnat_service_admin.username }}" password: "{{ xnat_service_admin.password }}" method: PUT diff --git a/configure/playbooks/roles/setup_xnat_project/tasks/create_project.yml b/configure/playbooks/roles/setup_xnat_project/tasks/create_project.yml index ee766f8..0c3b596 100644 --- a/configure/playbooks/roles/setup_xnat_project/tasks/create_project.yml +++ b/configure/playbooks/roles/setup_xnat_project/tasks/create_project.yml @@ -3,7 +3,7 @@ # Return 200 if created, 417 if already exists - name: "Create I-BASH project if it doesn't exist" ansible.builtin.uri: - url: "{{ xnat_web_server.url }}/data/projects/" + url: "{{ web_server.url }}/data/projects/" user: "{{ xnat_service_admin.username }}" password: "{{ xnat_service_admin.password }}" method: POST @@ -18,7 +18,7 @@ - name: Make the project public ansible.builtin.uri: - url: "{{ xnat_web_server.url }}/data/projects/{{ xnat_project.id }}/accessibility/public" + url: "{{ web_server.url }}/data/projects/{{ xnat_project.id }}/accessibility/public" user: "{{ xnat_service_admin.username }}" password: "{{ xnat_service_admin.password }}" method: PUT diff --git a/configure/playbooks/roles/setup_xnat_project/tasks/upload_data.yml b/configure/playbooks/roles/setup_xnat_project/tasks/upload_data.yml index d1971da..cfe7e92 100644 --- a/configure/playbooks/roles/setup_xnat_project/tasks/upload_data.yml +++ b/configure/playbooks/roles/setup_xnat_project/tasks/upload_data.yml @@ -1,3 +1,8 @@ +- name: Remove python3-requests + ansible.builtin.yum: + name: python3-requests + state: absent + - name: "Install necessary Python dependencies on host" ansible.builtin.pip: name: diff --git a/configure/playbooks/roles/sign_docker_client_cert/tasks/main.yml b/configure/playbooks/roles/sign_docker_client_cert/tasks/main.yml deleted file mode 100644 index 6d50f52..0000000 --- a/configure/playbooks/roles/sign_docker_client_cert/tasks/main.yml +++ /dev/null @@ -1,48 +0,0 @@ ---- -- name: Ensure docker client cert dir exists on server - ansible.builtin.file: - path: "{{ docker.client_cert_dir }}" - state: directory - owner: "{{ docker.owner }}" - group: "{{ docker.group }}" - mode: 0700 - -- name: Generate OpenSSL client private key - community.crypto.openssl_privatekey: - path: "{{ docker.client_cert_dir }}/key.pem" - owner: "{{ docker.owner }}" - group: "{{ docker.group }}" - mode: 0400 - -- name: Generate OpenSSL CSR for each client using private key - community.crypto.openssl_csr: - path: "{{ docker.client_cert_dir }}/{{ item }}.csr" - privatekey_path: "{{ docker.client_cert_dir }}/key.pem" - common_name: "{{ item }}" - register: new_docker_client_csr_generated - loop: "{{ clients }}" - -- name: Generate client certificates signed by server CA - community.crypto.x509_certificate: - path: "{{ docker.client_cert_dir }}/{{ item }}.cert" - csr_path: "{{ docker.client_cert_dir }}/{{ item }}.csr" - provider: ownca - ownca_path: "{{ docker.ca_cert }}" - ownca_privatekey_path: "{{ docker.ca_key }}" - mode: 0400 - owner: "{{ docker.owner }}" - group: "{{ docker.group }}" - loop: "{{ clients }}" - -- name: Copy signed client certificates to temp dir on Ansible controller - ansible.builtin.fetch: - src: "{{ docker.client_cert_dir }}/{{ item }}.cert" - dest: "{{ xnat_container_service_temp_files_cert_dir }}/{{ item }}.cert" - flat: true - loop: "{{ clients }}" - -- name: Copy private key to temp dir on Ansible controller - ansible.builtin.fetch: - src: "{{ docker.client_cert_dir }}/key.pem" - dest: "{{ xnat_container_service_temp_files_cert_dir }}/key.pem" - flat: true diff --git a/configure/playbooks/roles/test-multiple_xnat_users/tasks/main.yml b/configure/playbooks/roles/test-multiple_xnat_users/tasks/main.yml index fa99fbd..b1a53f3 100644 --- a/configure/playbooks/roles/test-multiple_xnat_users/tasks/main.yml +++ b/configure/playbooks/roles/test-multiple_xnat_users/tasks/main.yml @@ -13,7 +13,7 @@ loop_control: loop_var: user_id vars: - xnat_web_url: "{{ xnat_web_server.url }}" + xnat_web_url: "{{ web_server.url }}" admin_username: "{{ xnat_service_admin.username }}" admin_password: "{{ xnat_service_admin.password }}" ssl_certs: "{{ ssl.validate_certs }}" diff --git a/configure/playbooks/roles/test-multiple_xnat_users/templates/.netrc-testuser.j2 b/configure/playbooks/roles/test-multiple_xnat_users/templates/.netrc-testuser.j2 index 130a6f8..26b37f1 100644 --- a/configure/playbooks/roles/test-multiple_xnat_users/templates/.netrc-testuser.j2 +++ b/configure/playbooks/roles/test-multiple_xnat_users/templates/.netrc-testuser.j2 @@ -1,3 +1,3 @@ -machine {{ xnat_web_server.host }} +machine {{ web_server.host }} login {{ user_name }} password {{ user_password }} diff --git a/configure/playbooks/roles/tomcat/handlers/main.yml b/configure/playbooks/roles/tomcat/handlers/main.yml deleted file mode 100644 index 0050fa4..0000000 --- a/configure/playbooks/roles/tomcat/handlers/main.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -- name: restart tomcat - ansible.builtin.service: name=tomcat state=restarted diff --git a/configure/playbooks/roles/tomcat/tasks/main.yml b/configure/playbooks/roles/tomcat/tasks/main.yml deleted file mode 100644 index f83528c..0000000 --- a/configure/playbooks/roles/tomcat/tasks/main.yml +++ /dev/null @@ -1,107 +0,0 @@ ---- -- name: Ensure Ansible sefcontext dependencies are installed (Python 2) - ansible.builtin.yum: - name: ["libselinux-python", "policycoreutils-python"] - state: installed - when: ansible_facts["python"]["version"]["major"] is version("2") - -- name: Ensure Ansible sefcontext dependencies are installed (Python 3) - ansible.builtin.yum: - name: ["python3-libselinux", "policycoreutils-python-utils"] - state: installed - when: ansible_facts["python"]["version"]["major"] is version("3") - -- name: Ensure epel is installed - ansible.builtin.yum: - name: "epel-release" - state: installed - -- name: Ensure Tomcat Native library is installed - ansible.builtin.yum: - name: "tomcat-native" - state: installed - -- name: Configure SELinux to allow Tomcat to listen on port {{ tomcat.port }} - community.general.seport: - ports: "{{ tomcat.port }}" - proto: tcp - setype: http_port_t - state: present - when: selinux_enabled - -- name: Add tomcat group - ansible.builtin.group: - name: "{{ tomcat.group }}" - -- name: Add "tomcat" user - ansible.builtin.user: - name: "{{ tomcat.owner }}" - group: "{{ tomcat.group }}" - home: "{{ tomcat.catalina_home }}" - createhome: false - system: true - -- name: Check for existing Tomcat install - ansible.builtin.stat: - path: "{{ tomcat.catalina_home }}/bin" - register: tomcat_check - -- name: Check installed Tomcat version - ansible.builtin.shell: | - set -o pipefail - ./version.sh | grep -oP '(?<=Apache Tomcat/)([0-9]+\.?)+' - args: - chdir: "{{ tomcat.catalina_home }}/bin" - register: tomcat_check_version - when: "tomcat_check.stat.exists" - changed_when: false - failed_when: false - -- name: Upgrade/install Tomcat if needed - import_tasks: tasks/upgrade.yml - when: "not tomcat_check.stat.exists or tomcat_version not in tomcat_check_version.stdout" - -- name: Copy tomcat service file - ansible.builtin.template: - src: templates/tomcat.service.j2 - dest: "{{ tomcat.service_config_file }}" - mode: 0644 - when: ansible_service_mgr == "systemd" - -- name: Copy Tomcat config file - ansible.builtin.template: - src: tomcat.conf.j2 - dest: "{{ tomcat.config_file }}" - owner: "{{ tomcat.owner }}" - group: "{{ tomcat.group }}" - mode: 0644 - force: true - notify: restart tomcat - -- name: Start and enable tomcat - ansible.builtin.service: - daemon_reload: true - name: tomcat - state: started - enabled: true - when: ansible_service_mgr == "systemd" - -- name: Copy Tomcat setenv.sh file - ansible.builtin.template: - src: setenv.sh.j2 - dest: "{{ tomcat.catalina_home }}/bin/setenv.sh" - owner: "{{ tomcat.owner }}" - group: "{{ tomcat.group }}" - mode: 0644 - force: true - notify: restart tomcat - -- name: Copy Tomcat server config file - ansible.builtin.template: - src: server.xml.j2 - dest: "{{ tomcat.server_config_file }}" - owner: "{{ tomcat.owner }}" - group: "{{ tomcat.group }}" - mode: 0644 - force: true - notify: restart tomcat diff --git a/configure/playbooks/roles/tomcat/tasks/upgrade.yml b/configure/playbooks/roles/tomcat/tasks/upgrade.yml deleted file mode 100644 index f102f79..0000000 --- a/configure/playbooks/roles/tomcat/tasks/upgrade.yml +++ /dev/null @@ -1,92 +0,0 @@ ---- -- name: Stop Tomcat - ansible.builtin.service: - name: tomcat - state: stopped - when: "tomcat_check.stat.exists" - -- name: Back up existing Tomcat {{ tomcat.catalina_home }} - ansible.builtin.command: "mv {{ tomcat.catalina_home }} /usr/share/tomcat_bkp" - args: - creates: "/usr/share/tomcat_bkp" - when: "tomcat_check.stat.exists" - -- name: Remove currently installed Tomcat folder - ansible.builtin.file: - path: "{{ tomcat.catalina_home }}" - state: absent - -- name: Ensure Tomcat directory exists - ansible.builtin.file: - path: "{{ tomcat.catalina_home }}" - state: directory - owner: "{{ tomcat.owner }}" - group: "{{ tomcat.group }}" - mode: 0755 - -- name: Download and unarchive Tomcat v{{ tomcat_version }} - ansible.builtin.unarchive: - src: "{{ tomcat.binary_url }}" - dest: "{{ tomcat.catalina_home }}" - remote_src: true - owner: "{{ tomcat.owner }}" - group: "{{ tomcat.group }}" - extra_opts: "--strip-components=1" - creates: "{{ tomcat.catalina_home }}/bin" - -- name: Remove default Tomcat webapps - ansible.builtin.file: - state: absent - path: "{{ item }}" - with_items: - - "{{ tomcat.catalina_home }}/webapps/examples" - - "{{ tomcat.catalina_home }}/webapps/manager" - - "{{ tomcat.catalina_home }}/webapps/host-manager" - - "{{ tomcat.catalina_home }}/webapps/docs" - -- name: Remove default Tomcat ROOT application - ansible.builtin.file: - state: absent - path: "{{ tomcat.catalina_home }}/webapps/ROOT" - -- name: Check if a pipeline_installer exists - ansible.builtin.stat: - path: "/usr/share/tomcat_bkp/pipeline_installer" - register: pipeline_installer_folder - -- name: "Restore pipeline installer if it exists" - ansible.builtin.copy: - src: "/usr/share/tomcat_bkp/pipeline_installer" - dest: "{{ tomcat.catalina_home }}" - owner: "{{ tomcat.owner }}" - group: "{{ tomcat.group }}" - mode: preserve - remote_src: true - when: - - "tomcat_check.stat.exists" - - "pipeline_installer_folder.stat.exists" - notify: restart tomcat - -- name: "Restore previously installed XNAT and files" - ansible.builtin.copy: - src: "{{ item }}" - dest: "{{ tomcat.catalina_home }}" - owner: "{{ tomcat.owner }}" - group: "{{ tomcat.group }}" - mode: preserve - remote_src: true - with_items: - - "/usr/share/tomcat_bkp/webapps" - - "/usr/share/tomcat_bkp/.postgresql" - - "/usr/share/tomcat_bkp/logs" - - "/usr/share/tomcat_bkp/install_downloads" - when: "tomcat_check.stat.exists" - notify: restart tomcat - -- name: Show suggested commands for removing backed-up Tomcat folder - ansible.builtin.debug: - msg: - - "A backup of the previously installed Tomcat folder was created at /usr/share/tomcat_bkp" - - "You may wish to remove this" - - "e.g sudo rm -rf /usr/share/tomcat_bkp" - when: "tomcat_check.stat.exists" diff --git a/configure/playbooks/roles/tomcat/templates/server.xml.j2 b/configure/playbooks/roles/tomcat/templates/server.xml.j2 deleted file mode 100644 index f14eaf9..0000000 --- a/configure/playbooks/roles/tomcat/templates/server.xml.j2 +++ /dev/null @@ -1,40 +0,0 @@ - - - - - {% if tomcat_version.split('.')[0] == "7" %} - - {% endif %} - - - - - - - - - - - - - - - - - - {% if tomcat_webapp_name != "ROOT" %} - - {% endif %} - - - - diff --git a/configure/playbooks/roles/tomcat/templates/setenv.sh.j2 b/configure/playbooks/roles/tomcat/templates/setenv.sh.j2 deleted file mode 100644 index 982f89a..0000000 --- a/configure/playbooks/roles/tomcat/templates/setenv.sh.j2 +++ /dev/null @@ -1,5 +0,0 @@ -{% if tomcat_version.split('.')[0] == "7" %} -CATALINA_OPTS="-Dxnat.home={{ xnat_home_dir }} -Xms{{ java_mem.Xms }} -Xmx{{ java_mem.Xmx }} -XX:MaxPermSize=512m -XX:+CMSClassUnloadingEnabled -XX:+UseConcMarkSweepGC -Djavax.sql.DataSource.Factory=org.apache.commons.dbcp.BasicDataSourceFactory" -{% else %} -CATALINA_OPTS="-Dxnat.home={{ xnat_home_dir }} -Xms{{ java_mem.Xms }} -Xmx{{ java_mem.Xmx }} -XX:MetaspaceSize={{ java_mem.MetaspaceSize }} -XX:+UseG1GC -server" -{% endif %} diff --git a/configure/playbooks/roles/tomcat/templates/tomcat.conf.j2 b/configure/playbooks/roles/tomcat/templates/tomcat.conf.j2 deleted file mode 100644 index 8ef9395..0000000 --- a/configure/playbooks/roles/tomcat/templates/tomcat.conf.j2 +++ /dev/null @@ -1,52 +0,0 @@ -# System-wide configuration file for tomcat services -# This will be loaded by systemd as an environment file, -# so please keep the syntax. For shell expansion support -# place your custom files as /etc/tomcat/conf.d/*.conf -# -# There are 2 "classes" of startup behavior in this package. -# The old one, the default service named tomcat.service. -# The new named instances are called tomcat@instance.service. -# -# Use this file to change default values for all services. -# Change the service specific ones to affect only one service. -# For tomcat.service it's /etc/sysconfig/tomcat, for -# tomcat@instance it's /etc/sysconfig/tomcat@instance. - -# This variable is used to figure out if config is loaded or not. -TOMCAT_CFG_LOADED="1" - -# In new-style instances, if CATALINA_BASE isn't specified, it will -# be constructed by joining TOMCATS_BASE and NAME. -TOMCATS_BASE="/var/lib/tomcats/" - -# Where your java installation lives -JAVA_HOME="{{ java.home }}" - -# Where your tomcat installation lives -CATALINA_HOME="{{ tomcat.catalina_home }}" - -# System-wide tmp -CATALINA_TMPDIR="{{ tomcat.catalina_home }}/temp" - -# You can pass some parameters to java here if you wish to -# If the parameters are only for tomcat, in most cases it is sufficient to -# put them in CATALINA_OPTS below (unless needed for stop command) -#JAVA_OPTS="-Xminf0.1 -Xmaxf0.3" - -# Use JAVA_OPTS to set java.library.path for libtcnative.so -#JAVA_OPTS="-Djava.library.path=/usr/lib" - -# You can change your tomcat locale here -#LANG="en_US" - -# Run tomcat under the Java Security Manager -SECURITY_MANAGER="false" - -# If you wish to further customize your tomcat environment, -# put your own definitions here -# (i.e. LD_LIBRARY_PATH for some jdbc drivers) - -# Set additional Java parameters for tomcat running XNAT - -# If we need to add JAVA_HOME to the path -export PATH="${JAVA_HOME}/bin:${PATH}" diff --git a/configure/playbooks/roles/tomcat/templates/tomcat.service.j2 b/configure/playbooks/roles/tomcat/templates/tomcat.service.j2 deleted file mode 100644 index 63abb52..0000000 --- a/configure/playbooks/roles/tomcat/templates/tomcat.service.j2 +++ /dev/null @@ -1,20 +0,0 @@ -[Unit] -Description=Tomcat -After=syslog.target network.target - -[Service] -Type=forking - -User={{ tomcat.owner }} -Group={{ tomcat.owner }} - -EnvironmentFile={{ tomcat.config_file }} - -Environment=CATALINA_BASE={{ tomcat.catalina_home }} -Environment=CATALINA_PID={{ tomcat.catalina_home }}/temp/tomcat.pid - -ExecStart={{ tomcat.catalina_home }}/bin/catalina.sh start -ExecStop={{ tomcat.catalina_home }}/bin/catalina.sh stop - -[Install] -WantedBy=multi-user.target diff --git a/configure/playbooks/roles/xnat/defaults/main.yml b/configure/playbooks/roles/xnat/defaults/main.yml deleted file mode 100644 index 01bd07a..0000000 --- a/configure/playbooks/roles/xnat/defaults/main.yml +++ /dev/null @@ -1,13 +0,0 @@ ---- -xnat_archive_dir: "{{ xnat_root_dir }}/archive" -xnat_prearchive_dir: "{{ xnat_root_dir }}/prearchive" -xnat_cache_dir: "{{ xnat_root_dir }}/cache" -xnat_build_dir: "{{ xnat_root_dir }}/build" -xnat_ftp_dir: "{{ xnat_root_dir }}/ftp" -xnat_pipeline_dir: "{{ xnat_root_dir }}/pipeline" -xnat_inbox_dir: "{{ xnat_root_dir }}/inbox" -xnat_plugins_dir: "{{ xnat_home_dir }}/plugins" -xnat_config_dir: "{{ xnat_home_dir }}/config" -xnat_logs_dir: "{{ xnat_home_dir }}/logs" -xnat_work_dir: "{{ xnat_home_dir }}/work" -xnat_create_data_directory: true diff --git a/configure/playbooks/roles/xnat/handlers/main.yml b/configure/playbooks/roles/xnat/handlers/main.yml deleted file mode 100644 index 0050fa4..0000000 --- a/configure/playbooks/roles/xnat/handlers/main.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -- name: restart tomcat - ansible.builtin.service: name=tomcat state=restarted diff --git a/configure/playbooks/roles/xnat/tasks/configure.yml b/configure/playbooks/roles/xnat/tasks/configure.yml deleted file mode 100644 index ee68633..0000000 --- a/configure/playbooks/roles/xnat/tasks/configure.yml +++ /dev/null @@ -1,179 +0,0 @@ ---- -- name: "Ensure any Tomcat restart handlers have been called before REST configuration" - ansible.builtin.meta: flush_handlers - -- name: "Ensure Tomcat is running" - ansible.builtin.service: - name: "tomcat" - state: started - enabled: true - -- name: "Waiting for Tomcat to start - this could take several minutes: {{ xnat_web_server.url }}" - ansible.builtin.uri: - url: "{{ xnat_web_server.url }}" - method: GET - validate_certs: "{{ ssl.validate_certs }}" - register: _result - until: _result.status == 200 - retries: 10 - delay: 30 - -# XNAT initally creates a default admin account with a default password. -# On the first run, we change the default admin password, then use this account to create a service user, and then disable the default admin account. - -# First, test if the default admin password needs to be changed. -# To do this, first request a restricted page using the default admin credentials. -# If this succeeds (200), this is the initial run and the password needs to be changed. -# Subsequently, this should return 401 indicating the password has already been changed and/or the account is disabled. -- name: "Checking if the default admin credentials are the default values" - ansible.builtin.uri: - url: "{{ xnat_web_server.url }}/xapi/siteConfig" - user: "admin" - password: "admin" - method: GET - validate_certs: "{{ ssl.validate_certs }}" - status_code: 200, 401 - register: admin_default_auth - -# If the previous call succeeded, it means the default admin password has not yet been changed so we change it now -- name: "Changing default admin password" - ansible.builtin.uri: - url: "{{ xnat_web_server.url }}/xapi/users/admin" - user: "admin" - password: "admin" - method: PUT - body_format: json - body: '{"password":"{{ xnat_config.admin_password }}", "confirmPassword":"{{ xnat_config.admin_password }}"}' - validate_certs: "{{ ssl.validate_certs }}" - status_code: 200 - register: create - when: admin_default_auth.status == 200 - -# At this point the default admin password has been changed. -# Next, ensure the service account exists, and then that the default admin account has been disabled. -# To do this, use the admin user with the updated admin password to request the XNAT API details for the service user (/xpi/users/USERNAME) -# If an authentication error occurs (401) then the default admin is disabled, so we assume the service account must already exist -# If this succeeds (200) then the service_admin is already present, but the default admin is still enabled -# If user is not found (404 or 500) then the service_admin has not yet been created and the default admin is still enabled -- name: "Check existence of service admin user using default admin" - ansible.builtin.uri: - url: "{{ xnat_web_server.url }}/xapi/users/{{ xnat_service_admin.username }}" - user: "admin" - password: "{{ xnat_config.admin_password }}" - method: GET - validate_certs: "{{ ssl.validate_certs }}" - status_code: 200, 404, 401, 500 - register: service_admin_check - -# If the above service_admin_check failed with a non-authentication error (404, 500), then we create the service account -- name: "Create service admin user" - ansible.builtin.uri: - url: "{{ xnat_web_server.url }}/xapi/users/" - user: "admin" - password: "{{ xnat_config.admin_password }}" - method: POST - body_format: json - body: - admin: true - username: "{{ xnat_service_admin.username }}" - password: "{{ xnat_service_admin.password }}" - firstName: "{{ xnat_service_admin.firstname }}" - lastName: "{{ xnat_service_admin.lastname }}" - email: "{{ xnat_common_config.admin_email }}" - verified: true - enabled: true - validate_certs: "{{ ssl.validate_certs }}" - status_code: 201 - when: service_admin_check.status not in [200, 401] - -# Ensure that the created service_admin user has the correct admin role -- name: "Set service admin user roles" - ansible.builtin.uri: - url: "{{ xnat_web_server.url }}/xapi/users/{{ xnat_service_admin.username }}/roles/Administrator" - user: "admin" - password: "{{ xnat_config.admin_password }}" - method: PUT - validate_certs: "{{ ssl.validate_certs }}" - status_code: 200 - when: service_admin_check.status != 401 - -# Ensure that the created service_admin user is non-expiring -- name: "Set service admin account to non-expiring" - ansible.builtin.uri: - url: "{{ xnat_web_server.url }}/xapi/users/{{ xnat_service_admin.username }}/roles/non_expiring" - user: "admin" - password: "{{ xnat_config.admin_password }}" - method: PUT - validate_certs: "{{ ssl.validate_certs }}" - status_code: 200 - when: service_admin_check.status != 401 - - # Give the created service_admin access to all data -- name: "Set service admin role to all data admin" - ansible.builtin.uri: - url: "{{ xnat_web_server.url }}/xapi/users/{{ xnat_service_admin.username }}/groups/ALL_DATA_ADMIN" - user: "admin" - password: "{{ xnat_config.admin_password }}" - method: PUT - validate_certs: "{{ ssl.validate_certs }}" - status_code: 200 - when: service_admin_check.status != 401 - -# All further admin actions can then be undertaken using the new service_admin -- name: "XNAT configuration" - ansible.builtin.uri: - url: "{{ xnat_web_server.url }}/xapi/siteConfig" - user: "{{ xnat_service_admin.username }}" - password: "{{ xnat_service_admin.password }}" - method: POST - body_format: json - body: "{{ lookup('template', 'roles/xnat/templates/xnat-settings.json.j2') }}" - validate_certs: "{{ ssl.validate_certs }}" - status_code: 200 - register: login - -# The default admin is disabled once it is verified that the service_admin works -- name: "Disable default admin user" - ansible.builtin.uri: - url: "{{ xnat_web_server.url }}/xapi/users/admin" - user: "{{ xnat_service_admin.username }}" - password: "{{ xnat_service_admin.password }}" - method: PUT - body_format: json - body: - enabled: false - verified: false - validate_certs: "{{ ssl.validate_certs }}" - status_code: 200, 201, 304 - when: service_admin_check.status != 401 - -- name: "Disable the guest user" - ansible.builtin.uri: - url: "{{ xnat_web_server.url }}/xapi/users/guest" - user: "{{ xnat_service_admin.username }}" - password: "{{ xnat_service_admin.password }}" - method: PUT - body_format: json - body: - enabled: false - verified: false - validate_certs: "{{ ssl.validate_certs }}" - status_code: 200, 201, 304 - when: service_admin_check.status != 401 - -- name: "Store automation scripts" - ansible.builtin.uri: - url: "{{ xnat_web_server.url }}/data/automation/scripts/{{ item.id }}" - user: "{{ xnat_service_admin.username }}" - password: "{{ xnat_service_admin.password }}" - method: PUT - body_format: json - body: - scriptLabel: "{{ item.label }}" - description: "{{ item.description }}" - scriptVersion: "{{ item.version }}" - language: "{{ item.language }}" - content: "{{ item.script }}" - validate_certs: "{{ ssl.validate_certs }}" - status_code: 200, 201, 304 - loop: "{{ automation_scripts | default([]) }}" diff --git a/configure/playbooks/roles/xnat/tasks/directories.yml b/configure/playbooks/roles/xnat/tasks/directories.yml deleted file mode 100644 index 2aee990..0000000 --- a/configure/playbooks/roles/xnat/tasks/directories.yml +++ /dev/null @@ -1,46 +0,0 @@ ---- -- name: Ensure XNAT external storage subdirectory exists - ansible.builtin.file: - path: "{{ xnat_web_server.storage_dir }}" - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - state: directory - mode: 0700 - when: xnat_create_data_directory and EXTERNAL_STORAGE_DRIVE is defined - -- name: Ensure there is a symbolic link from {{ xnat_data_dir }} to external storage {{ xnat_web_server.storage_dir }} - ansible.builtin.file: - src: "{{ xnat_web_server.storage_dir }}" - dest: "{{ xnat_data_dir }}" - state: link - when: xnat_create_data_directory and EXTERNAL_STORAGE_DRIVE is defined - -- name: "Ensure XNAT subdirectories exist" - ansible.builtin.file: - path: "{{ item }}" - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - state: directory - mode: 0755 - with_items: - - "{{ xnat_root_dir }}" - - "{{ xnat_archive_dir }}" - - "{{ xnat_build_dir }}" - - "{{ xnat_cache_dir }}" - - "{{ xnat_ftp_dir }}" - - "{{ xnat_pipeline_dir }}" - - "{{ xnat_prearchive_dir }}" - - "{{ xnat_home_dir }}" - - "{{ xnat_config_dir }}" - - "{{ xnat_logs_dir }}" - - "{{ xnat_plugins_dir }}" - - "{{ xnat_work_dir }}" - -- name: "Ensure XNAT subdirectory ownership is correct" - ansible.builtin.file: - path: "{{ xnat_root_dir }}" - state: directory - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - mode: 0755 - notify: restart tomcat diff --git a/configure/playbooks/roles/xnat/tasks/ldap.yml b/configure/playbooks/roles/xnat/tasks/ldap.yml deleted file mode 100644 index 7105a41..0000000 --- a/configure/playbooks/roles/xnat/tasks/ldap.yml +++ /dev/null @@ -1,47 +0,0 @@ ---- -- name: "Ensure directories exist for cert files" - ansible.builtin.file: - path: "{{ item }}" - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - state: directory - mode: 0755 - with_items: - - "{{ xnat.install_downloads }}" - - "{{ xnat.install_downloads }}/certs" - -- name: Copy ldap server certificate to client - ansible.builtin.copy: - src: "{{ ldap.ca_cert }}" - dest: "{{ ldap_ca_cert_file_on_client }}" - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - mode: 0600 - -- name: Ensure XNAT LDAP auth folder exists - ansible.builtin.file: - path: "{{ xnat_config_dir }}/auth" - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - state: directory - mode: 0755 - -- name: Configure LDAP for XNAT - ansible.builtin.template: - src: "ldap1-provider.properties.j2" - dest: "{{ xnat_config_dir }}/auth/ldap1-provider.properties" - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - mode: 0644 - force: true - notify: restart tomcat - -- name: Add CA certificate for LDAP server to keystore - community.general.java_cert: - cert_path: "{{ ldap_ca_cert_file_on_client }}" - keystore_path: "{{ java.keystore_path }}" - keystore_pass: "{{ java_keystore.keystore_pass }}" - cert_alias: "{{ ldap.keystore_alias }}" - keystore_create: true - state: present - notify: restart tomcat diff --git a/configure/playbooks/roles/xnat/tasks/main.yml b/configure/playbooks/roles/xnat/tasks/main.yml deleted file mode 100644 index 002378a..0000000 --- a/configure/playbooks/roles/xnat/tasks/main.yml +++ /dev/null @@ -1,33 +0,0 @@ ---- -- name: Install lxml using pip - ansible.builtin.pip: - name: - - lxml - -- name: Ensure dependencies are installed - ansible.builtin.yum: - name: ["postgresql{{ postgresql_version }}", "wget"] - state: installed - -- name: "Configure XNAT directories" - import_tasks: directories.yml - -- name: "Add or upgrade XNAT code" - import_tasks: upgrade_xnat.yml - -- name: "Add or upgrade plugins" - import_tasks: plugins.yml - -- name: "Add or upgrade pipeline installer" - import_tasks: pipelines.yml - when: pipeline_engine_enabled - -- name: "Configure XNAT settings files" - import_tasks: settings_files.yml - -- name: "Configure LDAP for XNAT" - import_tasks: ldap.yml - when: ldap.enabled - -- name: "XNAT site configuration" - import_tasks: configure.yml diff --git a/configure/playbooks/roles/xnat/tasks/pipelines.yml b/configure/playbooks/roles/xnat/tasks/pipelines.yml deleted file mode 100644 index e16f453..0000000 --- a/configure/playbooks/roles/xnat/tasks/pipelines.yml +++ /dev/null @@ -1,67 +0,0 @@ ---- -- name: Download pipelines installer - ansible.builtin.get_url: - url: "{{ xnat_source.pipelines_url }}" - dest: "{{ xnat_source.xnat_downloads_dir }}/{{ xnat_source.pipeline_installer_file_name }}" - timeout: 30 - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - mode: 0644 - -- name: "Ensure pipeline installer folder exists" - ansible.builtin.file: - path: "{{ xnat.pipeline_install_dir }}" - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - state: directory - mode: 0755 - -- name: "Extract pipeline installer files" - ansible.builtin.unarchive: - src: "{{ xnat_source.xnat_downloads_dir }}/{{ xnat_source.pipeline_installer_file_name }}" - dest: "{{ xnat.pipeline_install_dir }}" - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - remote_src: true - extra_opts: [--strip-components=1] - register: pipeline_extract - -- name: "Configure pipeline installer gradle.properties" - ansible.builtin.template: - src: "gradle.properties.j2" - dest: "{{ xnat.pipeline_install_dir }}/gradle.properties" - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - mode: 0644 - force: true - register: pipeline_config - -- name: check if pipelines already built - ansible.builtin.stat: - path: "{{ xnat_pipeline_dir }}/config" - register: pipeline_deployment - -- name: "Run pipeline builder" - ansible.builtin.command: ./gradlew - args: - chdir: "{{ xnat.pipeline_install_dir }}" - when: pipeline_extract.changed or pipeline_config.changed or not pipeline_deployment.stat.exists - -- name: "Ensure pipelines subdirectory ownership is correct" - ansible.builtin.file: - path: "{{ xnat_pipeline_dir }}" - state: directory - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - recurse: true - notify: restart tomcat - -- name: Add self-signed web certificate for web server to keystore - community.general.java_cert: - cert_path: "{{ ssl.server_cert }}" - keystore_path: "{{ java.keystore_path }}" - keystore_pass: "{{ java_keystore.keystore_pass }}" - cert_alias: "{{ xnat_web_server.host }}" - keystore_create: true - state: present - when: ssl.use_ssl and not ssl.validate_certs diff --git a/configure/playbooks/roles/xnat/tasks/plugins.yml b/configure/playbooks/roles/xnat/tasks/plugins.yml deleted file mode 100644 index f9c3568..0000000 --- a/configure/playbooks/roles/xnat/tasks/plugins.yml +++ /dev/null @@ -1,146 +0,0 @@ ---- -- name: Ensure unzip is installed - ansible.builtin.yum: - name: "unzip" - state: installed - -- name: "Ensure download directories exist" - ansible.builtin.file: - path: "{{ item }}" - state: directory - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - mode: 0755 - with_items: - - "{{ xnat_source.plugins_downloads_dir }}/" - -- name: Download XNAT plugins - ansible.builtin.get_url: - url: "{{ item }}" - dest: "{{ xnat_source.plugins_downloads_dir }}" - timeout: 30 - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - mode: 0644 - register: plugins_downloaded - with_items: - - "{{ xnat_plugin_urls }}" - - "{{ xnat_server_specific_plugin_urls | default([]) }}" - -- name: Download plugins from package registry - community.general.maven_artifact: - group_id: "{{ item.group_id }}" - artifact_id: "{{ item.artifact_id }}" - version: "{{ item.version }}" - classifier: "{{ item.classifier | default(omit) }}" - repository_url: "{{ package_registry.url }}" - keep_name: true - dest: "{{ xnat_source.plugins_downloads_dir }}" - timeout: 30 - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - verify_checksum: always - headers: "{{ package_registry.authentication_header }}" - register: package_plugins_downloaded - with_items: - - "{{ xnat_plugin_packages | default([]) }}" - - "{{ xnat_server_specific_plugin_packages | default([]) }}" - when: - - package_registry.enabled - -- name: Get current plugins from remote file cache - ansible.builtin.find: - paths: "{{ xnat_source.plugins_downloads_dir }}" - patterns: "*.jar" - register: cached_plugins - -- name: Removing old plugins from local file cache - ansible.builtin.file: - path: "{{ item.path }}" - state: absent - with_items: - - "{{ cached_plugins.files }}" - when: - - item.path | basename not in plugins_downloaded.results | map(attribute='dest')|map('basename')|list - - item.path | basename not in package_plugins_downloaded.results | map(attribute='dest')|map('basename')|list - -- name: "Get list of plugins on the local cache" - ansible.builtin.find: - paths: "{{ xnat_source.plugins_downloads_dir }}" - patterns: "*.jar" - file_type: file - register: local_plugin_list - -- name: "Get stats for local plugins" - ansible.builtin.stat: - path: "{{ item.path }}" - with_items: "{{ local_plugin_list.files }}" - register: local_plugins_stats - -- name: "Get list of hashes for local plugins" - vars: - local_stats: [] - ansible.builtin.set_fact: - local_stats: "{{ local_stats + [{'name': item.stat.path | basename, 'hash': item.stat.checksum}] }}" - with_items: "{{ local_plugins_stats.results }}" - -- name: "Ensure directories exist for install files" - ansible.builtin.file: - path: "{{ item }}" - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - state: directory - mode: 0755 - with_items: - - "{{ xnat.install_downloads }}" - -- name: "Get list of plugins on the remote server" - ansible.builtin.find: - paths: "{{ xnat_plugins_dir }}" - patterns: "*.jar" - file_type: file - register: remote_plugin_list - -- name: "Get stats for remote plugins" - ansible.builtin.stat: - path: "{{ item.path }}" - with_items: "{{ remote_plugin_list.files }}" - register: remote_plugins_stats - -- name: "Get list of hashes for remote plugins" - ansible.builtin.set_fact: - remote_stats: "{{ remote_stats | default([]) + [{'name': item.stat.path | basename, 'hash': item.stat.checksum}] }}" - with_items: "{{ remote_plugins_stats.results }}" - -- name: "Check if any plugins have changed" - ansible.builtin.set_fact: - plugin_changes: "{{ remote_stats | default([]) | symmetric_difference(local_stats) }}" - -- name: "Ensure tomcat is not running when plugins are being updated" - ansible.builtin.service: - name: tomcat - state: stopped - when: plugin_changes | length > 0 - -- name: "Copy XNAT plugins from {{ xnat_source.plugins_downloads_dir }} to {{ xnat_plugins_dir }}" - ansible.builtin.copy: - src: "{{ item.path }}" - dest: "{{ xnat_plugins_dir }}" - owner: "{{ tomcat.owner }}" - group: "{{ tomcat.group }}" - mode: 0600 - remote_src: true - with_items: - - "{{ local_plugin_list.files }}" - register: plugins_files - when: plugin_changes | length > 0 - -- name: Removing old plugins from server - ansible.builtin.file: - path: "{{ item.path }}" - state: absent - with_items: - - "{{ remote_plugin_list.files }}" - when: - - "item.path | basename not in local_plugin_list.files | map(attribute='path') | map('basename') | list" - - "plugin_changes | length > 0" diff --git a/configure/playbooks/roles/xnat/tasks/settings_files.yml b/configure/playbooks/roles/xnat/tasks/settings_files.yml deleted file mode 100644 index 5112b72..0000000 --- a/configure/playbooks/roles/xnat/tasks/settings_files.yml +++ /dev/null @@ -1,20 +0,0 @@ ---- -- name: "Configure xnat-conf.properties" - ansible.builtin.template: - src: "xnat-conf.properties.j2" - dest: "{{ xnat_config_dir }}/xnat-conf.properties" - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - mode: 0644 - force: true - notify: restart tomcat - -- name: "Configure prefs-init" - ansible.builtin.template: - src: "prefs-init.j2" - dest: "{{ xnat_config_dir }}/prefs-init.ini" - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - mode: 0644 - force: true - notify: restart tomcat diff --git a/configure/playbooks/roles/xnat/tasks/upgrade_xnat.yml b/configure/playbooks/roles/xnat/tasks/upgrade_xnat.yml deleted file mode 100644 index 73c9309..0000000 --- a/configure/playbooks/roles/xnat/tasks/upgrade_xnat.yml +++ /dev/null @@ -1,76 +0,0 @@ ---- -- name: "Ensure download directories exist" - ansible.builtin.file: - path: "{{ item }}" - state: directory - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - mode: 0755 - with_items: - - "{{ xnat_source.xnat_downloads_dir }}" - -- name: Download XNAT WAR - ansible.builtin.get_url: - url: "{{ xnat_source.xnat_war_url }}" - dest: "{{ xnat_source.xnat_downloads_dir }}/{{ xnat_source.war_file_name }}" - timeout: 30 - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - mode: 0644 - -- name: "Ensure directories exist for install files" - ansible.builtin.file: - path: "{{ item }}" - owner: "{{ xnat.owner }}" - group: "{{ xnat.group }}" - state: directory - mode: 0755 - with_items: - - "{{ xnat.install_downloads }}" - -# We do an advance check without modification using check_mode. This allows us -# to stop Tomcat if required before updating the WAR file. -- name: "Check if XNAT war file has changed" - ansible.builtin.copy: - src: "{{ xnat_source.xnat_downloads_dir }}/{{ xnat_source.war_file_name }}" - dest: "{{ tomcat.root_webapp }}" - owner: "{{ tomcat.owner }}" - group: "{{ tomcat.group }}" - mode: 0600 - remote_src: true - check_mode: true - register: xnat_war_file_check - -# If the war file did not change but XNAT has not already been deployed (eg due -# to a previous failed deployment), we may need to force tomcat to restart -- name: check if XNAT is already deployed - ansible.builtin.stat: - path: "{{ tomcat.root }}/xnat-templates" - register: deployed_xnat_found - -# Stop Tomcat before deploying the WAR file, otherwise Tomcat may start extracting the file before it has finished copying -# But do not stop Tomcat unless we are going to deploy the new file -- name: Ensure tomcat is not running during deployment - ansible.builtin.service: - name: tomcat - state: stopped - when: xnat_war_file_check.changed or not deployed_xnat_found.stat.exists - -# Tomcat will not redeploy a WAR file that has already been expanded, even if -# the WAR file changes. We need to clear out the existing deployment. -- name: Remove existing XNAT deployment because of updated WAR file - ansible.builtin.file: - state: absent - path: "{{ tomcat.root }}" - when: xnat_war_file_check.changed - -- name: "Deploy XNAT war file" - ansible.builtin.copy: - src: "{{ xnat_source.xnat_downloads_dir }}/{{ xnat_source.war_file_name }}" - dest: "{{ tomcat.root_webapp }}" - owner: "{{ tomcat.owner }}" - group: "{{ tomcat.group }}" - mode: 0600 - remote_src: true - register: deployed_war_file - when: xnat_war_file_check.changed diff --git a/configure/playbooks/roles/xnat/templates/gradle.properties.j2 b/configure/playbooks/roles/xnat/templates/gradle.properties.j2 deleted file mode 100644 index 2939339..0000000 --- a/configure/playbooks/roles/xnat/templates/gradle.properties.j2 +++ /dev/null @@ -1,14 +0,0 @@ -# Gradle properties file for XNAT pipeline building - -# Copy this file to gradle.properties to set the default properties for your pipeline build. For pipeline module paths, -# you can specify pathoutside of the pipeline engine by uncommenting the modulePaths property below and specifying the -# pathwhere your pipeline modules are stored. Separate each module path with commas (whitespace around the commas is -# acceptable). The pipeline engine build will also take modules from the subfolder modules inside the pipeline engine -# itself (if that subfolder exists). -# -xnatUrl={{ tomcat.hostname }}:{{ tomcat.port }} -siteName={{ xnat_config.site_name }} -adminEmail={{ xnat_common_config.admin_email }} -smtpServer={{ smtp.hostname | default("localhost", true) }} -destination={{ xnat_pipeline_dir }} -# modulePaths=/path1/to/modules, /path2/to/modules diff --git a/configure/playbooks/roles/xnat/templates/ldap1-provider.properties.j2 b/configure/playbooks/roles/xnat/templates/ldap1-provider.properties.j2 deleted file mode 100644 index f812d84..0000000 --- a/configure/playbooks/roles/xnat/templates/ldap1-provider.properties.j2 +++ /dev/null @@ -1,8 +0,0 @@ -name={{ ldap.name }} -provider.id=ldap1 -auth.method=ldap -address={{ ldap.address }} -userdn={{ ldap.userdn }} -password={{ ldap.password }} -search.base={{ ldap.base }} -search.filter={{ ldap.filter }} diff --git a/configure/playbooks/roles/xnat/templates/prefs-init.j2 b/configure/playbooks/roles/xnat/templates/prefs-init.j2 deleted file mode 100644 index 4856c01..0000000 --- a/configure/playbooks/roles/xnat/templates/prefs-init.j2 +++ /dev/null @@ -1,32 +0,0 @@ -[siteConfig] - -siteId={{ xnat_config.site_name }} -siteUrl={{ xnat_web_server.url }} -adminEmail={{ xnat_common_config.admin_email }} - -archivePath={{ xnat_archive_dir }} -prearchivePath={{ xnat_prearchive_dir }} -cachePath={{ xnat_cache_dir }} -buildPath={{ xnat_build_dir }} -ftpPath={{ xnat_ftp_dir }} -pipelinePath={{ xnat_pipeline_dir }} - -requireLogin=true -userRegistration=false -enableCsrfToken=true -sessionTimeout=1 hour -initialized=false - -[notifications] - -smtpEnabled={{ smtp.enabled }} -smtpHostname={{ smtp.hostname | default("localhost", true) }} -smtpPort={{ smtp.port }} -smtpProtocol={{ smtp.protocol }} -smtpAuth={{ smtp.auth }} -smtpUsername={{ smtp.username }} -smtpPassword={{ smtp.password }} -smtpStartTls={{ smtp.start_tls }} -smtpSslTrust={{ smtp.ssl_trust }} - -emailPrefix={{ xnat_config.site_name }} diff --git a/configure/playbooks/roles/xnat/templates/xnat-conf.properties.j2 b/configure/playbooks/roles/xnat/templates/xnat-conf.properties.j2 deleted file mode 100644 index 58fb1a8..0000000 --- a/configure/playbooks/roles/xnat/templates/xnat-conf.properties.j2 +++ /dev/null @@ -1,10 +0,0 @@ -datasource.driver=org.postgresql.Driver -datasource.url=jdbc:postgresql://{{ xnat_db.host }}:{{ xnat_db.port }}/{{ xnat_db.postgres_xnat_database }} -datasource.username={{ xnat_db.postgres_xnat_user }} -datasource.password={{ xnat_db.postgres_xnat_password }} - -hibernate.dialect=org.hibernate.dialect.PostgreSQLDialect -hibernate.hbm2ddl.auto=update -hibernate.show_sql=false -hibernate.cache.use_second_level_cache=true -hibernate.cache.use_query_cache=true diff --git a/configure/playbooks/roles/xnat/templates/xnat-settings.json.j2 b/configure/playbooks/roles/xnat/templates/xnat-settings.json.j2 deleted file mode 100644 index 3a04a3e..0000000 --- a/configure/playbooks/roles/xnat/templates/xnat-settings.json.j2 +++ /dev/null @@ -1,121 +0,0 @@ -{ - "enableCsrfToken": true, - "aliasTokenTimeout": "2 days", - "uiDisplaySeriesDescription": true, - "uiShowLeftBar": true, - "emailVerification": true, - "requireChangeJustification": false, - "maxFailedLoginsLockoutDuration": "1 hour", - "passwordHistoryDuration": "1 year", - "reloadPrearcDatabaseOnStartup": "{{ xnat_common_config.reloadPrearcDatabaseOnStartup }}", - "siteWideAlertType": "message", - "allowNonAdminsToClaimUnassignedSessions": "{{ xnat_common_config.allowNonAdminsToClaimUnassignedSessions }}", - "uiShowLeftBarProjects": true, - "requireImageAssessorLabels": false, - "sessionTimeoutMessage": "You have been signed out due to inactivity.", - "adminEmail": "{{ xnat_common_config.admin_email }}", - "dataPaths": [ - "/xapi/**", - "/data/**", - "/REST/**", - "/fs/**" - ], - "uiAllowBlockedSubjectAssessorView": false, - "requireSaltedPasswords": true, - "failMergeOn": [], - "requireEventName": false, - "siteWideAlertMessage": "", - "uiAllowProjectDelete": true, - "enableSitewideSeriesImportFilter": false, - "concurrentMaxSessions": 1000, - "siteDescriptionType": "Text", - "siteDescriptionText": "{{ xnat_config.site_description }}", - "cachePath": "{{ xnat_cache_dir }}", - "uiShowLeftBarBrowse": true, - "securityChannel": "any", - "ipsThatCanSendEmailsThroughRest": "{{ xnat_common_config.ipsThatCanSendEmailsThroughRest }}", - "checksums": true, - "zipExtensions": "zip,jar,rar,ear,gar,mrb", - "passwordExpirationType": "Interval", - "siteLogoPath": "{{ xnat_config_logo }}", - "sitewideSeriesImportFilter": "", - "allowHtmlResourceRendering": true, - "passwordExpirationDate": "", - "userRegistration": false, - "uiAllowScanAddition": true, - "siteLoginLanding": "/screens/QuickSearch.vm", - "sitewideAnonymizationScript": {{ xnat_sitewide_anonymization_script }}, - "uiAllowScanTypeModification": true, - "roleRepositoryService": "org.nrg.xdat.security.services.impl.RoleRepositoryServiceImpl", - "initialized": true, - "ftpPath": "{{ xnat_ftp_dir }}", - "primaryAdminUsername": "{{ xnat_common_config.primaryAdminUsername }}", - "restrictUserListAccessToAdmins": "{{ xnat_common_config.restrictUserListAccessToAdmins }}", - "siteDescriptionPage": "/screens/site_description.vm", - "dicomFileNameTemplate": "${StudyInstanceUID}-${SeriesNumber}-${InstanceNumber}-${HashSOPClassUIDWithSOPInstanceUID}", - "sessionXmlRebuilderRepeat": 60000, - "uiDebugExtensionPoints": false, - "imageSessionDisplayNamePlural": "Sessions", - "aliasTokenTimeoutSchedule": "0 0 * * * *", - "interactiveAgentIds": [ - ".*MSIE.*", - ".*Mozilla.*", - ".*AppleWebKit.*", - ".*Opera.*" - ], - "passwordComplexityMessage": "The password must contain at least 8 characters.", - "uiShowLeftBarFavorites": true, - "emailVerificationMessage": "Dear FULL_NAME,\n

We received a request to create an account for you on SITE_NAME. If you made this request, please confirm your email address by clicking this link: Verify Email\n (This link will expire in 24 hours.)AUTO_ENABLE_TEXT

If you did not initiate this request, you can safely ignore this email.", - "pathErrorWarning": "", - "csrfEmailAlert": true, - "showChangeJustification": false, - "uiShowProjectManageFiles": true, - "featureService": "org.nrg.xdat.security.services.impl.FeatureServiceImpl", - "siteUrl": "{{ xnat_web_server.url }}", - "passwordExpirationInterval": "2 years", - "uiLoginFailureMessage": "The username or password you entered is incorrect. Please try again or reset your password.

After %d failed login attempts, your user account will be locked. If you believe your account is currently locked, you can:

", - "sitewidePetTracers": "PIB\nFDG", - "imageSessionDisplayNameSingular": "Session", - "scanTypeMapping": true, - "uiAllowNonAdminProjectCreation": "{{ xnat_common_config.uiAllowNonAdminProjectCreation }}", - "receivedFileUser": "{{ xnat_common_config.receivedFileUser }}", - "maxFailedLogins": 6, - "enableSitewideAnonymizationScript": "{{ xnat_common_config.enableSitewideAnonymizationScript }}", - "pipelinePath": "{{ xnat_pipeline_dir }}", - "uiAllowSubjectCreateFromExptEdit": true, - "par": "{{ xnat_common_config.par }}", - "processingUrl": "{{ xnat.processingUrl }}", - "inactivityBeforeLockout": "3 years", - "requireLogin": true, - "uiAllowQuarantine": true, - "uiShowLeftBarSearch": true, - "canResetFailedLoginsWithForgotPassword": true, - "sessionXmlRebuilderInterval": "{{ xnat_common_config.sessionXmlRebuilderInterval }}", - "inactivityBeforeLockoutSchedule": "0 0 1 * * ?", - "sessionTimeout": "1 hour", - "roleService": "org.nrg.xdat.security.services.impl.RoleServiceImpl", - "enableDicomReceiver": true, - "enableDicomReceiverPropertyChangedListener": "org.nrg.dcm.DicomSCPSiteConfigurationListener", - "prearchivePath": "{{ xnat_prearchive_dir }}", - "passwordComplexity": "^.{8,}$", - "archivePath": "{{ xnat_archive_dir }}", - "buildPath": "{{ xnat_build_dir }}", - "enabledProviders": {{ custom_enabled_providers | default(xnat_common_config.enabledProviders) | to_json }}, - "allowDataAdmins": true, - "uiShowManageFiles": true, - "passwordReuseRestriction": "Historical", - "matchSecurityProtocol": false, - "siteWideAlertStatus": 0, - "inboxPath": "{{ xnat_inbox_dir }}", - "projectAllowAutoArchive": true, - "resetFailedLoginsSchedule": "0 0 * * * *", - "uiAllowAdvancedSearch": true, - "featureRepositoryService": "org.nrg.xdat.security.services.impl.FeatureRepositoryServiceImpl", - "siteHomeLayout": "/Index.vm", - "siteLandingLayout": "/Index.vm", - "sitewidePetMr": "", - "sitewideSeriesImportFilterMode": "blacklist", - "siteId": "{{ xnat_config.site_name }}", - "uiAllowNewUserComments": true, - "siteHome": "/screens/QuickSearch.vm" -} diff --git a/configure/playbooks/setup_xnat_project.yml b/configure/playbooks/setup_xnat_project.yml index 394fcd6..cb5e60f 100644 --- a/configure/playbooks/setup_xnat_project.yml +++ b/configure/playbooks/setup_xnat_project.yml @@ -1,7 +1,7 @@ --- - name: Set up IBASH project - hosts: web + hosts: xnat become: true gather_facts: true roles: - - { role: setup_xnat_project } + - role: setup_xnat_project diff --git a/configure/playbooks/roles/nginx/templates/nginx.j2 b/configure/playbooks/templates/nginx_reverse_proxy_aws.j2 similarity index 60% rename from configure/playbooks/roles/nginx/templates/nginx.j2 rename to configure/playbooks/templates/nginx_reverse_proxy_aws.j2 index 70de271..e8488af 100644 --- a/configure/playbooks/roles/nginx/templates/nginx.j2 +++ b/configure/playbooks/templates/nginx_reverse_proxy_aws.j2 @@ -1,33 +1,38 @@ +# Configure a reverse proxy +# Optionally create an additional default_server load_module '/usr/lib64/nginx/modules/ngx_stream_module.so'; user nginx; worker_processes auto; -error_log {{ nginx.log_folder }}/error.log warn; +error_log {{ nginx_error_log }} warn; pid /run/nginx.pid; events { worker_connections 1024; } +{% if nginx_upstream_port is defined and nginx_upstream_listen_port is defined %} stream { - upstream dicom { - server localhost:{{ xnat_dicom_port }}; + upstream backend { + server localhost:{{ nginx_upstream_port }}; } server { - listen {{ dicom_port }}; - proxy_pass dicom; + listen {{ nginx_upstream_listen_port }}; + proxy_pass backend; } } +{% endif %} http { server_names_hash_bucket_size 128; - sendfile on; + sendfile off; +# sendfile on; can cause a problem in VirtualBox tcp_nopush on; tcp_nodelay on; keepalive_timeout 65; types_hash_max_size 2048; default_type application/octet-stream; - access_log {{ nginx.log_folder }}/access.log; + access_log {{ nginx_access_log }}; # Good security practice is not to expose nginx version server_tokens off; @@ -36,19 +41,19 @@ http { gzip on; gzip_disable "msie6"; -{% if ssl.use_ssl %} +{% if nginx_use_ssl %} # SSL parameters must be specified outside of the server block. # Otherwise may only use the nginx may end up using parameters specified in # the default_server block even if a different serer is matched - ssl_certificate {{ nginx.ssl_cert_file }}; - ssl_certificate_key {{ nginx.ssl_key_file }}; + ssl_certificate {{ nginx_ssl_cert_file }}; + ssl_certificate_key {{ nginx_ssl_key_file }}; # TLS 1.0 and TLS 1.1 should be disabled - # TLS 1.3 may not be supported by Red Hat 7 - ssl_protocols TLSv1.3; + # TLS 1.3 may not be supported by Centos7 + ssl_protocols TLSv1.2; - ssl_dhparam {{ nginx.dh_params_file }}; + ssl_dhparam {{ nginx_dh_params_file }}; ssl_ecdh_curve secp384r1; # Increase the cache lifetime to improve performance; this requires a larger cache size @@ -71,57 +76,59 @@ http { # Note that nginx only uses server_name for disambiguation purposes; if # there is no matching server_name then nginx will use the default_server # regardless of its value of server_name +{% if nginx_add_default_server %} server { listen 80 default_server; -{% if ssl.use_ssl %} + {% if nginx_use_ssl %} listen 443 ssl default_server; -{% endif %} + {% endif %} -{% if ipv6_enabled %} + {% if nginx_ipv6_enabled %} listen [::]:80 default_server; - {% if ssl.use_ssl %} + {% if nginx_use_ssl %} listen [::]:443 ssl default_server; + {% endif %} {% endif %} -{% endif %} server_name _; return 444; } +{% endif %} -{% if ssl.use_ssl %} +{% if nginx_use_ssl %} # Redirect to https server { - listen {{ nginx.http_port }}; + listen {{ nginx_http_port }}; - server_name {{ xnat_web_server.host }}; - return 301 https://{{ xnat_web_server.host }}:{{ nginx.https_port }}$request_uri; + server_name {{ nginx_server_name }}; + return 301 https://{{ nginx_server_name }}:{{ nginx_https_port }}$request_uri; } {% endif %} server { -{% if ssl.use_ssl %} - listen {{ nginx.https_port }} ssl http2; - {% if ipv6_enabled %} - listen [::]:{{ nginx.https_port }} ssl http2; +{% if nginx_use_ssl %} + listen {{ nginx_https_port }} ssl http2{% if not nginx_add_default_server %} default_server{% endif %}; + {% if nginx_ipv6_enabled %} + listen [::]:{{ nginx_https_port }} ssl http2{% if not nginx_add_default_server %} default_server{% endif %}; {% endif %} {% else %} - listen {{ nginx.http_port }}; - {% if ipv6_enabled %} - listen [::]:{{ nginx.http_port }}; + listen {{ nginx_http_port }}{% if not nginx_add_default_server %} default_server{% endif %}; + {% if nginx_ipv6_enabled %} + listen [::]:{{ nginx_http_port }}{% if not nginx_add_default_server %} default_server{% endif %}; {% endif %} {% endif %} - # Note: server_name is only used for disambiguation - server_name {{ xnat_web_server.host }}; - - root {{ tomcat.root }}; + server_name {{ nginx_server_name }}; + {% if nginx_root is defined %} + root {{ nginx_root }}; + {% endif %} location / { - proxy_pass http://localhost:{{ tomcat.port }}; + proxy_pass http://localhost:{{ nginx_proxy_port }}; proxy_redirect http:// $scheme://; proxy_set_header Host $http_host; proxy_set_header X-Real-IP $remote_addr; @@ -129,15 +136,15 @@ http { proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header X-Forwarded-Port $remote_port; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_connect_timeout 3600; - proxy_send_timeout 3600; - proxy_read_timeout 3600; + proxy_connect_timeout 600; + proxy_send_timeout 600; + proxy_read_timeout 600; proxy_buffers 4 32k; client_max_body_size 0; client_body_buffer_size 128k; proxy_max_temp_file_size 0; } - access_log {{ nginx.log_folder }}/xnat.access.log; - error_log {{ nginx.log_folder }}/xnat.error.log; + access_log {{ nginx_app_access_log }}; + error_log {{ nginx_app_error_log }}; } } diff --git a/configure/playbooks/test-multiple_xnat_users.yml b/configure/playbooks/test-multiple_xnat_users.yml index 44a4c29..5c9b669 100644 --- a/configure/playbooks/test-multiple_xnat_users.yml +++ b/configure/playbooks/test-multiple_xnat_users.yml @@ -1,10 +1,10 @@ --- - name: Create multiple XNAT users with their own projects - hosts: web + hosts: xnat become: false gather_facts: true vars: n_users: 4 roles: - - { role: test-multiple_xnat_users } + - role: test-multiple_xnat_users diff --git a/provision/.terraform.lock.hcl b/provision/.terraform.lock.hcl index 8160da9..2352b9a 100644 --- a/provision/.terraform.lock.hcl +++ b/provision/.terraform.lock.hcl @@ -1,104 +1,86 @@ # This file is maintained automatically by "terraform init". # Manual edits may be lost in future updates. -provider "registry.terraform.io/hashicorp/archive" { - version = "2.4.0" - hashes = [ - "h1:cJokkjeH1jfpG4QEHdRx0t2j8rr52H33A7C/oX73Ok4=", - "zh:18e408596dd53048f7fc8229098d0e3ad940b92036a24287eff63e2caec72594", - "zh:392d4216ecd1a1fd933d23f4486b642a8480f934c13e2cae3c13b6b6a7e34a7b", - "zh:655dd1fa5ca753a4ace21d0de3792d96fff429445717f2ce31c125d19c38f3ff", - "zh:70dae36c176aa2b258331ad366a471176417a94dd3b4985a911b8be9ff842b00", - "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", - "zh:7d8c8e3925f1e21daf73f85983894fbe8868e326910e6df3720265bc657b9c9c", - "zh:a032ec0f0aee27a789726e348e8ad20778c3a1c9190ef25e7cff602c8d175f44", - "zh:b8e50de62ba185745b0fe9713755079ad0e9f7ac8638d204de6762cc36870410", - "zh:c8ad0c7697a3d444df21ff97f3473a8604c8639be64afe3f31b8ec7ad7571e18", - "zh:df736c5a2a7c3a82c5493665f659437a22f0baf8c2d157e45f4dd7ca40e739fc", - "zh:e8ffbf578a0977074f6d08aa8734e36c726e53dc79894cfc4f25fadc4f45f1df", - "zh:efea57ff23b141551f92b2699024d356c7ffd1a4ad62931da7ed7a386aef7f1f", - ] -} - provider "registry.terraform.io/hashicorp/aws" { - version = "5.14.0" - constraints = ">= 4.45.0, >= 5.0.0" + version = "5.52.0" + constraints = ">= 4.45.0, >= 5.30.0" hashes = [ - "h1:MkK5wbWd3g9MAR+LqyWhqlGXolkrryf6LWkOAtOdG9k=", - "zh:03b80869b97dfca4ce6ee94a005e15ccec4d98af0876084a963963b05c9ab743", - "zh:11d148800fe028fcd10590f0473c5df306e220776e359aa838c2f07e5a89187e", - "zh:15d696cf583dc2917b257891e4a33afe7c3e8f20b63183f510267d709baaaf3d", - "zh:34c41e44534fbbf95a5f89b38404ee52b41c6c70af68f7e63a423b276fbcf797", - "zh:4211d0fd4753f7ba202f3e4a8afb2e03d12112dd4db4f9267c472bd597dc71ca", - "zh:47b6017d0cdd2f62b9e46137de38cd618441f658f8570a8e2687cce7643bf953", - "zh:51785b942d6f588825f4bfa86e05502be8721194b289c474121072e49acff6c3", - "zh:565f76885d41ecfea192b8a2e2f3d4b3dd278790d1d82b204706ae3582d51cf6", - "zh:703d670e1d73360d2533b02dbe9e2e055bf6f36a478cd4d66f2349861575c2ed", - "zh:7e4701f38590c22066da90b75dd92d81a685225d2d222d22425b7ccb26e92b4a", + "h1:LEJSEOCO8LPIO6uxJDYrFXdr+Y9hSmTWVcSgG6EdGvw=", + "h1:ucZxfJtHMHBp4Amnk0K3Bdr7Umbk6he8byey/+u41Lc=", + "zh:22c4599d47cd59e5519c52afc528fa2aec43b4434f369870ee2806daa071449d", + "zh:3c2edc482662a654f84db4cd3f2cdd8f200147207d053d2e95082744b7814e6d", + "zh:57edc36f908c64de37e92a978f3d675604315a725268da936fcd1e270199db47", + "zh:79e7afd5fb161f2eb2b7f8e7fd5cbb7f56a2c64f141b56f511ec69337ad3e96b", + "zh:82c6ae9a7f971b6ee8c476b6eb7f1be9d24ddd183cbf025f52628084ddb3a5ae", + "zh:92faecc0a8f573f57f37d24415862380a40341eb13d66beb738dd0873899a58e", + "zh:963d3c0e1aa22c872cd96f04ceb41c388137b972f714efbde989221bf7f6f723", "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", - "zh:ca3449252d70df14ad713d5b95fa0610da8087f12c9deb87beffe788f518d06d", - "zh:e2ed3d6d8c12d3fe56fb03fe272779270a92f6157ade8c3db1c987b83b62e68c", - "zh:f0b07b84a43d1afc3a9790ca699771970525c132fa8551e7b326d1f263414dd1", - "zh:f1d83b3e5a29bae471f9841a4e0153eac5bccedbdece369e2f6186e9044db64e", + "zh:af6d3bb94aa8a84d740e3731d2379cc5e12aa48d5db0f7489c4639f3814a22d7", + "zh:b9f7aceeaf5daf71394eab9bf0f9f56fdc762cac90e4d62e63aa3fcdf6c1c127", + "zh:c3dcfc2569edae4f36b798c76da7f7633e7bf322505d447d7c370a56c2a30dd2", + "zh:c8abb21c5ceba857f0eaff9e531d781dc655f8cdfae1cf056066daae72546a7f", + "zh:d92004a6a2a770d2542fd9c01b685418ab8d7ab422cf2cdce35dde789bc8593c", + "zh:dc794660b1d6d8f26a917e0ffab1875aa75144736875efaa60f29c72bf02afbf", + "zh:df931c4905e35ae43d558f6cda15f05710a7a24ecbb94533f8822e7572126512", ] } provider "registry.terraform.io/hashicorp/http" { - version = "3.4.0" + version = "3.4.2" hashes = [ - "h1:AaRLrzxA1t02OIwO32uLp85npqRLZSwPFgrHxb9qp0c=", - "h1:m0d6+9xK/9TJSE9Z6nM4IwHXZgod4/jkdsf7CZSpUvo=", - "zh:56712497a87bc4e91bbaf1a5a2be4b3f9cfa2384baeb20fc9fad0aff8f063914", - "zh:6661355e1090ebacab16a40ede35b029caffc279d67da73a000b6eecf0b58eba", - "zh:67b92d343e808b92d7e6c3bbcb9b9d5475fecfed0836963f7feb9d9908bd4c4f", + "h1:v6Hn+15SfN2SI281Sp+uNXdWhD197ycP07fnaoGpPcc=", + "h1:vaoPfsLm6mOk6avKTrWi35o+9p4fEeZAY3hzYoXVTfo=", + "zh:0ba051c9c8659ce0fec94a3d50926745f11759509c4d6de0ad5f5eb289f0edd9", + "zh:23e6760e8406fef645913bf47bfab1ca984c1c5805d2bb0ef8310b16913d29cd", + "zh:3c69fde4548bfe65b968534c4df8d699648c921d6a065b97fec5faece73a442b", + "zh:41c7f9a8c117704b7a8fa96a57ebfb92b72129d9625128eeb0dee7d5a09d1110", + "zh:59d09d2e00727df10565cc82a33250b44201fcd353eb2b1579507a5a0adcce18", "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", - "zh:86ebb9be9b685c96dbb5c024b55d87526d57a4b127796d6046344f8294d3f28e", - "zh:902be7cfca4308cba3e1e7ba6fc292629dfd150eb9a9f054a854fa1532b0ceba", - "zh:9ba26e0215cd53b21fe26a0a98c007de1348b7d13a75ae3cfaf7729e0f2c50bb", - "zh:a195c941e1f1526147134c257ff549bea4c89c953685acd3d48d9de7a38f39dc", - "zh:a7967b3d2a8c3e7e1dc9ae381ca753268f9fce756466fe2fc9e414ca2d85a92e", - "zh:bde56542e9a093434d96bea21c341285737c6d38fea2f05e12ba7b333f3e9c05", - "zh:c0306f76903024c497fd01f9fd9bace5854c263e87a97bc2e89dcc96d35ca3cc", - "zh:f9335a6c336171e85f8e3e99c3d31758811a19aeb21fa8c9013d427e155ae2a9", + "zh:c95b2f63d4357b3068531b90d9dca62a32551d7693defb7ab14b650b5d139c57", + "zh:cc0a3bbd3026191b35f417d3a8f26bdfad376d15be9e8d99a8803487ca5b0105", + "zh:d1185c6abb3ba25123fb7df1ad7dbe2b9cd8f43962628da551040fbe1934656f", + "zh:dfb26fccab7ecdc150f67415e6cfe19d699dc43e8bf5722f36032b17b46a0fbe", + "zh:eb1fcc00073bc0463f64e49600a73d925b1a0c0ae5b94dd7b67d3ebac248a113", + "zh:ec9b9ad69cf790cb0603a1036d758063bbbc35c0c75f72dd04a1eddaf46ad010", ] } provider "registry.terraform.io/hashicorp/local" { - version = "2.4.0" + version = "2.5.1" hashes = [ - "h1:Bs7LAkV/iQTLv72j+cTMrvx2U3KyXrcVHaGbdns1NcE=", - "h1:ZUEYUmm2t4vxwzxy1BvN1wL6SDWrDxfH7pxtzX8c6d0=", - "zh:53604cd29cb92538668fe09565c739358dc53ca56f9f11312b9d7de81e48fab9", - "zh:66a46e9c508716a1c98efbf793092f03d50049fa4a83cd6b2251e9a06aca2acf", - "zh:70a6f6a852dd83768d0778ce9817d81d4b3f073fab8fa570bff92dcb0824f732", + "h1:/GAVA/xheGQcbOZEq0qxANOg+KVLCA7Wv8qluxhTjhU=", + "h1:tjcGlQAFA0kmQ4vKkIPPUC4it1UYxLbg4YvHOWRAJHA=", + "zh:0af29ce2b7b5712319bf6424cb58d13b852bf9a777011a545fac99c7fdcdf561", + "zh:126063ea0d79dad1f68fa4e4d556793c0108ce278034f101d1dbbb2463924561", + "zh:196bfb49086f22fd4db46033e01655b0e5e036a5582d250412cc690fa7995de5", + "zh:37c92ec084d059d37d6cffdb683ccf68e3a5f8d2eb69dd73c8e43ad003ef8d24", + "zh:4269f01a98513651ad66763c16b268f4c2da76cc892ccfd54b401fff6cc11667", + "zh:51904350b9c728f963eef0c28f1d43e73d010333133eb7f30999a8fb6a0cc3d8", + "zh:73a66611359b83d0c3fcba2984610273f7954002febb8a57242bbb86d967b635", "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", - "zh:82a803f2f484c8b766e2e9c32343e9c89b91997b9f8d2697f9f3837f62926b35", - "zh:9708a4e40d6cc4b8afd1352e5186e6e1502f6ae599867c120967aebe9d90ed04", - "zh:973f65ce0d67c585f4ec250c1e634c9b22d9c4288b484ee2a871d7fa1e317406", - "zh:c8fa0f98f9316e4cfef082aa9b785ba16e36ff754d6aba8b456dab9500e671c6", - "zh:cfa5342a5f5188b20db246c73ac823918c189468e1382cb3c48a9c0c08fc5bf7", - "zh:e0e2b477c7e899c63b06b38cd8684a893d834d6d0b5e9b033cedc06dd7ffe9e2", - "zh:f62d7d05ea1ee566f732505200ab38d94315a4add27947a60afa29860822d3fc", - "zh:fa7ce69dde358e172bd719014ad637634bbdabc49363104f4fca759b4b73f2ce", + "zh:7ae387993a92bcc379063229b3cce8af7eaf082dd9306598fcd42352994d2de0", + "zh:9e0f365f807b088646db6e4a8d4b188129d9ebdbcf2568c8ab33bddd1b82c867", + "zh:b5263acbd8ae51c9cbffa79743fbcadcb7908057c87eb22fd9048268056efbc4", + "zh:dfcd88ac5f13c0d04e24be00b686d069b4879cc4add1b7b1a8ae545783d97520", ] } provider "registry.terraform.io/hashicorp/random" { - version = "3.5.1" + version = "3.6.2" hashes = [ - "h1:IL9mSatmwov+e0+++YX2V6uel+dV6bn+fC/cnGDK3Ck=", - "h1:sZ7MTSD4FLekNN2wSNFGpM+5slfvpm5A/NLVZiB7CO0=", - "zh:04e3fbd610cb52c1017d282531364b9c53ef72b6bc533acb2a90671957324a64", - "zh:119197103301ebaf7efb91df8f0b6e0dd31e6ff943d231af35ee1831c599188d", - "zh:4d2b219d09abf3b1bb4df93d399ed156cadd61f44ad3baf5cf2954df2fba0831", - "zh:6130bdde527587bbe2dcaa7150363e96dbc5250ea20154176d82bc69df5d4ce3", - "zh:6cc326cd4000f724d3086ee05587e7710f032f94fc9af35e96a386a1c6f2214f", + "h1:R5qdQjKzOU16TziCN1vR3Exr/B+8WGK80glLTT4ZCPk=", + "h1:VavG5unYCa3SYISMKF9pzc3718M0bhPlcbUZZGl7wuo=", + "zh:0ef01a4f81147b32c1bea3429974d4d104bbc4be2ba3cfa667031a8183ef88ec", + "zh:1bcd2d8161e89e39886119965ef0f37fcce2da9c1aca34263dd3002ba05fcb53", + "zh:37c75d15e9514556a5f4ed02e1548aaa95c0ecd6ff9af1119ac905144c70c114", + "zh:4210550a767226976bc7e57d988b9ce48f4411fa8a60cd74a6b246baf7589dad", + "zh:562007382520cd4baa7320f35e1370ffe84e46ed4e2071fdc7e4b1a9b1f8ae9b", + "zh:5efb9da90f665e43f22c2e13e0ce48e86cae2d960aaf1abf721b497f32025916", + "zh:6f71257a6b1218d02a573fc9bff0657410404fb2ef23bc66ae8cd968f98d5ff6", "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", - "zh:b6d88e1d28cf2dfa24e9fdcc3efc77adcdc1c3c3b5c7ce503a423efbdd6de57b", - "zh:ba74c592622ecbcef9dc2a4d81ed321c4e44cddf7da799faa324da9bf52a22b2", - "zh:c7c5cde98fe4ef1143bd1b3ec5dc04baf0d4cc3ca2c5c7d40d17c0e9b2076865", - "zh:dac4bad52c940cd0dfc27893507c1e92393846b024c5a9db159a93c534a3da03", - "zh:de8febe2a2acd9ac454b844a4106ed295ae9520ef54dc8ed2faf29f12716b602", - "zh:eab0d0495e7e711cca367f7d4df6e322e6c562fc52151ec931176115b83ed014", + "zh:9647e18f221380a85f2f0ab387c68fdafd58af6193a932417299cdcae4710150", + "zh:bb6297ce412c3c2fa9fec726114e5e0508dd2638cad6a0cb433194930c97a544", + "zh:f83e925ed73ff8a5ef6e3608ad9225baa5376446349572c2449c0c0b3cf184b7", + "zh:fbef0781cb64de76b1df1ca11078aecba7800d82fd4a956302734999cfd9a4af", ] } diff --git a/provision/README.md b/provision/README.md index 3a38d0d..5ec7033 100644 --- a/provision/README.md +++ b/provision/README.md @@ -35,10 +35,10 @@ ec2_instance_types = { Alternatively, you could use a [GPU-enabled instance](https://aws.amazon.com/ec2/instance-types/g4/) for the container service and run the GPU version of the FastSurfer pipeline (see the -[`run_fastsurfer_gpu`](../configure/playbooks/roles/container_service_client/files/recon-all-gpu-command.json) +[`run_fastsurfer_gpu`](../configure/playbooks/roles/container_service_images/files/recon-all-gpu-command.json) command). However, this will **significantly drive up the costs**. -You may also have to increase the amount of RAM reserved for Java (and thus XNAT) in the Ansible configuration. In the file `xnat-aws/configure/group_vars/web/vars/tomcat.yml` you would need to modify the `java.mem` variable, e.g.: +You may also have to increase the amount of RAM reserved for Java (and thus XNAT) in the Ansible configuration. In the file `xnat-aws/configure/group_vars/xnat.yml` you would need to modify the `java.mem` variable, e.g.: ```yaml java_mem: @@ -156,13 +156,15 @@ to skip the prompt. | Name | Version | |------|---------| | [terraform](#requirement\_terraform) | >=0.15 | +| [terraform](#requirement\_terraform) | >= 1.1.4 | +| [aws](#requirement\_aws) | >= 5.30.0 | ## Providers | Name | Version | |------|---------| -| [aws](#provider\_aws) | 5.14.0 | -| [local](#provider\_local) | 2.4.0 | +| [aws](#provider\_aws) | 5.52.0 | +| [local](#provider\_local) | 2.5.1 | ## Modules diff --git a/provision/modules/database/database.tf b/provision/modules/database/database.tf index c5d3001..ce73146 100644 --- a/provision/modules/database/database.tf +++ b/provision/modules/database/database.tf @@ -111,6 +111,6 @@ locals { tcp_protocol = "tcp" any_protocol = "-1" all_ips = ["0.0.0.0/0"] - ansible_vault_file = "../configure/group_vars/web/vault" + ansible_vault_file = "../configure/group_vars/xnat/vault" encryption_password_file = "../configure/.vault_password" } diff --git a/provision/show_resources_to_delete.sh b/provision/show_resources_to_delete.sh new file mode 100644 index 0000000..3d9884f --- /dev/null +++ b/provision/show_resources_to_delete.sh @@ -0,0 +1,17 @@ +#!/bin/bash +vpc="vpc-xxxxxxxxxxxxx" +region="eu-west-2" +aws ec2 describe-internet-gateways --region $region --filters 'Name=attachment.vpc-id,Values='$vpc | grep InternetGatewayId +aws ec2 describe-subnets --region $region --filters 'Name=vpc-id,Values='$vpc | grep SubnetId +aws ec2 describe-route-tables --region $region --filters 'Name=vpc-id,Values='$vpc | grep RouteTableId +aws ec2 describe-network-acls --region $region --filters 'Name=vpc-id,Values='$vpc | grep NetworkAclId +aws ec2 describe-vpc-peering-connections --region $region --filters 'Name=requester-vpc-info.vpc-id,Values='$vpc | grep VpcPeeringConnectionId +aws ec2 describe-vpc-endpoints --region $region --filters 'Name=vpc-id,Values='$vpc | grep VpcEndpointId +aws ec2 describe-nat-gateways --region $region --filter 'Name=vpc-id,Values='$vpc | grep NatGatewayId +aws ec2 describe-security-groups --region $region --filters 'Name=vpc-id,Values='$vpc | grep GroupId +aws ec2 describe-instances --region $region --filters 'Name=vpc-id,Values='$vpc | grep InstanceId +aws ec2 describe-vpn-connections --region $region --filters 'Name=vpc-id,Values='$vpc | grep VpnConnectionId +aws ec2 describe-vpn-gateways --region $region --filters 'Name=attachment.vpc-id,Values='$vpc | grep VpnGatewayId +aws ec2 describe-network-interfaces --region $region --filters 'Name=vpc-id,Values='$vpc | grep NetworkInterfaceId +aws ec2 describe-carrier-gateways --region $region --filters Name=vpc-id,Values=$vpc | grep CarrierGatewayId +aws ec2 describe-local-gateway-route-table-vpc-associations --region $region --filters Name=vpc-id,Values=$vpc | grep LocalGatewayRouteTableVpcAssociationId \ No newline at end of file diff --git a/provision/templates/ansible_hosts.yml.tftpl b/provision/templates/ansible_hosts.yml.tftpl index d99b7c8..4a3229f 100644 --- a/provision/templates/ansible_hosts.yml.tftpl +++ b/provision/templates/ansible_hosts.yml.tftpl @@ -20,17 +20,21 @@ all: children: # All web servers - web: + xnat: hosts: xnat_web: # All Container Service server hosts - cserv_hosts: + container_service_host: hosts: xnat_cserv: + container_service_client: + hosts: + xnat_web: + # xnat_cserv hosts and all the clients (web servers) it serves - xnat_container_service: + container_service: hosts: xnat_cserv: xnat_web: diff --git a/provision/versions.tf b/provision/versions.tf new file mode 100644 index 0000000..0124acd --- /dev/null +++ b/provision/versions.tf @@ -0,0 +1,11 @@ +# Enforce minimum Terraform and provider version numbers. +terraform { + required_providers { + aws = { + source = "hashicorp/aws" + version = ">= 5.30.0" + } + } + + required_version = ">= 1.1.4" +} \ No newline at end of file