From 4b1e6ff3b85effed84ec0c1a952b80923b346046 Mon Sep 17 00:00:00 2001 From: Miguel Angel Date: Tue, 18 Feb 2020 18:46:03 +0100 Subject: [PATCH] am-configure: Configure Pipeline Local FS - Create a new task file (`configure-pipeline-local-fs.yml`) to configure the Pipeline local FS Space and its locations. - Create a new task file (`configure-create-location.yml`) to be used to create all kinds of locations, checking if the location exists before creating it. --- defaults/main.yml | 37 +++++++ tasks/configure-create-location.yml | 56 ++++++++++ tasks/configure-pipeline-local-fs.yml | 147 ++++++++++++++++++++++++++ tasks/main.yml | 11 ++ 4 files changed, 251 insertions(+) create mode 100644 tasks/configure-create-location.yml create mode 100644 tasks/configure-pipeline-local-fs.yml diff --git a/defaults/main.yml b/defaults/main.yml index f4ab9b02..46b0ae97 100644 --- a/defaults/main.yml +++ b/defaults/main.yml @@ -220,6 +220,31 @@ archivematica_src_syslog_mcpserver_level: "DEBUG" # enabled: '0' # field_name: 'command_id' +# +# Configure locations +# +# For some different space protocols a dictionary will be used to configure locations. +# The dictionary will have 4 keys: +# 1) location_purpose: valid values are: +# +# "AR": AIP RECOVERY +# "AS": AIP STORAGE +# "CP": CURRENTLY PROCESSING +# "DS": DIP STORAGE +# "SD": SWORD DEPOSIT +# "SS": STORAGE SERVICE INTERNAL PROCESSING +# "BL": TRANSFER BACKLOG +# "TS": TRANSFER SOURCE +# "RP": REPLICATOR +# +# 2) location_path: it assumes that the Space absolute path is "/" and removes the "/" when creating the location. So when using +# a different absolute path in Space, please write the relative path from Space path adding a "/" character to +# the beginning of the string. +# +# 3) location_description: any location description +# +# 4) location_default: boolean + # # Configure SS locations # @@ -228,3 +253,15 @@ archivematica_src_syslog_mcpserver_level: "DEBUG" # - { location_purpose: "AS", location_path: "/aipstore", location_description: "AipStore", location_default: "true" } # - { location_purpose: "TS", location_path: "/transfer-source/", location_description: "Transfer Source", location_default: "false" } +# +# Configure SS Pipeline Local FileSystem locations. The CP location is mandatory when pipeline is on a server different than the one the SS is on. +# Pipeline Local Filesystems refer to the storage that is local to the Archivematica pipeline, +# but remote to the Storage Service. It's a ssh-based remote space. +# More info: +# https://www.archivematica.org/en/docs/storage-service-0.16/administrators/#pipeline-local-filesystem +# +#archivematica_src_configure_pipeline_localfs_locations: +# - { location_purpose: "CP", location_path: "{{ archivematica_src_shareddir }}", location_description: "Pipeline Local Fylesistem CP", location_default: "false" } +# - { location_purpose: "BL", location_path: "{{ archivematica_src_shareddir }}/www/AIPsStore/transferBacklog", location_description: "Pipeline Local Fylesistem BL", location_default: "false" } +# - { location_purpose: "AS", location_path: "/aipstore", location_description: "AipStore", location_default: "true" } +# - { location_purpose: "TS", location_path: "/transfer-source/", location_description: "Transfer Source", location_default: "false" } diff --git a/tasks/configure-create-location.yml b/tasks/configure-create-location.yml new file mode 100644 index 00000000..dd872797 --- /dev/null +++ b/tasks/configure-create-location.yml @@ -0,0 +1,56 @@ +--- + +# This file works as a function to create locations. The arguments are: +# - am_configure_space_uuid +# - am_configure_pipeline_uuid +# - am_configure_location: One record dictionary with fields: +# - location_purpose +# - location_path (absolute path) +# - location_description +# - location_default +# +# For instance, you can call this task file from other task file with: +# +#- name: "Create Pipeline Local Filesystem locations" +# include_tasks: configure-create-location.yml +# vars: +# am_configure_space_uuid: "{{ am_configure_pipelinelocalfs_space_id.stdout }}" +# am_configure_pipeline_uuid: "{{ am_configure_pipelinelocalfs_pipeline_uuid.stdout }}" +# with_items: "{{ archivematica_src_configure_pipeline_localfs_locations }}" +# loop_control: +# loop_var: am_configure_location + +- name: "Check if '{{ am_configure_location.location_purpose }}' location with '{{ am_configure_location.location_path }}' path exists in '{{ am_configure_space_uuid }}' space" + shell: > + echo "select location_id from locations_locationpipeline where location_id in + (select uuid from locations_location where purpose='{{ am_configure_location.location_purpose }}' + and relative_path='{{ am_configure_location.location_path | regex_replace('^/', '') }}' + and space_id='{{ am_configure_space_uuid }}');" + | {{ archivematica_src_ss_virtualenv }}/bin/python manage.py dbshell + | tail -n1 + args: + chdir: "{{ archivematica_src_ss_app }}" + executable: /bin/bash + environment: "{{ archivematica_src_ss_environment }}" + delegate_to: "{{ archivematica_src_configure_ss_inventory_hostname | default(archivematica_src_configure_ss_url|urlsplit('hostname')) }}" + remote_user: "{{ archivematica_src_configure_ss_ssh_user | default('artefactual') }}" + register: am_configure_check_location_id + + +- name: "Create '{{ am_configure_location.location_purpose }}' location with '{{ am_configure_location.location_path }}' path exists in '{{ am_configure_space_uuid }}' space" + uri: + url: "{{ archivematica_src_configure_ss_url }}/api/v2/location/" + headers: + Content-Type: "application/json" + Authorization: "ApiKey {{ archivematica_src_configure_ss_user }}:{{ archivematica_src_configure_ss_api_key }}" + body: + pipeline: ["/api/v2/pipeline/{{ am_configure_pipeline_uuid }}/"] + purpose: "{{ am_configure_location.location_purpose }}" + relative_path: "{{ am_configure_location.location_path | regex_replace('^\\/', '') }}" + description: "{{ am_configure_location.location_description }}" + space: "/api/v2/space/{{ am_configure_space_uuid }}/" + default: "{{ am_configure_location.location_default }}" + body_format: json + status_code: 201 + method: POST + when: am_configure_check_location_id.stdout == "" diff --git a/tasks/configure-pipeline-local-fs.yml b/tasks/configure-pipeline-local-fs.yml new file mode 100644 index 00000000..48f19ac6 --- /dev/null +++ b/tasks/configure-pipeline-local-fs.yml @@ -0,0 +1,147 @@ +--- +# This tasks file configures the Pipeline Local Filesystem Spaces +# Pipeline Local Filesystems refer to the storage that is local to the Archivematica pipeline, +# but remote to the Storage Service. It's a ssh-based remote space. +# More info: +# https://www.archivematica.org/en/docs/storage-service-0.16/administrators/#pipeline-local-filesystem + + +# Get pipeline uuid +- name: Get pipeline uuid + become: "yes" + shell: > + echo: "select value from DashboardSettings where name='dashboard_uuid';" + | {{ archivematica_src_am_dashboard_virtualenv }}/bin/python manage.py dbshell + | tail -n1 + args: + chdir: "{{ archivematica_src_am_dashboard_app }}" + executable: /bin/bash + environment: "{{ archivematica_src_am_dashboard_environment }}" + register: am_configure_pipelinelocalfs_pipeline_uuid + +# Define archivematica_src_configure_pipeline_remote_name +- set_fact: + am_configure_pipelinelocalfs_pipeline_remote_name: "{{ ansible_host }}" + +# Redefine archivematica_src_configure_pipeline_remote_name when archivematica_src_configure_am_site_url is defined +- set_fact: + am_configure_pipelinelocalfs_pipeline_remote_name: "{{ archivematica_src_configure_am_site_url|urlsplit('hostname') }}" + when: "archivematica_src_configure_am_site_url is defined" + +# Check when Pipeline Local Filesystem already exists +- name: "Get Pipeline Local Filesystem Space ID when it is already configured" + become: "yes" + shell: > + echo "select space_id from locations_pipelinelocalfs where remote_name='{{ am_configure_pipelinelocalfs_pipeline_remote_name }}';" + | {{ archivematica_src_ss_virtualenv }}/bin/python manage.py dbshell + | tail -n1 + args: + chdir: "{{ archivematica_src_ss_app }}" + executable: /bin/bash + environment: "{{ archivematica_src_ss_environment }}" + delegate_to: "{{ archivematica_src_configure_ss_inventory_hostname | default(archivematica_src_configure_ss_url|urlsplit('hostname')) }}" + remote_user: "{{ archivematica_src_configure_ss_ssh_user | default('artefactual') }}" + register: am_configure_pipelinelocalfs_space_id + +# Create Pipeline Local Filesystem Space when it doesn't exist + +- name: "Create Pipeline Local Filesystem Space" + uri: + url: "{{ archivematica_src_configure_ss_url }}/api/v2/space/" + headers: + Content-Type: "application/json" + Authorization: "ApiKey {{ archivematica_src_configure_ss_user }}:{{ archivematica_src_configure_ss_api_key }}" + body: + access_protocol: "PIPE_FS" + path: "/" + staging_path: "/var/archivematica/storage_service" + remote_user: "archivematica" + remote_name: "{{ am_configure_pipelinelocalfs_pipeline_remote_name }}" + rsync_password: "" + assume_rsync_daemon: False + body_format: json + status_code: 201 + method: POST + when: + - am_configure_pipelinelocalfs_space_id.stdout is defined + - am_configure_pipelinelocalfs_space_id.stdout == "" + +- name: "Get Pipeline Local Filesystem Space ID when it is created" + shell: > + echo "select space_id from locations_pipelinelocalfs where remote_name='{{ am_configure_pipelinelocalfs_pipeline_remote_name }}';" + | {{ archivematica_src_ss_virtualenv }}/bin/python manage.py dbshell + | tail -n1 + args: + chdir: "{{ archivematica_src_ss_app }}" + executable: /bin/bash + environment: "{{ archivematica_src_ss_environment }}" + delegate_to: "{{ archivematica_src_configure_ss_inventory_hostname | default(archivematica_src_configure_ss_url|urlsplit('hostname')) }}" + remote_user: "{{ archivematica_src_configure_ss_ssh_user | default('artefactual') }}" + register: am_configure_pipelinelocalfs_space_id_new + when: + - am_configure_pipelinelocalfs_space_id.stdout is defined + - am_configure_pipelinelocalfs_space_id.stdout == "" + + +# Reassign pipelinelocalfs_space_id when the Space is created +- set_fact: am_configure_pipelinelocalfs_space_id={{ am_configure_pipelinelocalfs_space_id_new }} + when: am_configure_pipelinelocalfs_space_id_new.stdout is defined + + +- name: "Create Pipeline Local Filesystem locations" + include_tasks: configure-create-location.yml + vars: + am_configure_space_uuid: "{{ am_configure_pipelinelocalfs_space_id.stdout }}" + am_configure_pipeline_uuid: "{{ am_configure_pipelinelocalfs_pipeline_uuid.stdout }}" + with_items: "{{ archivematica_src_configure_pipeline_localfs_locations }}" + loop_control: + loop_var: am_configure_location + +# Transfer Backlog and Currently Processing locations must be exactly one per pipeline. +# So it is necessary to remove the pipeline from default locations when these locations_locationpipelinenew locations are added +- name: "Delete pipeline from default LocalFilesystem Currently Procession or Transfer Backlog location when added to Pipeline LocalFS" + shell: > + echo "delete from locations_locationpipeline where pipeline_id='{{ am_configure_pipelinelocalfs_pipeline_uuid.stdout }}' and location_id in + (select uuid from locations_location where purpose='{{ item }}' and space_id in + (select space_id from locations_localfilesystem where id='1'));" + | {{ archivematica_src_ss_virtualenv }}/bin/python manage.py dbshell + | tail -n1 + args: + chdir: "{{ archivematica_src_ss_app }}" + executable: /bin/bash + environment: "{{ archivematica_src_ss_environment }}" + delegate_to: "{{ archivematica_src_configure_ss_inventory_hostname | default(archivematica_src_configure_ss_url|urlsplit('hostname')) }}" + remote_user: "{{ archivematica_src_configure_ss_ssh_user | default('artefactual') }}" + when: "item in (archivematica_src_configure_pipeline_localfs_locations | map(attribute='location_purpose') | list)" + loop: + - "BL" + - "CP" + +- name: "Create ssh key in SS" + user: + name: "archivematica" + generate_ssh_key: "yes" + ssh_key_file: ".ssh/id_rsa" + delegate_to: "{{ archivematica_src_configure_ss_inventory_hostname | default(archivematica_src_configure_ss_url|urlsplit('hostname')) }}" + remote_user: "{{ archivematica_src_configure_ss_ssh_user | default('artefactual') }}" + register: am_configure_pipelinelocalfs_ss_ssh_key + +- name: "Use StrictHostKeyChecking=no ssh option for archivematica user" + lineinfile: + create: "yes" + path: "/var/lib/archivematica/.ssh/config" + owner: "archivematica" + group: "archivematica" + mode: "0600" + line: "StrictHostKeyChecking no" + delegate_to: "{{ archivematica_src_configure_ss_inventory_hostname | default(archivematica_src_configure_ss_url|urlsplit('hostname')) }}" + remote_user: "{{ archivematica_src_configure_ss_ssh_user | default('artefactual') }}" + +- name: "Show ssh key" + debug: msg={{ am_configure_pipelinelocalfs_ss_ssh_key.ssh_public_key }} + +- name: "Add SS ssh key to pipeline server" + authorized_key: + user: "archivematica" + state: "present" + key: "{{ am_configure_pipelinelocalfs_ss_ssh_key.ssh_public_key }}" diff --git a/tasks/main.yml b/tasks/main.yml index 058a734b..82df7aa0 100644 --- a/tasks/main.yml +++ b/tasks/main.yml @@ -286,3 +286,14 @@ when: - "archivematica_src_install_ss|bool or archivematica_src_install_ss=='rpm'" - "archivematica_src_configure_gpg is defined" + +# +# Configure Pipeline Local Filesystem Space +# + +- include: "configure-pipeline-local-fs.yml" + tags: + - "amsrc-configure" + when: + - "archivematica_src_configure_dashboard|bool" + - "archivematica_src_configure_pipeline_localfs_locations is defined"