diff --git a/defaults/main.yml b/defaults/main.yml index 196f553..a033f72 100644 --- a/defaults/main.yml +++ b/defaults/main.yml @@ -231,6 +231,31 @@ archivematica_src_syslog_mcpserver_level: "DEBUG" # enabled: '0' # field_name: 'command_id' +# +# Configure locations +# +# For some different space protocols a dictionary will be used to configure locations. +# The dictionary will have 4 keys: +# 1) location_purpose: valid values are: +# +# "AR": AIP RECOVERY +# "AS": AIP STORAGE +# "CP": CURRENTLY PROCESSING +# "DS": DIP STORAGE +# "SD": SWORD DEPOSIT +# "SS": STORAGE SERVICE INTERNAL PROCESSING +# "BL": TRANSFER BACKLOG +# "TS": TRANSFER SOURCE +# "RP": REPLICATOR +# +# 2) location_path: it assumes that the Space absolute path is "/" and removes the "/" when creating the location. So when using +# a different absolute path in Space, please write the relative path from Space path adding a "/" character to +# the beginning of the string. +# +# 3) location_description: any location description +# +# 4) location_default: boolean + # # Configure SS locations # @@ -239,3 +264,15 @@ archivematica_src_syslog_mcpserver_level: "DEBUG" # - { location_purpose: "AS", location_path: "/aipstore", location_description: "AipStore", location_default: "true" } # - { location_purpose: "TS", location_path: "/transfer-source/", location_description: "Transfer Source", location_default: "false" } +# +# Configure SS Pipeline Local FileSystem locations. The CP location is mandatory when pipeline is on a server different than the one the SS is on. +# Pipeline Local Filesystems refer to the storage that is local to the Archivematica pipeline, +# but remote to the Storage Service. It's a ssh-based remote space. +# More info: +# https://www.archivematica.org/en/docs/storage-service-0.16/administrators/#pipeline-local-filesystem +# +#archivematica_src_configure_pipeline_localfs_locations: +# - { location_purpose: "CP", location_path: "{{ archivematica_src_shareddir }}", location_description: "Pipeline Local Fylesistem CP", location_default: "false" } +# - { location_purpose: "BL", location_path: "{{ archivematica_src_shareddir }}/www/AIPsStore/transferBacklog", location_description: "Pipeline Local Fylesistem BL", location_default: "false" } +# - { location_purpose: "AS", location_path: "/aipstore", location_description: "AipStore", location_default: "true" } +# - { location_purpose: "TS", location_path: "/transfer-source/", location_description: "Transfer Source", location_default: "false" } diff --git a/tasks/configure-create-location.yml b/tasks/configure-create-location.yml new file mode 100644 index 0000000..6a1f938 --- /dev/null +++ b/tasks/configure-create-location.yml @@ -0,0 +1,106 @@ +--- + +# This file works as a function to create locations. The arguments are: +# - am_configure_space_uuid +# - am_configure_pipeline_uuid +# - am_configure_location: One record dictionary with fields: +# - location_purpose +# - location_path (absolute path) +# - location_description +# - location_default +# +# For instance, you can call this task file from other task file with: +# +#- name: "Create Pipeline Local Filesystem locations" +# include_tasks: configure-create-location.yml +# vars: +# am_configure_space_uuid: "{{ am_configure_pipelinelocalfs_space_id.stdout }}" +# am_configure_pipeline_uuid: "{{ am_configure_pipelinelocalfs_pipeline_uuid.stdout }}" +# with_items: "{{ archivematica_src_configure_pipeline_localfs_locations }}" +# loop_control: +# loop_var: am_configure_location + +- name: "Check if '{{ am_configure_location.location_purpose }}' location with '{{ am_configure_location.location_path }}' path exists in '{{ am_configure_space_uuid }}' space" + shell: > + echo "select location_id from locations_locationpipeline where location_id in + (select uuid from locations_location where purpose='{{ am_configure_location.location_purpose }}' + and relative_path='{{ am_configure_location.location_path | regex_replace('^/', '') }}' + and space_id='{{ am_configure_space_uuid }}');" + | {{ archivematica_src_ss_virtualenv }}/bin/python manage.py dbshell + | tail -n1 + args: + chdir: "{{ archivematica_src_ss_app }}" + executable: /bin/bash + environment: "{{ archivematica_src_ss_environment }}" + delegate_to: "{{ archivematica_src_configure_ss_inventory_hostname | default(archivematica_src_configure_ss_url|urlsplit('hostname')) }}" + remote_user: "{{ archivematica_src_configure_ss_ssh_user | default('artefactual') }}" + register: am_configure_check_location_id + + +- name: "Create '{{ am_configure_location.location_purpose }}' location with '{{ am_configure_location.location_path }}' path exists in '{{ am_configure_space_uuid }}' space" + uri: + url: "{{ archivematica_src_configure_ss_url }}/api/v2/location/" + headers: + Content-Type: "application/json" + Authorization: "ApiKey {{ archivematica_src_configure_ss_user }}:{{ archivematica_src_configure_ss_api_key }}" + body: + pipeline: ["/api/v2/pipeline/{{ am_configure_pipeline_uuid }}/"] + purpose: "{{ am_configure_location.location_purpose }}" + relative_path: "{{ am_configure_location.location_path | regex_replace('^\\/', '') }}" + description: "{{ am_configure_location.location_description }}" + space: "/api/v2/space/{{ am_configure_space_uuid }}/" + default: "{{ am_configure_location.location_default }}" + body_format: json + status_code: 201 + method: POST + when: am_configure_check_location_id.stdout == "" + +- name: "Get the location id to be replicated when location is a replicator" + shell: > + echo "select id from locations_location where description='{{ am_configure_location.location_replicaof }}';" + | {{ archivematica_src_ss_virtualenv }}/bin/python manage.py dbshell + | tail -n1 + args: + chdir: "{{ archivematica_src_ss_app }}" + executable: /bin/bash + environment: "{{ archivematica_src_ss_environment }}" + delegate_to: "{{ archivematica_src_configure_ss_inventory_hostname | default(archivematica_src_configure_ss_url|urlsplit('hostname')) }}" + remote_user: "{{ archivematica_src_configure_ss_ssh_user | default('artefactual') }}" + register: am_configure_replica_location_id + when: + - am_configure_location.location_purpose == "RP" + - am_configure_location.location_replicaof is defined + +- name: "Get the location id when location is a replicator" + shell: > + echo "select id from locations_location where description='{{ am_configure_location.location_description }}';" + | {{ archivematica_src_ss_virtualenv }}/bin/python manage.py dbshell + | tail -n1 + args: + chdir: "{{ archivematica_src_ss_app }}" + executable: /bin/bash + environment: "{{ archivematica_src_ss_environment }}" + delegate_to: "{{ archivematica_src_configure_ss_inventory_hostname | default(archivematica_src_configure_ss_url|urlsplit('hostname')) }}" + remote_user: "{{ archivematica_src_configure_ss_ssh_user | default('artefactual') }}" + register: am_configure_location_id + when: + - am_configure_location.location_purpose == "RP" + - am_configure_location.location_replicaof is defined + +- name: "Configure replication for location with description: '{{ am_configure_location.location_replicaof }}'" + shell: > + echo "insert into locations_location_replicators (from_location_id, to_location_id) values('{{ am_configure_replica_location_id.stdout }}','{{ }}' ;" + | {{ archivematica_src_ss_virtualenv }}/bin/python manage.py dbshell + | tail -n1 + args: + chdir: "{{ archivematica_src_ss_app }}" + executable: /bin/bash + environment: "{{ archivematica_src_ss_environment }}" + delegate_to: "{{ archivematica_src_configure_ss_inventory_hostname | default(archivematica_src_configure_ss_url|urlsplit('hostname')) }}" + remote_user: "{{ archivematica_src_configure_ss_ssh_user | default('artefactual') }}" + register: am_configure_replica_location_id + when: + - am_configure_location.location_purpose == "RP" + - am_configure_location.location_replicaof is defined + - am_configure_replica_location_id.stdout != "" + - am_configure_location_id != "" diff --git a/tasks/configure-pipeline-local-fs.yml b/tasks/configure-pipeline-local-fs.yml new file mode 100644 index 0000000..dbbf9b1 --- /dev/null +++ b/tasks/configure-pipeline-local-fs.yml @@ -0,0 +1,147 @@ +--- +# This tasks file configures the Pipeline Local Filesystem Spaces +# Pipeline Local Filesystems refer to the storage that is local to the Archivematica pipeline, +# but remote to the Storage Service. It's a ssh-based remote space. +# More info: +# https://www.archivematica.org/en/docs/storage-service-0.16/administrators/#pipeline-local-filesystem + + +# Get pipeline uuid +- name: Get pipeline uuid + become: "yes" + shell: > + echo "select value from DashboardSettings where name='dashboard_uuid';" + | PYTHONPATH={{ archivematica_src_am_common_app }}:{{ archivematica_src_am_dashboard_app }} {{ archivematica_src_am_dashboard_virtualenv }}/bin/python manage.py dbshell + | tail -n1 + args: + chdir: "{{ archivematica_src_am_dashboard_app }}" + executable: /bin/bash + environment: "{{ archivematica_src_am_dashboard_environment }}" + register: am_configure_pipelinelocalfs_pipeline_uuid + +# Define archivematica_src_configure_pipeline_remote_name +- set_fact: + am_configure_pipelinelocalfs_pipeline_remote_name: "{{ ansible_host }}" + +# Redefine archivematica_src_configure_pipeline_remote_name when archivematica_src_configure_am_site_url is defined +- set_fact: + am_configure_pipelinelocalfs_pipeline_remote_name: "{{ archivematica_src_configure_am_site_url|urlsplit('hostname') }}" + when: "archivematica_src_configure_am_site_url is defined" + +# Check when Pipeline Local Filesystem already exists +- name: "Get Pipeline Local Filesystem Space ID when it is already configured" + become: "yes" + shell: > + echo "select space_id from locations_pipelinelocalfs where remote_name='{{ am_configure_pipelinelocalfs_pipeline_remote_name }}';" + | {{ archivematica_src_ss_virtualenv }}/bin/python manage.py dbshell + | tail -n1 + args: + chdir: "{{ archivematica_src_ss_app }}" + executable: /bin/bash + environment: "{{ archivematica_src_ss_environment }}" + delegate_to: "{{ archivematica_src_configure_ss_inventory_hostname | default(archivematica_src_configure_ss_url|urlsplit('hostname')) }}" + remote_user: "{{ archivematica_src_configure_ss_ssh_user | default('artefactual') }}" + register: am_configure_pipelinelocalfs_space_id + +# Create Pipeline Local Filesystem Space when it doesn't exist + +- name: "Create Pipeline Local Filesystem Space" + uri: + url: "{{ archivematica_src_configure_ss_url }}/api/v2/space/" + headers: + Content-Type: "application/json" + Authorization: "ApiKey {{ archivematica_src_configure_ss_user }}:{{ archivematica_src_configure_ss_api_key }}" + body: + access_protocol: "PIPE_FS" + path: "/" + staging_path: "/var/archivematica/storage_service" + remote_user: "archivematica" + remote_name: "{{ am_configure_pipelinelocalfs_pipeline_remote_name }}" + rsync_password: "" + assume_rsync_daemon: False + body_format: json + status_code: 201 + method: POST + when: + - am_configure_pipelinelocalfs_space_id.stdout is defined + - am_configure_pipelinelocalfs_space_id.stdout == "" + +- name: "Get Pipeline Local Filesystem Space ID when it is created" + shell: > + echo "select space_id from locations_pipelinelocalfs where remote_name='{{ am_configure_pipelinelocalfs_pipeline_remote_name }}';" + | {{ archivematica_src_ss_virtualenv }}/bin/python manage.py dbshell + | tail -n1 + args: + chdir: "{{ archivematica_src_ss_app }}" + executable: /bin/bash + environment: "{{ archivematica_src_ss_environment }}" + delegate_to: "{{ archivematica_src_configure_ss_inventory_hostname | default(archivematica_src_configure_ss_url|urlsplit('hostname')) }}" + remote_user: "{{ archivematica_src_configure_ss_ssh_user | default('artefactual') }}" + register: am_configure_pipelinelocalfs_space_id_new + when: + - am_configure_pipelinelocalfs_space_id.stdout is defined + - am_configure_pipelinelocalfs_space_id.stdout == "" + + +# Reassign pipelinelocalfs_space_id when the Space is created +- set_fact: am_configure_pipelinelocalfs_space_id={{ am_configure_pipelinelocalfs_space_id_new }} + when: am_configure_pipelinelocalfs_space_id_new.stdout is defined + + +- name: "Create Pipeline Local Filesystem locations" + include_tasks: configure-create-location.yml + vars: + am_configure_space_uuid: "{{ am_configure_pipelinelocalfs_space_id.stdout }}" + am_configure_pipeline_uuid: "{{ am_configure_pipelinelocalfs_pipeline_uuid.stdout }}" + with_items: "{{ archivematica_src_configure_pipeline_localfs_locations }}" + loop_control: + loop_var: am_configure_location + +# Transfer Backlog and Currently Processing locations must be exactly one per pipeline. +# So it is necessary to remove the pipeline from default locations when these locations_locationpipelinenew locations are added +- name: "Delete pipeline from default LocalFilesystem Currently Procession or Transfer Backlog location when added to Pipeline LocalFS" + shell: > + echo "delete from locations_locationpipeline where pipeline_id='{{ am_configure_pipelinelocalfs_pipeline_uuid.stdout }}' and location_id in + (select uuid from locations_location where purpose='{{ item }}' and space_id in + (select space_id from locations_localfilesystem where id='1'));" + | {{ archivematica_src_ss_virtualenv }}/bin/python manage.py dbshell + | tail -n1 + args: + chdir: "{{ archivematica_src_ss_app }}" + executable: /bin/bash + environment: "{{ archivematica_src_ss_environment }}" + delegate_to: "{{ archivematica_src_configure_ss_inventory_hostname | default(archivematica_src_configure_ss_url|urlsplit('hostname')) }}" + remote_user: "{{ archivematica_src_configure_ss_ssh_user | default('artefactual') }}" + when: "item in (archivematica_src_configure_pipeline_localfs_locations | map(attribute='location_purpose') | list)" + loop: + - "BL" + - "CP" + +- name: "Create ssh key in SS" + user: + name: "archivematica" + generate_ssh_key: "yes" + ssh_key_file: ".ssh/id_rsa" + delegate_to: "{{ archivematica_src_configure_ss_inventory_hostname | default(archivematica_src_configure_ss_url|urlsplit('hostname')) }}" + remote_user: "{{ archivematica_src_configure_ss_ssh_user | default('artefactual') }}" + register: am_configure_pipelinelocalfs_ss_ssh_key + +- name: "Use StrictHostKeyChecking=no ssh option for archivematica user" + lineinfile: + create: "yes" + path: "/var/lib/archivematica/.ssh/config" + owner: "archivematica" + group: "archivematica" + mode: "0600" + line: "StrictHostKeyChecking no" + delegate_to: "{{ archivematica_src_configure_ss_inventory_hostname | default(archivematica_src_configure_ss_url|urlsplit('hostname')) }}" + remote_user: "{{ archivematica_src_configure_ss_ssh_user | default('artefactual') }}" + +- name: "Show ssh key" + debug: msg={{ am_configure_pipelinelocalfs_ss_ssh_key.ssh_public_key }} + +- name: "Add SS ssh key to pipeline server" + authorized_key: + user: "archivematica" + state: "present" + key: "{{ am_configure_pipelinelocalfs_ss_ssh_key.ssh_public_key }}" diff --git a/tasks/configure.yml b/tasks/configure.yml index ca0d992..8a857b2 100644 --- a/tasks/configure.yml +++ b/tasks/configure.yml @@ -236,20 +236,38 @@ # Get id of the first registered pipeline (id=1) - name: "Configure AM: get default pipeline UUID from SS database" become: "yes" - command: mysql {{ archivematica_src_ss_db_name }} -Ns -e "select uuid from locations_pipeline where id='1';" + shell: > + echo "select \`uuid\` from locations_pipeline where id='1';" + | {{ archivematica_src_ss_virtualenv }}/bin/python manage.py dbshell | tail -n1 + args: + chdir: "{{ archivematica_src_ss_app }}" + executable: /bin/bash + environment: "{{ archivematica_src_ss_environment }}" register: pipeline_uuid tags: "configure-am" # Gets the uuid of the first registered space (id=1) - name: "Configure AM: get default Space UUID from SS database" become: "yes" - command: mysql {{ archivematica_src_ss_db_name }} -Ns -e "select uuid from locations_space where id='1';" + shell: > + echo "select \`uuid\` from locations_space where id='1';" + | {{ archivematica_src_ss_virtualenv }}/bin/python manage.py dbshell | tail -n1 + args: + chdir: "{{ archivematica_src_ss_app }}" + executable: /bin/bash + environment: "{{ archivematica_src_ss_environment }}" register: space_uuid tags: "configure-am" - name: "Configure AM: get all TS descriptions from SS database" become: "yes" - command: mysql {{ archivematica_src_ss_db_name }} -Ns -e "select description from locations_location;" + shell: > + echo "select description from locations_location;" + | {{ archivematica_src_ss_virtualenv }}/bin/python manage.py dbshell | tail -n1 + args: + chdir: "{{ archivematica_src_ss_app }}" + executable: /bin/bash + environment: "{{ archivematica_src_ss_environment }}" register: location_descriptions tags: "configure-am" diff --git a/tasks/main.yml b/tasks/main.yml index cbfd892..89a24fe 100644 --- a/tasks/main.yml +++ b/tasks/main.yml @@ -293,3 +293,14 @@ when: - "archivematica_src_install_ss|bool or archivematica_src_install_ss=='rpm'" - "archivematica_src_configure_gpg is defined" + +# +# Configure Pipeline Local Filesystem Space +# + +- include: "configure-pipeline-local-fs.yml" + tags: + - "amsrc-configure" + when: + - "archivematica_src_configure_dashboard|bool" + - "archivematica_src_configure_pipeline_localfs_locations is defined"