diff --git a/jenkins-scripts/dsl/_configs_/Globals.groovy b/jenkins-scripts/dsl/_configs_/Globals.groovy index bb4e41bae..3da1e8c38 100644 --- a/jenkins-scripts/dsl/_configs_/Globals.groovy +++ b/jenkins-scripts/dsl/_configs_/Globals.groovy @@ -136,16 +136,16 @@ class Globals return package_name.replaceAll('\\d*$', '') } - static String _s3_releases_dir(String package_name) { + static String s3_releases_dir(String package_name) { return get_canonical_package_name(package_name) + '/releases' } static String s3_upload_tarball_path(String package_name) { - return 's3://osrf-distributions/' + _s3_releases_dir(package_name) + return 's3://osrf-distributions/' + s3_releases_dir(package_name) } static String s3_download_url_basedir(String package_name) { - return 'https://osrf-distributions.s3.amazonaws.com/' + _s3_releases_dir(package_name) + return 'https://osrf-distributions.s3.amazonaws.com/' + s3_releases_dir(package_name) } /* rest of the s3 paths need to be cumputed during job running time since diff --git a/jenkins-scripts/dsl/_configs_/OSRFSourceCreation.groovy b/jenkins-scripts/dsl/_configs_/OSRFSourceCreation.groovy index 63463b9ff..bef933b00 100644 --- a/jenkins-scripts/dsl/_configs_/OSRFSourceCreation.groovy +++ b/jenkins-scripts/dsl/_configs_/OSRFSourceCreation.groovy @@ -33,9 +33,6 @@ class OSRFSourceCreation stringParam("UPLOAD_TO_REPO", default_params.find{ it.key == "UPLOAD_TO_REPO"}?.value, "For downstream jobs: OSRF repo name to upload the package to: stable | prerelease | nightly | none (for testing proposes)") - stringParam("PROJECT_NAME_TO_COPY_ARTIFACTS", - "", - "Internal use: parent job name passed by the job to be used in copy artifacts") } } } @@ -99,7 +96,7 @@ class OSRFSourceCreation exit 1 fi - echo "TARBALL_NAME=\${tarball}" >> ${properties_file} + echo "S3_FILES_TO_UPLOAD=\${tarball}" >> ${properties_file} echo "SOURCE_TARBALL_URI=$s3_download_url_basedir/\${tarball}" >> ${properties_file} """.stripIndent() ) @@ -130,8 +127,8 @@ class OSRFSourceCreation parameters { currentBuild() predefinedProps([PROJECT_NAME_TO_COPY_ARTIFACTS: '${JOB_NAME}', - S3_UPLOAD_PATH: Globals.s3_upload_tarball_path(package_name)]) - propertiesFile(properties_file) // TARBALL_NAME + S3_UPLOAD_PATH: Globals.s3_releases_dir(package_name)]) // relative path + propertiesFile(properties_file) // S3_FILES_TO_UPLOAD } } } diff --git a/jenkins-scripts/dsl/test.dsl b/jenkins-scripts/dsl/test.dsl index 5b0c381df..08ae6156c 100644 --- a/jenkins-scripts/dsl/test.dsl +++ b/jenkins-scripts/dsl/test.dsl @@ -52,6 +52,7 @@ repo_uploader.with stringParam('PACKAGE','','Package name') stringParam('TARBALL_NAME', '', 'Tarball name to upload') stringParam('S3_UPLOAD_PATH','', 'S3 path to upload') + stringParam('S3_FILES_TO_UPLOAD','', 'S3 file names to upload') stringParam('UPLOAD_TO_REPO','none','repo to upload') stringParam("PROJECT_NAME_TO_COPY_ARTIFACTS", "", @@ -76,7 +77,22 @@ repo_uploader.with ls -R \${WORKSPACE} test -f \${WORKSPACE}/${pkg_sources_dir}/\${TARBALL_NAME} - echo "Fake upload of \${TARBALL_NAME} to \${S3_UPLOAD_PATH}" + echo "Fake upload of \${S3_FILES_TO_UPLOAD} to \${S3_UPLOAD_PATH}" + # code copied from repository_uploader + pkgs_path="\$WORKSPACE/pkgs" + + for pkg in \${S3_FILES_TO_UPLOAD}; do + # S3_UPLOAD_PATH should be send by the upstream job + if [[ -z \${S3_UPLOAD_PATH} ]]; then + echo "S3_UPLOAD_PATH was not defined. Not uploading" + exit 1 + fi + + # Seems important to upload the path with a final slash + echo "WILL RUN: s3cmd \${pkgs_path}/\${pkg} \${S3_UPLOAD_PATH}" + done + + """.stripIndent()) } }