diff --git a/pom.xml b/pom.xml
index 56b9436d..ee9d1d02 100644
--- a/pom.xml
+++ b/pom.xml
@@ -111,6 +111,12 @@
tests
test
+
+ org.jenkins-ci.plugins.workflow
+ workflow-step-api
+ tests
+ test
+
org.jenkins-ci.test
docker-fixtures
diff --git a/src/main/java/io/jenkins/plugins/artifact_manager_jclouds/JCloudsArtifactManager.java b/src/main/java/io/jenkins/plugins/artifact_manager_jclouds/JCloudsArtifactManager.java
index 40fa42c6..56bc7814 100644
--- a/src/main/java/io/jenkins/plugins/artifact_manager_jclouds/JCloudsArtifactManager.java
+++ b/src/main/java/io/jenkins/plugins/artifact_manager_jclouds/JCloudsArtifactManager.java
@@ -121,7 +121,7 @@ private String getBlobPath(String key, String path) {
public void archive(FilePath workspace, Launcher launcher, BuildListener listener, Map artifacts)
throws IOException, InterruptedException {
LOGGER.log(Level.FINE, "Archiving from {0}: {1}", new Object[] { workspace, artifacts });
- Map contentTypes = workspace.act(new ContentTypeGuesser(new ArrayList<>(artifacts.keySet()), listener));
+ Map contentTypes = workspace.act(new ContentTypeGuesser(new ArrayList<>(artifacts.values()), listener));
LOGGER.fine(() -> "guessing content types: " + contentTypes);
Map artifactUrls = new HashMap<>();
BlobStore blobStore = getContext().getBlobStore();
@@ -132,7 +132,7 @@ public void archive(FilePath workspace, Launcher launcher, BuildListener listene
String blobPath = getBlobPath(path);
Blob blob = blobStore.blobBuilder(blobPath).build();
blob.getMetadata().setContainer(provider.getContainer());
- blob.getMetadata().getContentMetadata().setContentType(contentTypes.get(entry.getKey()));
+ blob.getMetadata().getContentMetadata().setContentType(contentTypes.get(entry.getValue()));
artifactUrls.put(entry.getValue(), provider.toExternalURL(blob, HttpMethod.PUT));
}
@@ -167,6 +167,8 @@ public Map invoke(File f, VirtualChannel channel) {
contentTypes.put(relPath, contentType);
} catch (IOException e) {
Functions.printStackTrace(e, listener.error("Unable to determine content type for file: " + theFile));
+ // A content type must be specified; otherwise, the metadata signature will be computed from data that includes "Content-Type:", but no such HTTP header will be sent, and AWS will reject the request.
+ contentTypes.put(relPath, "application/octet-stream");
}
}
return contentTypes;
diff --git a/src/test/java/io/jenkins/plugins/artifact_manager_jclouds/s3/JCloudsArtifactManagerTest.java b/src/test/java/io/jenkins/plugins/artifact_manager_jclouds/s3/JCloudsArtifactManagerTest.java
index 3d290d5b..d67083ee 100644
--- a/src/test/java/io/jenkins/plugins/artifact_manager_jclouds/s3/JCloudsArtifactManagerTest.java
+++ b/src/test/java/io/jenkins/plugins/artifact_manager_jclouds/s3/JCloudsArtifactManagerTest.java
@@ -27,8 +27,11 @@
import io.jenkins.plugins.artifact_manager_jclouds.BlobStoreProviderDescriptor;
import io.jenkins.plugins.artifact_manager_jclouds.BlobStoreProvider;
import io.jenkins.plugins.artifact_manager_jclouds.JCloudsArtifactManagerFactory;
+import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import static org.junit.Assume.*;
import java.io.IOException;
@@ -68,14 +71,18 @@
import hudson.model.FreeStyleBuild;
import hudson.model.FreeStyleProject;
import hudson.model.Item;
+import hudson.model.Run;
+import hudson.model.TaskListener;
import hudson.plugins.sshslaves.SSHLauncher;
import hudson.remoting.Which;
import hudson.slaves.DumbSlave;
import hudson.tasks.ArtifactArchiver;
import io.jenkins.plugins.aws.global_configuration.CredentialsAwsGlobalConfiguration;
+import java.io.Serializable;
import java.net.URI;
import java.net.URL;
import java.util.Collections;
+import java.util.Set;
import jenkins.branch.BranchSource;
import jenkins.model.ArtifactManagerConfiguration;
import jenkins.model.ArtifactManagerFactory;
@@ -84,6 +91,7 @@
import jenkins.plugins.git.GitSampleRepoRule;
import jenkins.plugins.git.traits.BranchDiscoveryTrait;
import jenkins.security.MasterToSlaveCallable;
+import jenkins.util.BuildListenerAdapter;
import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition;
import org.jenkinsci.plugins.workflow.job.WorkflowJob;
import org.jenkinsci.plugins.workflow.job.WorkflowRun;
@@ -93,7 +101,14 @@
import org.jenkinsci.plugins.workflow.flow.FlowCopier;
import org.jenkinsci.plugins.workflow.multibranch.WorkflowMultiBranchProject;
import org.jenkinsci.plugins.workflow.multibranch.WorkflowMultiBranchProjectTest;
+import org.jenkinsci.plugins.workflow.steps.Step;
+import org.jenkinsci.plugins.workflow.steps.StepContext;
+import org.jenkinsci.plugins.workflow.steps.StepDescriptor;
+import org.jenkinsci.plugins.workflow.steps.StepExecution;
+import org.jenkinsci.plugins.workflow.steps.StepExecutions;
import org.jvnet.hudson.test.MockAuthorizationStrategy;
+import org.jvnet.hudson.test.TestExtension;
+import org.kohsuke.stapler.DataBoundConstructor;
public class JCloudsArtifactManagerTest extends S3AbstractTest {
@@ -306,6 +321,26 @@ public void contentType() throws Exception {
assertThat(response.getContentType(), equalTo("application/json"));
}
+ @Test
+ public void archiveWithDistinctArchiveAndWorkspacePaths() throws Exception {
+ String text = "some regular text";
+ ArtifactManagerConfiguration.get().getArtifactManagerFactories().add(getArtifactManagerFactory(null, null));
+
+ j.createSlave("remote", null, null);
+
+ WorkflowJob p = j.createProject(WorkflowJob.class, "p");
+ p.setDefinition(new CpsFlowDefinition(
+ "node('remote') {\n" +
+ " writeFile file: 'f.txt', text: '" + text + "'\n" +
+ " archiveWithCustomPath(archivePath: 'what/an/interesting/path/to/f.txt', workspacePath: 'f.txt')\n" +
+ "}", true));
+ j.buildAndAssertSuccess(p);
+
+ WebResponse response = j.createWebClient().goTo("job/p/1/artifact/what/an/interesting/path/to/f.txt", null).getWebResponse();
+ assertThat(response.getContentAsString(), equalTo(text));
+ assertThat(response.getContentType(), equalTo("text/plain"));
+ }
+
//@Test
public void archiveSingleLargeFile() throws Exception {
ArtifactManagerConfiguration.get().getArtifactManagerFactories().add(getArtifactManagerFactory(null, null));
@@ -339,4 +374,36 @@ public boolean perform(AbstractBuild, ?> build, Launcher launcher, BuildListen
}
}
+ public static class ArchiveArtifactWithCustomPathStep extends Step implements Serializable {
+ private static final long serialVersionUID = 1L;
+ private final String archivePath;
+ private final String workspacePath;
+ @DataBoundConstructor
+ public ArchiveArtifactWithCustomPathStep(String archivePath, String workspacePath) {
+ this.archivePath = archivePath;
+ this.workspacePath = workspacePath;
+ }
+ @Override
+ public StepExecution start(StepContext context) throws Exception {
+ return StepExecutions.synchronousNonBlocking(context, context2 -> {
+ context.get(Run.class).pickArtifactManager().archive(
+ context.get(FilePath.class),
+ context.get(Launcher.class),
+ new BuildListenerAdapter(context.get(TaskListener.class)),
+ Collections.singletonMap(archivePath, workspacePath));
+ return null;
+ });
+ }
+ @TestExtension("archiveWithDistinctArchiveAndWorkspacePaths")
+ public static class DescriptorImpl extends StepDescriptor {
+ @Override
+ public String getFunctionName() {
+ return "archiveWithCustomPath";
+ }
+ @Override
+ public Set extends Class>> getRequiredContext() {
+ return Set.of(FilePath.class, Launcher.class, TaskListener.class);
+ }
+ }
+ }
}