diff --git a/deploy/pipeline/mw-pipeline-v0.1.yaml b/deploy/pipeline/mw-pipeline-v0.1.yaml
new file mode 100644
index 000000000..01e926cb8
--- /dev/null
+++ b/deploy/pipeline/mw-pipeline-v0.1.yaml
@@ -0,0 +1,163 @@
+apiVersion: tekton.dev/v1
+kind: Pipeline
+metadata:
+ name: mw-pipeline
+spec:
+ params:
+ - name: QUAY_REPO
+ type: string
+ - name: URL
+ description: Repository URL to clone from
+ type: string
+ - name: REVISION
+ description: 'Revision to checkout. (branch, tag, sha, ref, etc...)'
+ type: string
+ default: main
+ - name: BUILD_SCRIPT
+ description: 'The build script to embed with the Containerfile'
+ type: string
+ default: |
+ date
+ - name: RECIPE_IMAGE
+ type: string
+ - name: BUILD_TOOL
+ type: string
+ - name: BUILD_TOOL_VERSION
+ type: string
+ - name: JAVA_VERSION
+ type: string
+ - name: MVN_REPO_DEPLOY_URL
+ description: Maven repository to deploy to
+ type: string
+ default: http://indyhost/ #TODO remove default
+ - name: MVN_REPO_DEPENDENCIES_URL
+ description: Maven repository to get dependencies from
+ type: string
+ - name: ACCESS_TOKEN
+ type: string
+ - name: BUILD_ID
+ type: string
+ - name: caTrustConfigMapName
+ type: string
+ - name: ENABLE_INDY_PROXY
+ type: string
+ - name: JVM_BUILD_SERVICE_REQPROCESSOR_IMAGE
+ type: string
+ workspaces:
+ - name: source
+ description: Workspace containing the source code
+ # - name: ssh-directory
+ # configMap:
+ # name: ssh-directory
+ tasks:
+ - name: git-clone
+ params:
+ - name: url
+ value: $(params.URL)
+ - name: revision
+ value: $(params.REVISION)
+ - name: verbose
+ value: 'true'
+ taskRef:
+ resolver: bundles
+ params:
+ - name: name
+ value: git-clone
+ - name: bundle
+ value: quay.io/konflux-ci/tekton-catalog/task-git-clone:0.1
+ - name: kind
+ value: task
+ workspaces:
+ - name: output
+ workspace: source
+ # - name: ssh-directory
+ # workspace: ssh-directory
+ - name: pre-build
+ runAfter:
+ - git-clone
+ params:
+ - name: JVM_BUILD_SERVICE_REQPROCESSOR_IMAGE
+ value: $(params.JVM_BUILD_SERVICE_REQPROCESSOR_IMAGE)
+ - name: IMAGE_URL
+ value: $(params.QUAY_REPO):trusted-source-$(context.pipelineRun.name)
+ - name: NAME
+ value: $(context.pipelineRun.name)
+ - name: RECIPE_IMAGE
+ value: $(params.RECIPE_IMAGE)
+ - name: BUILD_TOOL
+ value: $(params.BUILD_TOOL)
+ - name: BUILD_TOOL_VERSION
+ value: $(params.BUILD_TOOL_VERSION)
+ - name: JAVA_VERSION
+ value: $(params.JAVA_VERSION)
+ - name: BUILD_SCRIPT
+ value: $(params.BUILD_SCRIPT)
+ # TODO: Below is needed for deploy-pre-build-source step and could be skipped if its removed.
+ - name: SCM_URL
+ value: $(params.URL)
+ - name: SCM_HASH
+ value: $(params.REVISION)
+ taskRef:
+ resolver: http
+ params:
+ - name: url
+ value: https://raw.githubusercontent.com/rnc/jvm-build-service/refs/heads/NCL8774/deploy/tasks/pre-build.yaml
+ workspaces:
+ - name: source
+ workspace: source
+
+ - name: buildah-oci-ta
+ runAfter:
+ - pre-build
+ params:
+ - name: SOURCE_ARTIFACT
+ value: $(tasks.pre-build.results.PRE_BUILD_IMAGE_DIGEST)
+ - name: HERMETIC
+ value: "false"
+ - name: IMAGE # output image
+ value: $(params.QUAY_REPO):build-$(context.pipelineRun.name)
+ - name: DOCKERFILE # local path to the containerfile
+ value: .jbs/Containerfile
+ - name: caTrustConfigMapName
+ value: $(params.caTrustConfigMapName)
+ - name: ENABLE_INDY_PROXY
+ value: $(params.ENABLE_INDY_PROXY)
+ - name: BUILD_ARGS #TODO this should be baked in the OCI source image only a ACCESS_TOKEN should be passed
+ value:
+ - PROXY_URL=$(params.MVN_REPO_DEPENDENCIES_URL)
+ - BUILD_ID=$(params.BUILD_ID)
+ - ACCESS_TOKEN=$(params.ACCESS_TOKEN)
+ taskRef:
+ #resolver: bundles
+ #params:
+ # - name: name
+ # value: buildah-oci-ta
+ # - name: bundle
+ # value: quay.io/konflux-ci/tekton-catalog/task-buildah-oci-ta:0.2
+ # - name: kind
+ # value: task
+
+ resolver: http
+ params:
+ - name: url
+ value: https://raw.githubusercontent.com/rnc/jvm-build-service/refs/heads/NCL8774/deploy/tasks/buildah-oci-ta.yaml
+
+ - name: maven-deployment
+ runAfter:
+ - buildah-oci-ta
+ params:
+ - name: JVM_BUILD_SERVICE_REQPROCESSOR_IMAGE
+ value: $(params.JVM_BUILD_SERVICE_REQPROCESSOR_IMAGE)
+ - name: IMAGE_URL
+ value: $(tasks.buildah-oci-ta.results.IMAGE_URL)
+ - name: IMAGE_DIGEST
+ value: $(tasks.buildah-oci-ta.results.IMAGE_DIGEST)
+ - name: MVN_REPO
+ value: $(params.MVN_REPO_DEPLOY_URL)
+ - name: ACCESS_TOKEN
+ value: $(params.ACCESS_TOKEN)
+ taskRef:
+ resolver: http
+ params:
+ - name: url
+ value: https://raw.githubusercontent.com/rnc/jvm-build-service/refs/heads/NCL8774/deploy/tasks/maven-deployment.yaml
diff --git a/deploy/tasks/buildah-oci-ta.yaml b/deploy/tasks/buildah-oci-ta.yaml
new file mode 100644
index 000000000..95c014ab5
--- /dev/null
+++ b/deploy/tasks/buildah-oci-ta.yaml
@@ -0,0 +1,757 @@
+---
+apiVersion: tekton.dev/v1
+kind: Task
+metadata:
+ name: buildah-oci-ta
+ annotations:
+ tekton.dev/pipelines.minVersion: 0.12.1
+ tekton.dev/tags: image-build, konflux
+ labels:
+ app.kubernetes.io/version: "0.2"
+ build.appstudio.redhat.com/build_type: docker
+spec:
+ description: |-
+ Buildah task builds source code into a container image and pushes the image into container registry using buildah tool.
+ In addition it generates a SBOM file, injects the SBOM file into final container image and pushes the SBOM file as separate image using cosign tool.
+ When [Java dependency rebuild](https://redhat-appstudio.github.io/docs.stonesoup.io/Documentation/main/cli/proc_enabled_java_dependencies.html) is enabled it triggers rebuilds of Java artifacts.
+ When prefetch-dependencies task was activated it is using its artifacts to run build in hermetic environment.
+ params:
+ - name: ACTIVATION_KEY
+ description: Name of secret which contains subscription activation key
+ type: string
+ default: activation-key
+ - name: ADDITIONAL_SECRET
+ description: Name of a secret which will be made available to the build
+ with 'buildah build --secret' at /run/secrets/$ADDITIONAL_SECRET
+ type: string
+ default: does-not-exist
+ - name: ADD_CAPABILITIES
+ description: Comma separated list of extra capabilities to add when
+ running 'buildah build'
+ type: string
+ default: ""
+ - name: BUILD_ARGS
+ description: Array of --build-arg values ("arg=value" strings)
+ type: array
+ default: []
+ - name: BUILD_ARGS_FILE
+ description: Path to a file with build arguments, see https://www.mankier.com/1/buildah-build#--build-arg-file
+ type: string
+ default: ""
+ - name: CACHI2_ARTIFACT
+ description: The Trusted Artifact URI pointing to the artifact with
+ the prefetched dependencies.
+ type: string
+ default: ""
+ - name: COMMIT_SHA
+ description: The image is built from this commit.
+ type: string
+ default: ""
+ - name: CONTEXT
+ description: Path to the directory to use as context.
+ type: string
+ default: .
+ - name: DOCKERFILE
+ description: Path to the Dockerfile to build.
+ type: string
+ default: ./Dockerfile
+ - name: ENTITLEMENT_SECRET
+ description: Name of secret which contains the entitlement certificates
+ type: string
+ default: etc-pki-entitlement
+ - name: HERMETIC
+ description: Determines if build will be executed without network access.
+ type: string
+ default: "false"
+ - name: IMAGE
+ description: Reference of the image buildah will produce.
+ type: string
+ - name: IMAGE_EXPIRES_AFTER
+ description: Delete image tag after specified time. Empty means to keep
+ the image tag. Time values could be something like 1h, 2d, 3w for
+ hours, days, and weeks, respectively.
+ type: string
+ default: ""
+ - name: LABELS
+ description: Additional key=value labels that should be applied to the
+ image
+ type: array
+ default: []
+ - name: PREFETCH_INPUT
+ description: In case it is not empty, the prefetched content should
+ be made available to the build.
+ type: string
+ default: ""
+ - name: SKIP_UNUSED_STAGES
+ description: Whether to skip stages in Containerfile that seem unused
+ by subsequent stages
+ type: string
+ default: "true"
+ - name: SOURCE_ARTIFACT
+ description: The Trusted Artifact URI pointing to the artifact with
+ the application source code.
+ type: string
+ - name: SQUASH
+ description: Squash all new and previous layers added as a part of this
+ build, as per --squash
+ type: string
+ default: "false"
+ - name: STORAGE_DRIVER
+ description: Storage driver to configure for buildah
+ type: string
+ default: vfs
+ - name: TARGET_STAGE
+ description: Target stage in Dockerfile to build. If not specified,
+ the Dockerfile is processed entirely to (and including) its last stage.
+ type: string
+ default: ""
+ - name: TLSVERIFY
+ description: Verify the TLS on the registry endpoint (for push/pull
+ to a non-TLS registry)
+ type: string
+ default: "true"
+ - name: YUM_REPOS_D_FETCHED
+ description: Path in source workspace where dynamically-fetched repos
+ are present
+ default: fetched.repos.d
+ - name: YUM_REPOS_D_SRC
+ description: Path in the git repository in which yum repository files
+ are stored
+ default: repos.d
+ - name: YUM_REPOS_D_TARGET
+ description: Target path on the container in which yum repository files
+ should be made available
+ default: /etc/yum.repos.d
+ - name: caTrustConfigMapKey
+ description: The name of the key in the ConfigMap that contains the
+ CA bundle data.
+ type: string
+ default: ca-bundle.crt
+ - name: caTrustConfigMapName
+ description: The name of the ConfigMap to read CA bundle data from.
+ type: string
+ default: trusted-ca
+ - name: ENABLE_INDY_PROXY
+ type: string
+ description: Enable the indy generic proxy (true/false)
+ default: "false"
+ results:
+ - name: IMAGE_DIGEST
+ description: Digest of the image just built
+ - name: IMAGE_REF
+ description: Image reference of the built image
+ - name: IMAGE_URL
+ description: Image repository and tag where the built image was pushed
+ - name: JAVA_COMMUNITY_DEPENDENCIES
+ description: The Java dependencies that came from community sources
+ such as Maven central.
+ - name: SBOM_BLOB_URL
+ description: Reference of SBOM blob digest to enable digest-based verification
+ from provenance
+ type: string
+ - name: SBOM_JAVA_COMPONENTS_COUNT
+ description: The counting of Java components by publisher in JSON format
+ type: string
+ volumes:
+ - name: activation-key
+ secret:
+ optional: true
+ secretName: $(params.ACTIVATION_KEY)
+ - name: additional-secret
+ secret:
+ optional: true
+ secretName: $(params.ADDITIONAL_SECRET)
+ - name: etc-pki-entitlement
+ secret:
+ optional: true
+ secretName: $(params.ENTITLEMENT_SECRET)
+ - name: shared
+ emptyDir: {}
+ - name: indy-generic-proxy-stage-secrets
+ secret:
+ optional: true
+ secretName: indy-generic-proxy-secrets
+ - name: indy-generic-proxy-stage-config
+ configMap:
+ items:
+ - key: application.yaml
+ path: application.yaml
+ name: indy-generic-proxy-stage-config
+ optional: true
+ - name: trusted-ca
+ configMap:
+ items:
+ - key: $(params.caTrustConfigMapKey)
+ path: ca-bundle.crt
+ name: $(params.caTrustConfigMapName)
+ optional: true
+ - name: varlibcontainers
+ emptyDir: {}
+ - name: workdir
+ emptyDir: {}
+ stepTemplate:
+ env:
+ - name: ACTIVATION_KEY
+ value: $(params.ACTIVATION_KEY)
+ - name: ADDITIONAL_SECRET
+ value: $(params.ADDITIONAL_SECRET)
+ - name: ADD_CAPABILITIES
+ value: $(params.ADD_CAPABILITIES)
+ - name: BUILDAH_FORMAT
+ value: oci
+ - name: BUILD_ARGS_FILE
+ value: $(params.BUILD_ARGS_FILE)
+ - name: CONTEXT
+ value: $(params.CONTEXT)
+ - name: ENTITLEMENT_SECRET
+ value: $(params.ENTITLEMENT_SECRET)
+ - name: HERMETIC
+ value: $(params.HERMETIC)
+ - name: IMAGE
+ value: $(params.IMAGE)
+ - name: IMAGE_EXPIRES_AFTER
+ value: $(params.IMAGE_EXPIRES_AFTER)
+ - name: SKIP_UNUSED_STAGES
+ value: $(params.SKIP_UNUSED_STAGES)
+ - name: SOURCE_CODE_DIR
+ value: source
+ - name: SQUASH
+ value: $(params.SQUASH)
+ - name: STORAGE_DRIVER
+ value: $(params.STORAGE_DRIVER)
+ - name: TARGET_STAGE
+ value: $(params.TARGET_STAGE)
+ - name: TLSVERIFY
+ value: $(params.TLSVERIFY)
+ - name: YUM_REPOS_D_FETCHED
+ value: $(params.YUM_REPOS_D_FETCHED)
+ - name: YUM_REPOS_D_SRC
+ value: $(params.YUM_REPOS_D_SRC)
+ - name: YUM_REPOS_D_TARGET
+ value: $(params.YUM_REPOS_D_TARGET)
+ volumeMounts:
+ - mountPath: /shared
+ name: shared
+ - mountPath: /var/workdir
+ name: workdir
+ steps:
+ - name: use-trusted-artifact
+ image: quay.io/redhat-appstudio/build-trusted-artifacts:latest@sha256:e0e457b6af10e44ff6b90208a9e69adc863a865e1c062c4cb84bf3846037d74d
+ args:
+ - use
+ - $(params.SOURCE_ARTIFACT)=/var/workdir/source
+ - $(params.CACHI2_ARTIFACT)=/var/workdir/cachi2
+ - name: build
+ image: quay.io/konflux-ci/buildah-task:latest@sha256:b2d6c32d1e05e91920cd4475b2761d58bb7ee11ad5dff3ecb59831c7572b4d0c
+ args:
+ - --build-args
+ - $(params.BUILD_ARGS[*])
+ - --labels
+ - $(params.LABELS[*])
+ workingDir: /var/workdir
+ volumeMounts:
+ - mountPath: /var/lib/containers
+ name: varlibcontainers
+ - mountPath: /entitlement
+ name: etc-pki-entitlement
+ - mountPath: /activation-key
+ name: activation-key
+ - mountPath: /additional-secret
+ name: additional-secret
+ - mountPath: /mnt/trusted-ca
+ name: trusted-ca
+ readOnly: true
+ env:
+ - name: COMMIT_SHA
+ value: $(params.COMMIT_SHA)
+ - name: DOCKERFILE
+ value: $(params.DOCKERFILE)
+ script: |
+ #!/bin/bash
+ set -e
+ ca_bundle=/mnt/trusted-ca/ca-bundle.crt
+ if [ -f "$ca_bundle" ]; then
+ echo "INFO: Using mounted CA bundle: $ca_bundle"
+ cp -vf $ca_bundle /etc/pki/ca-trust/source/anchors
+ update-ca-trust
+ fi
+
+ if [ -e "$SOURCE_CODE_DIR/$CONTEXT/$DOCKERFILE" ]; then
+ dockerfile_path="$(pwd)/$SOURCE_CODE_DIR/$CONTEXT/$DOCKERFILE"
+ elif [ -e "$SOURCE_CODE_DIR/$DOCKERFILE" ]; then
+ dockerfile_path="$(pwd)/$SOURCE_CODE_DIR/$DOCKERFILE"
+ elif [ -e "$DOCKERFILE" ]; then
+ # Custom Dockerfile location is mainly used for instrumented builds for SAST scanning and analyzing.
+ # Instrumented builds use this step as their base and also need to provide modified Dockerfile.
+ dockerfile_path="$DOCKERFILE"
+ elif echo "$DOCKERFILE" | grep -q "^https\?://"; then
+ echo "Fetch Dockerfile from $DOCKERFILE"
+ dockerfile_path=$(mktemp --suffix=-Dockerfile)
+ http_code=$(curl -s -S -L -w "%{http_code}" --output "$dockerfile_path" "$DOCKERFILE")
+ if [ $http_code != 200 ]; then
+ echo "No Dockerfile is fetched. Server responds $http_code"
+ exit 1
+ fi
+ http_code=$(curl -s -S -L -w "%{http_code}" --output "$dockerfile_path.dockerignore.tmp" "$DOCKERFILE.dockerignore")
+ if [ $http_code = 200 ]; then
+ echo "Fetched .dockerignore from $DOCKERFILE.dockerignore"
+ mv "$dockerfile_path.dockerignore.tmp" $SOURCE_CODE_DIR/$CONTEXT/.dockerignore
+ fi
+ else
+ echo "Cannot find Dockerfile $DOCKERFILE"
+ exit 1
+ fi
+
+ dockerfile_copy=$(mktemp --tmpdir "$(basename "$dockerfile_path").XXXXXX")
+ cp "$dockerfile_path" "$dockerfile_copy"
+
+ if [ -n "$JVM_BUILD_WORKSPACE_ARTIFACT_CACHE_PORT_80_TCP_ADDR" ] && grep -q '^\s*RUN \(./\)\?mvn' "$dockerfile_copy"; then
+ sed -i -e "s|^\s*RUN \(\(./\)\?mvn\)\(.*\)|RUN echo \"mirror.defaulthttp://$JVM_BUILD_WORKSPACE_ARTIFACT_CACHE_PORT_80_TCP_ADDR/v1/cache/default/0/*\" > /tmp/settings.yaml; \1 -s /tmp/settings.yaml \3|g" "$dockerfile_copy"
+ touch /var/lib/containers/java
+ fi
+
+ # Fixing group permission on /var/lib/containers
+ chown root:root /var/lib/containers
+
+ sed -i 's/^\s*short-name-mode\s*=\s*.*/short-name-mode = "disabled"/' /etc/containers/registries.conf
+
+ # Setting new namespace to run buildah - 2^32-2
+ echo 'root:1:4294967294' | tee -a /etc/subuid >>/etc/subgid
+
+ build_args=()
+ if [ -n "${BUILD_ARGS_FILE}" ]; then
+ # Parse BUILD_ARGS_FILE ourselves because dockerfile-json doesn't support it
+ echo "Parsing ARGs from $BUILD_ARGS_FILE"
+ mapfile -t build_args < <(
+ # https://www.mankier.com/1/buildah-build#--build-arg-file
+ # delete lines that start with #
+ # delete blank lines
+ sed -e '/^#/d' -e '/^\s*$/d' "${SOURCE_CODE_DIR}/${BUILD_ARGS_FILE}"
+ )
+ fi
+
+ LABELS=()
+ # Split `args` into two sets of arguments.
+ while [[ $# -gt 0 ]]; do
+ case $1 in
+ --build-args)
+ shift
+ # Note: this may result in multiple --build-arg=KEY=value flags with the same KEY being
+ # passed to buildah. In that case, the *last* occurrence takes precedence. This is why
+ # we append BUILD_ARGS after the content of the BUILD_ARGS_FILE - they take precedence.
+ while [[ $# -gt 0 && $1 != --* ]]; do
+ build_args+=("$1")
+ shift
+ done
+ ;;
+ --labels)
+ shift
+ while [[ $# -gt 0 && $1 != --* ]]; do
+ LABELS+=("--label" "$1")
+ shift
+ done
+ ;;
+ *)
+ echo "unexpected argument: $1" >&2
+ exit 2
+ ;;
+ esac
+ done
+
+ BUILD_ARG_FLAGS=()
+ for build_arg in "${build_args[@]}"; do
+ BUILD_ARG_FLAGS+=("--build-arg=$build_arg")
+ done
+
+ BASE_IMAGES=$(
+ dockerfile-json "${BUILD_ARG_FLAGS[@]}" "$dockerfile_copy" |
+ jq -r '.Stages[] | select(.From | .Stage or .Scratch | not) | .BaseName | select(test("^oci-archive:") | not)'
+ )
+
+ BUILDAH_ARGS=()
+ UNSHARE_ARGS=()
+
+ if [ "${HERMETIC}" == "true" ]; then
+ BUILDAH_ARGS+=("--pull=never")
+ UNSHARE_ARGS+=("--net")
+
+ for image in $BASE_IMAGES; do
+ unshare -Ufp --keep-caps -r --map-users 1,1,65536 --map-groups 1,1,65536 -- buildah pull $image
+ done
+ echo "Build will be executed with network isolation"
+ fi
+
+ if [ -n "${TARGET_STAGE}" ]; then
+ BUILDAH_ARGS+=("--target=${TARGET_STAGE}")
+ fi
+
+ BUILDAH_ARGS+=("${BUILD_ARG_FLAGS[@]}")
+
+ if [ -n "${ADD_CAPABILITIES}" ]; then
+ BUILDAH_ARGS+=("--cap-add=${ADD_CAPABILITIES}")
+ fi
+
+ if [ "${SQUASH}" == "true" ]; then
+ BUILDAH_ARGS+=("--squash")
+ fi
+
+ if [ "${SKIP_UNUSED_STAGES}" != "true" ]; then
+ BUILDAH_ARGS+=("--skip-unused-stages=false")
+ fi
+
+ VOLUME_MOUNTS=()
+
+ if [ -f "/var/workdir/cachi2/cachi2.env" ]; then
+ cp -r "/var/workdir/cachi2" /tmp/
+ chmod -R go+rwX /tmp/cachi2
+ VOLUME_MOUNTS+=(--volume /tmp/cachi2:/cachi2)
+ # Read in the whole file (https://unix.stackexchange.com/questions/533277), then
+ # for each RUN ... line insert the cachi2.env command *after* any options like --mount
+ sed -E -i \
+ -e 'H;1h;$!d;x' \
+ -e 's@^\s*(run((\s|\\\n)+-\S+)*(\s|\\\n)+)@\1. /cachi2/cachi2.env \&\& \\\n @igM' \
+ "$dockerfile_copy"
+ echo "Prefetched content will be made available"
+
+ prefetched_repo_for_my_arch="/tmp/cachi2/output/deps/rpm/$(uname -m)/repos.d/cachi2.repo"
+ if [ -f "$prefetched_repo_for_my_arch" ]; then
+ echo "Adding $prefetched_repo_for_my_arch to $YUM_REPOS_D_FETCHED"
+ mkdir -p "$YUM_REPOS_D_FETCHED"
+ cp --no-clobber "$prefetched_repo_for_my_arch" "$YUM_REPOS_D_FETCHED"
+ fi
+ fi
+
+ # if yum repofiles stored in git, copy them to mount point outside the source dir
+ if [ -d "${SOURCE_CODE_DIR}/${YUM_REPOS_D_SRC}" ]; then
+ mkdir -p ${YUM_REPOS_D_FETCHED}
+ cp -r ${SOURCE_CODE_DIR}/${YUM_REPOS_D_SRC}/* ${YUM_REPOS_D_FETCHED}
+ fi
+
+ # if anything in the repofiles mount point (either fetched or from git), mount it
+ if [ -d "${YUM_REPOS_D_FETCHED}" ]; then
+ chmod -R go+rwX ${YUM_REPOS_D_FETCHED}
+ mount_point=$(realpath ${YUM_REPOS_D_FETCHED})
+ VOLUME_MOUNTS+=(--volume "${mount_point}:${YUM_REPOS_D_TARGET}")
+ fi
+
+ DEFAULT_LABELS=(
+ "--label" "build-date=$(date -u +'%Y-%m-%dT%H:%M:%S')"
+ "--label" "architecture=$(uname -m)"
+ "--label" "vcs-type=git"
+ )
+ [ -n "$COMMIT_SHA" ] && DEFAULT_LABELS+=("--label" "vcs-ref=$COMMIT_SHA")
+ [ -n "$IMAGE_EXPIRES_AFTER" ] && DEFAULT_LABELS+=("--label" "quay.expires-after=$IMAGE_EXPIRES_AFTER")
+
+ # Concatenate defaults and explicit labels. If a label appears twice, the last one wins.
+ LABELS=("${DEFAULT_LABELS[@]}" "${LABELS[@]}")
+
+ ACTIVATION_KEY_PATH="/activation-key"
+ ENTITLEMENT_PATH="/entitlement"
+
+ # 1. do not enable activation key and entitlement at same time. If both vars are provided, prefer activation key.
+ # 2. Activation-keys will be used when the key 'org' exists in the activation key secret.
+ # 3. try to pre-register and mount files to the correct location so that users do no need to modify Dockerfiles.
+ # 3. If the Dockerfile contains the string "subcription-manager register", add the activation-keys volume
+ # to buildah but don't pre-register for backwards compatibility. In this case mount an empty directory on
+ # shared emptydir volume to "/etc/pki/entitlement" to prevent certificates from being included in the produced
+ # container.
+
+ REGISTERED="false"
+ if [ -e /activation-key/org ]; then
+ cp -r --preserve=mode "$ACTIVATION_KEY_PATH" /tmp/activation-key
+ mkdir /shared/rhsm-tmp
+ VOLUME_MOUNTS+=(--volume /tmp/activation-key:/activation-key -v /shared/rhsm-tmp:/etc/pki/entitlement:Z)
+ echo "Adding activation key to the build"
+
+ if ! grep subscription-manager "$dockerfile_path" | grep -q register; then
+ # user is not running registration in the Containerfile: pre-register.
+ echo "Pre-registering with subscription manager."
+ subscription-manager register --org "$(cat /tmp/activation-key/org)" --activationkey "$(cat /tmp/activation-key/activationkey)"
+ REGISTERED=$?
+ # copy generated certificates to /shared/rhsm-tmp
+ cp /etc/pki/entitlement/*.pem /shared/rhsm-tmp
+
+ # and then mount get /etc/rhsm/ca/redhat-uep.pem into /run/secrets/rhsm/ca
+ VOLUME_MOUNTS+=(--volume /etc/rhsm/ca/redhat-uep.pem:/run/secrets/rhsm/ca/redhat-uep.pem)
+ fi
+
+ # was: if [ -d "$ACTIVATION_KEY_PATH" ]; then
+ elif find /entitlement -name "*.pem" >>null; then
+ cp -r --preserve=mode "$ENTITLEMENT_PATH" /tmp/entitlement
+ VOLUME_MOUNTS+=(--volume /tmp/entitlement:/etc/pki/entitlement)
+ echo "Adding the entitlement to the build"
+ fi
+
+ if [ -n "$ADDITIONAL_VOLUME_MOUNTS" ]; then
+ # ADDITIONAL_VOLUME_MOUNTS allows to specify more volumes for the build.
+ # This is primarily used in instrumented builds for SAST scanning and analyzing.
+ # Instrumented builds use this step as their base and add some other tools.
+ while read -r volume_mount; do
+ VOLUME_MOUNTS+=("--volume=$volume_mount")
+ done <<<"$ADDITIONAL_VOLUME_MOUNTS"
+ fi
+
+ ADDITIONAL_SECRET_PATH="/additional-secret"
+ ADDITIONAL_SECRET_TMP="/tmp/additional-secret"
+ if [ -d "$ADDITIONAL_SECRET_PATH" ]; then
+ cp -r --preserve=mode -L "$ADDITIONAL_SECRET_PATH" $ADDITIONAL_SECRET_TMP
+ while read -r filename; do
+ echo "Adding the secret ${ADDITIONAL_SECRET}/${filename} to the build, available at /run/secrets/${ADDITIONAL_SECRET}/${filename}"
+ BUILDAH_ARGS+=("--secret=id=${ADDITIONAL_SECRET}/${filename},src=$ADDITIONAL_SECRET_TMP/${filename}")
+ done < <(find $ADDITIONAL_SECRET_TMP -maxdepth 1 -type f -exec basename {} \;)
+ fi
+
+ if [ -f "$ca_bundle" ]; then
+ cp -r --preserve=mode /mnt/trusted-ca /tmp/trusted-ca
+ VOLUME_MOUNTS+=(--volume /tmp/trusted-ca:/etc/pki/ca-trust/source/anchors)
+ echo "Adding the trusted-ca to the build"
+ fi
+
+ # Prevent ShellCheck from giving a warning because 'image' is defined and 'IMAGE' is not.
+ declare IMAGE
+
+ buildah_cmd_array=(
+ buildah build
+ "${VOLUME_MOUNTS[@]}"
+ "${BUILDAH_ARGS[@]}"
+ "${LABELS[@]}"
+ --tls-verify="$TLSVERIFY" --no-cache
+ --ulimit nofile=4096:4096
+ -f "$dockerfile_copy" -t "$IMAGE" .
+ )
+ buildah_cmd=$(printf "%q " "${buildah_cmd_array[@]}")
+
+ if [ "${HERMETIC}" == "true" ]; then
+ # enabling loopback adapter enables Bazel builds to work in hermetic mode.
+ command="ip link set lo up && $buildah_cmd"
+ else
+ command="$buildah_cmd"
+ fi
+
+ unshare -Uf "${UNSHARE_ARGS[@]}" --keep-caps -r --map-users 1,1,65536 --map-groups 1,1,65536 -w "${SOURCE_CODE_DIR}/$CONTEXT" -- sh -c "$command"
+
+ container=$(buildah from --pull-never "$IMAGE")
+ buildah mount $container | tee /shared/container_path
+ # delete symlinks - they may point outside the container rootfs, messing with SBOM scanners
+ find $(cat /shared/container_path) -xtype l -delete
+ echo $container >/shared/container_name
+
+ # Save the SBOM produced by Cachi2 so it can be merged into the final SBOM later
+ if [ -f "/tmp/cachi2/output/bom.json" ]; then
+ cp /tmp/cachi2/output/bom.json ./sbom-cachi2.json
+ fi
+
+ touch /shared/base_images_digests
+ for image in $BASE_IMAGES; do
+ buildah images --format '{{ .Name }}:{{ .Tag }}@{{ .Digest }}' --filter reference="$image" >>/shared/base_images_digests
+ done
+
+ # Needed to generate base images SBOM
+ echo "$BASE_IMAGES" >/shared/base_images_from_dockerfile
+
+ # unregister pod from subscription manager
+ if [ "$REGISTERED" == "0" ]; then
+ subscription-manager unregister
+ fi
+ computeResources:
+ limits:
+ cpu: "4"
+ memory: 8Gi
+ requests:
+ cpu: "1"
+ memory: 2Gi
+ securityContext:
+ capabilities:
+ add:
+ - SETFCAP
+ - name: sbom-syft-generate
+ image: registry.access.redhat.com/rh-syft-tech-preview/syft-rhel9:1.4.1@sha256:34d7065427085a31dc4949bd283c001b91794d427e1e4cdf1b21ea4faf9fee3f
+ workingDir: /var/workdir/source
+ volumeMounts:
+ - mountPath: /var/lib/containers
+ name: varlibcontainers
+ - mountPath: /shared
+ name: shared
+ script: |
+ echo "Running syft on the source directory"
+ syft dir:"/var/workdir/$SOURCE_CODE_DIR/$CONTEXT" --output cyclonedx-json="/var/workdir/sbom-source.json"
+ echo "Running syft on the image filesystem"
+ syft dir:"$(cat /shared/container_path)" --output cyclonedx-json="/var/workdir/sbom-image.json"
+ computeResources:
+ limits:
+ cpu: "2"
+ memory: 4Gi
+ requests:
+ cpu: 500m
+ memory: 1Gi
+ - name: analyse-dependencies-java-sbom
+ image: quay.io/redhat-appstudio/hacbs-jvm-build-request-processor:127ee0c223a2b56a9bd20a6f2eaeed3bd6015f77
+ volumeMounts:
+ - mountPath: /var/lib/containers
+ name: varlibcontainers
+ - mountPath: /shared
+ name: shared
+ script: |
+ if [ -f /var/lib/containers/java ]; then
+ /opt/jboss/container/java/run/run-java.sh analyse-dependencies path $(cat /shared/container_path) -s /var/workdir/sbom-image.json --task-run-name $(context.taskRun.name) --publishers $(results.SBOM_JAVA_COMPONENTS_COUNT.path)
+ sed -i 's/^/ /' $(results.SBOM_JAVA_COMPONENTS_COUNT.path) # Workaround for SRVKP-2875
+ else
+ touch $(results.JAVA_COMMUNITY_DEPENDENCIES.path)
+ fi
+ computeResources:
+ limits:
+ cpu: 200m
+ memory: 512Mi
+ requests:
+ cpu: 100m
+ memory: 256Mi
+ securityContext:
+ runAsUser: 0
+ - name: prepare-sboms
+ image: quay.io/redhat-appstudio/sbom-utility-scripts-image@sha256:53a3041dff341b7fd1765b9cc2c324625d19e804b2eaff10a6e6d9dcdbde3a91
+ workingDir: /var/workdir
+ script: |
+ echo "Merging contents of sbom-source.json and sbom-image.json into sbom-cyclonedx.json"
+ python3 /scripts/merge_syft_sboms.py
+
+ if [ -f "sbom-cachi2.json" ]; then
+ echo "Merging contents of sbom-cachi2.json into sbom-cyclonedx.json"
+ python3 /scripts/merge_cachi2_sboms.py sbom-cachi2.json sbom-cyclonedx.json >sbom-temp.json
+ mv sbom-temp.json sbom-cyclonedx.json
+ fi
+
+ echo "Creating sbom-purl.json"
+ python3 /scripts/create_purl_sbom.py
+
+ echo "Adding base images data to sbom-cyclonedx.json"
+ python3 /scripts/base_images_sbom_script.py \
+ --sbom=sbom-cyclonedx.json \
+ --base-images-from-dockerfile=/shared/base_images_from_dockerfile \
+ --base-images-digests=/shared/base_images_digests
+ computeResources:
+ limits:
+ cpu: 200m
+ memory: 512Mi
+ requests:
+ cpu: 100m
+ memory: 256Mi
+ securityContext:
+ runAsUser: 0
+ - name: inject-sbom-and-push
+ image: quay.io/konflux-ci/buildah-task:latest@sha256:b2d6c32d1e05e91920cd4475b2761d58bb7ee11ad5dff3ecb59831c7572b4d0c
+ workingDir: /var/workdir
+ volumeMounts:
+ - mountPath: /var/lib/containers
+ name: varlibcontainers
+ - mountPath: /mnt/trusted-ca
+ name: trusted-ca
+ readOnly: true
+ script: |
+ #!/bin/bash
+ set -e
+
+ ca_bundle=/mnt/trusted-ca/ca-bundle.crt
+ if [ -f "$ca_bundle" ]; then
+ echo "INFO: Using mounted CA bundle: $ca_bundle"
+ cp -vf $ca_bundle /etc/pki/ca-trust/source/anchors
+ update-ca-trust
+ fi
+
+ base_image_name=$(buildah inspect --format '{{ index .ImageAnnotations "org.opencontainers.image.base.name"}}' $IMAGE | cut -f1 -d'@')
+ base_image_digest=$(buildah inspect --format '{{ index .ImageAnnotations "org.opencontainers.image.base.digest"}}' $IMAGE)
+ container=$(buildah from --pull-never $IMAGE)
+ buildah copy $container sbom-cyclonedx.json sbom-purl.json /root/buildinfo/content_manifests/
+ buildah config -a org.opencontainers.image.base.name=${base_image_name} -a org.opencontainers.image.base.digest=${base_image_digest} $container
+
+ BUILDAH_ARGS=()
+ if [ "${SQUASH}" == "true" ]; then
+ BUILDAH_ARGS+=("--squash")
+ fi
+
+ buildah commit "${BUILDAH_ARGS[@]}" $container $IMAGE
+
+ status=-1
+ max_run=5
+ sleep_sec=10
+ for run in $(seq 1 $max_run); do
+ status=0
+ [ "$run" -gt 1 ] && sleep $sleep_sec
+ echo "Pushing sbom image to registry"
+ buildah push \
+ --tls-verify=$TLSVERIFY \
+ --digestfile /var/workdir/image-digest $IMAGE \
+ docker://$IMAGE && break || status=$?
+ done
+ if [ "$status" -ne 0 ]; then
+ echo "Failed to push sbom image to registry after ${max_run} tries"
+ exit 1
+ fi
+
+ cat "/var/workdir"/image-digest | tee $(results.IMAGE_DIGEST.path)
+ echo -n "$IMAGE" | tee $(results.IMAGE_URL.path)
+ {
+ echo -n "${IMAGE}@"
+ cat "/var/workdir/image-digest"
+ } >"$(results.IMAGE_REF.path)"
+
+ # Remove tag from IMAGE while allowing registry to contain a port number.
+ sbom_repo="${IMAGE%:*}"
+ sbom_digest="$(sha256sum sbom-cyclonedx.json | cut -d' ' -f1)"
+ # The SBOM_BLOB_URL is created by `cosign attach sbom`.
+ echo -n "${sbom_repo}@sha256:${sbom_digest}" | tee "$(results.SBOM_BLOB_URL.path)"
+ computeResources:
+ limits:
+ cpu: "4"
+ memory: 4Gi
+ requests:
+ cpu: "1"
+ memory: 1Gi
+ securityContext:
+ capabilities:
+ add:
+ - SETFCAP
+ runAsUser: 0
+ - name: upload-sbom
+ image: quay.io/konflux-ci/appstudio-utils:ab6b0b8e40e440158e7288c73aff1cf83a2cc8a9@sha256:24179f0efd06c65d16868c2d7eb82573cce8e43533de6cea14fec3b7446e0b14
+ workingDir: /var/workdir
+ volumeMounts:
+ - mountPath: /mnt/trusted-ca
+ name: trusted-ca
+ readOnly: true
+ script: |
+ ca_bundle=/mnt/trusted-ca/ca-bundle.crt
+ if [ -f "$ca_bundle" ]; then
+ echo "INFO: Using mounted CA bundle: $ca_bundle"
+ cp -vf $ca_bundle /etc/pki/ca-trust/source/anchors
+ update-ca-trust
+ fi
+
+ cosign attach sbom --sbom sbom-cyclonedx.json --type cyclonedx "$(cat "$(results.IMAGE_REF.path)")"
+ computeResources:
+ limits:
+ cpu: 200m
+ memory: 512Mi
+ requests:
+ cpu: 100m
+ memory: 256Mi
+ sidecars:
+ - name: indy-generic-proxy
+ image: quay.io/factory2/indy-generic-proxy-service:latest-stage-mpplus
+ volumeMounts:
+ - name: indy-generic-proxy-stage-secrets
+ readOnly: true
+ mountPath: /mnt/secrets-generic-proxy
+ - name: indy-generic-proxy-stage-config
+ readOnly: true
+ mountPath: /deployment/config
+ computeResources:
+ limits:
+ cpu: 1
+ memory: 2Gi
+ requests:
+ cpu: 200m
+ memory: 512Mi
+ script: |
+ if [ "$(params.ENABLE_INDY_PROXY)" == "true" ]; then
+ /usr/local/bin/dumb-init /deployment/start-service.sh
+ fi
diff --git a/deploy/tasks/maven-deployment.yaml b/deploy/tasks/maven-deployment.yaml
index bd5311a1b..b25d2a92f 100644
--- a/deploy/tasks/maven-deployment.yaml
+++ b/deploy/tasks/maven-deployment.yaml
@@ -29,6 +29,8 @@ spec:
- name: MVN_PASSWORD
description: Name of the secret holding the Maven repository password
type: string
+ # https://github.com/tektoncd/pipeline/issues/8249
+ default: ""
- name: MVN_SERVER_ID
description: Server identification in Maven settings.
type: string
diff --git a/deploy/tasks/pre-build.yaml b/deploy/tasks/pre-build.yaml
index afeacf62e..717b2da8f 100644
--- a/deploy/tasks/pre-build.yaml
+++ b/deploy/tasks/pre-build.yaml
@@ -57,7 +57,8 @@ spec:
- name: JVM_BUILD_SERVICE_REQPROCESSOR_IMAGE
description: Name of the processor image. Useful to override for development.
type: string
- default: "quay.io/redhat-user-workloads/konflux-jbs-pnc-tenant/jvm-build-service/build-request-processor:latest"
+ default: "quay.io/ncross/hacbs-jvm-build-request-processor:latest"
+ # default: "quay.io/redhat-user-workloads/konflux-jbs-pnc-tenant/jvm-build-service/build-request-processor:latest"
results:
- name: PRE_BUILD_IMAGE_DIGEST
description: Digest of the image just built
diff --git a/java-components/build-request-processor/src/main/java/com/redhat/hacbs/container/build/preprocessor/AbstractPreprocessor.java b/java-components/build-request-processor/src/main/java/com/redhat/hacbs/container/build/preprocessor/AbstractPreprocessor.java
index 79548932c..9a3076f9a 100644
--- a/java-components/build-request-processor/src/main/java/com/redhat/hacbs/container/build/preprocessor/AbstractPreprocessor.java
+++ b/java-components/build-request-processor/src/main/java/com/redhat/hacbs/container/build/preprocessor/AbstractPreprocessor.java
@@ -72,7 +72,6 @@ public void run() {
}
Log.warnf("### Using tool %s with version %s and javaHome %s", type, buildToolVersion, javaVersion);
- Log.warnf("### ENV %s", System.getenv("jvm-build-service"));
String javaHome;
if (javaVersion.equals("7") || javaVersion.equals("8")) {
@@ -146,14 +145,27 @@ public void run() {
fi
echo "PATH:$PATH"
+ update-ca-trust
+
+ # Go through certificates and insert them into the cacerts
+ for cert in $(find /etc/pki/ca-trust/source/anchors -type f); do
+ echo "Inserting $cert into java cacerts"
+ keytool -import -alias $(basename $cert)-ca \\
+ -file $cert \\
+ -keystore /etc/pki/java/cacerts \\
+ -storepass changeit --noprompt
+ done
+
# End of generic build script
+
+ echo "Building the project ..."
""";
if (isNotEmpty(buildScript)) {
// Now add in the build script from either JBS or PNC. This might contain e.g. "mvn -Pfoo install"
runBuild += buildScript;
}
- Log.warnf("### runBuild is\n%s", runBuild);
+ Log.debugf("### runBuild is\n%s", runBuild);
try {
Path runBuildSh = Paths.get(jbsDirectory.toString(), "run-build.sh");
diff --git a/java-components/build-request-processor/src/main/java/com/redhat/hacbs/container/build/preprocessor/maven/MavenPrepareCommand.java b/java-components/build-request-processor/src/main/java/com/redhat/hacbs/container/build/preprocessor/maven/MavenPrepareCommand.java
index 3ef199ef1..64db8adef 100644
--- a/java-components/build-request-processor/src/main/java/com/redhat/hacbs/container/build/preprocessor/maven/MavenPrepareCommand.java
+++ b/java-components/build-request-processor/src/main/java/com/redhat/hacbs/container/build/preprocessor/maven/MavenPrepareCommand.java
@@ -1,5 +1,7 @@
package com.redhat.hacbs.container.build.preprocessor.maven;
+import static org.apache.commons.lang3.StringUtils.isNotEmpty;
+
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.file.FileVisitResult;
@@ -128,13 +130,14 @@ private boolean handlePlugins(List plugins, boolean topLevel)
if (disabledPlugins != null) {
for (String s : disabledPlugins) {
- String[] ga = s.split(":");
+ if (isNotEmpty(s)) {
+ String[] ga = s.split(":");
- if (ga.length != 2) {
- throw new IOException("Error parsing groupId/artifactId: " + s);
+ if (ga.length != 2) {
+ throw new IOException("Error parsing groupId/artifactId: " + s);
+ }
+ toRemove.add(new PluginInfo(ga[0], ga[1]));
}
-
- toRemove.add(new PluginInfo(ga[0], ga[1]));
}
}
diff --git a/java-components/cli/pom.xml b/java-components/cli/pom.xml
index 7d7d7ff6f..cdaf8954a 100644
--- a/java-components/cli/pom.xml
+++ b/java-components/cli/pom.xml
@@ -24,6 +24,12 @@
io.github.redhat-appstudio.jvmbuild
hacbs-resource-model
+
+
+ io.github.redhat-appstudio.jvmbuild
+ driver
+
+
info.picocli
picocli-shell-jline2
@@ -102,10 +108,12 @@
com.github.stefanbirkner
system-lambda
+ test
org.wiremock
wiremock
+ test
diff --git a/java-components/cli/src/main/docker/Dockerfile.all-in-one b/java-components/cli/src/main/docker/Dockerfile.all-in-one
index 0c2422c99..e1f30403c 100644
--- a/java-components/cli/src/main/docker/Dockerfile.all-in-one
+++ b/java-components/cli/src/main/docker/Dockerfile.all-in-one
@@ -1,9 +1,18 @@
FROM registry.access.redhat.com/ubi8/openjdk-21@sha256:5ab904e6262629fca79d6f65fa859bfa376405522d2f660bdbfaaae2742586f1 AS builder
+USER root
+RUN microdnf --setopt=install_weak_deps=0 --setopt=tsflags=nodocs install -y git
+
+USER 185
WORKDIR /work
COPY ./ .
-RUN mvn -V -B package -pl cli -am -DskipTests
+# Quarkus GitInfo causes injection failure if there isn't a git commit to use.
+RUN git init && \
+ git config user.email "HACBS@redhat.com" && \
+ git config user.name "HACBS" && \
+ git add cli && git commit -m "CLI" -a && \
+ mvn -V -B package -pl cli -am -DskipTests
FROM registry.access.redhat.com/ubi8/openjdk-21-runtime@sha256:0a8cf41082f11f5bc56bd9438851e54593e17051df49592e953fb59376c7d539
WORKDIR /work/
diff --git a/java-components/cli/src/main/java/com/redhat/hacbs/cli/MainCommand.java b/java-components/cli/src/main/java/com/redhat/hacbs/cli/MainCommand.java
index 2ce8ca361..a500361ec 100644
--- a/java-components/cli/src/main/java/com/redhat/hacbs/cli/MainCommand.java
+++ b/java-components/cli/src/main/java/com/redhat/hacbs/cli/MainCommand.java
@@ -6,6 +6,7 @@
import com.redhat.hacbs.cli.artifacts.ArtifactCommand;
import com.redhat.hacbs.cli.builds.BuildCommand;
+import com.redhat.hacbs.cli.driver.DriverCommand;
import com.redhat.hacbs.cli.rebuilt.RebuiltCommand;
import com.redhat.hacbs.cli.settings.SetupCommand;
@@ -20,7 +21,8 @@
ArtifactCommand.class,
RebuiltCommand.class,
SetupCommand.class,
- DiagnosticCommand.class
+ DiagnosticCommand.class,
+ DriverCommand.class
})
@Vetoed
public class MainCommand {
diff --git a/java-components/cli/src/main/java/com/redhat/hacbs/cli/driver/Base.java b/java-components/cli/src/main/java/com/redhat/hacbs/cli/driver/Base.java
new file mode 100644
index 000000000..1bb6412df
--- /dev/null
+++ b/java-components/cli/src/main/java/com/redhat/hacbs/cli/driver/Base.java
@@ -0,0 +1,43 @@
+package com.redhat.hacbs.cli.driver;
+
+import java.util.Optional;
+
+import org.eclipse.microprofile.config.inject.ConfigProperty;
+
+import picocli.CommandLine;
+
+public abstract class Base {
+ @CommandLine.Option(names = "-n", description = "Namespace", defaultValue = "pnc-devel-tenant")
+ String namespace;
+
+ @CommandLine.Option(names = "-u", description = "URL", required = true)
+ String url;
+
+ @CommandLine.Option(names = "-r", description = "Revision", required = true)
+ String revision;
+
+ @CommandLine.Option(names = "-t", description = "Build Tool", required = true)
+ String buildTool;
+
+ @CommandLine.Option(names = "--tool-version", description = "Tool Version", required = true)
+ String buildToolVersion;
+
+ @CommandLine.Option(names = "-j", description = "Java Version", required = true)
+ String javaVersion;
+
+ @CommandLine.Option(names = "-s", description = "Build Script", required = true)
+ String buildScript;
+
+ @CommandLine.Option(names = "--deploy", description = "Deploy URL", required = true)
+ String deploy;
+
+ @CommandLine.Option(names = "--dependencies", description = "Dependencies URL", required = true)
+ String dependencies;
+
+ @CommandLine.Option(names = "-i", description = "Recipe Image", defaultValue = "quay.io/redhat-user-workloads/konflux-jbs-pnc-tenant/jvm-build-service-builder-images/ubi8:latest")
+ String recipeImage;
+
+ @ConfigProperty(name = "access.token")
+ Optional accessToken;
+
+}
diff --git a/java-components/cli/src/main/java/com/redhat/hacbs/cli/driver/CancelPipeline.java b/java-components/cli/src/main/java/com/redhat/hacbs/cli/driver/CancelPipeline.java
new file mode 100644
index 000000000..c1f2a5b24
--- /dev/null
+++ b/java-components/cli/src/main/java/com/redhat/hacbs/cli/driver/CancelPipeline.java
@@ -0,0 +1,38 @@
+package com.redhat.hacbs.cli.driver;
+
+import jakarta.enterprise.context.control.ActivateRequestContext;
+import jakarta.inject.Inject;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.redhat.hacbs.driver.Driver;
+import com.redhat.hacbs.driver.dto.CancelRequest;
+
+import picocli.CommandLine;
+
+@CommandLine.Command(name = "cancel-pipeline", mixinStandardHelpOptions = true, description = "Creates a pipeline")
+public class CancelPipeline implements Runnable {
+
+ private static final Logger logger = LoggerFactory.getLogger(CancelPipeline.class);
+
+ @Inject
+ Driver driver;
+
+ @CommandLine.Option(names = "-n", description = "Namespace", defaultValue = "pnc-devel-tenant")
+ String namespace;
+
+ @CommandLine.Option(names = "-p", description = "Pipeline name")
+ String pipeline;
+
+ @ActivateRequestContext // https://github.com/quarkusio/quarkus/issues/8758
+ @Override
+ public void run() {
+ var cancel = CancelRequest.builder()
+ .namespace(namespace)
+ .pipelineId(pipeline)
+ .build();
+
+ driver.cancel(cancel);
+ }
+}
diff --git a/java-components/cli/src/main/java/com/redhat/hacbs/cli/driver/CreatePipeline.java b/java-components/cli/src/main/java/com/redhat/hacbs/cli/driver/CreatePipeline.java
new file mode 100644
index 000000000..919721f85
--- /dev/null
+++ b/java-components/cli/src/main/java/com/redhat/hacbs/cli/driver/CreatePipeline.java
@@ -0,0 +1,55 @@
+package com.redhat.hacbs.cli.driver;
+
+import jakarta.enterprise.context.control.ActivateRequestContext;
+import jakarta.inject.Inject;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.redhat.hacbs.driver.Driver;
+import com.redhat.hacbs.driver.dto.BuildRequest;
+import com.redhat.hacbs.driver.dto.BuildResponse;
+
+import picocli.CommandLine;
+
+@CommandLine.Command(name = "create-pipeline", mixinStandardHelpOptions = true, description = "Creates a pipeline")
+public class CreatePipeline extends Base implements Runnable {
+
+ private static final Logger logger = LoggerFactory.getLogger(CreatePipeline.class);
+
+ @Inject
+ Driver driver;
+
+ @CommandLine.Option(names = "--quay", description = "Quay repo", defaultValue = "quay.io/redhat-user-workloads-stage/pnc-devel-tenant/pnc-konflux")
+ String quayRepo;
+
+ @CommandLine.Option(names = "--processor", description = "Request Process Image", defaultValue = "quay.io/redhat-user-workloads/konflux-jbs-pnc-tenant/jvm-build-service/build-request-processor:latest")
+ String processor;
+
+ @ActivateRequestContext // https://github.com/quarkusio/quarkus/issues/8758
+ @Override
+ public void run() {
+ driver.setQuayRepo(quayRepo);
+ driver.setProcessor(processor);
+ driver.setAccessToken(accessToken.orElse(""));
+
+ BuildRequest request = BuildRequest.builder()
+ .namespace(namespace)
+ .scmUrl(url)
+ .scmRevision(revision)
+ .buildTool(buildTool)
+ .buildToolVersion(buildToolVersion)
+ .javaVersion(javaVersion)
+ .buildScript(buildScript)
+ .repositoryDeployUrl(deploy)
+ .repositoryDependencyUrl(dependencies)
+ .repositoryBuildContentId("test-maven-konflux-int-0001")
+ .recipeImage(recipeImage)
+ // Just use default from buildah-oci-ta for now.
+ .podMemoryOverride("4Gi")
+ .build();
+ BuildResponse b = driver.create(request);
+
+ logger.info("Got response {}", b);
+ }
+}
diff --git a/java-components/cli/src/main/java/com/redhat/hacbs/cli/driver/DriverCommand.java b/java-components/cli/src/main/java/com/redhat/hacbs/cli/driver/DriverCommand.java
new file mode 100644
index 000000000..0aad27088
--- /dev/null
+++ b/java-components/cli/src/main/java/com/redhat/hacbs/cli/driver/DriverCommand.java
@@ -0,0 +1,8 @@
+package com.redhat.hacbs.cli.driver;
+
+import picocli.CommandLine;
+
+@CommandLine.Command(name = "driver", subcommands = {
+ Fabric8.class, CreatePipeline.class, CancelPipeline.class }, mixinStandardHelpOptions = true)
+public class DriverCommand {
+}
diff --git a/java-components/cli/src/main/java/com/redhat/hacbs/cli/driver/Fabric8.java b/java-components/cli/src/main/java/com/redhat/hacbs/cli/driver/Fabric8.java
new file mode 100644
index 000000000..a37c73387
--- /dev/null
+++ b/java-components/cli/src/main/java/com/redhat/hacbs/cli/driver/Fabric8.java
@@ -0,0 +1,142 @@
+package com.redhat.hacbs.cli.driver;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import io.fabric8.kubernetes.api.model.Quantity;
+import io.fabric8.kubernetes.api.model.ResourceRequirementsBuilder;
+import io.fabric8.kubernetes.client.KubernetesClient;
+import io.fabric8.tekton.client.TektonClient;
+import io.fabric8.tekton.pipeline.v1.Param;
+import io.fabric8.tekton.pipeline.v1.ParamBuilder;
+import io.fabric8.tekton.pipeline.v1.PipelineRun;
+import io.fabric8.tekton.pipeline.v1.PipelineRunBuilder;
+import io.fabric8.tekton.pipeline.v1.PipelineTaskRunSpec;
+import io.fabric8.tekton.pipeline.v1.PipelineTaskRunSpecBuilder;
+import io.fabric8.tekton.pipeline.v1.TaskRunStepSpecBuilder;
+import io.fabric8.tekton.pipeline.v1.WorkspaceBinding;
+import io.fabric8.tekton.pipeline.v1.WorkspaceBindingBuilder;
+import io.quarkus.arc.Arc;
+import io.quarkus.arc.InstanceHandle;
+import picocli.CommandLine;
+
+/**
+ * Experiment only - see if Fabric8 can be used to create the entire pipelinerun object rather
+ * than reading a definition from yaml.
+ */
+@Deprecated
+@CommandLine.Command(name = "fabric8", mixinStandardHelpOptions = true, description = "Creates a pipeline")
+public class Fabric8 extends Base implements Runnable {
+
+ @Override
+ public void run() {
+
+ PipelineRun run;
+
+ try (InstanceHandle instanceHandle = Arc.container().instance(TektonClient.class)) {
+
+ // Experiment with creating gitlab project-ncl/konflux-integration/-/blob/main/deploy/mw-pipeline-run-v0.1.yaml
+ PipelineRunBuilder pipelineRunBuilder = new PipelineRunBuilder()
+ .withNewMetadata().withGenerateName("hacbs-pipeline-").endMetadata()
+ .withNewSpec()
+ .withNewPipelineRef().withResolver("git").withParams(getGitParams()).endPipelineRef()
+ .withWorkspaces(getWorkspace())
+ .withParams(embedParams())
+ .withTaskRunSpecs(configureTaskRunSpecs())
+ .endSpec();
+ run = pipelineRunBuilder.build();
+ }
+ try (InstanceHandle instanceHandle = Arc.container().instance(KubernetesClient.class)) {
+ PipelineRun created = instanceHandle.get().resource(run).create();
+ System.err.println("### run created : " + created);
+ // final CountDownLatch closeLatch = new CountDownLatch(1);
+ // instanceHandle.get().resource(run).watch(new Watcher<>() {
+ // @Override
+ // public void eventReceived(Action action, PipelineRun resource) {
+ // System.out.println("### event action " + action.name());
+ // switch (action.name()) {
+ // case "ADDED":
+ // System.out.println("### added " + resource.getMetadata().getName());
+ // break;
+ // case "DELETED":
+ // break;
+ // case "MODIFIED":
+ // System.out.println(
+ // "### added " + resource.getMetadata().getName() + " and status " + resource.getStatus()
+ // .getResults());
+ // break;
+ // // default:
+ // }
+ // }
+ //
+ // @Override
+ // public void onClose(WatcherException cause) {
+ // System.out.println("### close " + cause);
+ // closeLatch.countDown();
+ // }
+ // });
+ // closeLatch.await();
+ // } catch (InterruptedException e) {
+ // throw new RuntimeException(e);
+
+ // created.getStatus()
+ }
+ }
+
+ private List embedParams() {
+ List result = new ArrayList<>();
+ // The actual parameters to be customized...
+ result.add(new ParamBuilder().withName("URL").withNewValue(url).build());
+ result.add(new ParamBuilder().withName("REVISION").withNewValue(revision).build());
+ result.add(new ParamBuilder().withName("BUILD_TOOL").withNewValue(buildTool).build());
+ result.add(new ParamBuilder().withName("BUILD_TOOL_VERSION").withNewValue(buildToolVersion).build());
+ result.add(new ParamBuilder().withName("JAVA_VERSION").withNewValue(javaVersion).build());
+ result.add(new ParamBuilder().withName("BUILD_SCRIPT").withNewValue(buildScript).build());
+ if (accessToken.isPresent()) {
+ result.add(new ParamBuilder().withName("ACCESS_TOKEN").withNewValue(accessToken.get()).build());
+ } else {
+ System.err.println("Access token not set");
+ }
+ // TODO: Hard code these per now, same as in pipelinerun yaml
+ result.add(new ParamBuilder().withName("MVN_REPO_DEPLOY_URL").withNewValue(deploy)
+ .build());
+ result.add(new ParamBuilder().withName("MVN_REPO_DEPENDENCIES_URL").withNewValue(deploy)
+ .build());
+ result.add(new ParamBuilder().withName("BUILD_ID").withNewValue("test-maven-konflux-int-0001").build());
+
+ return result;
+ }
+
+ // TODO: The memory settings in this function should be customizable for different build sizes
+ private List configureTaskRunSpecs() {
+ var stepSpec = new PipelineTaskRunSpecBuilder().withPipelineTaskName("buildah-oci-ta")
+ .withStepSpecs(new TaskRunStepSpecBuilder()
+ .withName("build")
+ .withComputeResources(new ResourceRequirementsBuilder()
+ .withLimits(Collections.singletonMap("memory", new Quantity("5Gi")))
+ .withRequests(Collections.singletonMap("memory", new Quantity("5Gi"))).build())
+ .build())
+ .build();
+ return Collections.singletonList(stepSpec);
+ }
+
+ private List getGitParams() {
+ List result = new ArrayList<>();
+ result.add(new ParamBuilder().withName("url")
+ .withNewValue("https://gitlab.cee.redhat.com/project-ncl/konflux-integration.git").build());
+ result.add(new ParamBuilder().withName("revision").withNewValue("main").build());
+ result.add(new ParamBuilder().withName("pathInRepo").withNewValue(".tekton/mw-pipeline-v0.1.yaml").build());
+ return result;
+ }
+
+ private WorkspaceBinding getWorkspace() {
+ return new WorkspaceBindingBuilder().withName("source").withNewVolumeClaimTemplate()
+ .withNewSpec()
+ .addToAccessModes("ReadWriteOnce")
+ .withNewResources().withRequests(Collections.singletonMap("storage", new Quantity("1Gi"))).endResources()
+ .endSpec()
+ .endVolumeClaimTemplate()
+ .build();
+ }
+}
diff --git a/java-components/cli/src/test/java/com/redhat/hacbs/cli/MockOidcClient.java b/java-components/cli/src/test/java/com/redhat/hacbs/cli/MockOidcClient.java
new file mode 100644
index 000000000..c7f0bf077
--- /dev/null
+++ b/java-components/cli/src/test/java/com/redhat/hacbs/cli/MockOidcClient.java
@@ -0,0 +1,41 @@
+package com.redhat.hacbs.cli;
+
+import java.io.IOException;
+import java.time.Duration;
+import java.time.temporal.ChronoUnit;
+import java.util.Map;
+
+import io.quarkus.oidc.client.OidcClient;
+import io.quarkus.oidc.client.Tokens;
+import io.quarkus.test.Mock;
+import io.smallrye.mutiny.Uni;
+
+/**
+ * From PNC
+ * BuildDriver
+ */
+@Mock
+public class MockOidcClient implements OidcClient {
+
+ @Override
+ public Uni getTokens(Map additionalGrantParameters) {
+ return Uni.createFrom()
+ .item(new Tokens("accessToken", 1L, Duration.of(5, ChronoUnit.MINUTES), "refreshToken", 1L, null, null));
+ }
+
+ @Override
+ public Uni refreshTokens(String refreshToken, Map additionalGrantParameters) {
+ return null;
+ }
+
+ @Override
+ public Uni revokeAccessToken(String accessToken, Map additionalParameters) {
+ return null;
+ }
+
+ @Override
+ public void close() throws IOException {
+
+ }
+}
diff --git a/java-components/cli/src/test/resources/application.yaml b/java-components/cli/src/test/resources/application.yaml
new file mode 100644
index 000000000..3fd73ac10
--- /dev/null
+++ b/java-components/cli/src/test/resources/application.yaml
@@ -0,0 +1,5 @@
+quarkus:
+ oidc:
+ enabled: false
+ oidc-client:
+ enabled: false
diff --git a/java-components/driver/pom.xml b/java-components/driver/pom.xml
new file mode 100644
index 000000000..33eb872f2
--- /dev/null
+++ b/java-components/driver/pom.xml
@@ -0,0 +1,166 @@
+
+
+ 4.0.0
+
+ io.github.redhat-appstudio.jvmbuild
+ jvm-build-service-parent
+ 999-SNAPSHOT
+
+ driver
+ Driver
+
+
+ 1.18.32
+ 1.12.0
+
+
+
+
+ io.quarkus
+ quarkus-oidc
+
+
+ io.quarkus
+ quarkus-oidc-client
+
+
+ io.quarkus
+ quarkus-openshift-client
+
+
+ io.quarkus
+ quarkus-rest-jackson
+
+
+ io.quarkus
+ quarkus-rest-client-jackson
+
+
+ io.quarkiverse.tektonclient
+ quarkus-tekton-client
+
+
+ org.apache.commons
+ commons-text
+ ${version.commons-text}
+
+
+ io.quarkus
+ quarkus-info
+
+
+ org.jboss.pnc
+ pnc-api
+ 3.0.0
+
+
+
+
+
+
+
+
+
+ org.projectlombok
+ lombok
+ ${version.lombok}
+ provided
+
+
+ io.quarkus
+ quarkus-junit5
+ test
+
+
+ io.quarkus
+ quarkus-junit5-mockito
+ test
+
+
+ io.rest-assured
+ rest-assured
+ test
+
+
+ io.quarkus
+ quarkus-test-kubernetes-client
+ test
+
+
+
+
+
+ ${quarkus.platform.group-id}
+ quarkus-maven-plugin
+ ${quarkus.platform.version}
+ true
+
+
+
+ build
+ generate-code
+ generate-code-tests
+
+
+
+
+
+ maven-surefire-plugin
+ ${surefire-plugin.version}
+
+
+ org.jboss.logmanager.LogManager
+ ${maven.home}
+
+
+
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+
+
+
+ ${maven.build.timestamp}
+
+
+
+
+
+
+
+
+ native
+
+
+ native
+
+
+
+
+
+ maven-failsafe-plugin
+ ${surefire-plugin.version}
+
+
+
+ integration-test
+ verify
+
+
+
+ ${project.build.directory}/${project.build.finalName}-runner
+ org.jboss.logmanager.LogManager
+ ${maven.home}
+
+
+
+
+
+
+
+
+ native
+
+
+
+
diff --git a/java-components/driver/src/main/java/com/redhat/hacbs/driver/Driver.java b/java-components/driver/src/main/java/com/redhat/hacbs/driver/Driver.java
new file mode 100644
index 000000000..77b84ba65
--- /dev/null
+++ b/java-components/driver/src/main/java/com/redhat/hacbs/driver/Driver.java
@@ -0,0 +1,153 @@
+package com.redhat.hacbs.driver;
+
+import static org.apache.commons.lang3.StringUtils.isEmpty;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import jakarta.enterprise.context.RequestScoped;
+import jakarta.inject.Inject;
+
+import org.apache.commons.io.IOUtils;
+import org.eclipse.microprofile.config.ConfigProvider;
+import org.eclipse.microprofile.config.inject.ConfigProperty;
+import org.eclipse.microprofile.rest.client.inject.RestClient;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.redhat.hacbs.driver.clients.IndyService;
+import com.redhat.hacbs.driver.clients.IndyTokenRequestDTO;
+import com.redhat.hacbs.driver.clients.IndyTokenResponseDTO;
+import com.redhat.hacbs.driver.dto.BuildRequest;
+import com.redhat.hacbs.driver.dto.BuildResponse;
+import com.redhat.hacbs.driver.dto.CancelRequest;
+
+import io.fabric8.knative.internal.pkg.apis.Condition;
+import io.fabric8.kubernetes.api.model.Quantity;
+import io.fabric8.kubernetes.client.KubernetesClient;
+import io.fabric8.tekton.client.TektonClient;
+import io.fabric8.tekton.pipeline.v1.ParamBuilder;
+import io.fabric8.tekton.pipeline.v1.PipelineRun;
+import io.quarkus.oidc.client.OidcClient;
+import lombok.Setter;
+
+@RequestScoped
+public class Driver {
+
+ private static final Logger logger = LoggerFactory.getLogger(Driver.class);
+
+ @Inject
+ OidcClient oidcClient;
+
+ @RestClient
+ IndyService indyService;
+
+ @Inject
+ KubernetesClient client;
+
+ @Setter
+ private String accessToken;
+
+ @Setter
+ @ConfigProperty(name = "konflux-build-driver.konflux-processor")
+ String processor;
+
+ @Setter
+ @ConfigProperty(name = "konflux-build-driver.quay-repo")
+ String quayRepo;
+
+ @ConfigProperty(name = "konflux-build-driver.pipeline-resolver")
+ String resolverTarget;
+
+ public BuildResponse create(BuildRequest buildRequest) {
+ IndyTokenResponseDTO tokenResponseDTO = new IndyTokenResponseDTO(accessToken);
+
+ if (isEmpty(accessToken)) {
+ logger.info("Establishing token from Indy using clientId {}",
+ ConfigProvider.getConfig().getConfigValue("quarkus.oidc.client-id").getValue());
+ tokenResponseDTO = indyService.getAuthToken(
+ new IndyTokenRequestDTO(buildRequest.repositoryBuildContentId()),
+ "Bearer " + getFreshAccessToken());
+ }
+
+ Map templateProperties = new HashMap<>();
+ templateProperties.put("ACCESS_TOKEN", tokenResponseDTO.token());
+ templateProperties.put("BUILD_ID", buildRequest.repositoryBuildContentId());
+ templateProperties.put("BUILD_SCRIPT", buildRequest.buildScript());
+ templateProperties.put("BUILD_TOOL", buildRequest.buildTool());
+ templateProperties.put("BUILD_TOOL_VERSION", buildRequest.buildToolVersion());
+ templateProperties.put("JAVA_VERSION", buildRequest.javaVersion());
+ templateProperties.put("MVN_REPO_DEPENDENCIES_URL", buildRequest.repositoryDependencyUrl());
+ templateProperties.put("MVN_REPO_DEPLOY_URL", buildRequest.repositoryDeployUrl());
+ templateProperties.put("QUAY_REPO", quayRepo);
+ templateProperties.put("RECIPE_IMAGE", buildRequest.recipeImage());
+ templateProperties.put("JVM_BUILD_SERVICE_REQPROCESSOR_IMAGE", processor);
+ templateProperties.put("REVISION", buildRequest.scmRevision());
+ templateProperties.put("URL", buildRequest.scmUrl());
+
+ PipelineRun pipelineRun = null;
+ try {
+ var tc = client.adapt(TektonClient.class);
+ // Various ways to create the initial PipelineRun object. We can use an objectmapper,
+ // client.getKubernetesSerialization() or the load calls on the Fabric8 objects.
+ pipelineRun = tc.v1().pipelineRuns()
+ .load(IOUtils.resourceToURL("pipeline.yaml", Thread.currentThread().getContextClassLoader())).item();
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ pipelineRun = pipelineRun.edit().editOrNewSpec()
+ .editPipelineRef()
+ .editFirstParam().editOrNewValue().withStringVal(resolverTarget).endValue()
+ .endParam()
+ .endPipelineRef()
+ .addAllToParams(templateProperties.entrySet().stream()
+ .map(t -> new ParamBuilder().withName(t.getKey()).withNewValue(t.getValue()).build()).toList())
+ .editFirstTaskRunSpec()
+ .editFirstStepSpec()
+ .editComputeResources()
+ .addToLimits("memory", new Quantity(buildRequest.podMemoryOverride()))
+ .addToRequests("memory", new Quantity(buildRequest.podMemoryOverride()))
+ .endComputeResources()
+ .endStepSpec()
+ .endTaskRunSpec()
+ .endSpec().build();
+
+ var created = client.resource(pipelineRun).inNamespace(buildRequest.namespace()).create();
+
+ return BuildResponse.builder().namespace(buildRequest.namespace()).pipelineId(created.getMetadata().getName()).build();
+ }
+
+ public void cancel(CancelRequest request) {
+ var tc = client.adapt(TektonClient.class);
+ var pipeline = tc.v1beta1().pipelineRuns().inNamespace(request.namespace()).withName(request.pipelineId()).get();
+
+ logger.info("Retrieved pipeline {}", pipeline.getMetadata().getName());
+
+ List conditions = new ArrayList<>();
+ // https://tekton.dev/docs/pipelines/pipelineruns/#monitoring-execution-status
+ Condition cancelCondition = new Condition();
+ cancelCondition.setType("Succeeded");
+ cancelCondition.setStatus("False");
+ // https://github.com/tektoncd/community/blob/main/teps/0058-graceful-pipeline-run-termination.md
+ cancelCondition.setReason("CancelledRunFinally");
+ cancelCondition.setMessage("The PipelineRun was cancelled");
+ conditions.add(cancelCondition);
+
+ pipeline.getStatus().setConditions(conditions);
+
+ tc.v1beta1().pipelineRuns().inNamespace(request.namespace()).resource(pipeline).updateStatus();
+ }
+
+ /**
+ * Get a fresh access token for the service account. This is done because we want to get a
+ * super-new token to be used since we're not entirely sure when the http request will be done.
+ *
+ * @return fresh access token
+ */
+ public String getFreshAccessToken() {
+ return oidcClient.getTokens().await().indefinitely().getAccessToken();
+ }
+}
diff --git a/java-components/driver/src/main/java/com/redhat/hacbs/driver/clients/IndyService.java b/java-components/driver/src/main/java/com/redhat/hacbs/driver/clients/IndyService.java
new file mode 100644
index 000000000..d44fd6649
--- /dev/null
+++ b/java-components/driver/src/main/java/com/redhat/hacbs/driver/clients/IndyService.java
@@ -0,0 +1,51 @@
+/**
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2021 Red Hat, Inc., and individual contributors
+ * as indicated by the @author tags.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.redhat.hacbs.driver.clients;
+
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.HeaderParam;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.core.MediaType;
+
+import org.eclipse.microprofile.rest.client.inject.RegisterRestClient;
+
+/**
+ * Indy service representing the Indy server. It uses Quarkus magical rest client to generate the client implementation
+ */
+@RegisterRestClient(configKey = "indy-service")
+public interface IndyService {
+
+ /**
+ * Ask Indy to give us the token that we will use for Maven communication with Indy, in the builder pod for the
+ * particular buildId
+ *
+ * @param indyTokenRequestDTO the DTO to send to Indy
+ * @param accessToken accessToken required to send data. Note that it should include "Bearer "
+ *
+ * @return Token DTO
+ */
+ @Path("/api/security/auth/token")
+ @Produces(MediaType.APPLICATION_JSON)
+ @Consumes(MediaType.APPLICATION_JSON)
+ @POST
+ IndyTokenResponseDTO getAuthToken(
+ IndyTokenRequestDTO indyTokenRequestDTO,
+ @HeaderParam("Authorization") String accessToken);
+}
diff --git a/java-components/driver/src/main/java/com/redhat/hacbs/driver/clients/IndyTokenRequestDTO.java b/java-components/driver/src/main/java/com/redhat/hacbs/driver/clients/IndyTokenRequestDTO.java
new file mode 100644
index 000000000..c5c25e321
--- /dev/null
+++ b/java-components/driver/src/main/java/com/redhat/hacbs/driver/clients/IndyTokenRequestDTO.java
@@ -0,0 +1,13 @@
+package com.redhat.hacbs.driver.clients;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import lombok.Builder;
+
+/**
+ * DTO of the Indy token endpoint request
+ */
+@Builder
+public record IndyTokenRequestDTO(@JsonProperty("build-id") String buildId) {
+
+}
diff --git a/java-components/driver/src/main/java/com/redhat/hacbs/driver/clients/IndyTokenResponseDTO.java b/java-components/driver/src/main/java/com/redhat/hacbs/driver/clients/IndyTokenResponseDTO.java
new file mode 100644
index 000000000..17cf82adb
--- /dev/null
+++ b/java-components/driver/src/main/java/com/redhat/hacbs/driver/clients/IndyTokenResponseDTO.java
@@ -0,0 +1,11 @@
+package com.redhat.hacbs.driver.clients;
+
+import lombok.Builder;
+
+/**
+ * DTO of the Indy token endpoint response
+ */
+@Builder
+public record IndyTokenResponseDTO(String token) {
+
+}
diff --git a/java-components/driver/src/main/java/com/redhat/hacbs/driver/dto/BuildRequest.java b/java-components/driver/src/main/java/com/redhat/hacbs/driver/dto/BuildRequest.java
new file mode 100644
index 000000000..abf49b382
--- /dev/null
+++ b/java-components/driver/src/main/java/com/redhat/hacbs/driver/dto/BuildRequest.java
@@ -0,0 +1,14 @@
+package com.redhat.hacbs.driver.dto;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+
+import lombok.Builder;
+
+@Builder(builderClassName = "Builder")
+@JsonIgnoreProperties(ignoreUnknown = true)
+public record BuildRequest(String recipeImage, String buildTool, String buildToolVersion, String javaVersion,
+ String projectName, String scmUrl, String scmRevision, String buildScript,
+ String repositoryDependencyUrl, String repositoryDeployUrl, String repositoryBuildContentId,
+ String namespace, String podMemoryOverride) {
+
+}
diff --git a/java-components/driver/src/main/java/com/redhat/hacbs/driver/dto/BuildResponse.java b/java-components/driver/src/main/java/com/redhat/hacbs/driver/dto/BuildResponse.java
new file mode 100644
index 000000000..208f44796
--- /dev/null
+++ b/java-components/driver/src/main/java/com/redhat/hacbs/driver/dto/BuildResponse.java
@@ -0,0 +1,11 @@
+package com.redhat.hacbs.driver.dto;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+
+import lombok.Builder;
+
+@Builder(builderClassName = "Builder")
+@JsonIgnoreProperties(ignoreUnknown = true)
+public record BuildResponse(String pipelineId, String namespace) {
+
+}
diff --git a/java-components/driver/src/main/java/com/redhat/hacbs/driver/dto/CancelRequest.java b/java-components/driver/src/main/java/com/redhat/hacbs/driver/dto/CancelRequest.java
new file mode 100644
index 000000000..36aa37320
--- /dev/null
+++ b/java-components/driver/src/main/java/com/redhat/hacbs/driver/dto/CancelRequest.java
@@ -0,0 +1,11 @@
+package com.redhat.hacbs.driver.dto;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+
+import lombok.Builder;
+
+@Builder(builderClassName = "Builder")
+@JsonIgnoreProperties(ignoreUnknown = true)
+public record CancelRequest(String pipelineId, String namespace) {
+
+}
diff --git a/java-components/driver/src/main/java/com/redhat/hacbs/driver/endpoints/Public.java b/java-components/driver/src/main/java/com/redhat/hacbs/driver/endpoints/Public.java
new file mode 100644
index 000000000..4b49e9fd1
--- /dev/null
+++ b/java-components/driver/src/main/java/com/redhat/hacbs/driver/endpoints/Public.java
@@ -0,0 +1,87 @@
+/**
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2021 Red Hat, Inc., and individual contributors
+ * as indicated by the @author tags.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.redhat.hacbs.driver.endpoints;
+
+import jakarta.inject.Inject;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.core.MediaType;
+
+import org.jboss.pnc.api.dto.ComponentVersion;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.redhat.hacbs.driver.Driver;
+import com.redhat.hacbs.driver.dto.BuildRequest;
+import com.redhat.hacbs.driver.dto.BuildResponse;
+import com.redhat.hacbs.driver.dto.CancelRequest;
+import com.redhat.hacbs.driver.util.Info;
+
+import io.smallrye.common.annotation.RunOnVirtualThread;
+
+/**
+ * Endpoint to start/cancel the build.
+ *
+ * @author Matej Lazar
+ */
+@Path("/")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public class Public {
+
+ private static final Logger logger = LoggerFactory.getLogger(Public.class);
+
+ @Inject
+ Driver driver;
+
+ @Inject
+ Info info;
+
+ @POST
+ @Path("/build")
+ @RunOnVirtualThread
+ // public CompletionStage build(BuildRequest buildRequest) {
+ public BuildResponse build(BuildRequest buildRequest) {
+ logger.info("Requested project build: {}", buildRequest.projectName());
+ var result = driver.create(buildRequest);
+ logger.info("### Got {}", result);
+ return result;
+ }
+
+ @PUT
+ @Path("/cancel")
+ @RunOnVirtualThread
+ public void cancel(CancelRequest cancelRequest) {
+ logger.info("Requested cancel: {}", cancelRequest.pipelineId());
+ driver.cancel(cancelRequest);
+ }
+
+ @Path("/version")
+ @GET
+ @RunOnVirtualThread
+ public ComponentVersion getVersion() {
+ var r = info.getVersion();
+ logger.info("Requested version {}", r);
+ return r;
+ }
+}
diff --git a/java-components/driver/src/main/java/com/redhat/hacbs/driver/util/Info.java b/java-components/driver/src/main/java/com/redhat/hacbs/driver/util/Info.java
new file mode 100644
index 000000000..c1780723a
--- /dev/null
+++ b/java-components/driver/src/main/java/com/redhat/hacbs/driver/util/Info.java
@@ -0,0 +1,33 @@
+package com.redhat.hacbs.driver.util;
+
+import jakarta.enterprise.context.ApplicationScoped;
+import jakarta.inject.Inject;
+
+import org.eclipse.microprofile.config.inject.ConfigProperty;
+import org.jboss.pnc.api.dto.ComponentVersion;
+
+import io.quarkus.info.BuildInfo;
+import io.quarkus.info.GitInfo;
+
+@ApplicationScoped
+public class Info {
+
+ @ConfigProperty(name = "quarkus.application.name")
+ String name;
+
+ @Inject
+ GitInfo gitInfo;
+
+ @Inject
+ BuildInfo buildInfo;
+
+ public ComponentVersion getVersion() {
+ return ComponentVersion.builder()
+ .name(name)
+ .builtOn(buildInfo.time().toZonedDateTime())
+ .commit(gitInfo.latestCommitId())
+ .version(buildInfo.version())
+ .build();
+ }
+
+}
diff --git a/java-components/driver/src/main/resources/META-INF/beans.xml b/java-components/driver/src/main/resources/META-INF/beans.xml
new file mode 100644
index 000000000..e69de29bb
diff --git a/java-components/driver/src/main/resources/application.yaml b/java-components/driver/src/main/resources/application.yaml
new file mode 100644
index 000000000..25b24dad2
--- /dev/null
+++ b/java-components/driver/src/main/resources/application.yaml
@@ -0,0 +1,66 @@
+konflux-build-driver:
+ quay-repo: "quay.io/redhat-user-workloads-stage/pnc-devel-tenant/pnc-konflux"
+ konflux-processor: "quay.io/redhat-user-workloads/konflux-jbs-pnc-tenant/jvm-build-service/build-request-processor:latest"
+ # TODO: This will eventually be build-definitions repository
+ pipeline-resolver: "https://raw.githubusercontent.com/rnc/jvm-build-service/refs/heads/NCL8774/deploy/pipeline/mw-pipeline-v0.1.yaml"
+quarkus:
+ application:
+ name: konflux-build-driver
+ shutdown:
+ timeout: 300
+ log:
+ category:
+ "com.redhat.hacbs":
+ level: DEBUG
+# console:
+# format: "%d{HH:mm:ss,SSS} %-5p traceId=%X{traceId}, parentId=%X{parentId}, spanId=%X{spanId}, sampled=%X{sampled} [%c{3.}] (%t) %s%e mdc:[%X]%n"
+# http:
+# cors:
+# ~: true
+# origins: "/.*/"
+# kubernetes-client:
+# connection-timeout: PT30S
+# request-timeout: PT30S
+ #namespace: default
+ #token: secretToken
+ oidc:
+ auth-server-url: https://keycloak-host/auth/realms/pncredhat
+ client-id: client
+ credentials:
+ secret: secret
+ # tls:
+ # verification: none
+# keycloak:
+# policy-enforcer:
+# enable: false
+ oidc-client:
+ auth-server-url: https://keycloak-host/auth/realms/pncredhat
+ client-id: service-account
+ credentials:
+ secret: secret-of-service-account
+ rest-client:
+ indy-service:
+ url: "http://localhost:8080"
+#"%test":
+# quarkus:
+# oidc:
+# enabled: false
+# oidc-client:
+# enabled: false
+# log:
+# console:
+# json: false
+# environment-driver:
+# http-client:
+# connect-timeout: 1
+# request-timeout: 3
+# build-agent:
+# ping-path: PingHandler
+# host: localhost
+# running-wait-for: 3
+#
+#"%dev":
+# quarkus:
+# log:
+# console:
+# json: false
diff --git a/java-components/driver/src/main/resources/pipeline.yaml b/java-components/driver/src/main/resources/pipeline.yaml
new file mode 100644
index 000000000..0ebc272b6
--- /dev/null
+++ b/java-components/driver/src/main/resources/pipeline.yaml
@@ -0,0 +1,43 @@
+apiVersion: tekton.dev/v1
+kind: PipelineRun
+metadata:
+ generateName: run-mw-pipeline-
+spec:
+ pipelineRef:
+ # TODO: Might want in future to change this to OCI bundle reference?
+ resolver: http
+ params:
+ - name: url
+ value: ""
+ workspaces:
+ - name: source
+ # TODO: If we have a custom git step we can share this with prebuild thereby eliminating the need for a volumeClaimTemplate
+ #
+ # emptyDir: {} - does not share the data between tasks
+ # When the volume is created from a template in a PipelineRun or TaskRun it will be deleted when the PipelineRun or TaskRun is deleted.
+ volumeClaimTemplate:
+ metadata:
+ spec:
+ accessModes:
+ - ReadWriteOnce
+ resources:
+ requests:
+ storage: 1Gi
+ status: {}
+ params:
+ - name: caTrustConfigMapName
+ value: custom-ca
+ - name: ENABLE_INDY_PROXY
+ value: "false"
+ # TODO: Should PNC set both limits and requests? See
+ # https://home.robusta.dev/blog/kubernetes-memory-limit
+ # https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/
+ taskRunSpecs:
+ - pipelineTaskName: buildah-oci-ta
+ stepSpecs:
+ - name: build
+ computeResources:
+ # limits/requests memory are configured dynamically in Driver.
+ taskRunTemplate:
+ podTemplate:
+ env:
diff --git a/java-components/driver/src/test/java/com/redhat/hacbs/driver/EndpointTest.java b/java-components/driver/src/test/java/com/redhat/hacbs/driver/EndpointTest.java
new file mode 100644
index 000000000..79e8db882
--- /dev/null
+++ b/java-components/driver/src/test/java/com/redhat/hacbs/driver/EndpointTest.java
@@ -0,0 +1,68 @@
+package com.redhat.hacbs.driver;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.when;
+
+import jakarta.inject.Inject;
+
+import org.eclipse.microprofile.rest.client.inject.RestClient;
+import org.jboss.pnc.api.dto.ComponentVersion;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import com.redhat.hacbs.driver.clients.IndyService;
+import com.redhat.hacbs.driver.clients.IndyTokenRequestDTO;
+import com.redhat.hacbs.driver.clients.IndyTokenResponseDTO;
+import com.redhat.hacbs.driver.dto.BuildRequest;
+
+import io.fabric8.kubernetes.client.KubernetesClient;
+import io.fabric8.kubernetes.client.server.mock.KubernetesServer;
+import io.quarkus.test.InjectMock;
+import io.quarkus.test.junit.QuarkusTest;
+import io.quarkus.test.kubernetes.client.KubernetesTestServer;
+import io.quarkus.test.kubernetes.client.WithKubernetesTestServer;
+import io.restassured.RestAssured;
+import io.restassured.http.ContentType;
+
+@WithKubernetesTestServer
+@QuarkusTest
+public class EndpointTest {
+
+ @KubernetesTestServer
+ KubernetesServer mockServer;
+
+ @Inject
+ KubernetesClient client;
+
+ @InjectMock
+ @RestClient
+ IndyService indyService;
+
+ @BeforeEach
+ public void setup() {
+ when(indyService.getAuthToken(any(IndyTokenRequestDTO.class), any(String.class)))
+ .thenReturn(new IndyTokenResponseDTO("token-for-builder-pod"));
+ }
+
+ @Test
+ void verify() {
+
+ BuildRequest request = BuildRequest.builder().namespace("default").podMemoryOverride("1Gi").build();
+ RestAssured.given().contentType(ContentType.JSON)
+ .body(request)
+ .when()
+ .post("/build")
+ .then()
+ .statusCode(200);
+ }
+
+ @Test
+ void version() {
+ var result = RestAssured.given()
+ .when()
+ .get("/version")
+ .as(ComponentVersion.class);
+ Assertions.assertEquals("konflux-build-driver", result.getName());
+ }
+}
diff --git a/java-components/driver/src/test/java/com/redhat/hacbs/driver/MockOidcClient.java b/java-components/driver/src/test/java/com/redhat/hacbs/driver/MockOidcClient.java
new file mode 100644
index 000000000..9f8bc88a1
--- /dev/null
+++ b/java-components/driver/src/test/java/com/redhat/hacbs/driver/MockOidcClient.java
@@ -0,0 +1,41 @@
+package com.redhat.hacbs.driver;
+
+import java.io.IOException;
+import java.time.Duration;
+import java.time.temporal.ChronoUnit;
+import java.util.Map;
+
+import io.quarkus.oidc.client.OidcClient;
+import io.quarkus.oidc.client.Tokens;
+import io.quarkus.test.Mock;
+import io.smallrye.mutiny.Uni;
+
+/**
+ * From PNC
+ * BuildDriver
+ */
+@Mock
+public class MockOidcClient implements OidcClient {
+
+ @Override
+ public Uni getTokens(Map additionalGrantParameters) {
+ return Uni.createFrom()
+ .item(new Tokens("accessToken", 1L, Duration.of(5, ChronoUnit.MINUTES), "refreshToken", 1L, null, null));
+ }
+
+ @Override
+ public Uni refreshTokens(String refreshToken, Map additionalGrantParameters) {
+ return null;
+ }
+
+ @Override
+ public Uni revokeAccessToken(String accessToken, Map additionalParameters) {
+ return null;
+ }
+
+ @Override
+ public void close() throws IOException {
+
+ }
+}
diff --git a/java-components/driver/src/test/resources/application.yaml b/java-components/driver/src/test/resources/application.yaml
new file mode 100644
index 000000000..5b08310b0
--- /dev/null
+++ b/java-components/driver/src/test/resources/application.yaml
@@ -0,0 +1,9 @@
+quarkus:
+ oidc:
+ enabled: false
+ oidc-client:
+ enabled: false
+ log:
+ category:
+ "com.redhat.hacbs":
+ level: DEBUG
diff --git a/java-components/pom.xml b/java-components/pom.xml
index 42a6c52a8..8bb23cef5 100644
--- a/java-components/pom.xml
+++ b/java-components/pom.xml
@@ -4,6 +4,12 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
4.0.0
+
+ org.jboss
+ jboss-parent
+ 47
+
+
io.github.redhat-appstudio.jvmbuild
jvm-build-service-parent
999-SNAPSHOT
@@ -40,6 +46,8 @@
UTF-8
UTF-8
+ 21
+ 21
21
io.quarkus.platform
@@ -83,6 +91,7 @@
common-images
common-maven
domain-proxy
+ driver
@@ -145,6 +154,11 @@
common-maven
${project.version}
+
+ io.github.redhat-appstudio.jvmbuild
+ driver
+ ${project.version}
+
io.quarkiverse.quinoa
@@ -182,6 +196,11 @@
quarkus-maven-resolver
0.0.4
+
+ io.quarkiverse.tektonclient
+ quarkus-tekton-client
+ 1.0.1
+
org.apache.maven.release