Skip to content

Build for Remote Workflow: latest-main #1848

Build for Remote Workflow: latest-main

Build for Remote Workflow: latest-main #1848

Workflow file for this run

# We use a single workflow to build all packages because github.run_number is
# specific to each workflow. This ensures that each package has an
# OTC_BUILD_NUMBER that is greater than previous runs which allows package
# upgrades from one build to the next.
name: 'Build packages'
# Sets the name of the CI run based on whether the run was triggered with or
# without a workflow_id set.
run-name: >
${{
inputs.workflow_id == '' && format('Build for Remote Workflow: latest-main')
||
inputs.workflow_id != '' && format('Build for Remote Workflow: {0}', inputs.workflow_id)
}}
on:
push:
branches:
- 'main'
pull_request:
workflow_dispatch:
inputs:
workflow_id:
description: |
Workflow Run ID from the SumoLogic/sumologic-otel-collector repository
to download artifacts from. The artifacts for the specified workflow
must contain an otelcol-sumo binary for each platform that packages
are being built for.
required: false
type: string
jobs:
determine_workflow:
runs-on: ubuntu-latest
name: Determine workflow_id
outputs:
workflow_id: ${{ steps.workflow.outputs.id }}
steps:
- name: Checkout
uses: actions/checkout@v4
# Determine the latest successful run of the "Dev builds" workflow for
# the "main" branch. This is skipped if inputs.workflow_id is set.
- name: Determine latest successful workflow run
id: latest-workflow
if: inputs.workflow_id == ''
env:
GH_TOKEN: ${{ github.token }}
run: |
R="SumoLogic/sumologic-otel-collector"
WFN="Dev builds"
W=$(gh run list -R "$R" -w "$WFN" -s success -b main --json databaseId -q '.[0].databaseId')
echo "id=$W" >> "$GITHUB_OUTPUT"
- name: Set output workflow
id: workflow
run: |
echo "id=${{ inputs.workflow_id || steps.latest-workflow.outputs.id }}" >> $GITHUB_OUTPUT
- name: Output Remote Workflow URL
run: echo ::notice title=Remote Workflow URL::https://github.com/SumoLogic/sumologic-otel-collector/actions/runs/${{ steps.workflow.outputs.id }}
# Builds a package for each target in the matrix. The target must be an
# existing file name (without extension) in the targets directory when
# build_tool is cmake.
build_packages:
name: ${{ matrix.target }}
uses: ./.github/workflows/_reusable_build_package.yml
needs:
- determine_workflow
with:
otc_build_number: ${{ github.run_number }}
cmake_target: ${{ matrix.target }}
workflow_id: ${{ needs.determine_workflow.outputs.workflow_id }}
runs_on: ${{ matrix.runs_on }}
goarch: ${{ matrix.goarch }}
package_arch: ${{ matrix.package_arch }}
build_tool: ${{ matrix.build_tool }}
fips: ${{ matrix.fips || false }}
secrets:
apple_developer_certificate_p12_base64: ${{ secrets.APPLE_DEVELOPER_CERTIFICATE_P12_BASE64 }}
apple_developer_certificate_password: ${{ secrets.APPLE_DEVELOPER_CERTIFICATE_PASSWORD }}
productbuild_identity_name: ${{ secrets.APPLE_DEVELOPER_CERTIFICATE_IDENTITY }}
gh_artifacts_token: ${{ secrets.GH_ARTIFACTS_TOKEN }}
microsoft_certificate: ${{ secrets.MICROSOFT_CERTIFICATE }}
microsoft_certificate_password: ${{ secrets.MICROSOFT_CERTIFICATE_PASSWORD }}
microsoft_certhash: ${{ secrets.MICROSOFT_CERTHASH }}
microsoft_certname: ${{ secrets.MICROSOFT_CERTNAME }}
microsoft_description: ${{ secrets.MICROSOFT_DESCRIPTION }}
gh_ci_token: ${{ secrets.GH_CI_TOKEN }}
packagecloud_token: ${{ secrets.PACKAGECLOUD_TOKEN }}
aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
strategy:
matrix:
include:
- target: otc_linux_amd64_deb
runs_on: ubuntu-latest
build_tool: cmake
- target: otc_linux_amd64_rpm
runs_on: ubuntu-latest
build_tool: cmake
- target: otc_linux_arm64_deb
runs_on: ubuntu-latest
build_tool: cmake
- target: otc_linux_arm64_rpm
runs_on: ubuntu-latest
build_tool: cmake
- target: otc_darwin_amd64_productbuild
runs_on: macos-latest
build_tool: cmake
- target: otc_darwin_arm64_productbuild
runs_on: macos-latest
build_tool: cmake
- target: otc_windows_amd64_wix
runs_on: windows-2019
goarch: amd64
package_arch: x64
build_tool: wix
# fips targets
- target: otc_fips_linux_amd64_deb
runs_on: ubuntu-latest
build_tool: cmake
- target: otc_fips_linux_amd64_rpm
runs_on: ubuntu-latest
build_tool: cmake
- target: otc_fips_linux_arm64_deb
runs_on: ubuntu-latest
build_tool: cmake
- target: otc_fips_linux_arm64_rpm
runs_on: ubuntu-latest
build_tool: cmake
- target: otc_fips_windows_amd64_wix
runs_on: windows-2019
goarch: amd64
package_arch: x64
build_tool: wix
fips: true
install-script:
name: Store install script
runs-on: ubuntu-latest
needs:
- build_packages
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: us-east-1
AWS_S3_BUCKET: sumologic-osc-ci-builds
OTC_VERSION: ${{ needs.build_packages.outputs.otc_version }}
OTC_BUILD_NUMBER: ${{ needs.build_packages.outputs.otc_build_number }}
steps:
- uses: actions/checkout@v4
- name: Store Linux install script as action artifact
uses: actions/upload-artifact@v4
with:
name: install.sh
path: ./install-script/install.sh
if-no-files-found: error
- name: Store Windows install script as action artifact
uses: actions/upload-artifact@v4
with:
name: install.ps1
path: ./install-script/install.ps1
if-no-files-found: error
- name: Store install scripts on S3
run: |
version=${OTC_VERSION}-${OTC_BUILD_NUMBER}
s3_path="${version}/"
aws s3 cp install-script/install.ps1 s3://${AWS_S3_BUCKET}/${s3_path}
aws s3 cp install-script/install.sh s3://${AWS_S3_BUCKET}/${s3_path}
- name: Create latest_version file
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
run: |
version=${OTC_VERSION}-${OTC_BUILD_NUMBER}
echo "${version}" >> latest_version
aws s3 cp --content-type "text/plain" latest_version \
s3://${AWS_S3_BUCKET}/
test-install-script:
name: Test Install Script
runs-on: ${{ matrix.runs_on }}
timeout-minutes: 60
needs:
- build_packages
strategy:
fail-fast: false
matrix:
include:
- arch_os: linux_amd64
runs_on: ubuntu-20.04
- arch_os: darwin_amd64
runs_on: macos-latest
- arch_os: windows_amd64
runs_on: windows-2022
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_CI_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OTC_VERSION: ${{ needs.build_packages.outputs.otc_version }}
OTC_BUILD_NUMBER: ${{ github.run_number }}
PACKAGECLOUD_MASTER_TOKEN: ${{ secrets.PACKAGECLOUD_MASTER_TOKEN }}
PACKAGECLOUD_REPO: ci-builds
steps:
- uses: actions/checkout@v4
- name: Check if test related files changed
id: changed-files
uses: tj-actions/changed-files@v44
with:
files: |
install-script/**/*
.github/**
- name: Setup go
if: steps.changed-files.outputs.any_changed == 'true'
uses: WillAbides/setup-go-faster@v1
with:
go-version: stable
- name: Download macOS package and use it for install.sh
if: ${{ steps.changed-files.outputs.any_changed == 'true' && runner.os == 'macOS' }}
uses: actions/download-artifact@v4
with:
path: artifacts/
pattern: otelcol-sumo_*-intel.pkg
- name: Show packages
if: ${{ steps.changed-files.outputs.any_changed == 'true' && runner.os == 'macOS' }}
run: |
ls -l artifacts/
ls -l artifacts/**/*
- name: Set DARWIN_PKG_URL (macOS)
if: ${{ steps.changed-files.outputs.any_changed == 'true' && runner.os == 'macOS' }}
run: |
fp="$(readlink -f artifacts/otelcol-sumo_*-intel.pkg/otelcol-sumo_*-intel.pkg)"
echo DARWIN_PKG_URL="file://${fp}" >> $GITHUB_ENV
- name: Set S3_BUCKET (macOS)
if: ${{ steps.changed-files.outputs.any_changed == 'true' && runner.os == 'macOS' }}
run: |
echo S3_BUCKET="sumologic-osc-ci-builds" >> $GITHUB_ENV
- name: Run install script tests (*nix)
if: steps.changed-files.outputs.any_changed == 'true' && runner.os != 'Windows'
working-directory: install-script/test
run: make test
- name: Run install script tests (Windows)
shell: powershell
if: steps.changed-files.outputs.any_changed == 'true' && runner.os == 'Windows'
working-directory: install-script/test
env:
S3_BUCKET: sumologic-osc-ci-builds
OVERRIDE_ARCH: x64
SKIP_ARCH_DETECTION: 1
run: make test