-
Notifications
You must be signed in to change notification settings - Fork 3
172 lines (142 loc) · 7.18 KB
/
fossa.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
name: Enterprise- FOSSA Report Generation
on:
workflow_dispatch:
inputs:
version_number_for_report_generation:
type: string
description: 'Supply the DaticalDb-installer version variable which is used during its report generation to be stored in the s3 bucket. eg 8.7.352'
required: false
jobs:
wait-for-fossa-report-generation:
runs-on: ubuntu-latest
strategy:
matrix:
repo: [
# { name: "DaticalDB-installer", ref: "DAT-18919",owner: "Datical" },
# #{name: "ephemeral-database", ref: "master",owner: "liquibase"}, #TODO: producing html report instead of csv. CSV report already uploaded in s3 to unblock the combine-fossa-reports job
# { name: "drivers", ref: "DAT-18919",owner: "Datical" },
{name: "protoclub", ref: "DAT-18919",owner: "Datical"}
# { name: "datical-sqlparser", ref: "DAT-18919",owner: "Datical" },
# { name: "storedlogic", ref: "DAT-18919",owner: "Datical" },
# { name: "AppDBA", ref: "DAT-18919",owner: "Datical" },
# { name: "liquibase-bundle", ref: "DAT-18919",owner: "Datical" },
# { name: "liquibase", ref: "DAT-18919",owner: "Datical" }
]
name: "${{ matrix.repo.name }} - Fossa Report"
steps:
- name: Set workflow inputs
run: |
if [[ "${{ matrix.repo.name }}" ]]; then
echo "WORKFLOW_INPUTS={ \"version_number_for_report_generation\": \"${{ github.event.inputs.version_number_for_report_generation }}\" }" >> $GITHUB_ENV
else
echo "WORKFLOW_INPUTS={}" >> $GITHUB_ENV
fi
- name: Dispatch an action and get the run ID
uses: codex-/return-dispatch@v1
id: return_dispatch
continue-on-error: true
with:
token: ${{ secrets.FOSSA_TRIGGER_REPORT_GENERATION }}
ref: ${{ matrix.repo.ref }}
repo: ${{ matrix.repo.name }}
owner: ${{ matrix.repo.owner }}
workflow: fossa.yml
workflow_inputs: ${{ env.WORKFLOW_INPUTS }}
- name: Retry fetching run ID (max 4 attempts with 5 seconds delay)
run: |
retries=4
delay=5 # Delay of 5 seconds between retries
for i in $(seq 1 $retries); do
run_id="${{ steps.return_dispatch.outputs.run_id }}"
if [ -n "$run_id" ]; then
echo "Found run ID: $run_id"
echo "run_id=$run_id" >> $GITHUB_ENV
break
else
echo "Run ID not found, retrying in $delay seconds..."
fi
if [ $i -eq $retries ]; then
echo "Failed to get run ID after $retries attempts."
exit 1
fi
# Wait before retrying
sleep $delay
done
shell: bash
- name: Await Run ID ${{ steps.return_dispatch.outputs.run_id }}
uses: Codex-/await-remote-run@v1
with:
token: ${{ secrets.FOSSA_TRIGGER_REPORT_GENERATION }}
run_id: ${{ steps.return_dispatch.outputs.run_id }}
repo: ${{ matrix.repo.name }}
owner: ${{ matrix.repo.owner }}
run_timeout_seconds: 420 # 7 minutes Time until giving up on the run
poll_interval_ms: 120000 # 2 minutes Frequency to poll the run for a status.
combine-fossa-reports:
runs-on: ubuntu-latest
needs: wait-for-fossa-report-generation
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
repository: liquibase/build-logic
ref: DAT-18919
path: build-logic
- name: Set up AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.LIQUIBASEORIGIN_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.LIQUIBASEORIGIN_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Download reports from S3 and Rearrange CSV files
run: |
# Create a directory to store downloaded reports from S3
mkdir -p /home/runner/work/enterprise/fossa_reports_s3
# Download all files from the specified S3 bucket to the created directory
aws s3 cp --recursive s3://liquibaseorg-origin/enterprise_fossa_report/raw_reports /home/runner/work/enterprise/fossa_reports_s3/
# List the contents of the directory to confirm successful download
ls -l /home/runner/work/enterprise/fossa_reports_s3
# Define an array of CSV file names
csv_files=("DaticalDB-installer" "drivers" "protoclub" "datical-sqlparser" "storedlogic" "AppDBA" "liquibase-bundle" "liquibase")
# Loop through each CSV file and remove headers again for combine report generation
for file in "${csv_files[@]}"; do
tail -n +1 /home/runner/work/enterprise/fossa_reports_s3/${file}.csv >> /home/runner/work/enterprise/fossa_reports_s3/${file}_no_header.csv
done
# Concatenate all CSV files without headers, sort, and remove duplicates
cat /home/runner/work/enterprise/fossa_reports_s3/*_no_header.csv | sort | uniq > /home/runner/work/enterprise/fossa_reports_s3/enterprise_unique.csv
# Add a header to the final CSV file, placing it above the sorted and unique data
echo 'Title,Version,Declared License,Package Homepage' | cat - /home/runner/work/enterprise/fossa_reports_s3/enterprise_unique.csv > temp && mv temp /home/runner/work/enterprise/fossa_reports_s3/enterprise_unique.csv
ls -l $GITHUB_WORKSPACE
# Read ignored dependencies from a file
ignoredLibsFile=$(cat $GITHUB_WORKSPACE/build-logic/.github/workflows/ignore_dependencies_fossa.txt)
# Split the ignored dependencies into an array
IFS=',' read -r -a ignoredLibs <<< "$ignoredLibsFile"
# Create a temporary file
tempfile=$(mktemp)
# Build the grep command to filter out ignored dependencies
grepCmd="grep -iv"
for lib in "${ignoredLibs[@]}"; do
grepCmd="$grepCmd -e \"$lib\""
done
# Process the FOSSA report to remove ignored dependencies
cat /home/runner/work/enterprise/fossa_reports_s3/enterprise_unique.csv | eval $grepCmd > enterprise_report.csv
- name: Upload CSV to Artifacts
uses: actions/upload-artifact@v3
with:
name: enterprise_report
path: ${{ inputs.version_number_for_report_generation }}
- name: Upload merged CSV to S3
if: always()
run: aws s3 cp enterprise_report.csv s3://liquibaseorg-origin/enterprise_fossa_report/${{ inputs.version_number_for_report_generation }}/enterprise_report_${{ inputs.version_number_for_report_generation }}.csv
trigger-datical-service:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Dispatch an action for datical-service
uses: peter-evans/repository-dispatch@v3
with:
token: ${{ secrets.FOSSA_TRIGGER_REPORT_GENERATION }}
repository: Datical/datical-service
event-type: trigger-fossa-report-generation
client-payload: '{"ref": "master", "version_number_for_report_generation": "${{ github.event.inputs.version_number_for_report_generation }}"}'