From 2227097db796ac1353d7fed44adbf0849906e7d9 Mon Sep 17 00:00:00 2001 From: Adelina Simion <43963729+addetz@users.noreply.github.com> Date: Tue, 19 Nov 2024 13:50:29 +0000 Subject: [PATCH] docs: add packs caching solution DOC-1460 (#4632) * docs: add caching solution DOC-1460 * docs: add upload logic DOC-1460 * docs: update upload artifact DOC-1460 * docs: update build steps * docs: update download logic * docs: fix folder name * docs: fix folder path * docs: add composite action that builds cached packs DOC-1460 * docs: fix composite action reference * docs: fix composite action path * docs: fix composite action shell DOC-1460 * docs: add missing shell * docs: add failure condition * docs: create static/img/packs * docs: fix broken action * docs: add local workflow script * docs: add exit code handling DOC-1460 * ci: auto-formatting prettier issues * docs: add api-repositories-response to clean-packs * docs: add error handling to writeFile * docs: remove plugin from asset and add async file write function * docs: add directory creation to writeResponseFile * docs: remove push condition from post release * docs: add push condition from post release * docs: change name of workflow * docs: add readme documentation * ci: auto-formatting prettier issues * docs: fix reading of exit code * docs: remove the true on npm run build * docs: disable automatic failure to allow the exit code to be set * docs: add intermediary step * docs: adjust workflow * docs: adjust workflow with failure * docs: remove fail condition * docs: remove intermediary step * docs: add caching in other workflows * docs: add extra context in readme * docs: place make build commands back * docs: add build-ci job * docs: adjust the make commands DOC-1460 * docs: remove spaces * docs: remove spaces * docs: update readme * docs: remove clear from build-ci * docs: replace clear and change action * docs: add exit codes section in the readme * docs: add conditional around jq install --------- Co-authored-by: Carolina Delwing Rosa Co-authored-by: addetz --- .../actions/build-cached-packs/action.yaml | 48 ++++++++ .github/workflows/dependabot.yaml | 9 +- .github/workflows/nightly-docker-build.yaml | 11 +- .github/workflows/post_release.yaml | 59 +++++++++ .github/workflows/pull_request.yaml | 9 +- .github/workflows/release-branch-pr.yaml | 11 +- .github/workflows/release-preview.yaml | 14 ++- .github/workflows/release.yaml | 26 +++- .github/workflows/screenshot_capture.yaml | 12 +- .github/workflows/visual-comparison.yaml | 12 +- Makefile | 47 ++++++++ README.md | 37 ++++++ plugins/packs-integrations.js | 114 ++++++++++++------ scripts/get-cached-packs.sh | 49 ++++++++ 14 files changed, 406 insertions(+), 52 deletions(-) create mode 100644 .github/actions/build-cached-packs/action.yaml create mode 100644 .github/workflows/post_release.yaml create mode 100755 scripts/get-cached-packs.sh diff --git a/.github/actions/build-cached-packs/action.yaml b/.github/actions/build-cached-packs/action.yaml new file mode 100644 index 0000000000..a3aa251cc2 --- /dev/null +++ b/.github/actions/build-cached-packs/action.yaml @@ -0,0 +1,48 @@ +name: "Build with cached packs" +inputs: + gh-token: + description: "GitHub Token for authentication" + required: true + +runs: + using: "composite" + steps: + - name: Install jq (JSON processor) if not found + run: | + if ! command -v jq &> /dev/null; then + sudo apt-get update + sudo apt-get install -y jq + else + echo "jq is already installed. Skipping install..." + fi + shell: bash + + - name: Download Packs Data + run: | + # Find the latest packs upload workflow. + run_id=$(gh run list --workflow="post_release.yaml" --limit 1 --json databaseId | jq -r '.[0].databaseId') + # Remove any downloaded artifacts, should they exist. + rm -rf ./downloaded_artifacts + # Download the latest artifact to a new dir. + gh run download ${run_id} --name build-packs --dir ./downloaded_artifacts + shell: bash + env: + GH_TOKEN: ${{ inputs.gh-token }} + + - name: Unpack packs data + run: | + # Ensure the correct folders exist. + mkdir -p .docusaurus/packs-integrations + # Move the files to their correct places in the checked out repository + mv downloaded_artifacts/.docusaurus/packs-integrations/* .docusaurus/packs-integrations + mkdir -p static/img/packs + mv downloaded_artifacts/build/packs/* static/img/packs + # Clean up. + rm -rf downloaded_artifacts + shell: bash + + - name: Build + run: | + rm -rf build + npm run build + shell: bash diff --git a/.github/workflows/dependabot.yaml b/.github/workflows/dependabot.yaml index 5fa0e8dd1a..821cf1a663 100644 --- a/.github/workflows/dependabot.yaml +++ b/.github/workflows/dependabot.yaml @@ -43,4 +43,11 @@ jobs: - name: Build run: | - npm run build + touch .env + make build-ci + + - name: Build with cached packs + if: ${{ env.BUILD_EXIT_CODE == '5' }} + uses: ./.github/actions/build-cached-packs + with: + gh-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/nightly-docker-build.yaml b/.github/workflows/nightly-docker-build.yaml index 874ec42cad..b76e0e7c76 100644 --- a/.github/workflows/nightly-docker-build.yaml +++ b/.github/workflows/nightly-docker-build.yaml @@ -30,11 +30,16 @@ jobs: - run: npm ci - - name: Compile + - name: Build run: | touch .env - make build - + make build-ci + + - name: Build with cached packs + if: ${{ env.BUILD_EXIT_CODE == '5' }} + uses: ./.github/actions/build-cached-packs + with: + gh-token: ${{ secrets.GITHUB_TOKEN }} - name: Set up QEMU uses: docker/setup-qemu-action@v3 diff --git a/.github/workflows/post_release.yaml b/.github/workflows/post_release.yaml new file mode 100644 index 0000000000..55662514d8 --- /dev/null +++ b/.github/workflows/post_release.yaml @@ -0,0 +1,59 @@ +name: Post Release Processing +# This workflow is triggered when a workflow run of the "Release to Production" workflow is completed or when manually triggered. +# The primary purpose of this workflow is to build the site, copy the packs data and upload it as artifacts. +# The packs data can be used as a fallback when the Palette API cannot return a packs list. + +on: + workflow_run: + workflows: ["Release to Production"] + types: [completed] + workflow_dispatch: + +env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + FULLSTORY_ORGID: ${{ secrets.FULLSTORY_ORGID }} + ALGOLIA_ADMIN_KEY: ${{ secrets.ALGOLIA_ADMIN_KEY }} + ALGOLIA_APP_ID: ${{ secrets.ALGOLIA_APP_ID }} + ALGOLIA_SEARCH_KEY: ${{ secrets.ALGOLIA_SEARCH_KEY }} + ALGOLIA_INDEX_NAME: ${{ secrets.ALGOLIA_INDEX_NAME }} + PALETTE_API_KEY: ${{ secrets.PALETTE_API_KEY }} + DISABLE_PACKS_INTEGRATIONS: ${{ secrets.DISABLE_PACKS_INTEGRATIONS }} + +jobs: + + create-assets: + name: asset-builds + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + + - name: Setup Node.js Environment + uses: actions/setup-node@v4 + with: + node-version: "20" + cache: "npm" + + - name: Install Dependencies + run: npm ci + + - name: Build + run: | + touch .env + make build-ci + + - name: Build with cached packs + if: ${{ env.BUILD_EXIT_CODE == '5' }} + uses: ./.github/actions/build-cached-packs + with: + gh-token: ${{ secrets.GITHUB_TOKEN }} + + - name: Upload Build Packs + uses: actions/upload-artifact@v4 + with: + name: "build-packs" + path: | + build/packs + .docusaurus/packs-integrations + if-no-files-found: error + retention-days: 7 diff --git a/.github/workflows/pull_request.yaml b/.github/workflows/pull_request.yaml index 83b0b96f12..5b00129d6e 100644 --- a/.github/workflows/pull_request.yaml +++ b/.github/workflows/pull_request.yaml @@ -96,4 +96,11 @@ jobs: - name: Build run: | - npm run build + touch .env + make build-ci + + - name: Build with cached packs + if: ${{ env.BUILD_EXIT_CODE == '5' }} + uses: ./.github/actions/build-cached-packs + with: + gh-token: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/release-branch-pr.yaml b/.github/workflows/release-branch-pr.yaml index b25a40dc0a..bbef39f7a0 100644 --- a/.github/workflows/release-branch-pr.yaml +++ b/.github/workflows/release-branch-pr.yaml @@ -61,4 +61,13 @@ jobs: 🤖 Netlify configured to enable preview build for branch: ${{env.GITHUB_BRANCH}} . Subsequent commits will automatically trigger a Netlify build preview. refresh-message-position: false - - run: npm run build + - name: Build + run: | + touch .env + make build + + - name: Build with cached packs + if: ${{ env.BUILD_EXIT_CODE == '5' }} + uses: ./.github/actions/build-cached-packs + with: + gh-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release-preview.yaml b/.github/workflows/release-preview.yaml index 1657c58f8f..c18e2985d2 100644 --- a/.github/workflows/release-preview.yaml +++ b/.github/workflows/release-preview.yaml @@ -48,17 +48,25 @@ jobs: echo "User-agent: *" >> static/robots.txt echo "Disallow: /" >> static/robots.txt echo "Sitemap: https://docs-latest.spectrocloud.com/sitemap.xml" >> static/robots.txt - - name: Ensure noIndex is set id: check_noindex run: node scripts/noindex_docusaurus_config.js $PWD - - + - name: Build run: | + set +e # Disable automatic stop on command failure touch .env make build + exit_code=$? + echo "Build command exit code: $exit_code" + echo "BUILD_EXIT_CODE=$exit_code">> $GITHUB_ENV + + - name: Build with cached packs + if: ${{ env.BUILD_EXIT_CODE == '5' }} + uses: ./.github/actions/build-cached-packs + with: + gh-token: ${{ secrets.GITHUB_TOKEN }} - name: Deploy Preview run: | diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 58955fe508..668109c70c 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -54,12 +54,18 @@ jobs: cache: "npm" - run: npm ci - - - name: Compile + + - name: Build run: | touch .env make versions-ci - make build + make build-ci + + - name: Build with cached packs + if: ${{ env.BUILD_EXIT_CODE == '5' }} + uses: ./.github/actions/build-cached-packs + with: + gh-token: ${{ secrets.GITHUB_TOKEN }} - name: Upload to AWS run: | @@ -90,10 +96,20 @@ jobs: - run: npm ci - - name: Compile + - name: Versions + run: | + + - name: Build run: | + touch .env make versions-ci - make build + make build-ci + + - name: Build with cached packs + if: ${{ env.BUILD_EXIT_CODE == '5' }} + uses: ./.github/actions/build-cached-packs + with: + gh-token: ${{ secrets.GITHUB_TOKEN }} - name: Upload to AWS run: | diff --git a/.github/workflows/screenshot_capture.yaml b/.github/workflows/screenshot_capture.yaml index 9dbf5931e0..6e599cefbd 100644 --- a/.github/workflows/screenshot_capture.yaml +++ b/.github/workflows/screenshot_capture.yaml @@ -44,8 +44,16 @@ jobs: - name: Install Playwright browsers run: npx playwright install --with-deps chromium - - name: Build Website - run: make build + - name: Build + run: | + touch .env + make build-ci + + - name: Build with cached packs + if: ${{ env.BUILD_EXIT_CODE == '5' }} + uses: ./.github/actions/build-cached-packs + with: + gh-token: ${{ secrets.GITHUB_TOKEN }} - name: Upload Build uses: actions/upload-artifact@v4 diff --git a/.github/workflows/visual-comparison.yaml b/.github/workflows/visual-comparison.yaml index 2bd1e895af..9576b49dcf 100644 --- a/.github/workflows/visual-comparison.yaml +++ b/.github/workflows/visual-comparison.yaml @@ -72,8 +72,16 @@ jobs: attempt_limit: 3 attempt_delay: 60000 # 1 minute - - name: Build Website - run: make build + - name: Build + run: | + touch .env + make build-ci + + - name: Build with cached packs + if: ${{ env.BUILD_EXIT_CODE == '5' }} + uses: ./.github/actions/build-cached-packs + with: + gh-token: ${{ secrets.GITHUB_TOKEN }} - name: Upload Build uses: actions/upload-artifact@v4 diff --git a/Makefile b/Makefile index 6763947968..8c13514301 100644 --- a/Makefile +++ b/Makefile @@ -54,6 +54,7 @@ clean-versions: ## Clean Docusarus content versions clean-packs: ## Clean supplemental packs and pack images rm -rf static/img/packs rm -rf .docusaurus/packs-integrations/api_pack_response.json + rm -rf .docusaurus/packs-integrations/api_repositories_response.json clean-api: ## Clean API docs @echo "cleaning api docs" @@ -81,12 +82,53 @@ start: ## Start a local development server make generate-partials npm run start +start-cached-packs: ## Start a local development server with cached packs retry. + make generate-partials + @{ \ + npm run start; \ + exit_code=$$?; \ + if [ "$$exit_code" = "5" ]; then \ + echo "❌ Start has failed due to missing packs data..."; \ + echo "ℹ️ Initializing fetch cached packs data..."; \ + make get-cached-packs; \ + echo "ℹ️ Retrying start... "; \ + npm run start;\ + fi; \ + } + build: ## Run npm build @echo "building site" npm run clear rm -rf build npm run build +build-cached-packs: ## Run npm build with cached packs retry + @echo "building site" + npm run clear + rm -rf build + @{ \ + npm run build; \ + exit_code=$$?; \ + if [ "$$exit_code" = "5" ]; then \ + echo "❌ Build has failed due to missing packs data..."; \ + echo "ℹ️ Initializing fetch cached packs data..."; \ + make get-cached-packs; \ + echo "ℹ️ Retrying build... "; \ + npm run build;\ + fi; \ + } + +build-ci: ## Run npm build in CI environment + @echo "building site" + npm run clear + rm -rf build + @{ \ + npm run build; \ + exit_code=$$?; \ + echo "Build exited with code $$exit_code..."; \ + echo "BUILD_EXIT_CODE=$$exit_code" >> $(GITHUB_ENV); \ + } + versions: ## Create Docusarus content versions @echo "creating versions" ./scripts/versions.sh $(TMPDIR) @@ -230,6 +272,11 @@ find-unused-images: generate-partials: ## Generate ./scripts/generate-partials.sh + +###@ Fetch cached packs assets. + +get-cached-packs: + ./scripts/get-cached-packs.sh ###@ Aloglia Indexing diff --git a/README.md b/README.md index 65eb96dd21..bc4e8e99bc 100644 --- a/README.md +++ b/README.md @@ -839,6 +839,32 @@ Settting the `DISABLE_PACKS_PLUGIN` environment variable to `true` will also hav `integrations/` page is the new behavior. - The Packs list page will display a warning message indicating that the packs data is not available. +#### Cached Packs Data + +All pack related data is saved to a +[GitHub Workflow Artifact](https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/storing-and-sharing-data-from-a-workflow#about-workflow-artifacts) +after every succesful release to production. Check out the [post_release.yaml](.github/workflows/post_release.yaml) for +further details. The cached data enables us to build and start librarium without performing any pack related API +queries. All of our GitHub workflows will use this cached data as a fallback in the case of an API related build +failure. Check out the [build-cached-packs action.yaml](.github/actions/build-cached-packs/action.yaml) to learn how the +cached data is fetched and used. + +Packs data is saved locally in the `.docusaurus/packs-integrations` and `static/img/packs` folders. You can remove the +data using `make clean-packs`. You can use the cached packs artifact locally when you don't have any downloaded packs +data and you want to avoid the pack download time. This flow also helps you when you don't have any local packs data and +we are experiencing an API outage. + +librarium provides the following commands which fetch cached packs to your local environment. + +| **Command** | **Description** | +| ------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | +| `make get-cached-packs` | Fetch the packs data artifact and pace files in the correct places. You can then execute `make start` or `make build` as usual. | +| `make start-cached-packs` | Attempt to start the local development server. If a packs related outage is detected, fetch the packs data artifact and retry the `start` command. | +| `make build-cached-packs` | Attempt to build the application. If a packs related outage is detected, fetch the packs data artifact and retry the `build` command. | + +These scripts will prompt you to install and authenticate the [GitHub CLI](https://cli.github.com/) before you can +proceed. + #### README Content The pack component will display a Pack's README file if it exists. The README content comes from the Palette API. @@ -1107,3 +1133,14 @@ make clean-versions > > The `docusaurus.config.js` file is updated by the [`update_docusaurus_config.js`](./docusaurus.config.js) script. DO > NOT commit this file with the updated changes. + +## Exit Codes + +Librarium provides the following exit codes. These exit codes are returned by both the `npm run start` and +`npm run build` commands. + +| **Exit Code** | **Description** | +| ----------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `0` | The command was executed successfully. | +| `5` | The command failed due to errors received from the API service. These requests are issued by the [Packs Component](#packs-component) and librarium cannot start without loading packs, either from the API service or the [cached packs data](#cached-packs-data) | +| Any other non-zero exit code. | The command failed due to another error. Check the command output. | diff --git a/plugins/packs-integrations.js b/plugins/packs-integrations.js index 9db0724774..6f799a4e8b 100644 --- a/plugins/packs-integrations.js +++ b/plugins/packs-integrations.js @@ -14,7 +14,8 @@ import logger from "@docusaurus/logger"; const filterLimit = 100; //Limit for fetching the packs from the Palette API const dirname = ".docusaurus/packs-integrations/"; const logoDirname = "static/img/packs/"; -const filename = "api_pack_response.json"; +const packs_filename = "api_pack_response.json"; +const repos_filename = "api_repositories_response.json"; const options = { headers: { "Content-Disposition": "attachment", @@ -297,7 +298,13 @@ async function fetchPackListItems(queryParams, packDataArr, counter, mappedRepos } // Provide the registryUids in the payload to fetch the packs ONLY from registries provided in the Docusarus config file const payload = { filter: { type: ["spectro", "oci"], registryUid: registryUids } }; - const response = await callRateLimitAPI(() => api.post(`/v1/packs/search${queryParams}`, payload)); + let response = []; + try { + response = await callRateLimitAPI(() => api.post(`/v1/packs/search${queryParams}`, payload)); + } catch (error) { + logger.error("An error occurred while fetching packs:", error); + process.exit(5); + } const tempPackArr = packDataArr.concat(response?.data?.items); if (response?.data?.listmeta?.continue) { @@ -313,8 +320,15 @@ async function fetchPackListItems(queryParams, packDataArr, counter, mappedRepos } async function mapRepositories(repositories) { - const ociRegistries = await api.get("v1/registries/oci/summary"); - const packRegistries = await api.get("v1/registries/pack"); + let ociRegistries = []; + let packRegistries = []; + try { + ociRegistries = await api.get("v1/registries/oci/summary"); + packRegistries = await api.get("v1/registries/pack"); + } catch (error) { + logger.error("An error occurred while fetching registries:", error); + process.exit(5); + } const mergedRegistries = [ociRegistries.data?.items || [], packRegistries.data?.items || []]; const results = mergedRegistries.flat(); const repoMap = repositories.reduce((acc, repository) => { @@ -326,6 +340,9 @@ async function mapRepositories(repositories) { } return acc; }, []); + + await writeResponseFile(`${dirname}${repos_filename}`, repoMap); + return repoMap; } @@ -387,6 +404,30 @@ async function getLogoUrl(packsAllData, logoUrlMap) { } } +async function writeResponseFile(path, apiResponse) { + if (!existsSync(dirname)) { + mkdirSync(dirname, { recursive: true }); + } + open(path, "w+", (err, fd) => { + if (err) { + logger.error("An error occurred while opening the JSON file:", err); + return; + } + try { + writeFile(path, JSON.stringify(apiResponse), (err1) => { + if (err1) { + logger.error("An error occurred while writing the JSON file:", err1); + } + logger.info(`API Response saved to ${path}`); + }); + } finally { + close(fd, (err2) => { + if (err2) logger.error("An error occurred while closing the file:", err2); + }); + } + }); +} + // If the plugin is disabled, then the packs and integrations data will not be fetched. // However, the PDE service packs still need to be loaded. // Otherwise, errors will be thrown when the PDE service packs are accessed. @@ -410,18 +451,29 @@ async function pluginPacksAndIntegrationsData(context, options) { name: "plugin-packs-integrations", async loadContent() { const repositories = options.repositories || []; + let isPackFileExists = false; + let isReposFileExists = false; + let mappedRepos = []; + if (existsSync(dirname) && existsSync(`${dirname}${repos_filename}`)) { + isReposFileExists = true; + } + if (existsSync(dirname) && existsSync(`${dirname}${packs_filename}`)) { + isPackFileExists = true; + } + if (!isReposFileExists) { + mappedRepos = await mapRepositories(repositories); + } else { + try { + const data = await promises.readFile(`${dirname}${repos_filename}`); + mappedRepos = JSON.parse(data); + } catch (e) { + logger.error("An error occurred while reading the JSON file:", e); + } + } - const mappedRepos = await mapRepositories(repositories); let apiPackResponse = {}; - let isFileExists = false; - if (existsSync(dirname) && existsSync(`${dirname}${filename}`)) { - isFileExists = true; - } let logoUrlMap = {}; - if (!isFileExists) { - if (!existsSync(dirname)) { - mkdirSync(dirname, { recursive: true }); - } + if (!isPackFileExists) { logger.info("Fetching the list of packs from the Palette API"); let packDataArr = await fetchPackListItems(`?limit=${filterLimit}`, [], 0, mappedRepos); @@ -448,14 +500,20 @@ async function pluginPacksAndIntegrationsData(context, options) { packMDMap[packData.spec.name] = packData; const cloudType = packData.spec.cloudTypes.includes("all") ? "aws" : packData.spec.cloudTypes[0]; const registryPackData = []; - for (const registry of packData.spec.registries) { - const url = `${packUrl}${packData.spec.name}/registries/${registry.uid}?cloudType=${cloudType}&layer=${packData.spec.layer}`; - registryPackData.push( - callRateLimitAPI(() => { - return api.get(url); - }) - ); + try { + for (const registry of packData.spec.registries) { + const url = `${packUrl}${packData.spec.name}/registries/${registry.uid}?cloudType=${cloudType}&layer=${packData.spec.layer}`; + registryPackData.push( + callRateLimitAPI(() => { + return api.get(url); + }) + ); + } + } catch (error) { + logger.error("An error occurred while fetching packs:", error); + process.exit(5); } + return registryPackData; }); const flatted = promisesPackDetails.flat(); @@ -479,22 +537,10 @@ async function pluginPacksAndIntegrationsData(context, options) { apiPackResponse.apiPacksData = apiPacksData; apiPackResponse.packMDMap = packMDMap; apiPackResponse.logoUrlMap = logoUrlMap; - open(`${dirname}${filename}`, "w+", (err, fd) => { - try { - writeFile(`${dirname}${filename}`, JSON.stringify(apiPackResponse), (err) => { - if (err) { - logger.error("An error occurred while writing the JSON file:", err); - } - }); - } finally { - close(fd, (err1) => { - if (err1) logger.error("An error occurred while closing the file:", err1); - }); - } - }); + await writeResponseFile(`${dirname}${packs_filename}`, apiPackResponse); } else { try { - const data = await promises.readFile(`${dirname}${filename}`); + const data = await promises.readFile(`${dirname}${packs_filename}`); apiPackResponse = JSON.parse(data); } catch (e) { logger.error("An error occurred while reading the JSON file:", e); diff --git a/scripts/get-cached-packs.sh b/scripts/get-cached-packs.sh new file mode 100755 index 0000000000..e0e911641d --- /dev/null +++ b/scripts/get-cached-packs.sh @@ -0,0 +1,49 @@ +#!/bin/bash + +# Enable error handling +set -e + +echo "Starting fetch of cached packs..." + +if command -v gh &> /dev/null +then + echo "✅ GitHub CLI is installed." +else + echo "❌ GitHub CLI is not installed." + echo "ℹ️ Use 'brew install gh' to install it with Homebrew." + exit 1 +fi + +if gh auth status &> /dev/null +then + echo "✅ GitHub CLI is authenticated. " +else + echo "❌ GitHub CLI is not authenticated." + echo "ℹ️ Please log in with 'gh auth login'." + exit 1 +fi + + # Find the latest packs upload workflow. +run_id=$(gh run list --workflow="post_release.yaml" --limit 1 --json databaseId | jq -r '.[0].databaseId') + +# Remove any downloaded artifacts, should they exist. +rm -rf ./downloaded_artifacts + +# Download the latest artifact to a new dir. +gh run download ${run_id} --name build-packs --dir ./downloaded_artifacts + +echo "✅ Cached packs artifact downloaded." + +# Ensure the correct folders exist. +mkdir -p .docusaurus/packs-integrations + +# Move the files to their correct places in the checked out repository +mv downloaded_artifacts/.docusaurus/packs-integrations/* .docusaurus/packs-integrations +mkdir -p static/img/packs +mv downloaded_artifacts/build/packs/* static/img/packs + +# Clean up. +rm -rf downloaded_artifacts + +echo "✅ Completed fetch of cached packs." +echo "⏭️ You can now execute 'make start' or 'make build'."