diff --git a/.github/workflows/gateway-conformance.yml b/.github/workflows/gateway-conformance.yml new file mode 100644 index 00000000..8d2f9fe4 --- /dev/null +++ b/.github/workflows/gateway-conformance.yml @@ -0,0 +1,82 @@ +name: Gateway Conformance + +on: + push: + branches: + - main + pull_request: + workflow_dispatch: + +jobs: + gateway-conformance: + runs-on: ubuntu-latest + steps: + # 1, Setup Node, install npm dependencies, and build all packages/* + # see https://github.com/ipdxco/unified-github-workflows/blob/3a1a7870ce5967163d8f5c8210b8ad50b2e659aa/.github/workflows/js-test-and-release.yml#L28-L34 + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: lts/* + - uses: ipfs/aegir/actions/cache-node-modules@master + + # 2. Set up 'go' so we can install the gateway-conformance binary + - name: Setup Go + uses: actions/setup-go@v4 + with: + go-version: 1.21.x + + # 3. Download the gateway-conformance fixtures using ipfs/gateway-conformance action + # This will prevent us from needing to install `docker` on the github runner + - name: Download gateway-conformance fixtures + uses: ipfs/gateway-conformance/.github/actions/extract-fixtures@v0.5.1 + # working-directory: ./packages/gateway-conformance + with: + output: ./packages/gateway-conformance/dist/src/fixtures/data/gateway-conformance-fixtures + + + # 4. Run the tests + - name: Run gateway-conformance tests + run: | + npm run test + working-directory: ./packages/gateway-conformance + + # 5. Convert json output to reports similar to how it's done at https://github.com/ipfs/gateway-conformance/blob/main/.github/actions/test/action.yml + # the 'gwc-report-all.json' file is created by the 'has expected total failures and successes' test + # TODO: remove this when we're passing enough tests to use the 'ipfs/gateway-conformance/.github/actions/test' action + - name: Create the XML + if: failure() || success() + uses: pl-strflt/gotest-json-to-junit-xml@v1 + with: + input: ./packages/gateway-conformance/gwc-report-all.json + output: ./packages/gateway-conformance/gwc-report-all.xml + - name: Create the HTML + if: failure() || success() + uses: pl-strflt/junit-xml-to-html@v1 + with: + mode: no-frames + input: ./packages/gateway-conformance/gwc-report-all.xml + output: ./packages/gateway-conformance/gwc-report-all.html + - name: Create the Markdown + if: failure() || success() + uses: pl-strflt/junit-xml-to-html@v1 + with: + mode: summary + input: ./packages/gateway-conformance/gwc-report-all.xml + output: ./packages/gateway-conformance/gwc-report-all.md + + # 6. Upload the reports + - name: Upload MD summary + if: failure() || success() + run: cat ./packages/gateway-conformance/gwc-report-all.md >> $GITHUB_STEP_SUMMARY + - name: Upload HTML report + if: failure() || success() + uses: actions/upload-artifact@v4 + with: + name: gateway-conformance.html + path: ./packages/gateway-conformance/gwc-report-all.html + - name: Upload JSON report + if: failure() || success() + uses: actions/upload-artifact@v4 + with: + name: gateway-conformance.json + path: ./packages/gateway-conformance/gwc-report-all.json diff --git a/packages/gateway-conformance/.aegir.js b/packages/gateway-conformance/.aegir.js new file mode 100644 index 00000000..db268668 --- /dev/null +++ b/packages/gateway-conformance/.aegir.js @@ -0,0 +1,65 @@ +// @ts-check +import getPort from 'aegir/get-port' + +/** @type {import('aegir').PartialOptions} */ +export default { + test: { + files: ['./dist/src/*.spec.js'], + before: async (options) => { + if (options.runner !== 'node') { + throw new Error('Only node runner is supported') + } + + const { GWC_IMAGE } = await import('./dist/src/constants.js') + const { loadKuboFixtures, kuboRepoDir } = await import('./dist/src/fixtures/kubo-mgmt.js') + const IPFS_NS_MAP = await loadKuboFixtures() + + const { createKuboNode } = await import('./dist/src/fixtures/create-kubo.js') + const controller = await createKuboNode(await getPort(3440)) + await controller.start() + const kuboGateway = `http://${controller.api.gatewayHost}:${controller.api.gatewayPort}` + + const { startBasicServer } = await import('./dist/src/fixtures/basic-server.js') + const SERVER_PORT = await getPort(3441) + const stopBasicServer = await startBasicServer({ + serverPort: SERVER_PORT, + kuboGateway + }) + + const { startReverseProxy } = await import('./dist/src/fixtures/reverse-proxy.js') + const PROXY_PORT = await getPort(3442) + const KUBO_PORT = controller.api.gatewayPort + const stopReverseProxy = await startReverseProxy({ + backendPort: SERVER_PORT, + targetHost: 'localhost', + proxyPort: PROXY_PORT + }) + + const CONFORMANCE_HOST = 'localhost' + + return { + controller, + stopReverseProxy, + stopBasicServer, + env: { + IPFS_NS_MAP, + GWC_IMAGE, + CONFORMANCE_HOST, + KUBO_PORT: `${KUBO_PORT}`, + PROXY_PORT: `${PROXY_PORT}`, + SERVER_PORT: `${SERVER_PORT}`, + KUBO_GATEWAY: kuboGateway, + KUBO_REPO: process.env.KUBO_REPO || kuboRepoDir + } + } + }, + after: async (options, beforeResult) => { + // @ts-expect-error - broken aegir types + await beforeResult.stopReverseProxy() + // @ts-expect-error - broken aegir types + await beforeResult.stopBasicServer() + // @ts-expect-error - broken aegir types + await beforeResult.controller.stop() + } + } +} diff --git a/packages/gateway-conformance/.gitignore b/packages/gateway-conformance/.gitignore new file mode 100644 index 00000000..b7305351 --- /dev/null +++ b/packages/gateway-conformance/.gitignore @@ -0,0 +1 @@ +gwc-report*.json diff --git a/packages/gateway-conformance/CHANGELOG.md b/packages/gateway-conformance/CHANGELOG.md new file mode 100644 index 00000000..e69de29b diff --git a/packages/gateway-conformance/LICENSE b/packages/gateway-conformance/LICENSE new file mode 100644 index 00000000..20ce483c --- /dev/null +++ b/packages/gateway-conformance/LICENSE @@ -0,0 +1,4 @@ +This project is dual licensed under MIT and Apache-2.0. + +MIT: https://www.opensource.org/licenses/mit +Apache-2.0: https://www.apache.org/licenses/license-2.0 diff --git a/packages/gateway-conformance/LICENSE-APACHE b/packages/gateway-conformance/LICENSE-APACHE new file mode 100644 index 00000000..14478a3b --- /dev/null +++ b/packages/gateway-conformance/LICENSE-APACHE @@ -0,0 +1,5 @@ +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. diff --git a/packages/gateway-conformance/LICENSE-MIT b/packages/gateway-conformance/LICENSE-MIT new file mode 100644 index 00000000..72dc60d8 --- /dev/null +++ b/packages/gateway-conformance/LICENSE-MIT @@ -0,0 +1,19 @@ +The MIT License (MIT) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/packages/gateway-conformance/README.md b/packages/gateway-conformance/README.md new file mode 100644 index 00000000..8829da51 --- /dev/null +++ b/packages/gateway-conformance/README.md @@ -0,0 +1,58 @@ +

+ + Helia logo + +

+ +# @helia/verified-fetch-gateway-conformance + +[![ipfs.tech](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](https://ipfs.tech) +[![Discuss](https://img.shields.io/discourse/https/discuss.ipfs.tech/posts.svg?style=flat-square)](https://discuss.ipfs.tech) +[![codecov](https://img.shields.io/codecov/c/github/ipfs/helia-verified-fetch.svg?style=flat-square)](https://codecov.io/gh/ipfs/helia-verified-fetch) +[![CI](https://img.shields.io/github/actions/workflow/status/ipfs/helia-verified-fetch/js-test-and-release.yml?branch=main\&style=flat-square)](https://github.com/ipfs/helia-verified-fetch/actions/workflows/js-test-and-release.yml?query=branch%3Amain) + +> [Gateway Conformance](https://github.com/ipfs/gateway-conformance) tests for @helia/verified-fetch + +# About + +Runs Gateway Conformance tests against @helia/verified-fetch using Kubo as a backing trustless-gateway. + +## Example - Testing a new Kubo release + +```console +$ npm i @helia/verified-fetch-gateway-conformance +$ KUBO_BINARY=/path/to/kubo verified-fetch-gateway-conformance +``` + +# Install + +```console +$ npm i @helia/verified-fetch-gateway-conformance +``` + +## Browser ` +``` + +# License + +Licensed under either of + +- Apache 2.0, ([LICENSE-APACHE](LICENSE-APACHE) / ) +- MIT ([LICENSE-MIT](LICENSE-MIT) / ) + +# Contribute + +Contributions welcome! Please check out [the issues](https://github.com/ipfs/helia-verified-fetch/issues). + +Also see our [contributing document](https://github.com/ipfs/community/blob/master/CONTRIBUTING_JS.md) for more information on how we work, and about contributing in general. + +Please be aware that all interactions related to this repo are subject to the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md). + +Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. + +[![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/CONTRIBUTING.md) diff --git a/packages/gateway-conformance/package.json b/packages/gateway-conformance/package.json new file mode 100644 index 00000000..a0f9b28f --- /dev/null +++ b/packages/gateway-conformance/package.json @@ -0,0 +1,70 @@ +{ + "name": "@helia/verified-fetch-gateway-conformance", + "version": "1.20.0", + "description": "Gateway conformance tests for @helia/verified-fetch", + "license": "Apache-2.0 OR MIT", + "homepage": "https://github.com/ipfs/helia-verified-fetch/tree/main/packages/gateway-conformance#readme", + "repository": { + "type": "git", + "url": "git+https://github.com/ipfs/helia-verified-fetch.git" + }, + "bugs": { + "url": "https://github.com/ipfs/helia-verified-fetch/issues" + }, + "publishConfig": { + "access": "public", + "provenance": true + }, + "keywords": [ + "IPFS" + ], + "bin": { + "demo-server": "./dist/src/demo-server.js", + "verified-fetch-gateway-conformance": "./dist/src/bin.js" + }, + "type": "module", + "types": "./dist/src/index.d.ts", + "files": [ + "src", + "dist", + "!dist/test", + "!**/*.tsbuildinfo" + ], + "exports": { + ".": { + "types": "./dist/src/index.d.ts", + "import": "./dist/src/index.js" + } + }, + "eslintConfig": { + "extends": "ipfs", + "parserOptions": { + "project": true, + "sourceType": "module" + } + }, + "scripts": { + "clean": "aegir clean dist gwc-report-*.json", + "lint": "aegir lint", + "dep-check": "aegir dep-check", + "doc-check": "aegir doc-check", + "build": "aegir build", + "test": "aegir test -t node" + }, + "dependencies": { + "@helia/verified-fetch": "1.3.13", + "@libp2p/logger": "^4.0.11", + "@sgtpooki/file-type": "^1.0.1", + "aegir": "^42.2.5", + "execa": "^8.0.1", + "glob": "^10.3.12", + "ipfsd-ctl": "^13.0.0", + "kubo": "^0.27.0", + "kubo-rpc-client": "^3.0.4", + "undici": "^6.15.0" + }, + "browser": { + "./dist/src/fixtures/create-kubo.js": "./dist/src/fixtures/create-kubo.browser.js", + "kubo": false + } +} diff --git a/packages/gateway-conformance/src/bin.ts b/packages/gateway-conformance/src/bin.ts new file mode 100644 index 00000000..d43e8786 --- /dev/null +++ b/packages/gateway-conformance/src/bin.ts @@ -0,0 +1,25 @@ +#! /usr/bin/env node +/* eslint-disable no-console */ + +import { spawn } from 'node:child_process' +import { dirname, resolve } from 'node:path' +import { fileURLToPath } from 'node:url' + +// aegir should be run from `node_modules/@helia/verified-fetch-gateway-conformance` +const cwd = resolve(dirname(fileURLToPath(import.meta.url)), '../../') + +const test = spawn('npx', ['aegir', 'test'], { + cwd +}) + +test.stdout.on('data', (data) => { + process.stdout.write(data) +}) + +test.stderr.on('data', (data) => { + process.stderr.write(data) +}) + +test.on('close', (code) => { + process.exit(code ?? 0) +}) diff --git a/packages/gateway-conformance/src/conformance.spec.ts b/packages/gateway-conformance/src/conformance.spec.ts new file mode 100644 index 00000000..9e8d4d15 --- /dev/null +++ b/packages/gateway-conformance/src/conformance.spec.ts @@ -0,0 +1,382 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +/* eslint-env mocha */ +import { readFile } from 'node:fs/promises' +import { homedir } from 'node:os' +import { join } from 'node:path' +import { prefixLogger } from '@libp2p/logger' +import { expect } from 'aegir/chai' +import { execa } from 'execa' +import { Agent, setGlobalDispatcher } from 'undici' + +const logger = prefixLogger('conformance-tests') + +interface TestConfig { + name: string + spec?: string + skip?: string[] + run?: string[] + maxFailures: number + minimumSuccesses?: number +} + +function getGatewayConformanceBinaryPath (): string { + const goPath = process.env.GOPATH ?? join(homedir(), 'go', 'bin') + return join(goPath, 'gateway-conformance') +} + +function getConformanceTestArgs (name: string, gwcArgs: string[] = [], goTestArgs: string[] = []): string[] { + return [ + 'test', + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + `--gateway-url=http://${process.env.CONFORMANCE_HOST!}:${process.env.PROXY_PORT!}`, + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + `--subdomain-url=http://${process.env.CONFORMANCE_HOST!}:${process.env.PROXY_PORT!}`, + '--verbose', + '--json', `gwc-report-${name}.json`, + ...gwcArgs, + '--', + '-timeout', '5m', + ...goTestArgs + ] +} + +const tests: TestConfig[] = [ + { + name: 'TestMetadata', + run: ['TestMetadata'], + maxFailures: 0, + minimumSuccesses: 1 + }, + { + name: 'TestDagPbConversion', + run: ['TestDagPbConversion'], + maxFailures: 51, + minimumSuccesses: 14 + }, + { + name: 'TestPlainCodec', + run: ['TestPlainCodec'], + maxFailures: 44, + minimumSuccesses: 15 + }, + { + name: 'TestPathing', + run: ['TestPathing'], + maxFailures: 5, + minimumSuccesses: 0 + }, + { + name: 'TestDNSLinkGatewayUnixFSDirectoryListing', + run: ['TestDNSLinkGatewayUnixFSDirectoryListing'], + maxFailures: 20, + minimumSuccesses: 0 + }, + { + name: 'TestCors', + run: ['TestCors'], + maxFailures: 11, + minimumSuccesses: 0 + }, + { + name: 'TestGatewayJsonCbor', + run: ['TestGatewayJsonCbor'], + maxFailures: 9, + minimumSuccesses: 0 + }, + { + name: 'TestNativeDag', + run: ['TestNativeDag'], + maxFailures: 2, + minimumSuccesses: 0 + }, + { + name: 'TestGatewayJSONCborAndIPNS', + run: ['TestGatewayJSONCborAndIPNS'], + maxFailures: 25, + minimumSuccesses: 8 + }, + { + name: 'TestGatewayIPNSPath', + run: ['TestGatewayIPNSPath'], + maxFailures: 8, + minimumSuccesses: 3 + }, + { + name: 'TestRedirectCanonicalIPNS', + run: ['TestRedirectCanonicalIPNS'], + maxFailures: 7, + minimumSuccesses: 0 + }, + { + name: 'TestGatewayBlock', + run: ['TestGatewayBlock'], + maxFailures: 25, + minimumSuccesses: 4 + }, + { + name: 'TestTrustlessRawRanges', + run: ['TestTrustlessRawRanges'], + maxFailures: 5, + minimumSuccesses: 7 + }, + { + name: 'TestTrustlessRaw', + run: ['TestTrustlessRaw'], + maxFailures: 29, + minimumSuccesses: 7 + }, + { + name: 'TestGatewayIPNSRecord', + run: ['TestGatewayIPNSRecord'], + maxFailures: 23, + minimumSuccesses: 0 + }, + { + name: 'TestTrustlessCarOrderAndDuplicates', + run: ['TestTrustlessCarOrderAndDuplicates'], + maxFailures: 26, + minimumSuccesses: 3 + }, + { + name: 'TestTrustlessCarEntityBytes', + run: ['TestTrustlessCarEntityBytes'], + maxFailures: 122, + minimumSuccesses: 55 + }, + { + name: 'TestTrustlessCarDagScopeAll', + run: ['TestTrustlessCarDagScopeAll'], + maxFailures: 23, + minimumSuccesses: 10 + }, + { + name: 'TestTrustlessCarDagScopeEntity', + run: ['TestTrustlessCarDagScopeEntity'], + maxFailures: 56, + minimumSuccesses: 25 + }, + { + name: 'TestTrustlessCarDagScopeBlock', + run: ['TestTrustlessCarDagScopeBlock'], + maxFailures: 34, + minimumSuccesses: 15 + }, + { + name: 'TestTrustlessCarPathing', + run: ['TestTrustlessCarPathing'], + maxFailures: 45, + minimumSuccesses: 20 + }, + { + name: 'TestSubdomainGatewayDNSLinkInlining', + run: ['TestSubdomainGatewayDNSLinkInlining'], + maxFailures: 41, + minimumSuccesses: 0 + }, + { + name: 'TestGatewaySubdomainAndIPNS', + run: ['TestGatewaySubdomainAndIPNS'], + maxFailures: 95, + minimumSuccesses: 0 + }, + { + name: 'TestGatewaySubdomains', + run: ['TestGatewaySubdomains'], + maxFailures: 279, + minimumSuccesses: 0 + }, + { + name: 'TestUnixFSDirectoryListingOnSubdomainGateway', + run: ['TestUnixFSDirectoryListingOnSubdomainGateway'], + maxFailures: 39, + minimumSuccesses: 0 + }, + { + name: 'TestRedirectsFileWithIfNoneMatchHeader', + run: ['TestRedirectsFileWithIfNoneMatchHeader'], + maxFailures: 15, + minimumSuccesses: 0 + }, + { + name: 'TestRedirectsFileSupportWithDNSLink', + run: ['TestRedirectsFileSupportWithDNSLink'], + maxFailures: 17, + minimumSuccesses: 6 + }, + { + name: 'TestRedirectsFileSupport', + run: ['TestRedirectsFileSupport'], + maxFailures: 252, + minimumSuccesses: 6 + }, + { + name: 'TestPathGatewayMiscellaneous', + run: ['TestPathGatewayMiscellaneous'], + maxFailures: 3, + minimumSuccesses: 0 + }, + { + name: 'TestGatewayUnixFSFileRanges', + run: ['TestGatewayUnixFSFileRanges'], + maxFailures: 10, + minimumSuccesses: 5 + }, + { + name: 'TestGatewaySymlink', + run: ['TestGatewaySymlink'], + maxFailures: 9, + minimumSuccesses: 0 + }, + { + name: 'TestGatewayCacheWithIPNS', + run: ['TestGatewayCacheWithIPNS'], + maxFailures: 27, + minimumSuccesses: 15 + }, + { + name: 'TestGatewayCache', + run: ['TestGatewayCache'], + maxFailures: 71, + minimumSuccesses: 23 + }, + { + name: 'TestUnixFSDirectoryListing', + run: ['TestUnixFSDirectoryListing'], + maxFailures: 50, + minimumSuccesses: 0 + }, + { + name: 'TestTar', + run: ['TestTar'], + maxFailures: 16, + minimumSuccesses: 8 + } +] + +describe('@helia/verified-fetch - gateway conformance', function () { + before(async () => { + if (process.env.KUBO_GATEWAY == null) { + throw new Error('KUBO_GATEWAY env var is required') + } + if (process.env.PROXY_PORT == null) { + throw new Error('PROXY_PORT env var is required') + } + if (process.env.SERVER_PORT == null) { + throw new Error('SERVER_PORT env var is required') + } + if (process.env.CONFORMANCE_HOST == null) { + throw new Error('CONFORMANCE_HOST env var is required') + } + // see https://stackoverflow.com/questions/71074255/use-custom-dns-resolver-for-any-request-in-nodejs + // EVERY undici/fetch request host resolves to local IP. Node.js does not resolve reverse-proxy requests properly + const staticDnsAgent = new Agent({ + connect: { + lookup: (_hostname, _options, callback) => { callback(null, [{ address: '0.0.0.0', family: 4 }]) } + } + }) + setGlobalDispatcher(staticDnsAgent) + }) + + describe('smokeTests', () => { + [ + ['basic server path request works', `http://localhost:${process.env.SERVER_PORT}/ipfs/bafkqabtimvwgy3yk`], + ['proxy server path request works', `http://localhost:${process.env.PROXY_PORT}/ipfs/bafkqabtimvwgy3yk`], + ['basic server subdomain request works', `http://bafkqabtimvwgy3yk.ipfs.localhost:${process.env.SERVER_PORT}`], + ['proxy server subdomain request works', `http://bafkqabtimvwgy3yk.ipfs.localhost:${process.env.PROXY_PORT}`] + ].forEach(([name, url]) => { + it(name, async () => { + const resp = await fetch(url) + expect(resp).to.be.ok() + expect(resp.status).to.equal(200) + const text = await resp.text() + expect(text.trim()).to.equal('hello') + }) + }) + }) + + describe('conformance testing', () => { + const binaryPath = getGatewayConformanceBinaryPath() + before(async () => { + const log = logger.forComponent('before') + const { stdout, stderr } = await execa('go', ['install', 'github.com/ipfs/gateway-conformance/cmd/gateway-conformance@latest'], { reject: true }) + log(stdout) + log.error(stderr) + }) + + after(async () => { + const log = logger.forComponent('after') + try { + await execa('rm', [binaryPath]) + log('gateway-conformance binary successfully uninstalled.') + } catch (error) { + log.error(`Error removing "${binaryPath}"`, error) + } + }) + + tests.forEach(({ name, spec, skip, run, maxFailures, minimumSuccesses }) => { + const log = logger.forComponent(name) + + it(`has no more than ${maxFailures} failing tests for ${name}`, async function () { + const { stderr, stdout } = await execa(binaryPath, getConformanceTestArgs(name, + [ + ...(spec != null ? ['--specs', spec] : []) + ], + [ + ...((skip != null) ? ['-skip', `${skip.join('|')}`] : []), + ...((run != null) ? ['-run', `${run.join('|')}`] : []) + ] + ), { reject: false }) + + log(stdout) + log.error(stderr) + + let failureCount = 0 + let successCount = 0 + + // parse the newline delimited JSON report at gwc-report-${name}.json and count the number of "PASS:" and "FAIL:" lines + const report = await readFile(`gwc-report-${name}.json`, 'utf8') + const lines = report.split('\n') + for (const line of lines) { + if (line.includes('--- FAIL:')) { + failureCount++ + } else if (line.includes('--- PASS:')) { + successCount++ + } + } + + expect(failureCount).to.be.lessThanOrEqual(maxFailures) + expect(successCount).to.be.greaterThanOrEqual(minimumSuccesses ?? 0) + }) + }) + + /** + * This test ensures new or existing gateway-conformance tests that fail are caught and addressed appropriately. + * Eventually, we will not need the `tests.forEach` tests and can just run all the recommended tests directly, + * as this test does. + */ + it('has expected total failures and successes', async function () { + const log = logger.forComponent('all') + + const { stderr, stdout } = await execa(binaryPath, getConformanceTestArgs('all'), { reject: false }) + + log(stdout) + log.error(stderr) + let failureCount = 0 + let successCount = 0 + + // parse the newline delimited JSON report at gwc-report-${name}.json and count the number of "PASS:" and "FAIL:" lines + const report = await readFile('gwc-report-all.json', 'utf8') + const lines = report.split('\n') + for (const line of lines) { + if (line.includes('--- FAIL:')) { + failureCount++ + } else if (line.includes('--- PASS:')) { + successCount++ + } + } + + expect(failureCount).to.be.lessThanOrEqual(135) + expect(successCount).to.be.greaterThanOrEqual(30) + }) + }) +}) diff --git a/packages/gateway-conformance/src/constants.ts b/packages/gateway-conformance/src/constants.ts new file mode 100644 index 00000000..2ebdd154 --- /dev/null +++ b/packages/gateway-conformance/src/constants.ts @@ -0,0 +1 @@ +export const GWC_IMAGE = process.env.GWC_IMAGE ?? 'ghcr.io/ipfs/gateway-conformance:v0.5.1' diff --git a/packages/gateway-conformance/src/demo-server.ts b/packages/gateway-conformance/src/demo-server.ts new file mode 100644 index 00000000..db781917 --- /dev/null +++ b/packages/gateway-conformance/src/demo-server.ts @@ -0,0 +1,29 @@ +/** + * Basically copies what .aegir.js does, but without all the env vars and setup.. just so you can run `node src/demo-server.ts` and test queries manually. + */ +import getPort from 'aegir/get-port' + +const { loadKuboFixtures } = await import('./fixtures/kubo-mgmt.js') +await loadKuboFixtures() + +const { createKuboNode } = await import('./fixtures/create-kubo.js') +const controller = await createKuboNode(await getPort(3440)) +await controller.start() +const kuboGateway = `http://${controller.api.gatewayHost}:${controller.api.gatewayPort}` + +const { startBasicServer } = await import('./fixtures/basic-server.js') +const SERVER_PORT = await getPort(3441) +await startBasicServer({ + serverPort: SERVER_PORT, + kuboGateway +}) + +const { startReverseProxy } = await import('./fixtures/reverse-proxy.js') +const PROXY_PORT = await getPort(3442) +await startReverseProxy({ + backendPort: SERVER_PORT, + targetHost: 'localhost', + proxyPort: PROXY_PORT +}) + +export {} diff --git a/packages/gateway-conformance/src/fixtures/basic-server.ts b/packages/gateway-conformance/src/fixtures/basic-server.ts new file mode 100644 index 00000000..210597a9 --- /dev/null +++ b/packages/gateway-conformance/src/fixtures/basic-server.ts @@ -0,0 +1,107 @@ +import { createServer } from 'node:http' +import { logger } from '@libp2p/logger' +import { contentTypeParser } from './content-type-parser.js' +import { createVerifiedFetch } from './create-verified-fetch.js' + +const log = logger('basic-server') +/** + * Create a basic server with native Node.js HTTP server that simply calls verifiedFetch and returns the response. + * + * This server needs to be wrapped by reverse-proxy to work for the gateway conformance tests. + */ + +export interface BasicServerOptions { + kuboGateway?: string + serverPort: number +} + +export async function startBasicServer ({ kuboGateway, serverPort }: BasicServerOptions): Promise<() => Promise> { + kuboGateway = kuboGateway ?? process.env.KUBO_GATEWAY + if (kuboGateway == null) { + throw new Error('options.kuboGateway or KUBO_GATEWAY env var is required') + } + + const verifiedFetch = await createVerifiedFetch({ + gateways: [kuboGateway], + routers: [kuboGateway] + }, { + contentTypeParser + }) + + const server = createServer((req, res) => { + if (req.method === 'OPTIONS') { + res.writeHead(200) + res.end() + return + } + + if (req.url == null) { + // this should never happen + res.writeHead(400) + res.end('Bad Request') + return + } + + log('req.headers: %O', req.headers) + const hostname = req.headers.host?.split(':')[0] + const host = req.headers['x-forwarded-for'] ?? `${hostname}:${serverPort}` + + const fullUrlHref = req.headers.referer ?? `http://${host}${req.url}` + log('fetching %s', fullUrlHref) + + const requestController = new AbortController() + // we need to abort the request if the client disconnects + req.on('close', () => { + log('client disconnected, aborting request') + requestController.abort() + }) + + void verifiedFetch(fullUrlHref, { redirect: 'manual', signal: requestController.signal }).then(async (resp) => { + // loop over headers and set them on the response + const headers: Record = {} + for (const [key, value] of resp.headers.entries()) { + headers[key] = value + } + + res.writeHead(resp.status, headers) + if (resp.body == null) { + res.write(await resp.arrayBuffer()) + } else { + // read the body of the response and write it to the response from the server + const reader = resp.body.getReader() + while (true) { + const { done, value } = await reader.read() + if (done) { + break + } + res.write(Buffer.from(value)) + } + } + res.end() + }).catch((e) => { + log.error('Problem with request: %s', e.message) + if (!res.headersSent) { + res.writeHead(500) + } + res.end(`Internal Server Error: ${e.message}`) + }).finally(() => { + requestController.abort() + }) + }) + + server.listen(serverPort, () => { + log(`Basic server listening on port ${serverPort}`) + }) + + return async () => { + await new Promise((resolve, reject) => { + server.close((err: any) => { + if (err != null) { + reject(err) + } else { + resolve() + } + }) + }) + } +} diff --git a/packages/gateway-conformance/src/fixtures/content-type-parser.ts b/packages/gateway-conformance/src/fixtures/content-type-parser.ts new file mode 100644 index 00000000..76f22479 --- /dev/null +++ b/packages/gateway-conformance/src/fixtures/content-type-parser.ts @@ -0,0 +1,53 @@ +import { fileTypeFromBuffer } from '@sgtpooki/file-type' + +// default from verified-fetch is application/octect-stream, which forces a download. This is not what we want for MANY file types. +const defaultMimeType = 'text/html; charset=utf-8' +function checkForSvg (bytes: Uint8Array): string { + return /^(<\?xml[^>]+>)?[^<^\w]+ { + const detectedType = (await fileTypeFromBuffer(bytes))?.mime + if (detectedType != null) { + return detectedType + } + + if (fileName == null) { + // no other way to determine file-type. + return checkForSvg(bytes) + } + + // no need to include file-types listed at https://github.com/SgtPooki/file-type#supported-file-types + switch (fileName.split('.').pop()) { + case 'css': + return 'text/css' + case 'html': + return 'text/html; charset=utf-8' + case 'js': + return 'application/javascript' + case 'json': + return 'application/json' + case 'txt': + return 'text/plain' + case 'woff2': + return 'font/woff2' + // see bottom of https://github.com/SgtPooki/file-type#supported-file-types + case 'svg': + return 'image/svg+xml' + case 'csv': + return 'text/csv' + case 'doc': + return 'application/msword' + case 'xls': + return 'application/vnd.ms-excel' + case 'ppt': + return 'application/vnd.ms-powerpoint' + case 'msi': + return 'application/x-msdownload' + default: + return defaultMimeType + } +} diff --git a/packages/gateway-conformance/src/fixtures/create-kubo.ts b/packages/gateway-conformance/src/fixtures/create-kubo.ts new file mode 100644 index 00000000..14590a14 --- /dev/null +++ b/packages/gateway-conformance/src/fixtures/create-kubo.ts @@ -0,0 +1,31 @@ +import { createController, type Controller } from 'ipfsd-ctl' +import { path as kuboPath } from 'kubo' +import * as kuboRpcClient from 'kubo-rpc-client' + +export async function createKuboNode (listenPort?: number): Promise { + return createController({ + kuboRpcModule: kuboRpcClient, + ipfsBin: kuboPath(), + test: true, + ipfsOptions: { + config: { + repo: process.env.KUBO_REPO ?? '', + Addresses: { + Swarm: [ + '/ip4/0.0.0.0/tcp/0', + '/ip4/0.0.0.0/tcp/0/ws' + ], + Gateway: `/ip4/127.0.0.1/tcp/${listenPort ?? 0}` + }, + Gateway: { + NoFetch: true, + ExposeRoutingAPI: true, + HTTPHeaders: { + 'Access-Control-Allow-Origin': ['*'], + 'Access-Control-Allow-Methods': ['GET', 'POST', 'PUT', 'OPTIONS'] + } + } + } + } + }) +} diff --git a/packages/gateway-conformance/src/fixtures/create-verified-fetch.ts b/packages/gateway-conformance/src/fixtures/create-verified-fetch.ts new file mode 100644 index 00000000..dccab9e1 --- /dev/null +++ b/packages/gateway-conformance/src/fixtures/create-verified-fetch.ts @@ -0,0 +1,7 @@ +import type { CreateVerifiedFetchInit, CreateVerifiedFetchOptions, VerifiedFetch } from '@helia/verified-fetch' + +export async function createVerifiedFetch (init?: CreateVerifiedFetchInit, options?: CreateVerifiedFetchOptions): Promise { + const { createVerifiedFetch } = await import(process.env.VERIFIED_FETCH ?? '@helia/verified-fetch') + + return createVerifiedFetch(init, options) +} diff --git a/packages/gateway-conformance/src/fixtures/kubo-mgmt.ts b/packages/gateway-conformance/src/fixtures/kubo-mgmt.ts new file mode 100644 index 00000000..b4c07ef1 --- /dev/null +++ b/packages/gateway-conformance/src/fixtures/kubo-mgmt.ts @@ -0,0 +1,137 @@ +/** + * This is required to update gateway-conformance fixtures + * + * Can only be ran from node + * + * external command dependencies: + * - `docker` + */ + +import { readFile } from 'node:fs/promises' +import { dirname, relative, resolve, basename } from 'node:path' +import { fileURLToPath } from 'node:url' +import { logger } from '@libp2p/logger' +import { $ } from 'execa' +import { glob } from 'glob' +import { path } from 'kubo' +import { GWC_IMAGE } from '../constants.js' + +// eslint-disable-next-line @typescript-eslint/naming-convention +const __dirname = dirname(fileURLToPath(import.meta.url)) + +const log = logger('kubo-mgmt') + +const kuboBinary = process.env.KUBO_BINARY ?? path() + +// This needs to match the `repo` property provided to `ipfsd-ctl` in `createKuboNode` so our kubo instance in tests use the same repo +export const kuboRepoDir = process.env.KUBO_REPO ?? resolve(__dirname, 'test-repo') +export const GWC_FIXTURES_PATH = resolve(__dirname, 'gateway-conformance-fixtures') + +export async function loadKuboFixtures (): Promise { + await attemptKuboInit() + + await downloadFixtures() + + return loadFixtures() +} + +function getExecaOptions ({ cwd, ipfsNsMap }: { cwd?: string, ipfsNsMap?: string } = {}): { cwd: string, env: Record } { + return { + cwd: cwd ?? __dirname, + env: { + IPFS_PATH: kuboRepoDir, + IPFS_NS_MAP: ipfsNsMap + } + } +} + +async function attemptKuboInit (): Promise { + const execaOptions = getExecaOptions() + try { + await $(execaOptions)`${kuboBinary} init` + log('Kubo initialized at %s', kuboRepoDir) + + await configureKubo() + } catch (e: any) { + if (e.stderr?.includes('ipfs daemon is running') === true) { + log('Kubo is already running') + return + } + if (e.stderr?.includes('already exists!') === true) { + log('Kubo was already initialized at %s', kuboRepoDir) + return + } + + throw e + } +} + +async function configureKubo (): Promise { + const execaOptions = getExecaOptions() + try { + // some of the same things as https://github.com/ipfs/kubo/blob/62eb1439157ea8de385671cb513e8ece10e43baf/config/profile.go#L73 + await $(execaOptions)`${kuboBinary} config Addresses.Gateway /ip4/127.0.0.1/tcp/0` + await $(execaOptions)`${kuboBinary} config Addresses.API /ip4/127.0.0.1/tcp/0` + await $(execaOptions)`${kuboBinary} config --json Bootstrap '[]'` + await $(execaOptions)`${kuboBinary} config --json Swarm.DisableNatPortMap true` + await $(execaOptions)`${kuboBinary} config --json Discovery.MDNS.Enabled false` + await $(execaOptions)`${kuboBinary} config --json Gateway.NoFetch true` + await $(execaOptions)`${kuboBinary} config --json Gateway.ExposeRoutingAPI true` + await $(execaOptions)`${kuboBinary} config --json Gateway.HTTPHeaders.Access-Control-Allow-Origin '["*"]'` + await $(execaOptions)`${kuboBinary} config --json Gateway.HTTPHeaders.Access-Control-Allow-Methods '["GET", "POST", "PUT", "OPTIONS"]'` + log('Kubo configured') + } catch (e) { + log.error('Failed to configure Kubo', e) + } +} + +async function downloadFixtures (force = false): Promise { + if (!force) { + // if the fixtures are already downloaded, we don't need to download them again + const allFixtures = await glob([`${GWC_FIXTURES_PATH}/**/*.car`, `${GWC_FIXTURES_PATH}/**/*.ipns-record`, `${GWC_FIXTURES_PATH}/dnslinks.json`]) + if (allFixtures.length > 0) { + log('Fixtures already downloaded') + return + } + } + + log('Downloading fixtures') + try { + await $`docker run --name gateway-conformance-fixture-loader -v ${process.cwd()}:/workspace -w /workspace ${GWC_IMAGE} extract-fixtures --directory ${relative('.', GWC_FIXTURES_PATH)} --merged false` + } catch (e) { + log.error('Error downloading fixtures, assuming current or previous success', e) + } finally { + // ensure the docker container is stopped and removed otherwise it will fail on subsequent runs + await $`docker stop gateway-conformance-fixture-loader` + await $`docker rm gateway-conformance-fixture-loader` + } +} + +async function loadFixtures (): Promise { + const execaOptions = getExecaOptions() + + for (const carFile of await glob([`${resolve(__dirname, 'data')}/**/*.car`])) { + log('Loading *.car fixture %s', carFile) + const { stdout } = await $(execaOptions)`${kuboBinary} dag import --pin-roots=false --offline ${carFile}` + stdout.split('\n').forEach(log) + } + + // TODO: fix in CI. See https://github.com/ipfs/helia-verified-fetch/actions/runs/9022946675/job/24793649918?pr=67#step:7:19 + if (process.env.CI == null) { + for (const ipnsRecord of await glob([`${GWC_FIXTURES_PATH}/**/*.ipns-record`])) { + const key = basename(ipnsRecord, '.ipns-record') + const relativePath = relative(GWC_FIXTURES_PATH, ipnsRecord) + log('Loading *.ipns-record fixture %s', relativePath) + const { stdout } = await $(({ ...execaOptions }))`cd ${GWC_FIXTURES_PATH} && ${kuboBinary} routing put --allow-offline "/ipns/${key}" "${relativePath}"` + stdout.split('\n').forEach(log) + } + } + + const json = await readFile(`${GWC_FIXTURES_PATH}/dnslinks.json`, 'utf-8') + const { subdomains, domains } = JSON.parse(json) + const subdomainDnsLinks = Object.entries(subdomains).map(([key, value]) => `${key}.example.com:${value}`).join(',') + const domainDnsLinks = Object.entries(domains).map(([key, value]) => `${key}:${value}`).join(',') + const ipfsNsMap = `${domainDnsLinks},${subdomainDnsLinks}` + + return ipfsNsMap +} diff --git a/packages/gateway-conformance/src/fixtures/reverse-proxy.ts b/packages/gateway-conformance/src/fixtures/reverse-proxy.ts new file mode 100644 index 00000000..d942883a --- /dev/null +++ b/packages/gateway-conformance/src/fixtures/reverse-proxy.ts @@ -0,0 +1,113 @@ +import { request, createServer, type RequestOptions, type IncomingMessage, type ServerResponse } from 'node:http' +import { logger } from '@libp2p/logger' + +const log = logger('reverse-proxy') + +let TARGET_HOST: string +let backendPort: number +let proxyPort: number +let subdomain: undefined | string +let prefixPath: undefined | string +let disableTryFiles: boolean +let X_FORWARDED_HOST: undefined | string + +const makeRequest = (options: RequestOptions, req: IncomingMessage, res: ServerResponse & { req: IncomingMessage }, attemptRootFallback = false): void => { + options.headers = options.headers ?? {} + options.headers.Host = TARGET_HOST + const clientIp = req.socket.remoteAddress + options.headers['X-Forwarded-For'] = req.headers.host ?? clientIp + + // override path to include prefixPath if set + if (prefixPath != null) { + options.path = `${prefixPath}${options.path}` + } + if (subdomain != null) { + options.headers.Host = `${subdomain}.${TARGET_HOST}` + } + if (X_FORWARDED_HOST != null) { + options.headers['X-Forwarded-Host'] = X_FORWARDED_HOST + } + + // log where we're making the request to + log('Proxying request to %s:%s%s', options.headers.Host, options.port, options.path) + + const proxyReq = request(options, (proxyRes) => { + if (!disableTryFiles && proxyRes.statusCode === 404) { // poor mans attempt to implement nginx style try_files + if (!attemptRootFallback) { + // Split the path and pop the last segment + const pathSegments = options.path?.split('/') ?? [] + const lastSegment = pathSegments.pop() ?? '' + + // Attempt to request the last segment at the root + makeRequest({ ...options, path: `/${lastSegment}` }, req, res, true) + } else { + // If already attempted a root fallback, serve index.html + makeRequest({ ...options, path: '/index.html' }, req, res) + } + } else { + // setCommonHeaders(res) + if (proxyRes.statusCode == null) { + log.error('No status code received from proxy') + res.writeHead(500) + res.end('Internal Server Error') + return + } + res.writeHead(proxyRes.statusCode, proxyRes.headers) + proxyRes.pipe(res, { end: true }) + } + }) + + req.pipe(proxyReq, { end: true }) + + proxyReq.on('error', (e) => { + log.error(`Problem with request: ${e.message}`) + res.writeHead(500) + res.end(`Internal Server Error: ${e.message}`) + }) +} + +export interface ReverseProxyOptions { + targetHost?: string + backendPort?: number + proxyPort?: number + subdomain?: string + prefixPath?: string + disableTryFiles?: boolean + xForwardedHost?: string +} +export async function startReverseProxy (options?: ReverseProxyOptions): Promise<() => Promise> { + TARGET_HOST = options?.targetHost ?? process.env.TARGET_HOST ?? 'localhost' + backendPort = options?.backendPort ?? Number(process.env.BACKEND_PORT ?? 3000) + proxyPort = options?.proxyPort ?? Number(process.env.PROXY_PORT ?? 3333) + subdomain = options?.subdomain ?? process.env.SUBDOMAIN + prefixPath = options?.prefixPath ?? process.env.PREFIX_PATH + disableTryFiles = options?.disableTryFiles ?? process.env.DISABLE_TRY_FILES === 'true' + X_FORWARDED_HOST = options?.xForwardedHost ?? process.env.X_FORWARDED_HOST + + const proxyServer = createServer((req, res) => { + if (req.method === 'OPTIONS') { + res.writeHead(200) + res.end() + return + } + log('req.headers: %O', req.headers) + + const options: RequestOptions = { + hostname: TARGET_HOST, + port: backendPort, + path: req.url, + method: req.method, + headers: { ...req.headers } + } + + makeRequest(options, req, res) + }) + + proxyServer.listen(proxyPort, () => { + log(`Proxy server listening on port ${proxyPort}`) + }) + + return async function stopReverseProxy (): Promise { + proxyServer?.close() + } +} diff --git a/packages/gateway-conformance/src/index.ts b/packages/gateway-conformance/src/index.ts new file mode 100644 index 00000000..d664081c --- /dev/null +++ b/packages/gateway-conformance/src/index.ts @@ -0,0 +1,51 @@ +/** + * @packageDocumentation + * + * Runs Gateway Conformance tests against @helia/verified-fetch using Kubo as a backing trustless-gateway. + * + * @example Testing a new @helia/verified-fetch release + * + * ```console + * $ npm i @helia/verified-fetch-gateway-conformance + * $ VERIFIED_FETCH=@helia/verified-fetch@1.x.x-6f8c15b verified-fetch-gateway-conformance + * ``` + * + * @example Testing with a different Kubo version + * + * ```console + * $ npm i @helia/verified-fetch-gateway-conformance + * $ KUBO_BINARY=/path/to/kubo verified-fetch-gateway-conformance + * ``` + * + * @example using a different gateway-conformance image + * + * ```console + * $ GWC_IMAGE=ghcr.io/ipfs/gateway-conformance:v0.5.1 verified-fetch-gateway-conformance + * ``` + * + * @example Debugging a test run + * + * ```console + * $ DEBUG="-mocha*,*,*:trace" npm run test # very verbose output + * $ DEBUG="conformance-tests*,conformance-tests*:trace" npm run test # only gateway-conformance test output + * ``` + * + * @example querying the gateway-conformance server directly + * + * ```console + * $ npm run build + * $ node dist/src/demo-server.js # in terminal 1 + * $ curl -v GET http://localhost:3442/ipfs/bafkqabtimvwgy3yk/ # in terminal 2 + * ``` + * + * ### Troubleshooting + * + * #### Missing file in gateway-conformance-fixtures folder + * + * If you see the following error: + * > ENOENT: no such file or directory, open '[...]/helia-verified-fetch/packages/gateway-conformance/dist/src/fixtures/data/gateway-conformance-fixtures/dnslinks.json + * + * This likely means the docker container is not executing properly for some reason. You can try running the following command to see if there are any errors: `DEBUG="-mocha*,*,*:trace" npm run test` + */ + +export {} diff --git a/packages/gateway-conformance/tsconfig.json b/packages/gateway-conformance/tsconfig.json new file mode 100644 index 00000000..ff70ec08 --- /dev/null +++ b/packages/gateway-conformance/tsconfig.json @@ -0,0 +1,17 @@ +{ + "extends": "aegir/src/config/tsconfig.aegir.json", + "compilerOptions": { + "outDir": "dist", + "target": "ES2022", + "module": "ES2022" + }, + "include": [ + "src", + "test" + ], + "references": [ + { + "path": "../verified-fetch" + } + ] +}