diff --git a/.github/workflows/discord-webhook.yml b/.github/workflows/discord-webhook.yml index 787d4242..a6679e83 100644 --- a/.github/workflows/discord-webhook.yml +++ b/.github/workflows/discord-webhook.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest steps: - name: New Discussion - uses: tsickert/discord-webhook@v5.4.0 + uses: tsickert/discord-webhook@v6.0.0 if: ${{ (github.event_name == 'discussion') }} with: webhook-url: ${{ secrets.DISCORD_WEBHOOK_GITHUB }} @@ -25,7 +25,7 @@ jobs: embed-description: A **discussion** has been created in ${{ github.repository }}. - name: New Issue - uses: tsickert/discord-webhook@v5.4.0 + uses: tsickert/discord-webhook@v6.0.0 if: ${{ (github.event_name == 'issues') }} with: webhook-url: ${{ secrets.DISCORD_WEBHOOK_GITHUB }} @@ -38,7 +38,7 @@ jobs: embed-description: An **issue** has been opened in ${{ github.repository }}. - name: New Pull Request - uses: tsickert/discord-webhook@v5.4.0 + uses: tsickert/discord-webhook@v6.0.0 if: ${{ (github.event_name == 'pull_request_target') }} with: webhook-url: ${{ secrets.DISCORD_WEBHOOK_GITHUB }} diff --git a/.github/workflows/scan-pull-request.yml b/.github/workflows/scan-pull-request.yml index 432e5c71..1cfa2817 100644 --- a/.github/workflows/scan-pull-request.yml +++ b/.github/workflows/scan-pull-request.yml @@ -28,7 +28,7 @@ jobs: continue-on-error: false steps: - uses: actions/checkout@v4 - - uses: agilepathway/label-checker@v1.6.13 + - uses: agilepathway/label-checker@v1.6.44 with: any_of: api,bug,build,dependencies,documentation,enhancement,refactoring repo_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/triage-issue.yml b/.github/workflows/triage-issue.yml new file mode 100644 index 00000000..aec1c09b --- /dev/null +++ b/.github/workflows/triage-issue.yml @@ -0,0 +1,20 @@ +name: triage opened issue + +on: + issues: + types: + - reopened + - opened + +jobs: + label-issue: + runs-on: ubuntu-latest + permissions: + issues: write + steps: + - run: gh issue edit "$NUMBER" --add-label "$LABELS" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_REPO: ${{ github.repository }} + NUMBER: ${{ github.event.issue.number }} + LABELS: triage diff --git a/.github/workflows/verify.yaml b/.github/workflows/verify.yaml index e25e91d6..032e944f 100644 --- a/.github/workflows/verify.yaml +++ b/.github/workflows/verify.yaml @@ -2,6 +2,7 @@ name: Test Code (Style, Tests) on: push: + branches: [ main ] pull_request: branches: [ main ] paths-ignore: diff --git a/README.md b/README.md index f33195c5..b367fe66 100644 --- a/README.md +++ b/README.md @@ -13,8 +13,8 @@ strongly advise reading the [documentation](https://eclipse-edc.github.io/docs/# Also, a working knowledge of **Git**, **Gradle**, **Java** and **HTTP** is presumed. -We'll assume that you've just checked out the samples code base and have **Java 11+** installed on your development -machine. If not, please download and install JDK 11+ for your OS. +We'll assume that you've just checked out the samples code base and have **Java 17+** installed on your development +machine. If not, please download and install JDK 17+ for your OS. Command examples in this document will use the `bash` syntax and use Unix-style paths, but any other shell should be fine as well. If you're using Windows you either need to adapt the paths or use WSL2. @@ -49,6 +49,14 @@ Click the link above to learn about the transfer samples in more detail. All transfer samples are located in the `advanced` directory. +### [Policy](./policy/README.md) + +These samples deal with the topic of policies and their evaluation and enforcement. They will teach you what +configurations you need to make to enable the evaluation of specific policy rules and constraint and how to provide +custom code for their enforcement. + +All policy samples are located in the `policy` directory. + ## Contributing See [how to contribute](https://github.com/eclipse-edc/docs/blob/main/CONTRIBUTING.md). diff --git a/advanced/advanced-01-open-telemetry/README.md b/advanced/advanced-01-open-telemetry/README.md index 0ddc29b3..43aad0d6 100644 --- a/advanced/advanced-01-open-telemetry/README.md +++ b/advanced/advanced-01-open-telemetry/README.md @@ -25,29 +25,17 @@ is configured to expose a Prometheus metrics endpoint. To run the consumer, the provider, and Jaeger execute the following commands in the project root folder: +Build the connector ```bash -docker compose -f advanced/advanced-01-open-telemetry/docker-compose.yaml up --abort-on-container-exit +./gradlew :advanced:advanced-01-open-telemetry:open-telemetry-runtime:build ``` -Open a new terminal. - -Register data planes for provider and consumer: - +Start the docker compose ```bash -curl -H 'Content-Type: application/json' \ - -H "X-Api-Key: password" \ - -d @transfer/transfer-00-prerequisites/resources/dataplane/register-data-plane-provider.json \ - -X POST "http://localhost:19193/management/v2/dataplanes" \ - -s | jq +docker compose -f advanced/advanced-01-open-telemetry/docker-compose.yaml up --abort-on-container-exit ``` -```bash -curl -H 'Content-Type: application/json' \ - -H "X-Api-Key: password" \ - -d @transfer/transfer-00-prerequisites/resources/dataplane/register-data-plane-consumer.json \ - -X POST "http://localhost:29193/management/v2/dataplanes" \ - -s | jq -``` +Open a new terminal. Create an asset: @@ -63,7 +51,7 @@ Create a Policy on the provider connector: ```bash curl -H "X-Api-Key: password" \ -d @transfer/transfer-01-negotiation/resources/create-policy.json \ - -H 'content-type: application/json' http://localhost:19193/management/v2/policydefinitions \ + -H 'content-type: application/json' http://localhost:19193/management/v3/policydefinitions \ -s | jq ``` @@ -72,7 +60,7 @@ Follow up with the creation of a contract definition: ```bash curl -H "X-Api-Key: password" \ -d @transfer/transfer-01-negotiation/resources/create-contract-definition.json \ - -H 'content-type: application/json' http://localhost:19193/management/v2/contractdefinitions \ + -H 'content-type: application/json' http://localhost:19193/management/v3/contractdefinitions \ -s | jq ``` @@ -84,7 +72,7 @@ directly with this call: curl -H "X-Api-Key: password" \ -H "Content-Type: application/json" \ -d @advanced/advanced-01-open-telemetry/resources/get-dataset.json \ - -X POST "http://localhost:29193/management/v2/catalog/dataset/request" \ + -X POST "http://localhost:29193/management/v3/catalog/dataset/request" \ -s | jq ``` @@ -95,40 +83,55 @@ The output will be something like: "@id": "assetId", "@type": "dcat:Dataset", "odrl:hasPolicy": { - "@id": "MQ==:YXNzZXRJZA==:YjI5ZDVkZDUtZWU0Mi00NWRiLWE2OTktYjNmMjlmMWNjODk3", - "@type": "odrl:Set", + "@id": "MQ==:YXNzZXRJZA==:NjdlNDFhM2EtYThjMS00YTBmLWFkNmYtMjk5NzkzNTE2OTE3", + "@type": "odrl:Offer", "odrl:permission": [], "odrl:prohibition": [], - "odrl:obligation": [], - "odrl:target": "assetId" + "odrl:obligation": [] }, "dcat:distribution": [ { "@type": "dcat:Distribution", "dct:format": { - "@id": "HttpProxy" + "@id": "HttpData-PULL" }, - "dcat:accessService": "06348bca-6bf0-47fe-8bb5-6741cff7a955" + "dcat:accessService": { + "@id": "cb701b36-48ee-4132-8436-dba7b83c606c", + "@type": "dcat:DataService", + "dcat:endpointDescription": "dspace:connector", + "dcat:endpointUrl": "http://provider:19194/protocol", + "dct:terms": "dspace:connector", + "dct:endpointUrl": "http://provider:19194/protocol" + } }, { "@type": "dcat:Distribution", "dct:format": { - "@id": "HttpData" + "@id": "HttpData-PUSH" }, - "dcat:accessService": "06348bca-6bf0-47fe-8bb5-6741cff7a955" + "dcat:accessService": { + "@id": "cb701b36-48ee-4132-8436-dba7b83c606c", + "@type": "dcat:DataService", + "dcat:endpointDescription": "dspace:connector", + "dcat:endpointUrl": "http://provider:19194/protocol", + "dct:terms": "dspace:connector", + "dct:endpointUrl": "http://provider:19194/protocol" + } } ], - "edc:name": "product description", - "edc:id": "assetId", - "edc:contenttype": "application/json", + "name": "product description", + "id": "assetId", + "contenttype": "application/json", "@context": { - "dct": "https://purl.org/dc/terms/", + "@vocab": "https://w3id.org/edc/v0.0.1/ns/", "edc": "https://w3id.org/edc/v0.0.1/ns/", - "dcat": "https://www.w3.org/ns/dcat/", + "dcat": "http://www.w3.org/ns/dcat#", + "dct": "http://purl.org/dc/terms/", "odrl": "http://www.w3.org/ns/odrl/2/", "dspace": "https://w3id.org/dspace/v0.8/" } } + ``` With the `odrl:hasPolicy/@id` we can now replace it in the [negotiate-contract.json](resources/negotiate-contract.json) file @@ -138,7 +141,7 @@ and request the contract negotiation: curl -H "X-Api-Key: password" \ -H "Content-Type: application/json" \ -d @advanced/advanced-01-open-telemetry/resources/negotiate-contract.json \ - -X POST "http://localhost:29193/management/v2/contractnegotiations" \ + -X POST "http://localhost:29193/management/v3/contractnegotiations" \ -s | jq ``` @@ -147,7 +150,7 @@ state with this call, replacing `{{contract-negotiation-id}}` with the id return ```shell curl -H 'X-Api-Key: password' \ - -X GET "http://localhost:29193/management/v2/contractnegotiations/{{contract-negotiation-id}}" \ + -X GET "http://localhost:29193/management/v3/contractnegotiations/{{contract-negotiation-id}}" \ -s | jq ``` @@ -157,7 +160,7 @@ Finally, update the contract agreement id in the [start-transfer.json](resources curl -H "X-Api-Key: password" \ -H "Content-Type: application/json" \ -d @advanced/advanced-01-open-telemetry/resources/start-transfer.json \ - -X POST "http://localhost:29193/management/v2/transferprocesses" \ + -X POST "http://localhost:29193/management/v3/transferprocesses" \ -s | jq ``` @@ -232,11 +235,11 @@ In order to provide your own OpenTelemetry implementation, you have to "deploy a - Add a file in the resource directory META-INF/services. The file should be called `io.opentelemetry.api.OpenTelemetry`. - Add to the file the fully qualified name of your custom OpenTelemetry implementation class. -EDC uses a [ServiceLoader](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/util/ServiceLoader.html) +EDC uses a [ServiceLoader](https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/util/ServiceLoader.html) to load an implementation of OpenTelemetry. If it finds an OpenTelemetry service provider on the class path it will use it, otherwise it will use the registered global OpenTelemetry. You can look at the section `Deploying service providers on the class path` of the -[ServiceLoader documentation](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/util/ServiceLoader.html) +[ServiceLoader documentation](https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/util/ServiceLoader.html) to have more information about service providers. ---- \ No newline at end of file +--- diff --git a/advanced/advanced-01-open-telemetry/docker-compose.yaml b/advanced/advanced-01-open-telemetry/docker-compose.yaml index 27d3a792..91d7d413 100644 --- a/advanced/advanced-01-open-telemetry/docker-compose.yaml +++ b/advanced/advanced-01-open-telemetry/docker-compose.yaml @@ -4,8 +4,7 @@ services: consumer: build: - context: ../.. - dockerfile: advanced/advanced-01-open-telemetry/open-telemetry-consumer/Dockerfile + context: open-telemetry-runtime volumes: - ./:/open-telemetry - ../../transfer/transfer-00-prerequisites/:/prerequisites @@ -15,8 +14,9 @@ services: environment: EDC_HOSTNAME: consumer OTEL_SERVICE_NAME: consumer - OTEL_TRACES_EXPORTER: jaeger - OTEL_EXPORTER_JAEGER_ENDPOINT: http://jaeger:14250 + OTEL_LOGS_EXPORTER: none + OTEL_JAVAAGENT_EXTENSIONS: /app/opentelemetry-exporter-otlp.jar + OTEL_EXPORTER_OTLP_ENDPOINT: http://jaeger:4318 OTEL_METRICS_EXPORTER: prometheus WEB_HTTP_PORT: 29191 WEB_HTTP_PATH: /api @@ -34,7 +34,6 @@ services: EDC_API_AUTH_KEY: password EDC_KEYSTORE: /prerequisites/resources/certs/cert.pfx EDC_KEYSTORE_PASSWORD: 123456 - EDC_VAULT: /prerequisites/resources/configuration/provider-vault.properties EDC_FS_CONFIG: /prerequisites/resources/configuration/provider-configuration.properties entrypoint: java -javaagent:/app/opentelemetry-javaagent.jar @@ -43,8 +42,7 @@ services: provider: build: - context: ../.. - dockerfile: advanced/advanced-01-open-telemetry/open-telemetry-provider/Dockerfile + context: open-telemetry-runtime volumes: - ./:/open-telemetry - ../../transfer/transfer-00-prerequisites/:/prerequisites @@ -54,8 +52,9 @@ services: environment: EDC_HOSTNAME: provider OTEL_SERVICE_NAME: provider - OTEL_TRACES_EXPORTER: jaeger - OTEL_EXPORTER_JAEGER_ENDPOINT: http://jaeger:14250 + OTEL_LOGS_EXPORTER: none + OTEL_JAVAAGENT_EXTENSIONS: /app/opentelemetry-exporter-otlp.jar + OTEL_EXPORTER_OTLP_ENDPOINT: http://jaeger:4318 WEB_HTTP_PORT: 19191 WEB_HTTP_PATH: /api WEB_HTTP_PUBLIC_PORT: 19291 @@ -72,7 +71,6 @@ services: EDC_API_AUTH_KEY: password EDC_KEYSTORE: /prerequisites/resources/certs/cert.pfx EDC_KEYSTORE_PASSWORD: 123456 - EDC_VAULT: /prerequisites/resources/configuration/consumer-vault.properties EDC_FS_CONFIG: /prerequisites/resources/configuration/consumer-configuration.properties EDC_SAMPLES_TRANSFER_01_ASSET_PATH: /open-telemetry/README.md entrypoint: java @@ -82,7 +80,10 @@ services: jaeger: image: jaegertracing/all-in-one + environment: + - COLLECTOR_OTLP_ENABLED=true ports: + - "4317:4317" - "16686:16686" prometheus: diff --git a/advanced/advanced-01-open-telemetry/open-telemetry-consumer/Dockerfile b/advanced/advanced-01-open-telemetry/open-telemetry-consumer/Dockerfile deleted file mode 100644 index 6641f6fa..00000000 --- a/advanced/advanced-01-open-telemetry/open-telemetry-consumer/Dockerfile +++ /dev/null @@ -1,11 +0,0 @@ -FROM gradle:jdk17 AS build - -WORKDIR /home/gradle/project/ -COPY --chown=gradle:gradle . /home/gradle/project/ -RUN gradle advanced:advanced-01-open-telemetry:open-telemetry-consumer:build - -FROM openjdk:17-slim - -WORKDIR /app -COPY --from=build /home/gradle/project/advanced/advanced-01-open-telemetry/open-telemetry-consumer/build/libs/opentelemetry-javaagent-*.jar /app/opentelemetry-javaagent.jar -COPY --from=build /home/gradle/project/advanced/advanced-01-open-telemetry/open-telemetry-consumer/build/libs/consumer.jar /app/connector.jar \ No newline at end of file diff --git a/advanced/advanced-01-open-telemetry/open-telemetry-provider/Dockerfile b/advanced/advanced-01-open-telemetry/open-telemetry-provider/Dockerfile deleted file mode 100644 index 64bbb8bb..00000000 --- a/advanced/advanced-01-open-telemetry/open-telemetry-provider/Dockerfile +++ /dev/null @@ -1,11 +0,0 @@ -FROM gradle:jdk17 AS build - -WORKDIR /home/gradle/project/ -COPY --chown=gradle:gradle . /home/gradle/project/ -RUN gradle advanced:advanced-01-open-telemetry:open-telemetry-provider:build - -FROM openjdk:17-slim - -WORKDIR /app -COPY --from=build /home/gradle/project/advanced/advanced-01-open-telemetry/open-telemetry-provider/build/libs/opentelemetry-javaagent-*.jar /app/opentelemetry-javaagent.jar -COPY --from=build /home/gradle/project/advanced/advanced-01-open-telemetry/open-telemetry-provider/build/libs/provider.jar /app/connector.jar \ No newline at end of file diff --git a/advanced/advanced-01-open-telemetry/open-telemetry-runtime/Dockerfile b/advanced/advanced-01-open-telemetry/open-telemetry-runtime/Dockerfile new file mode 100644 index 00000000..9643d0e0 --- /dev/null +++ b/advanced/advanced-01-open-telemetry/open-telemetry-runtime/Dockerfile @@ -0,0 +1,6 @@ +FROM openjdk:17-slim + +WORKDIR /app +COPY build/libs/opentelemetry-javaagent-2.5.0.jar /app/opentelemetry-javaagent.jar +COPY build/libs/opentelemetry-exporter-otlp-1.39.0.jar /app/opentelemetry-exporter-otlp.jar +COPY build/libs/connector.jar /app/connector.jar diff --git a/advanced/advanced-01-open-telemetry/open-telemetry-consumer/build.gradle.kts b/advanced/advanced-01-open-telemetry/open-telemetry-runtime/build.gradle.kts similarity index 78% rename from advanced/advanced-01-open-telemetry/open-telemetry-consumer/build.gradle.kts rename to advanced/advanced-01-open-telemetry/open-telemetry-runtime/build.gradle.kts index bb8d7e95..6f431789 100644 --- a/advanced/advanced-01-open-telemetry/open-telemetry-consumer/build.gradle.kts +++ b/advanced/advanced-01-open-telemetry/open-telemetry-runtime/build.gradle.kts @@ -25,30 +25,33 @@ plugins { dependencies { + implementation(libs.edc.control.api.configuration) implementation(libs.edc.control.plane.api.client) implementation(libs.edc.control.plane.api) implementation(libs.edc.control.plane.core) implementation(libs.edc.dsp) implementation(libs.edc.configuration.filesystem) - implementation(libs.edc.vault.filesystem) implementation(libs.edc.iam.mock) implementation(libs.edc.management.api) - implementation(libs.edc.transfer.data.plane) + implementation(libs.edc.transfer.data.plane.signaling) implementation(libs.edc.transfer.pull.http.receiver) implementation(libs.edc.data.plane.selector.api) implementation(libs.edc.data.plane.selector.core) - implementation(libs.edc.data.plane.selector.client) - implementation(libs.edc.data.plane.api) + implementation(libs.edc.data.plane.self.registration) + implementation(libs.edc.data.plane.control.api) + implementation(libs.edc.data.plane.public.api) implementation(libs.edc.data.plane.core) implementation(libs.edc.data.plane.http) implementation(libs.edc.api.observability) implementation(libs.edc.auth.tokenbased) + implementation(libs.opentelemetry.exporter.otlp) + runtimeOnly(libs.edc.monitor.jdk.logger) } @@ -58,14 +61,15 @@ application { tasks.withType { mergeServiceFiles() - archiveFileName.set("consumer.jar") + archiveFileName.set("connector.jar") } tasks.register("copyOpenTelemetryJar", Copy::class) { val openTelemetry = configurations.create("open-telemetry") dependencies { - openTelemetry(libs.opentelemetry) + openTelemetry(libs.opentelemetry.javaagent) + openTelemetry(libs.opentelemetry.exporter.otlp) } from(openTelemetry) @@ -74,4 +78,4 @@ tasks.register("copyOpenTelemetryJar", Copy::class) { tasks.build { finalizedBy("copyOpenTelemetryJar") -} \ No newline at end of file +} diff --git a/advanced/advanced-01-open-telemetry/open-telemetry-runtime/src/main/java/org/eclipse/edc/sample/runtime/SeedVaultExtension.java b/advanced/advanced-01-open-telemetry/open-telemetry-runtime/src/main/java/org/eclipse/edc/sample/runtime/SeedVaultExtension.java new file mode 100644 index 00000000..171d96ff --- /dev/null +++ b/advanced/advanced-01-open-telemetry/open-telemetry-runtime/src/main/java/org/eclipse/edc/sample/runtime/SeedVaultExtension.java @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2024 Bayerische Motoren Werke Aktiengesellschaft (BMW AG) + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Bayerische Motoren Werke Aktiengesellschaft (BMW AG) - initial API and implementation + * + */ + +package org.eclipse.edc.sample.runtime; + +import org.eclipse.edc.runtime.metamodel.annotation.Inject; +import org.eclipse.edc.spi.security.Vault; +import org.eclipse.edc.spi.system.ServiceExtension; +import org.eclipse.edc.spi.system.ServiceExtensionContext; + +public class SeedVaultExtension implements ServiceExtension { + + @Inject + private Vault vault; + + private static final String PUBLIC_KEY = """ + -----BEGIN CERTIFICATE----- + MIIDazCCAlOgAwIBAgIUZ3/sZXYzW4PjmOXKrZn6WBmUJ+4wDQYJKoZIhvcNAQEL + BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM + GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yMjAyMjMxNTA2MDNaFw0zMjAy + MjExNTA2MDNaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw + HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB + AQUAA4IBDwAwggEKAoIBAQDBl6XaJnXTL+6DWip3aBhU+MzmY4d1V9hbTm1tiZ3g + E0VbUrvGO3LoYaxpPv6zFmsg3uJv6JxVAde7EddidN0ITHB9cQNdAfdUJ5njmsGS + PbdQuOQTHw0aG7/QvTI/nsvfEE6e0lbV/0e7DHacZT/+OztBH1RwkG2ymM94Hf8H + I6x7q6yfRTAZOqeOMrPCYTcluAgE9NskoPvjX5qASakBtXISKIsOU84N0/2HDN3W + EGMXvoHUQu6vrij6BwiwxKaw1AKwWENKoga775bPXN3M+JTSaIKE7dZbKzvx0Zi0 + h5X+bxc3BJi3Z/CsUBCzE+Y0SFetOiYmyl/2YmnneYoVAgMBAAGjUzBRMB0GA1Ud + DgQWBBTvK1wVERwjni4B2vdH7KtEJeVWFzAfBgNVHSMEGDAWgBTvK1wVERwjni4B + 2vdH7KtEJeVWFzAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBn + QHiPA7OBYukHd9gS7c0HXE+fsWcS3GZeLqcHfQQnV3pte1vTmu9//IVW71wNCJ1/ + rySRyODPQoPehxEcyHwupNZSzXK//nPlTdSgjMfFxscvt1YndyQLQYCfyOJMixAe + Aqrb14GTFHUUrdor0PyElhkULjkOXUrSIsdBrfWrwLTkelE8NK3tb5ZG8KPzD9Jy + +NwEPPr9d+iHkUkM7EFWw/cl56wka9ryBb97RI7DqbO6/j6OXHMk4GByxKv7DSIR + IvF9/Dw20qytajtaHV0pluFcOBuFc0NfiDvCaQlbTsfjzbc6UmZWbOi9YOJl3VQ/ + g3h+15GuzbsSzOCOEYOT + -----END CERTIFICATE----- + """; + + private static final String PRIVATE_KEY = """ + -----BEGIN PRIVATE KEY----- + MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDBl6XaJnXTL+6D + Wip3aBhU+MzmY4d1V9hbTm1tiZ3gE0VbUrvGO3LoYaxpPv6zFmsg3uJv6JxVAde7 + EddidN0ITHB9cQNdAfdUJ5njmsGSPbdQuOQTHw0aG7/QvTI/nsvfEE6e0lbV/0e7 + DHacZT/+OztBH1RwkG2ymM94Hf8HI6x7q6yfRTAZOqeOMrPCYTcluAgE9NskoPvj + X5qASakBtXISKIsOU84N0/2HDN3WEGMXvoHUQu6vrij6BwiwxKaw1AKwWENKoga7 + 75bPXN3M+JTSaIKE7dZbKzvx0Zi0h5X+bxc3BJi3Z/CsUBCzE+Y0SFetOiYmyl/2 + YmnneYoVAgMBAAECggEBAJHXiN6bctAyn+DcoHlsNkhtVw+Jk5bXIutGXjHTJtiU + K//siAGC78IZMyXmi0KndPVCdBwShROVW8xWWIiXuZxy2Zvm872xqX4Ah3JsN7/Q + NrXdVBUDo38zwIGkxqIfIz9crZ4An+J/eq5zaTfRHzCLtswMqjRS2hFeBY5cKrBY + 4bkSDGTP/c5cP7xS/UwaiTR2Ptd41f4zTyd4l5rl30TYHpazQNlbdxcOV4jh2Rnp + E0+cFEvEfeagVq7RmfBScKG5pk4qcRG0q2QHMyK5y00hdYvhdRjSgN7xIDkeO5B8 + s8/tSLU78nCl2gA9IKxTXYLitpISwZ81Q04mEAKRRtECgYEA+6lKnhn//aXerkLo + ZOLOjWQZhh005jHdNxX7DZqLpTrrfxc8v15KWUkAK1H0QHqYvfPrbbsBV1MY1xXt + sKmkeu/k8fJQzCIvFN4K2J5W5kMfq9PSw5d3XPeDaQuXUVaxBVp0gzPEPHmkKRbA + AkUqY0oJwA9gMKf8dK+flmLZfbsCgYEAxO4Roj2G46/Oox1GEZGxdLpiMpr9rEdR + JlSZ9kMGfddNLV7sFp6yPXDcyc/AOqeNj7tw1MyoT3Ar454+V0q83EZzCXvs4U6f + jUrfFcoVWIwf9AV/J4KWzMIzfqPIeNwqymZKd6BrZgcXXvAEPWt27mwO4a1GhC4G + oZv0t3lAsm8CgYAQ8C0IhSF4tgBN5Ez19VoHpDQflbmowLRt77nNCZjajyOokyzQ + iI0ig0pSoBp7eITtTAyNfyew8/PZDi3IVTKv35OeQTv08VwP4H4EZGve5aetDf3C + kmBDTpl2qYQOwnH5tUPgTMypcVp+NXzI6lTXB/WuCprjy3qvc96e5ZpT3wKBgQC8 + Xny/k9rTL/eYTwgXBiWYYjBL97VudUlKQOKEjNhIxwkrvQBXIrWbz7lh0Tcu49al + BcaHxru4QLO6pkM7fGHq0fh3ufJ8EZjMrjF1xjdk26Q05o0aXe+hLKHVIRVBhlfo + ArB4fRo+HcpdJXjox0KcDQCvHe+1v9DYBTWvymv4QQKBgBy3YH7hKz35DcXvA2r4 + Kis9a4ycuZqTXockO4rkcIwC6CJp9JbHDIRzig8HYOaRqmZ4a+coqLmddXr2uOF1 + 7+iAxxG1KzdT6uFNd+e/j2cdUjnqcSmz49PRtdDswgyYhoDT+W4yVGNQ4VuKg6a3 + Z3pC+KTdoHSKeA2FyAGnSUpD + -----END PRIVATE KEY----- + """; + + @Override + public void initialize(ServiceExtensionContext context) { + vault.storeSecret("public-key", PUBLIC_KEY); + vault.storeSecret("private-key", PRIVATE_KEY); + } +} diff --git a/advanced/advanced-01-open-telemetry/open-telemetry-runtime/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension b/advanced/advanced-01-open-telemetry/open-telemetry-runtime/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension new file mode 100644 index 00000000..eaca6845 --- /dev/null +++ b/advanced/advanced-01-open-telemetry/open-telemetry-runtime/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension @@ -0,0 +1 @@ +org.eclipse.edc.sample.runtime.SeedVaultExtension diff --git a/advanced/advanced-01-open-telemetry/resources/negotiate-contract.json b/advanced/advanced-01-open-telemetry/resources/negotiate-contract.json index 548d1f5e..44b73204 100644 --- a/advanced/advanced-01-open-telemetry/resources/negotiate-contract.json +++ b/advanced/advanced-01-open-telemetry/resources/negotiate-contract.json @@ -1,21 +1,15 @@ { "@context": { - "@vocab": "https://w3id.org/edc/v0.0.1/ns/", - "odrl": "http://www.w3.org/ns/odrl/2/" + "@vocab": "https://w3id.org/edc/v0.0.1/ns/" }, - "@type": "NegotiationInitiateRequestDto", - "connectorId": "provider", + "@type": "ContractRequest", "counterPartyAddress": "http://provider:19194/protocol", - "consumerId": "consumer", - "providerId": "provider", "protocol": "dataspace-protocol-http", "policy": { "@context": "http://www.w3.org/ns/odrl.jsonld", "@id": "{{contract-offer-id}}", - "@type": "Set", - "permission": [], - "prohibition": [], - "obligation": [], + "@type": "Offer", + "assigner": "provider", "target": "assetId" } } diff --git a/advanced/advanced-01-open-telemetry/resources/start-transfer.json b/advanced/advanced-01-open-telemetry/resources/start-transfer.json index 171cc529..1dc97872 100644 --- a/advanced/advanced-01-open-telemetry/resources/start-transfer.json +++ b/advanced/advanced-01-open-telemetry/resources/start-transfer.json @@ -8,7 +8,5 @@ "contractId": "{{contract-agreement-id}}", "assetId": "assetId", "protocol": "dataspace-protocol-http", - "dataDestination": { - "type": "HttpProxy" - } + "transferType": "HttpData-PULL" } diff --git a/advanced/advanced-02-custom-runtime/src/main/java/org/eclipse/edc/sample/runtime/CustomRuntime.java b/advanced/advanced-02-custom-runtime/src/main/java/org/eclipse/edc/sample/runtime/CustomRuntime.java index f1e23189..a04d7409 100644 --- a/advanced/advanced-02-custom-runtime/src/main/java/org/eclipse/edc/sample/runtime/CustomRuntime.java +++ b/advanced/advanced-02-custom-runtime/src/main/java/org/eclipse/edc/sample/runtime/CustomRuntime.java @@ -17,12 +17,10 @@ import org.eclipse.edc.boot.system.DefaultServiceExtensionContext; import org.eclipse.edc.boot.system.runtime.BaseRuntime; import org.eclipse.edc.spi.monitor.Monitor; -import org.eclipse.edc.spi.system.ConfigurationExtension; import org.eclipse.edc.spi.system.ServiceExtensionContext; +import org.eclipse.edc.spi.system.configuration.Config; import org.jetbrains.annotations.NotNull; -import java.util.List; - public class CustomRuntime extends BaseRuntime { /** @@ -30,22 +28,17 @@ public class CustomRuntime extends BaseRuntime { * instantiate the {@code BaseRuntime}. */ public static void main(String[] args) { - new CustomRuntime().boot(); - } - - @Override - protected String getRuntimeName(ServiceExtensionContext context) { - return "CUSTOM-RUNTIME"; + new CustomRuntime().boot(true); } @Override - protected @NotNull ServiceExtensionContext createContext(Monitor monitor) { + protected @NotNull ServiceExtensionContext createContext(Monitor monitor, Config config) { //override the default service extension context with a super customized one - return new SuperCustomExtensionContext(monitor, loadConfigurationExtensions()); + return new SuperCustomExtensionContext(monitor, config); } @Override - protected void shutdown() { + public void shutdown() { super.shutdown(); //this is the custom part here: @@ -53,8 +46,8 @@ protected void shutdown() { } private static class SuperCustomExtensionContext extends DefaultServiceExtensionContext { - SuperCustomExtensionContext(Monitor monitor, List configurationExtensions) { - super(monitor, configurationExtensions); + SuperCustomExtensionContext(Monitor monitor, Config config) { + super(monitor, config); } } } diff --git a/build.gradle.kts b/build.gradle.kts index 83279cf4..8b773e65 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -35,14 +35,10 @@ allprojects { // configure which version of the annotation processor to use. defaults to the same version as the plugin configure { processorVersion.set(edcVersion) - outputDirectory.set(project.buildDir) + outputDirectory.set(project.layout.buildDirectory.asFile.get()) } configure { - versions { - // override default dependency versions here - metaModel.set(edcVersion) - } publish.set(false) } @@ -51,10 +47,12 @@ allprojects { configDirectory.set(rootProject.file("resources")) } - // EdcRuntimeExtension uses this to determine the runtime classpath of the module to run. - tasks.register("printClasspath") { - doLast { - println(sourceSets["main"].runtimeClasspath.asPath) + tasks.test { + testLogging { + showStandardStreams = true } } -} \ No newline at end of file + +} + + diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index fde78cdb..2ff57977 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -2,22 +2,22 @@ format.version = "1.1" [versions] -assertj = "3.25.1" -awaitility = "4.2.0" -edc = "0.4.1" +assertj = "3.26.0" +awaitility = "4.2.1" +edc = "0.7.1" jakarta-json = "2.0.1" junit-pioneer = "2.2.0" -jupiter = "5.10.1" -okhttp-mockwebserver = "5.0.0-alpha.12" -openTelemetry = "1.18.0" +jupiter = "5.10.3" +okhttp-mockwebserver = "5.0.0-alpha.14" restAssured = "5.4.0" -rsApi = "3.1.0" -testcontainers = "1.19.3" -kafkaClients = "3.6.1" +rsApi = "4.0.0" +testcontainers = "1.19.8" +kafkaClients = "3.7.1" [libraries] assertj = { module = "org.assertj:assertj-core", version.ref = "assertj" } awaitility = { module = "org.awaitility:awaitility", version.ref = "awaitility" } +edc-api-control-configuration = { module = "org.eclipse.edc:control-api-configuration", version.ref = "edc" } edc-api-core = { module = "org.eclipse.edc:api-core", version.ref = "edc" } edc-api-observability = { module = "org.eclipse.edc:api-observability", version.ref = "edc" } edc-auth-tokenbased = { module = "org.eclipse.edc:auth-tokenbased", version.ref = "edc" } @@ -25,41 +25,48 @@ edc-boot = { module = "org.eclipse.edc:boot", version.ref = "edc" } edc-build-plugin = { module = "org.eclipse.edc.edc-build:org.eclipse.edc.edc-build.gradle.plugin", version.ref = "edc" } edc-configuration-filesystem = { module = "org.eclipse.edc:configuration-filesystem", version.ref = "edc" } edc-connector-core = { module = "org.eclipse.edc:connector-core", version.ref = "edc" } +edc-control-api-configuration = { module = "org.eclipse.edc:control-api-configuration", version.ref = "edc" } edc-control-plane-api-client = { module = "org.eclipse.edc:control-plane-api-client", version.ref = "edc" } edc-control-plane-api = { module = "org.eclipse.edc:control-plane-api", version.ref = "edc" } edc-control-plane-core = { module = "org.eclipse.edc:control-plane-core", version.ref = "edc" } edc-control-plane-spi = { module = "org.eclipse.edc:control-plane-spi", version.ref = "edc" } -edc-data-plane-api = { module = "org.eclipse.edc:data-plane-api", version.ref = "edc" } -edc-data-plane-aws-s3 = { module = "org.eclipse.edc:data-plane-aws-s3", version.ref = "edc" } -edc-data-plane-azure-storage = { module = "org.eclipse.edc:data-plane-azure-storage", version.ref = "edc" } +edc-data-plane-control-api = { module = "org.eclipse.edc:data-plane-control-api", version.ref = "edc" } +edc-data-plane-public-api = { module = "org.eclipse.edc:data-plane-public-api-v2", version.ref = "edc" } +edc-data-plane-aws-s3 = { module = "org.eclipse.edc.aws:data-plane-aws-s3", version.ref = "edc" } +edc-data-plane-azure-storage = { module = "org.eclipse.edc.azure:data-plane-azure-storage", version.ref = "edc" } edc-data-plane-client = { module = "org.eclipse.edc:data-plane-client", version.ref = "edc" } edc-data-plane-core = { module = "org.eclipse.edc:data-plane-core", version.ref = "edc" } edc-data-plane-http = { module = "org.eclipse.edc:data-plane-http", version.ref = "edc" } edc-data-plane-kafka = { module = "org.eclipse.edc:data-plane-kafka", version.ref = "edc" } edc-data-plane-selector-api = { module = "org.eclipse.edc:data-plane-selector-api", version.ref = "edc" } -edc-data-plane-selector-client = { module = "org.eclipse.edc:data-plane-selector-client", version.ref = "edc" } edc-data-plane-selector-core = { module = "org.eclipse.edc:data-plane-selector-core", version.ref = "edc" } +edc-data-plane-self-registration = { module = "org.eclipse.edc:data-plane-self-registration", version.ref = "edc" } edc-data-plane-spi = { module = "org.eclipse.edc:data-plane-spi", version.ref = "edc" } edc-data-plane-util = { module = "org.eclipse.edc:data-plane-util", version.ref = "edc" } edc-dsp = { module = "org.eclipse.edc:dsp", version.ref = "edc" } +edc-edr-cache-api = { module = "org.eclipse.edc:edr-cache-api", version.ref = "edc" } +edc-edr-store-core = { module = "org.eclipse.edc:edr-store-core", version.ref = "edc" } +edc-edr-store-receiver = { module = "org.eclipse.edc:edr-store-receiver", version.ref = "edc" } edc-http = { module = "org.eclipse.edc:http", version.ref = "edc" } edc-iam-mock = { module = "org.eclipse.edc:iam-mock", version.ref = "edc" } edc-jersey-micrometer = { module = "org.eclipse.edc:jersey-micrometer", version.ref = "edc" } edc-jetty-micrometer = { module = "org.eclipse.edc:jetty-micrometer", version.ref = "edc" } -edc-json-ld = { module = "org.eclipse.edc:json-ld", version.ref = "edc" } +edc-json-ld-lib = { module = "org.eclipse.edc:json-ld-lib", version.ref = "edc" } +edc-json-ld-spi = { module = "org.eclipse.edc:json-ld-spi", version.ref = "edc" } edc-junit = { module = "org.eclipse.edc:junit", version.ref = "edc" } edc-management-api = { module = "org.eclipse.edc:management-api", version.ref = "edc" } +edc-management-api-test-fixtures = { module = "org.eclipse.edc:management-api-test-fixtures", version.ref = "edc" } edc-micrometer-core = { module = "org.eclipse.edc:micrometer-core", version.ref = "edc" } edc-monitor-jdk-logger = { module = "org.eclipse.edc:monitor-jdk-logger", version.ref = "edc" } -edc-provision-aws-s3 = { module = "org.eclipse.edc:provision-aws-s3", version.ref = "edc" } +edc-provision-aws-s3 = { module = "org.eclipse.edc.aws:provision-aws-s3", version.ref = "edc" } edc-runtime-metamodel = { module = "org.eclipse.edc:runtime-metamodel", version.ref = "edc" } -edc-transfer-data-plane = { module = "org.eclipse.edc:transfer-data-plane", version.ref = "edc" } +edc-transfer-data-plane-signaling = { module = "org.eclipse.edc:transfer-data-plane-signaling", version.ref = "edc" } edc-transfer-process-api = { module = "org.eclipse.edc:transfer-process-api", version.ref = "edc" } -edc-transfer-pull-http-receiver = { module = "org.eclipse.edc:transfer-pull-http-receiver", version.ref = "edc" } +edc-transfer-pull-http-receiver = { module = "org.eclipse.edc:transfer-pull-http-dynamic-receiver", version.ref = "edc" } edc-transfer-pull-http-dynamic-receiver = { module = "org.eclipse.edc:transfer-pull-http-dynamic-receiver", version.ref = "edc" } edc-util = { module = "org.eclipse.edc:util", version.ref = "edc" } -edc-vault-azure = { module = "org.eclipse.edc:vault-azure", version.ref = "edc" } -edc-vault-filesystem = { module = "org.eclipse.edc:vault-filesystem", version.ref = "edc" } +edc-vault-azure = { module = "org.eclipse.edc.azure:vault-azure", version.ref = "edc" } +edc-validator-data-address-http-data = { module = "org.eclipse.edc:validator-data-address-http-data", version.ref = "edc" } jakarta-rsApi = { module = "jakarta.ws.rs:jakarta.ws.rs-api", version.ref = "rsApi" } jakartaJson = { module = "org.glassfish:jakarta.json", version.ref = "jakarta-json" } junit-jupiter-api = { module = "org.junit.jupiter:junit-jupiter-api", version.ref = "jupiter" } @@ -67,14 +74,15 @@ junit-jupiter-engine = { module = "org.junit.jupiter:junit-jupiter-engine", vers junit-jupiter-params = { module = "org.junit.jupiter:junit-jupiter-params", version.ref = "jupiter" } junit-pioneer = { module = "org.junit-pioneer:junit-pioneer", version.ref = "junit-pioneer" } okhttp-mockwebserver = { module = "com.squareup.okhttp3:mockwebserver", version.ref = "okhttp-mockwebserver" } -opentelemetry-annotations = { module = "io.opentelemetry:opentelemetry-extension-annotations", version.ref = "openTelemetry" } +opentelemetry-annotations = { module = "io.opentelemetry:opentelemetry-extension-annotations", version = "1.18.0" } +opentelemetry-exporter-otlp = { module = "io.opentelemetry:opentelemetry-exporter-otlp", version = "1.39.0" } +opentelemetry-javaagent = { module = "io.opentelemetry.javaagent:opentelemetry-javaagent", version = "2.5.0" } restAssured = { module = "io.rest-assured:rest-assured", version.ref = "restAssured" } testcontainers = { module = "org.testcontainers:testcontainers", version.ref = "testcontainers" } testcontainers-junit-jupiter = { module = "org.testcontainers:junit-jupiter", version.ref = "testcontainers" } kafka-clients = { module = "org.apache.kafka:kafka-clients", version.ref = "kafkaClients" } testcontainers-kafka = { module = "org.testcontainers:kafka", version.ref = "testcontainers" } testcontainers-junit = { module = "org.testcontainers:junit-jupiter", version.ref = "testcontainers" } -opentelemetry = "io.opentelemetry.javaagent:opentelemetry-javaagent:2.0.0" [plugins] shadow = { id = "com.github.johnrengelman.shadow", version = "8.1.1" } diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index da1db5f0..17655d0e 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.0-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/policy/README.md b/policy/README.md new file mode 100644 index 00000000..d9853f7c --- /dev/null +++ b/policy/README.md @@ -0,0 +1,12 @@ +# Policy samples + +The samples in this scope revolve around the topic of policies and policy evaluation/enforcement. + +> Before starting with these samples, be sure to check out the [basic samples](../basic/README.md)! + +## Samples + +### [Policy sample 01](./policy-01-policy-enforcement/README.md): Policy enforcement + +This sample will teach you the very basics of policy enforcement. It shows the necessary configurations to enable +policy evaluation for certain rules as well as an example on how to enforce a policy with a constraint. diff --git a/policy/policy-01-policy-enforcement/README.md b/policy/policy-01-policy-enforcement/README.md new file mode 100644 index 00000000..cdde3832 --- /dev/null +++ b/policy/policy-01-policy-enforcement/README.md @@ -0,0 +1,405 @@ +# Policy enforcement + +In this sample we'll learn how to enable policy enforcement. As the EDC are a framework, they do not provide any +evaluation out-of-the-box, but instead provide an evaluation system that can be easily configured to suit custom needs. +We'll perform the necessary configurations and implement and register a function for evaluating a policy. + +We will set up two connectors, a provider and a consumer, and let the provider offer an asset with a policy that +imposes a location restriction. So depending on the consumer's location, the consumer will be able to negotiate a +contract for requesting the asset or not. The sample consists of multiple modules: + +* `policy-functions`: creates the provider's offer and provides the function for policy enforcement +* `[policy-enforcement-provider|consumer]`: contains the build and config files for the respective connector + +## Creating the policy functions extension + +In this extension, we'll implement and register a function to evaluate the location-restricted policy we will create +later. + +### Creating rule bindings + +In this sample, the provider will offer an asset with a policy that imposes a constraint, but if we were to run the +sample now, we would not see any policy evaluation happening. This is because the EDC do not regard any rules or +constraints for evaluation unless we configure it. The EDC use the concept of *policy scopes* to define which rules +and constraints should be evaluated in certain runtime contexts, as some rules or constraints may only make sense in +some contexts, but not in others. A simple example is a rule that states *data must be anonymized*. Evaluating this +during the contract negotiation would not make much sense, as at this point in time no data is being exchanged yet +and therefore nothing can be anonymized. So we need to define which rules and constraints should be evaluated in which +scopes. This is done by creating *rule bindings* at the `RuleBindingRegistry`. For our example, we create the following +rule bindings: + +```java +ruleBindingRegistry.bind("use", ALL_SCOPES); +ruleBindingRegistry.bind(LOCATION_CONSTRAINT_KEY, NEGOTIATION_SCOPE); +``` + +When creating a rule binding, we can bind an action type or constraint to either all scopes or just a specific one. +Here, we bind the action type `use` to all scopes, so that rules with this action type are always evaluated. For the +location constraint we choose the negotiation scope, meaning it will only be evaluated during the contract negotiation. +Information on available scopes can be found +[here](https://github.com/eclipse-edc/Connector/blob/main/docs/developer/policy-engine.md). + +### Implementing the function for evaluation + +With the rule bindings in place, the provider will now try to evaluate our policy including the constraint during a +contract negotiation, but it does not yet know *how* to evaluate this constraint. For this, we need to implement a +function, for which the EDC offer two interfaces: `AtomicConstraintFunction` and `RuleFunction`. The former is meant +for evaluating a single constraint of a rule, while is latter is meant for evaluating a complete rule node (including +constraints as well as duties that may be associated with a permission). For our example, we choose to implement an +`AtomicConstraintFunction`, as we want to evaluate our location constraint: + +```java +public class LocationConstraintFunction implements AtomicConstraintFunction { + + //... + + @Override + public boolean evaluate(Operator operator, Object rightValue, Permission rule, PolicyContext context) { + var region = context.getContextData(ParticipantAgent.class).getClaims().get("region"); + + monitor.info(format("Evaluating constraint: location %s %s", operator, rightValue.toString())); + + return switch (operator) { + case EQ -> Objects.equals(region, rightValue); + case NEQ -> !Objects.equals(region, rightValue); + case IN -> ((Collection) rightValue).contains(region); + default -> false; + }; + } +} +``` + +When implementing either of the function interfaces, we have to override the `evaluate` method. For the +`AtomicConstraintFunction` we get the constraint's operator and right value as well as the containing rule node and +a `PolicyContext` as parameters. Using these, we have to determine whether the constraint is fulfilled. Since we want +to check the requesting participant's location, we need to access information about the participant. This is supplied +through the `PolicyContext`. We get the participant's claim with key *region* to obtain information about the +participant's location. We can then compare the location to the expected value depending on the operator used. The +function should return true, if the constraint is fulfilled, and false otherwise. + +**Note**: we can use the *region* claim here because our connectors use the `iam-mock` extension, which always adds +a claim with this exact name to all tokens. Depending on the identity provider used, different claims may be present, +or the same claim may have a different name. + +### Registering the function with the policy engine + +After creating our function for evaluation, the last thing we need to do is register this function at the +`PolicyEngine`, so that it is available for evaluation: + +```java +policyEngine.registerFunction(NEGOTIATION_SCOPE, Permission.class, LOCATION_CONSTRAINT_KEY, new LocationConstraintFunction(monitor)); +``` + +When registering the function, we again have to specify a scope. This allows for evaluating the same rule or +constraint differently in different runtime contexts. Since we bound our constraint to the negotiation scope, we also +register our function for that scope. Next, we need to specify the type of rule our function should be used for. This +is important, as the same constraint may have different implications as part of a permission, prohibition or duty. +When registering an `AtomicConstraintFunction`, we also have to specify a key that the function is associated with. +This has to resolve to exactly the constraint's left operand, so that the correct function for evaluation of a +constraint can be chosen depending on its left operand. So we set the key to the same value we used as our constraint's +left operand. And lastly, we hand over an instance of our function. + +Now, during a contract negotiation, our provider will evaluate our constraint by calling our function's `evaluate` +method. + +## Configuring the connectors + +Next, let's configure the two connectors. For each connector we need a build file and a configuration file. + +### Build files + +In the build file, we define the following dependencies for both connectors: + +* `libs.edc.control.plane.core`: the core module for the control-plane +* `libs.edc.configuration.filesystem`: enables configuration via a properties file +* `libs.edc.management.api`: provides the API for interacting with the control-plane +* `libs.edc.dsp`: enables connector-to-connector communication via the Dataspace Protocol +* `libs.edc.iam.mock`: mocks an identity provider + +**Note**: we do not include any `data-plane` modules, as we are not going to transfer any data in this sample. To +be able to actually transfer data, additional dependencies are required. More information can be found in the +[documentation](https://github.com/eclipse-edc/Connector/blob/main/docs/developer/build-your-own-connector.md) and in +the [transfer samples](../../transfer/README.md). + +#### Provider + +For the provider, we also add a dependency on our previously created `policy-functions` extension, so that it is able +to enforce a policy rule with a location constraint. + +### Configuration files + +We create the `config.properties` files for both provider and consumer and first define their API bindings. We then +define the DSP callback addresses, which are required for callback during the contract negotiation, as well as their +participant IDs. + +#### Consumer + +For the consumer we also add the following property: + +```properties +edc.mock.region=us +``` + +This defines the value for the consumer's region claim issued by the mock identity provider, which we use for evaluating +the consumer's location. + +## Running the sample + +Now, let's run the sample step by step. + +### 1. Run connectors + +First, we need to build and start both our connectors. Execute the following commands from the project root in two +separate terminal windows (one per connector): + +Provider: +```bash +./gradlew policy:policy-01-policy-enforcement:policy-enforcement-provider:build +java -Dedc.fs.config=policy/policy-01-policy-enforcement/policy-enforcement-provider/config.properties -jar policy/policy-01-policy-enforcement/policy-enforcement-provider/build/libs/provider.jar +``` + +Consumer: +```bash +./gradlew policy:policy-01-policy-enforcement:policy-enforcement-consumer:build +java -Dedc.fs.config=policy/policy-01-policy-enforcement/policy-enforcement-consumer/config.properties -jar policy/policy-01-policy-enforcement/policy-enforcement-consumer/build/libs/consumer.jar +``` + +### 2. Create the provider's offer + +In order for the provider to offer any data, we need to create 3 things: an `Asset` (= what data should be offered), +a `PolicyDefinition` (= under which conditions should data be offered), and a `ContractDefinition`, that links the +`Asset` and `PolicyDefinition`. + +#### 2.1 Create the asset + +We create an `Asset` with a `DataAddress` of type *test*. This asset will **not** work for a data transfer, +as *test* is not an actual transfer type. But, as we're not going to transfer any data in this sample, this is +sufficient for our example. You can view the request body for creating the asset in +[create-asset.json](./resources/create-asset.json). Run the following command to create the asset: + +```bash +curl -X POST -H "Content-Type: application/json" -H "X-Api-Key: password" \ + -d @policy/policy-01-policy-enforcement/resources/create-asset.json \ + "http://localhost:19193/management/v3/assets" | jq +``` + +#### 2.2 Create the policy definition + +Next. we'll create the `PolicyDefinition`, which contains a `Policy` and an ID. Each `Policy` needs to contain +at least one rule describing which actions are allowed, disallowed or required to perform. Each rule can optionally +contain a set of constraints that further refine the actions. For more information on the policy model take a look at +the [documentation](https://github.com/eclipse-edc/Connector/blob/main/docs/developer/architecture/usage-control/policies.md) +or the [policy section in the developer handbook](https://github.com/eclipse-edc/docs/blob/main/developer/handbook.md#policies). + +For our example, we create a `Permission` with action type `use`, as we want to allow the usage of our offered data. +But we only want to allow the usage under the condition that the requesting participant is in a certain location, +therefore we add a constraint to our permission. In that constraint we state that the participant's location has to +be equal to `eu`. You can view the request body for creating the policy definition in +[create-policy.json](./resources/create-policy.json). Run the following command to create the policy definition: + +```bash +curl -X POST -H "Content-Type: application/json" -H "X-Api-Key: password" \ + -d @policy/policy-01-policy-enforcement/resources/create-policy.json \ + "http://localhost:19193/management/v3/policydefinitions" | jq +``` + +#### 2.3 Create the contract definition + +The last thing we create is a `ContractDefinition`, that references the previously created +policy definition and asset. We will set the policy both as the access and the contract policy in the contract +definition. To read up on the difference between the two, check out the +[developer handbook](https://github.com/eclipse-edc/docs/blob/main/developer/handbook.md#contract-definitions). +You can view the request body for creating the contract definition in +[create-contract-definition.json](./resources/create-contract-definition.json) Run the following command to create +the contract definition: + +```bash +curl -X POST -H "Content-Type: application/json" -H "X-Api-Key: password" \ + -d @policy/policy-01-policy-enforcement/resources/create-contract-definition.json \ + "http://localhost:19193/management/v3/contractdefinitions" | jq +``` + +With this, the provider now offers the asset under the condition that the requesting participant is located in the EU. + +### 3. Make a catalog request + +After starting both connectors, we'll first make a catalog request from the consumer to the provider to see the +provider's offers. For this, we'll use an endpoint of the consumer's management API, specifying the provider's address +in the request. The request body is prepared in [catalog-request.json](resources/catalog-request.json). + +```bash +curl -X POST -H "Content-Type: application/json" -H "X-Api-Key: password" \ + -d @policy/policy-01-policy-enforcement/resources/catalog-request.json \ + "http://localhost:29193/management/v3/catalog/request" | jq +``` + +We'll receive the following catalog in the response, where we can see the offer created in the provider's extension. + +```json +{ + "@id": "4462b621-fb77-4e8c-91a5-8cbd85b967c2", + "@type": "dcat:Catalog", + "dcat:dataset": { + "@id": "test-document", + "@type": "dcat:Dataset", + "odrl:hasPolicy": { + "@id": "MQ==:dGVzdC1kb2N1bWVudA==:NjUzNTA5M2QtYTFjMi00YTRmLWE5NjYtYTM0ZjE2NjFjOTYy", + "@type": "odrl:Set", + "odrl:permission": { + "odrl:target": "test-document", + "odrl:action": { + "odrl:type": "use" + }, + "odrl:constraint": { + "odrl:leftOperand": "location", + "odrl:operator": { + "@id": "odrl:eq" + }, + "odrl:rightOperand": "eu" + } + }, + "odrl:prohibition": [], + "odrl:obligation": [], + "odrl:target": { + "@id": "test-document" + } + }, + "dcat:distribution": [], + "id": "test-document" + }, + "dcat:service": { + "@id": "fe9581ee-b4ec-473c-b0b7-96f30d957e87", + "@type": "dcat:DataService", + "dct:terms": "connector", + "dct:endpointUrl": "http://localhost:8282/protocol" + }, + "participantId": "provider", + "@context": { + "@vocab": "https://w3id.org/edc/v0.0.1/ns/", + "edc": "https://w3id.org/edc/v0.0.1/ns/", + "dcat": "https://www.w3.org/ns/dcat/", + "dct": "https://purl.org/dc/terms/", + "odrl": "http://www.w3.org/ns/odrl/2/", + "dspace": "https://w3id.org/dspace/v0.8/" + } +} + +``` + +But why are we able to see this offer, even though we set the location restricted policy as the access policy and our +consumer is not in the EU? While we did set the restricted policy as the access policy, we only bound the constraint to +the `NEGOTIATION_SCOPE` using the `RuleBindingRegistry`, meaning it will not be regarded for evaluations in the +cataloging phase. + +We can now use the offer details received in the catalog to start a contract negotiation with the provider. + +### 4. Start a contract negotiation + +To start the contract negotiation between provider and consumer, we'll use an endpoint of the consumer's management API. +In the request body for this request, we need to provide information about which connector we want to negotiate with, +which protocol to use and which offer we want to negotiate. The request body is prepared in +[contractoffer.json](resources/contract-request.json). To start the negotiation, run the following command: + +```bash +curl -X POST -H "Content-Type: application/json" -H "X-Api-Key: password" \ + -d @policy/policy-01-policy-enforcement/resources/contract-request.json \ + "http://localhost:29193/management/v3/contractnegotiations" | jq +``` + +You'll get back a UUID. This is the ID of the contract negotiation process which is being asynchronously executed +in the background. + +### 5. Get the contract negotiation state + +Using the ID received in the previous step, we can now view the state of the negotiation by calling another endpoint +of the consumer's management API: + +```bash +curl -X GET -H "X-Api-Key: password" "http://localhost:29193/management/v3/contractnegotiations/" | jq +``` + +In the response we'll get a description of the negotiation, similar to the following: + +```json +{ + ... + "edc:contractAgreementId": null, + "edc:state": "TERMINATED", + ... +} +``` + +We can see that the negotiation has been declined, and we did not receive a contract agreement. If we now take a look +at the provider's logs, we'll see the following lines: + +```bash +INFO 2024-02-12T11:07:32.954014912 Evaluating constraint: location EQ eu +DEBUG 2024-02-12T11:07:32.9562391 [Provider] Contract offer rejected as invalid: Policy eu-policy not fulfilled +``` + +The consumer was not able to get a contract agreement, because it does not fulfil the location-restricted policy. This +means we have successfully implemented and configured the policy evaluation on provider side. Building up on this +example, you can now tackle more complex policies, by e.g. defining and combining different constraints and creating +the respective functions for evaluation. + +## Sample variations + +You can play around with this sample a bit and run it in different variations, yielding different outcomes. Some +possible variations are described in the following. + +**Note: the following variations do not build up on each other, so make sure to revert any changes done for one +variation before proceeding with the next!** + +### Set consumer region to `eu` + +Our policy requires the consumer to be in the EU. Change the property `edc.mock.region` in the consumer's +`config.properties` to the value `eu` and run the sample again. This time, the negotiation will reach the state +`FINALIZED` and reference a contract agreement, as our consumer now fulfils the policy. + +```properties +edc.mock.region=eu +``` + +### Remove binding of constraint + +In our `PolicyFunctionsExtension`, we've created a rule binding so that our constraint would be evaluated during the +contract negotiation. Remove this binding and run the sample again (while leaving the consumer's property +`edc.mock-region` with value `us`!). The negotiation will be confirmed and reference a contract agreement, even though +our consumer is not in the correct location. This happens, as without the binding of the constraint, the provider +will not regard it during evaluation. + +```java +ruleBindingRegistry.bind("use", ALL_SCOPES); +//ruleBindingRegistry.bind(LOCATION_CONSTRAINT_KEY, NEGOTIATION_SCOPE); +``` + +### Remove binding of action type + +In our `PolicyFunctionsExtension`, we've created rule bindings for our permission's action type as well as the +constraint. In the previous variation, we've removed the binding for the constraint. For this variation, we want to +leave the binding for the constraint in place, and instead remove the binding for the action type. Run the sample again +(while leaving the consumer's property `edc.mock-region` with value `us`!) and you will see the negotiation being +confirmed. Even though the constraint is bound to be evaluated and the consumer does not fulfil it, the constraint +is not evaluated and our function never called. This happens because there is no rule binding for the permission +containing the constraint, and thus the whole permission node is disregarded during evaluation. + +```java +//ruleBindingRegistry.bind("use", ALL_SCOPES); +ruleBindingRegistry.bind(LOCATION_CONSTRAINT_KEY, NEGOTIATION_SCOPE); +``` + +### Bind the constraint to the cataloging scope + +In our example, we've bound the constraint to the `NEGOTIATION_SCOPE`. Let's remove this binding and instead bind the +constraint as well as our function to the `CATALOGING_SCOPE` and rebuild the provider. When running a sample again, +you will not see the offer in the provider's catalog anymore. As the constraint is now evaluated during cataloging, the +offer is filtered out because our consumer does not fulfil the location constraint. Since the request body for the +negotiation is already prepared, you can still try to initiate a negotiation. Even though the constraint is not bound +to the negotiation scope anymore, the negotiation will be terminated. When receiving a request for a negotiation, +the provider will still evaluate its contract definitions' access policies using the catalog scope, to ensure that +a consumer cannot negotiate an offer it is not allowed to see. + +```java +ruleBindingRegistry.bind(LOCATION_CONSTRAINT_KEY, CATALOGING_SCOPE); +policyEngine.registerFunction(CATALOGING_SCOPE, Permission.class, LOCATION_CONSTRAINT_KEY, new LocationConstraintFunction(monitor)); +``` diff --git a/policy/policy-01-policy-enforcement/policy-enforcement-consumer/build.gradle.kts b/policy/policy-01-policy-enforcement/policy-enforcement-consumer/build.gradle.kts new file mode 100644 index 00000000..4eeb461a --- /dev/null +++ b/policy/policy-01-policy-enforcement/policy-enforcement-consumer/build.gradle.kts @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2024 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ + +plugins { + `java-library` + id("application") + alias(libs.plugins.shadow) +} + +dependencies { + implementation(libs.edc.connector.core) + implementation(libs.edc.control.plane.core) + implementation(libs.edc.configuration.filesystem) + implementation(libs.edc.management.api) + implementation(libs.edc.dsp) + implementation(libs.edc.iam.mock) + implementation(libs.edc.http) +} + +application { + mainClass.set("org.eclipse.edc.boot.system.runtime.BaseRuntime") +} + +tasks.withType { + mergeServiceFiles() + archiveFileName.set("consumer.jar") +} diff --git a/policy/policy-01-policy-enforcement/policy-enforcement-consumer/config.properties b/policy/policy-01-policy-enforcement/policy-enforcement-consumer/config.properties new file mode 100644 index 00000000..f1f61b84 --- /dev/null +++ b/policy/policy-01-policy-enforcement/policy-enforcement-consumer/config.properties @@ -0,0 +1,12 @@ +web.http.port=29191 +web.http.path=/api +web.http.management.port=29193 +web.http.management.path=/management +web.http.protocol.port=29194 +web.http.protocol.path=/protocol + +edc.api.auth.key=password +edc.dsp.callback.address=http://localhost:29194/protocol +edc.participant.id=consumer +edc.ids.id=urn:connector:consumer +edc.mock.region=us diff --git a/policy/policy-01-policy-enforcement/policy-enforcement-provider/build.gradle.kts b/policy/policy-01-policy-enforcement/policy-enforcement-provider/build.gradle.kts new file mode 100644 index 00000000..4e750627 --- /dev/null +++ b/policy/policy-01-policy-enforcement/policy-enforcement-provider/build.gradle.kts @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2024 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ + +plugins { + `java-library` + id("application") + alias(libs.plugins.shadow) +} + + +dependencies { + implementation(libs.edc.connector.core) + implementation(libs.edc.control.plane.core) + implementation(libs.edc.configuration.filesystem) + implementation(libs.edc.management.api) + implementation(libs.edc.dsp) + implementation(libs.edc.iam.mock) + implementation(libs.edc.http) + + implementation(project(":policy:policy-01-policy-enforcement:policy-functions")) +} + +application { + mainClass.set("org.eclipse.edc.boot.system.runtime.BaseRuntime") +} + +tasks.withType { + mergeServiceFiles() + archiveFileName.set("provider.jar") +} diff --git a/policy/policy-01-policy-enforcement/policy-enforcement-provider/config.properties b/policy/policy-01-policy-enforcement/policy-enforcement-provider/config.properties new file mode 100644 index 00000000..dfb79d74 --- /dev/null +++ b/policy/policy-01-policy-enforcement/policy-enforcement-provider/config.properties @@ -0,0 +1,11 @@ +web.http.port=19191 +web.http.path=/api +web.http.management.port=19193 +web.http.management.path=/management +web.http.protocol.port=19194 +web.http.protocol.path=/protocol + +edc.api.auth.key=password +edc.dsp.callback.address=http://localhost:19194/protocol +edc.participant.id=provider +edc.ids.id=urn:connector:provider diff --git a/policy/policy-01-policy-enforcement/policy-functions/build.gradle.kts b/policy/policy-01-policy-enforcement/policy-functions/build.gradle.kts new file mode 100644 index 00000000..3010cd8d --- /dev/null +++ b/policy/policy-01-policy-enforcement/policy-functions/build.gradle.kts @@ -0,0 +1,12 @@ +plugins { + `java-library` + id("application") +} + +dependencies { + api(libs.edc.data.plane.spi) + api(libs.edc.json.ld.spi) + + implementation(libs.edc.control.plane.core) + +} diff --git a/policy/policy-01-policy-enforcement/policy-functions/src/main/java/org/eclipse/edc/sample/extension/policy/LocationConstraintFunction.java b/policy/policy-01-policy-enforcement/policy-functions/src/main/java/org/eclipse/edc/sample/extension/policy/LocationConstraintFunction.java new file mode 100644 index 00000000..bd65ae0a --- /dev/null +++ b/policy/policy-01-policy-enforcement/policy-functions/src/main/java/org/eclipse/edc/sample/extension/policy/LocationConstraintFunction.java @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2024 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ + +package org.eclipse.edc.sample.extension.policy; + +import org.eclipse.edc.policy.engine.spi.AtomicConstraintFunction; +import org.eclipse.edc.policy.engine.spi.PolicyContext; +import org.eclipse.edc.policy.model.Operator; +import org.eclipse.edc.policy.model.Permission; +import org.eclipse.edc.spi.agent.ParticipantAgent; +import org.eclipse.edc.spi.monitor.Monitor; + +import java.util.Collection; +import java.util.Objects; + +import static java.lang.String.format; + +public class LocationConstraintFunction implements AtomicConstraintFunction { + + private final Monitor monitor; + + public LocationConstraintFunction(Monitor monitor) { + this.monitor = monitor; + } + + @Override + public boolean evaluate(Operator operator, Object rightValue, Permission rule, PolicyContext context) { + var region = context.getContextData(ParticipantAgent.class).getClaims().get("region"); + + monitor.info(format("Evaluating constraint: location %s %s", operator, rightValue.toString())); + + return switch (operator) { + case EQ -> Objects.equals(region, rightValue); + case NEQ -> !Objects.equals(region, rightValue); + case IN -> ((Collection) rightValue).contains(region); + default -> false; + }; + } +} \ No newline at end of file diff --git a/policy/policy-01-policy-enforcement/policy-functions/src/main/java/org/eclipse/edc/sample/extension/policy/PolicyFunctionsExtension.java b/policy/policy-01-policy-enforcement/policy-functions/src/main/java/org/eclipse/edc/sample/extension/policy/PolicyFunctionsExtension.java new file mode 100644 index 00000000..0d1018d7 --- /dev/null +++ b/policy/policy-01-policy-enforcement/policy-functions/src/main/java/org/eclipse/edc/sample/extension/policy/PolicyFunctionsExtension.java @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2024 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ + +package org.eclipse.edc.sample.extension.policy; + +import org.eclipse.edc.policy.engine.spi.PolicyEngine; +import org.eclipse.edc.policy.engine.spi.RuleBindingRegistry; +import org.eclipse.edc.policy.model.Permission; +import org.eclipse.edc.runtime.metamodel.annotation.Inject; +import org.eclipse.edc.spi.system.ServiceExtension; +import org.eclipse.edc.spi.system.ServiceExtensionContext; + +import static org.eclipse.edc.connector.controlplane.contract.spi.validation.ContractValidationService.NEGOTIATION_SCOPE; +import static org.eclipse.edc.jsonld.spi.PropertyAndTypeNames.ODRL_USE_ACTION_ATTRIBUTE; +import static org.eclipse.edc.policy.engine.spi.PolicyEngine.ALL_SCOPES; +import static org.eclipse.edc.spi.constants.CoreConstants.EDC_NAMESPACE; + +public class PolicyFunctionsExtension implements ServiceExtension { + private static final String LOCATION_CONSTRAINT_KEY = EDC_NAMESPACE + "location"; + + @Inject + private RuleBindingRegistry ruleBindingRegistry; + @Inject + private PolicyEngine policyEngine; + + @Override + public String name() { + return "Sample policy functions"; + } + + @Override + public void initialize(ServiceExtensionContext context) { + var monitor = context.getMonitor(); + + ruleBindingRegistry.bind(ODRL_USE_ACTION_ATTRIBUTE, ALL_SCOPES); + ruleBindingRegistry.bind(LOCATION_CONSTRAINT_KEY, NEGOTIATION_SCOPE); + policyEngine.registerFunction(ALL_SCOPES, Permission.class, LOCATION_CONSTRAINT_KEY, new LocationConstraintFunction(monitor)); + } +} diff --git a/policy/policy-01-policy-enforcement/policy-functions/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension b/policy/policy-01-policy-enforcement/policy-functions/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension new file mode 100644 index 00000000..1b26f404 --- /dev/null +++ b/policy/policy-01-policy-enforcement/policy-functions/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension @@ -0,0 +1 @@ +org.eclipse.edc.sample.extension.policy.PolicyFunctionsExtension \ No newline at end of file diff --git a/policy/policy-01-policy-enforcement/resources/catalog-request.json b/policy/policy-01-policy-enforcement/resources/catalog-request.json new file mode 100644 index 00000000..370b4f2f --- /dev/null +++ b/policy/policy-01-policy-enforcement/resources/catalog-request.json @@ -0,0 +1,7 @@ +{ + "@context": { + "@vocab": "https://w3id.org/edc/v0.0.1/ns/" + }, + "counterPartyAddress": "http://localhost:19194/protocol", + "protocol": "dataspace-protocol-http" +} diff --git a/policy/policy-01-policy-enforcement/resources/contract-request.json b/policy/policy-01-policy-enforcement/resources/contract-request.json new file mode 100644 index 00000000..e254def0 --- /dev/null +++ b/policy/policy-01-policy-enforcement/resources/contract-request.json @@ -0,0 +1,27 @@ +{ + "@context": { + "@vocab": "https://w3id.org/edc/v0.0.1/ns/" + }, + "@type": "ContractRequest", + "counterPartyAddress": "http://localhost:19194/protocol", + "protocol": "dataspace-protocol-http", + "policy": { + "@context": "http://www.w3.org/ns/odrl.jsonld", + "@id": "1:test-document:13dce0f1-52ed-4554-a194-e83e92733ee5", + "@type": "Offer", + "permission": [ + { + "action": "use", + "target": "test-document", + "constraint": { + "@type": "AtomicConstraint", + "leftOperand": "location", + "operator": "eq", + "rightOperand": "eu" + } + } + ], + "assigner": "provider", + "target": "test-document" + } +} diff --git a/policy/policy-01-policy-enforcement/resources/create-asset.json b/policy/policy-01-policy-enforcement/resources/create-asset.json new file mode 100644 index 00000000..b34f9042 --- /dev/null +++ b/policy/policy-01-policy-enforcement/resources/create-asset.json @@ -0,0 +1,13 @@ +{ + "@context": { + "@vocab": "https://w3id.org/edc/v0.0.1/ns/" + }, + "@id": "test-document", + "properties": { + "name": "product description", + "contenttype": "application/json" + }, + "dataAddress": { + "type": "test" + } +} \ No newline at end of file diff --git a/policy/policy-01-policy-enforcement/resources/create-contract-definition.json b/policy/policy-01-policy-enforcement/resources/create-contract-definition.json new file mode 100644 index 00000000..16d2d4d3 --- /dev/null +++ b/policy/policy-01-policy-enforcement/resources/create-contract-definition.json @@ -0,0 +1,14 @@ +{ + "@context": { + "@vocab": "https://w3id.org/edc/v0.0.1/ns/" + }, + "@id": "1", + "accessPolicyId": "eu-policy", + "contractPolicyId": "eu-policy", + "assetsSelector": [{ + "@type": "CriterionDto", + "operandLeft": "https://w3id.org/edc/v0.0.1/ns/id", + "operator": "=", + "operandRight": "test-document" + }] +} diff --git a/policy/policy-01-policy-enforcement/resources/create-policy.json b/policy/policy-01-policy-enforcement/resources/create-policy.json new file mode 100644 index 00000000..b31e2801 --- /dev/null +++ b/policy/policy-01-policy-enforcement/resources/create-policy.json @@ -0,0 +1,25 @@ +{ + "@context": { + "@vocab": "https://w3id.org/edc/v0.0.1/ns/" + }, + "@id": "eu-policy", + "policy": { + "@context": "http://www.w3.org/ns/odrl.jsonld", + "@type": "Set", + "permission": [ + { + "action": "use", + "constraint": { + "@type": "AtomicConstraint", + "leftOperand": "location", + "operator": { + "@id": "odrl:eq" + }, + "rightOperand": "eu" + } + } + ], + "prohibition": [], + "obligation": [] + } +} diff --git a/policy/policy-02-provision/README.md b/policy/policy-02-provision/README.md new file mode 100644 index 00000000..22989fbe --- /dev/null +++ b/policy/policy-02-provision/README.md @@ -0,0 +1,211 @@ +# Create a policy for provisioning + +Now that we know how to transfer a file between two connectors and how to write policies, in this step we will see how we +can use policies to modify the supporting data transfer infrastructure. We will be regulating the file destination in a file transfer process. We will use a policy +defined in [`provision.menifest.verify`](https://eclipse-edc.github.io/docs/#/submodule/Connector/docs/developer/policy-engine?id=manifest-verification-scope-provisionmanifestverify) +scope. This scope is used during the provisioning phase to evaluate the resource definitions of a generated resource manifest. +Policy functions registered in this scope may modify resource definitions so that they comply with the policy. + +## Defining the policy for provider + +First we will create a policy definition for the provider which contains `regulateFilePathConstraint`. We can define this policy the same way we did it for the sample [`policy-01-contract-negotiation`](policy/policy-01-contract-negotiation). +The contract policy is implemented the following way: + +```java +// in PolicyFunctionsExtension.java +var desiredFilePath = context.getSetting(policyRegulatedFilePathSetting, "/tmp/provider/test-document.txt"); +var regulateFilePathConstraint = AtomicConstraint.Builder.newInstance() + .leftExpression(new LiteralExpression(policyRegulateFilePath)) + .operator(Operator.EQ) + .rightExpression(new LiteralExpression(desiredFilePath)) + .build(); + +var permission = Permission.Builder.newInstance() + .action(Action.Builder.newInstance().type("USE").build()) + .constraint(regulateFilePathConstraint) + .build(); + +return PolicyDefinition.Builder.newInstance() + .id("use-regulated-path") + .policy(Policy.Builder.newInstance() + .permission(permission) + .build()) + .build(); +``` +We do not need to register any function for this policy. This is because in our current sample the `regulateFilePathConstraint` +will be applied on consumer’s `ResourceDefinition`. So we will be registering related functions in consumer’s policy functions. +We will see that in a moment. + + +## Modifying file transfer json +In previous file transfer example, while requesting a file, in [`transfer-01-file-transfer/filetransfer.json`](transfer/transfer-01-file-transfer/filetransfer.json) +we mentioned the file destination path, and set `managedResources` to `false`. But for this sample, we will set the `managedResources` to `true` and will not be +defining any file destination path. The request body for file transfer has been defined in [`filetransfer.json`](policy/policy-02-provision/filetransfer.json). + +```json +{ + ... + "dataDestination": { + "properties": { + "type": "File" + } + }, + ... + "managedResources": true, + ... +} +``` +This will allow the `ResourceManifestGenerator` to generate a `ResourceManifest`, from a `ResourceDefinition` +according to our required file type mentioned in [`filetransfer.json`](policy/policy-02-provision/filetransfer.json). + + +## Defining provisioner and resource definition generator + +For simplicity, we are doing a local file transfer, and have implemented necessary codes required for related +resource definition generator and provisioner in the module [`policy-provision`](policy/policy-02-provision/policy-provision). +[`LocalConsumerResourceDefinitionGenerator`](policy/policy-02-provision/policy-provision/src/main/java/org/eclipse/sample/extension/provision/LocalConsumerResourceDefinitionGenerator.java) +implements `ConsumerResourceDefinitionGenerator` which generates `ResourceDefinition` for our required file type `File`. + +```java +// in LocalConsumerResourceDefinitionGenerator.java + +private static final String TYPE = "File"; + +// this path will get modified during the policy evaluation to notice the change, keep the path different from the path used in policy +private static final String DESTINATION = "any path"; + +@Override +public @Nullable ResourceDefinition generate(DataRequest dataRequest, Policy policy) { + Objects.requireNonNull(dataRequest, "dataRequest must always be provided"); + Objects.requireNonNull(policy, "policy must always be provided"); + + var destination = DESTINATION; + var id = randomUUID().toString(); + + return LocalResourceDefinition.Builder.newInstance() + .id(id) + .pathName(destination) + .build(); +} +@Override +public boolean canGenerate(DataRequest dataRequest, Policy policy) { + Objects.requireNonNull(dataRequest, "dataRequest must always be provided"); + Objects.requireNonNull(policy, "policy must always be provided"); + + return TYPE.equals(dataRequest.getDestinationType()); +} +``` + +[`LocalProvisionExtension`](policy/policy-02-provision/policy-provision/src/main/java/org/eclipse/sample/extension/provision/LocalProvisionExtension.java) +generates a [`LocalResourceProvisioner`](policy/policy-02-provision/policy-provision/src/main/java/org/eclipse/sample/extension/provision/LocalResourceProvisioner.java) +which is our required type of provisioner for local resources. + + +## Creating and registering the policy function for consumer + +Now, as we are willing to modify the data destination according to our policy, we have to define a policy that will +be evaluated in [`provision.menifest.verify`](https://eclipse-edc.github.io/docs/#/submodule/Connector/docs/developer/policy-engine?id=manifest-verification-scope-provisionmanifestverify) +scope. + +As the data destination address is defined in consumer ResourceManifest, we have to write a policy function that will +be used by consumer connector. [`policy-provision-consumer-policy-functions`](policy/policy-02-provision/policy-provision-consumer-policy-functions) +module includes the policy functions for consumer connector. + + +The [`RegulateFilePathFunction `](policy/policy-02-provision/policy-provision-consumer-policy-functions/src/main/java/org/eclipse/sample/extension/provision/consumer/policy/RegulateFilePathFunction.java) +implements the `AtomicConstraintFunction` interface, which contains a single method for evaluating a constraint. +In that method, the `operator` `EQ` and desired `pathname` in the `right value` of the constraint are used for evaluation. +In this example, we updated the `pathName` in `LocalResourceDefinition` to our desired `pathName` which was defined in our policy. + + +Next, we have to register our function with the `PolicyEngine` and bind the desired action as well as the key used to +register our function to the desired scopes using the `RuleBindingRegistry`. This is done in the +[`ConsumerPolicyFunctionsExtension`](policy/policy-02-provision/policy-provision-consumer-policy-functions/src/main/java/org/eclipse/sample/extension/provision/consumer/policy/ConsumerPolicyFunctionsExtension.java): + +```java +private final String policyRegulateFilePath = "POLICY_REGULATE_FILE_PATH"; + +//... + +@Override +public void initialize(ServiceExtensionContext context) { + //... + + ruleBindingRegistry.bind("USE", ALL_SCOPES); + ruleBindingRegistry.bind(policyRegulateFilePath, MANIFEST_VERIFICATION_SCOPE); + policyEngine.registerFunction(MANIFEST_VERIFICATION_SCOPE, Permission.class, policyRegulateFilePath, new RegulateFilePathFunction(monitor)); + + //... +} +``` + +Here, we do not need to define any policy, as this policy function will be used by consumer connector. + + +## How to run the sample + +Running this sample consists of the same steps done in file transfer sample. + +### Configuration + +Set the desired path address in the provider [`config.properties`](policy/policy-02-provision/policy-provision-provider/config.properties). + +```properties +edc.samples.policy-02.constraint.desired.file.path = path/to/desired/location/transfer.txt +``` + +### Run the sample + +### 1. Build and start the connectors +First, build and run the provider and consumer connector for this sample: + +Build and run the consumer connector: +```shell +./gradlew policy:policy-02-provision:policy-provision-consumer:build + +java -Dedc.fs.config=policy/policy-02-provision/policy-provision-consumer/config.properties -jar policy/policy-02-provision/policy-provision-consumer/build/libs/consumer.jar +# for windows +java -D"edc.fs.config"=policy/policy-02-provision/policy-provision-consumer/config.properties -jar policy/policy-02-provision/policy-provision-consumer/build/libs/consumer.jar +``` +In another terminal, build and run the provider connector: +```shell +./gradlew policy:policy-02-provision:policy-provision-provider:build + +java -Dedc.fs.config=policy/policy-02-provision/policy-provision-provider/config.properties -jar policy/policy-02-provision/policy-provision-provider/build/libs/provider.jar +# for windows +java -D"edc.fs.config"=policy/policy-02-provision/policy-provision-provider/config.properties -jar policy/policy-02-provision/policy-provision-provider/build/libs/provider.jar +``` + +### 2. Initiate a contract negotiation +Next, initiate a contract negotiation. The request body is prepared in [`contractoffer.json`](policy/policy-02-provision/contractoffer.json). +Then run: + +```shell +curl -X POST -H "Content-Type: application/json" -H "X-Api-Key: password" -d @policy/policy-02-provision/contractoffer.json "http://localhost:9192/management/v2/contractnegotiations" +``` + +### 3. Look up the contract agreement ID + +Look up the contract agreement ID: + +```bash +curl -X GET -H 'X-Api-Key: password' "http://localhost:9192/management/v2/contractnegotiations/" +``` + +### 4. Request the file + +To request a file transfer, you need to use the [`filetransfer.json`](policy/policy-02-provision/filetransfer.json). First, locate the `contractId` field in the [`filetransfer.json`](policy/policy-02-provision/filetransfer.json) file. +Then, replace the `{{contract-agreement-id}}` placeholder with the actual contract agreement ID that you obtained from the previous step: + +```bash +curl -X POST -H "Content-Type: application/json" -H "X-Api-Key: password" -d @policy/policy-02-provision/filetransfer.json "http://localhost:9192/management/v2/transferprocesses" +``` + +### 5. See transferred file + +After the file transfer is completed, we can check the destination path specified in the policy/[`config.properties`](policy/policy-02-provision/policy-provision-provider/config.properties) +for the file. Here, we'll now find a file with the same content as the original file offered by the provider. We should notice that even though +`LocalConsumerResourceDefinitionGenerator` defined a different destination for the file, the path is getting modified according to the +policy. + +--- \ No newline at end of file diff --git a/policy/policy-02-provision/contractoffer.json b/policy/policy-02-provision/contractoffer.json new file mode 100644 index 00000000..35a91335 --- /dev/null +++ b/policy/policy-02-provision/contractoffer.json @@ -0,0 +1,31 @@ +{ + "@context": { + "@vocab": "https://w3id.org/edc/v0.0.1/ns/" + }, + "@type": "ContractRequest", + "counterPartyAddress": "http://localhost:8282/protocol", + "protocol": "dataspace-protocol-http", + "policy": { + "@context": "http://www.w3.org/ns/odrl.jsonld", + "@id": "MQ==:dGVzdC1kb2N1bWVudA==:YjY2YWU2OGQtNjVmMS00ODEyLTg0MzktMjNlYmZjZjY5YTdk", + "@type": "Offer", + "assigner": "provider", + "target": "test-document", + "permission": [ + { + "action": { + "@id": "USE" + }, + "constraint": { + "leftOperand": { + "@id": "POLICY_REGULATE_FILE_PATH" + }, + "operator": { + "@id": "odrl:eq" + }, + "rightOperand": "path/to/desired/location/transfer.txt" + } + } + ] + } +} \ No newline at end of file diff --git a/policy/policy-02-provision/filetransfer.json b/policy/policy-02-provision/filetransfer.json new file mode 100644 index 00000000..42bef426 --- /dev/null +++ b/policy/policy-02-provision/filetransfer.json @@ -0,0 +1,16 @@ +{ + "@context": { + "@vocab": "https://w3id.org/edc/v0.0.1/ns/" + }, + "@type": "DataRequest", + "protocol": "dataspace-protocol-http", + "assetId": "test-document", + "contractId": "{{contract-agreement-id}}", + "dataDestination": { + "type": "File" + }, + "transferType": "File-PUSH", + "managedResources": true, + "counterPartyAddress": "http://localhost:8282/protocol", + "connectorId": "consumer" +} \ No newline at end of file diff --git a/policy/policy-02-provision/policy-provision-consumer/build.gradle.kts b/policy/policy-02-provision/policy-provision-consumer/build.gradle.kts new file mode 100644 index 00000000..c1c056a0 --- /dev/null +++ b/policy/policy-02-provision/policy-provision-consumer/build.gradle.kts @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2023 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ +plugins { + `java-library` + id("application") + alias(libs.plugins.shadow) +} + +dependencies { + implementation(libs.edc.control.plane.api.client) + implementation(libs.edc.control.plane.api) + implementation(libs.edc.control.plane.core) + implementation(libs.edc.dsp) + implementation(libs.edc.configuration.filesystem) + implementation(libs.edc.iam.mock) + implementation(libs.edc.management.api) + implementation(libs.edc.transfer.data.plane.signaling) + implementation(libs.edc.transfer.pull.http.receiver) + implementation(libs.edc.validator.data.address.http.data) + implementation(libs.edc.api.control.configuration) + + implementation(libs.edc.edr.cache.api) + implementation(libs.edc.edr.store.core) + implementation(libs.edc.edr.store.receiver) + + implementation(libs.edc.data.plane.selector.api) + implementation(libs.edc.data.plane.selector.core) + + implementation(libs.edc.data.plane.self.registration) + implementation(libs.edc.data.plane.control.api) + implementation(libs.edc.data.plane.public.api) + implementation(libs.edc.data.plane.core) + implementation(libs.edc.data.plane.http) +} + +application { + mainClass.set("$group.boot.system.runtime.BaseRuntime") +} + +tasks.withType { + exclude("**/pom.properties", "**/pom.xm") + mergeServiceFiles() + archiveFileName.set("consumer.jar") +} \ No newline at end of file diff --git a/policy/policy-02-provision/policy-provision-consumer/config.properties b/policy/policy-02-provision/policy-provision-consumer/config.properties new file mode 100644 index 00000000..d0939c9c --- /dev/null +++ b/policy/policy-02-provision/policy-provision-consumer/config.properties @@ -0,0 +1,15 @@ +edc.participant.id=consumer +edc.dsp.callback.address=http://localhost:9292/protocol +web.http.port=9191 +web.http.path=/api +web.http.management.port=9192 +web.http.management.path=/management +web.http.protocol.port=9292 +web.http.protocol.path=/protocol +edc.transfer.proxy.token.signer.privatekey.alias=private-key +edc.transfer.proxy.token.verifier.publickey.alias=public-key +web.http.public.port=9195 +web.http.public.path=/public +web.http.control.port=9193 +web.http.control.path=/control + diff --git a/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/policy/ConsumerPolicyFunctionsExtension.java b/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/policy/ConsumerPolicyFunctionsExtension.java new file mode 100644 index 00000000..96bfc72f --- /dev/null +++ b/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/policy/ConsumerPolicyFunctionsExtension.java @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2023 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ + +package org.eclipse.edc.sample.extension.policy; + +import org.eclipse.edc.policy.engine.spi.PolicyEngine; +import org.eclipse.edc.policy.engine.spi.RuleBindingRegistry; +import org.eclipse.edc.policy.model.Permission; +import org.eclipse.edc.runtime.metamodel.annotation.Extension; +import org.eclipse.edc.runtime.metamodel.annotation.Inject; +import org.eclipse.edc.spi.monitor.Monitor; +import org.eclipse.edc.spi.system.ServiceExtension; +import org.eclipse.edc.spi.system.ServiceExtensionContext; + +import static org.eclipse.edc.connector.controlplane.transfer.spi.provision.ResourceManifestGenerator.MANIFEST_VERIFICATION_SCOPE; +import static org.eclipse.edc.policy.engine.spi.PolicyEngine.ALL_SCOPES; + +@Extension(value = ConsumerPolicyFunctionsExtension.NAME) +public class ConsumerPolicyFunctionsExtension implements ServiceExtension { + public static final String NAME = "Consumer Policy Functions Extension"; + public static final String KEY = "POLICY_REGULATE_FILE_PATH"; + + @Inject + private Monitor monitor; + @Inject + private RuleBindingRegistry ruleBindingRegistry; + @Inject + private PolicyEngine policyEngine; + + @Override + public void initialize(ServiceExtensionContext context) { + ruleBindingRegistry.bind("USE", ALL_SCOPES); + ruleBindingRegistry.bind(KEY, MANIFEST_VERIFICATION_SCOPE); + policyEngine.registerFunction(MANIFEST_VERIFICATION_SCOPE, Permission.class, KEY, new RegulateFilePathFunction(monitor)); + } + + @Override + public String name() { + return NAME; + } + +} diff --git a/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/policy/RegulateFilePathFunction.java b/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/policy/RegulateFilePathFunction.java new file mode 100644 index 00000000..0c932673 --- /dev/null +++ b/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/policy/RegulateFilePathFunction.java @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2023 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ + +package org.eclipse.edc.sample.extension.policy; + +import org.eclipse.edc.connector.controlplane.transfer.spi.provision.ResourceManifestContext; +import org.eclipse.edc.policy.engine.spi.AtomicConstraintFunction; +import org.eclipse.edc.policy.engine.spi.PolicyContext; +import org.eclipse.edc.policy.model.Operator; +import org.eclipse.edc.policy.model.Permission; +import org.eclipse.edc.sample.extension.provision.LocalResourceDefinition; +import org.eclipse.edc.spi.monitor.Monitor; + +import java.util.Objects; + +public class RegulateFilePathFunction implements AtomicConstraintFunction { + private final Monitor monitor; + + public RegulateFilePathFunction(Monitor monitor) { + this.monitor = monitor; + } + + @Override + public boolean evaluate(Operator operator, Object rightValue, Permission rule, PolicyContext context) { + var desiredFilePath = (String) rightValue; + + if (Objects.requireNonNull(operator) == Operator.EQ) { + var manifestContext = context.getContextData(ResourceManifestContext.class); + manifestContext.getDefinitions().stream() + .filter(definition -> definition.getClass().equals(LocalResourceDefinition.class)) + .forEach(definition -> ((LocalResourceDefinition) definition).updatePathName(desiredFilePath)); + return true; + } + + monitor.debug(String.format("Operator expected to be EQ but was %s", operator)); + return false; + } +} diff --git a/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/provision/LocalConsumerResourceDefinitionGenerator.java b/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/provision/LocalConsumerResourceDefinitionGenerator.java new file mode 100644 index 00000000..b32d2373 --- /dev/null +++ b/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/provision/LocalConsumerResourceDefinitionGenerator.java @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2023 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ + +package org.eclipse.edc.sample.extension.provision; + +import org.eclipse.edc.connector.controlplane.transfer.spi.provision.ConsumerResourceDefinitionGenerator; +import org.eclipse.edc.connector.controlplane.transfer.spi.types.ResourceDefinition; +import org.eclipse.edc.connector.controlplane.transfer.spi.types.TransferProcess; +import org.eclipse.edc.policy.model.Policy; +import org.jetbrains.annotations.Nullable; + +import java.util.Objects; + +import static java.util.UUID.randomUUID; + +public class LocalConsumerResourceDefinitionGenerator implements ConsumerResourceDefinitionGenerator { + + private static final String TYPE = "File"; + /** + * This will get modified during the policy evaluation to notice the change, keep the path different from the path used in policy + */ + private static final String DESTINATION = "any path"; + + @Override + public @Nullable ResourceDefinition generate(TransferProcess transferProcess, Policy policy) { + Objects.requireNonNull(transferProcess, "transferProcess must always be provided"); + Objects.requireNonNull(policy, "policy must always be provided"); + + return LocalResourceDefinition.Builder.newInstance() + .id(randomUUID().toString()) + .pathName(DESTINATION) + .build(); + } + + @Override + public boolean canGenerate(TransferProcess transferProcess, Policy policy) { + Objects.requireNonNull(transferProcess, "dataRequest must always be provided"); + Objects.requireNonNull(policy, "policy must always be provided"); + + return TYPE.equals(transferProcess.getDestinationType()); + } + +} diff --git a/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/provision/LocalProvisionExtension.java b/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/provision/LocalProvisionExtension.java new file mode 100644 index 00000000..4780387f --- /dev/null +++ b/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/provision/LocalProvisionExtension.java @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2023 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ + +package org.eclipse.edc.sample.extension.provision; + +import dev.failsafe.RetryPolicy; +import org.eclipse.edc.connector.controlplane.transfer.spi.provision.ProvisionManager; +import org.eclipse.edc.connector.controlplane.transfer.spi.provision.ResourceManifestGenerator; +import org.eclipse.edc.runtime.metamodel.annotation.Extension; +import org.eclipse.edc.runtime.metamodel.annotation.Inject; +import org.eclipse.edc.runtime.metamodel.annotation.Setting; +import org.eclipse.edc.spi.monitor.Monitor; +import org.eclipse.edc.spi.system.ServiceExtension; +import org.eclipse.edc.spi.system.ServiceExtensionContext; + +@Extension(value = LocalProvisionExtension.NAME) +public class LocalProvisionExtension implements ServiceExtension { + public static final String NAME = "Local Provision Extension"; + @Setting + private static final String PROVISION_MAX_RETRY = "10"; + @Inject + private Monitor monitor; + @Inject + private ProvisionManager provisionManager; + @Inject + private ResourceManifestGenerator manifestGenerator; + + @Override + public String name() { + return NAME; + } + + @Override + public void initialize(ServiceExtensionContext context) { + var retryPolicy = (RetryPolicy) context.getService(RetryPolicy.class); + + int maxRetries = context.getSetting(PROVISION_MAX_RETRY, 10); + var provisionerConfiguration = new LocalResourceProvisionerConfiguration(maxRetries); + var localResourceProvisioner = new LocalResourceProvisioner(monitor, retryPolicy, provisionerConfiguration); + provisionManager.register(localResourceProvisioner); + + // register the generator + manifestGenerator.registerGenerator(new LocalConsumerResourceDefinitionGenerator()); + } +} diff --git a/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/provision/LocalProvisionedResource.java b/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/provision/LocalProvisionedResource.java new file mode 100644 index 00000000..f0f2b8fb --- /dev/null +++ b/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/provision/LocalProvisionedResource.java @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2023 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ + +package org.eclipse.edc.sample.extension.provision; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonTypeName; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder; +import org.eclipse.edc.connector.controlplane.transfer.spi.types.ProvisionedDataDestinationResource; + + +@JsonDeserialize(builder = LocalProvisionedResource.Builder.class) +@JsonTypeName("dataspaceconnector:datarequest") +public class LocalProvisionedResource extends ProvisionedDataDestinationResource { + private static final String PATHNAME = "path"; + private static final String TYPE = "File"; + + @JsonPOJOBuilder(withPrefix = "") + public static class Builder extends ProvisionedDataDestinationResource.Builder { + private Builder() { + super(new LocalProvisionedResource()); + dataAddressBuilder.type(TYPE); + } + + @JsonCreator + public static Builder newInstance() { + return new Builder(); + } + + public Builder pathName(String pathName) { + dataAddressBuilder.property(PATHNAME, pathName); + return this; + } + } +} diff --git a/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/provision/LocalResourceDefinition.java b/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/provision/LocalResourceDefinition.java new file mode 100644 index 00000000..caa523e5 --- /dev/null +++ b/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/provision/LocalResourceDefinition.java @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2023 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ + +package org.eclipse.edc.sample.extension.provision; + +import org.eclipse.edc.connector.controlplane.transfer.spi.types.ResourceDefinition; + +import java.util.Objects; + +public class LocalResourceDefinition extends ResourceDefinition { + private String pathName; + + private LocalResourceDefinition() { + } + + public String getPathName() { + return pathName; + } + + private void setPathName(String pathName) { + this.pathName = pathName; + } + + public void updatePathName(String pathName) { + setPathName(pathName); + } + + @Override + public Builder toBuilder() { + return initializeBuilder(new Builder()) + .pathName(pathName); + } + + public static class Builder extends ResourceDefinition.Builder { + private Builder() { + super(new LocalResourceDefinition()); + } + + public static Builder newInstance() { + return new Builder(); + } + + public Builder pathName(String pathName) { + resourceDefinition.pathName = pathName; + return this; + } + + @Override + protected void verify() { + super.verify(); + Objects.requireNonNull(resourceDefinition.pathName, "pathName"); + } + } +} diff --git a/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/provision/LocalResourceProvisioner.java b/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/provision/LocalResourceProvisioner.java new file mode 100644 index 00000000..fc85ee9e --- /dev/null +++ b/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/provision/LocalResourceProvisioner.java @@ -0,0 +1,98 @@ +/* + * Copyright (c) 2023 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ + +package org.eclipse.edc.sample.extension.provision; + +import dev.failsafe.RetryPolicy; +import org.eclipse.edc.connector.controlplane.transfer.spi.provision.Provisioner; +import org.eclipse.edc.connector.controlplane.transfer.spi.types.DeprovisionedResource; +import org.eclipse.edc.connector.controlplane.transfer.spi.types.ProvisionResponse; +import org.eclipse.edc.connector.controlplane.transfer.spi.types.ProvisionedResource; +import org.eclipse.edc.connector.controlplane.transfer.spi.types.ResourceDefinition; +import org.eclipse.edc.policy.model.Policy; +import org.eclipse.edc.spi.monitor.Monitor; +import org.eclipse.edc.spi.response.StatusResult; + +import java.io.File; +import java.io.IOException; +import java.util.concurrent.CompletableFuture; + +import static java.util.concurrent.CompletableFuture.completedFuture; + + +public class LocalResourceProvisioner implements Provisioner { + private final Monitor monitor; + private final RetryPolicy retryPolicy; + private final LocalResourceProvisionerConfiguration configuration; + + public LocalResourceProvisioner(Monitor monitor, RetryPolicy retryPolicy, LocalResourceProvisionerConfiguration configuration) { + this.monitor = monitor; + this.configuration = configuration; + this.retryPolicy = RetryPolicy.builder(retryPolicy.getConfig()) + .withMaxRetries(configuration.maxRetries()) + .build(); + } + + @Override + public boolean canProvision(ResourceDefinition resourceDefinition) { + return resourceDefinition instanceof LocalResourceDefinition; + } + + @Override + public boolean canDeprovision(ProvisionedResource resourceDefinition) { + return resourceDefinition instanceof LocalProvisionedResource; + } + + @Override + public CompletableFuture> provision(LocalResourceDefinition resourceDefinition, Policy policy) { + createDestinationFile(resourceDefinition.getPathName()); + StatusResult provisionResponseStatusResult = provisionSucceeded(resourceDefinition); + return completedFuture(provisionResponseStatusResult); + } + + @Override + public CompletableFuture> deprovision(LocalProvisionedResource provisionedResource, Policy policy) { + return null; + } + + + private void createDestinationFile(String pathName) { + var file = new File(pathName.replaceAll("\\.", ".").replaceAll("/", "/")); + if (!file.exists()) { + try { + if (!file.createNewFile()) { + monitor.debug(String.format("File could not be created at path %s", pathName)); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + + private StatusResult provisionSucceeded(LocalResourceDefinition resourceDefinition) { + var resource = LocalProvisionedResource.Builder.newInstance() + .id(resourceDefinition.getPathName()) + .resourceDefinitionId(resourceDefinition.getId()) + .hasToken(true) + .pathName(resourceDefinition.getPathName()) + .transferProcessId(resourceDefinition.getTransferProcessId()) + .resourceName(resourceDefinition.getPathName()) + .build(); + + monitor.debug("LocalResourceProvisioner: Resource request submitted: " + resourceDefinition.getPathName()); + + var response = ProvisionResponse.Builder.newInstance().resource(resource).build(); + return StatusResult.success(response); + } +} diff --git a/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/provision/LocalResourceProvisionerConfiguration.java b/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/provision/LocalResourceProvisionerConfiguration.java new file mode 100644 index 00000000..89731858 --- /dev/null +++ b/policy/policy-02-provision/policy-provision-consumer/src/main/java/org/eclipse/edc/sample/extension/provision/LocalResourceProvisionerConfiguration.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2023 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ + +package org.eclipse.edc.sample.extension.provision; + +public record LocalResourceProvisionerConfiguration(int maxRetries) { + +} diff --git a/policy/policy-02-provision/policy-provision-consumer/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension b/policy/policy-02-provision/policy-provision-consumer/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension new file mode 100644 index 00000000..0662e409 --- /dev/null +++ b/policy/policy-02-provision/policy-provision-consumer/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension @@ -0,0 +1,16 @@ +# +# Copyright (c) 2023 Fraunhofer Institute for Software and Systems Engineering +# +# This program and the accompanying materials are made available under the +# terms of the Apache License, Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# +# Contributors: +# Fraunhofer Institute for Software and Systems Engineering +# +# + +org.eclipse.edc.sample.extension.provision.LocalProvisionExtension +org.eclipse.edc.sample.extension.policy.ConsumerPolicyFunctionsExtension \ No newline at end of file diff --git a/advanced/advanced-01-open-telemetry/open-telemetry-provider/build.gradle.kts b/policy/policy-02-provision/policy-provision-provider/build.gradle.kts similarity index 55% rename from advanced/advanced-01-open-telemetry/open-telemetry-provider/build.gradle.kts rename to policy/policy-02-provision/policy-provision-provider/build.gradle.kts index be18514c..50c8c041 100644 --- a/advanced/advanced-01-open-telemetry/open-telemetry-provider/build.gradle.kts +++ b/policy/policy-02-provision/policy-provision-provider/build.gradle.kts @@ -1,9 +1,5 @@ -import java.nio.file.Files -import java.nio.file.Paths -import java.nio.file.StandardCopyOption - /* - * Copyright (c) 2022 Microsoft Corporation + * Copyright (c) 2023 Fraunhofer Institute for Software and Systems Engineering * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at @@ -12,7 +8,7 @@ import java.nio.file.StandardCopyOption * SPDX-License-Identifier: Apache-2.0 * * Contributors: - * Microsoft Corporation - initial implementation + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation * */ @@ -23,54 +19,41 @@ plugins { } dependencies { - implementation(libs.edc.control.plane.api.client) implementation(libs.edc.control.plane.api) implementation(libs.edc.control.plane.core) - implementation(libs.edc.dsp) implementation(libs.edc.configuration.filesystem) - implementation(libs.edc.vault.filesystem) - implementation(libs.edc.iam.mock) implementation(libs.edc.management.api) - implementation(libs.edc.transfer.data.plane) + implementation(libs.edc.transfer.data.plane.signaling) implementation(libs.edc.transfer.pull.http.receiver) + implementation(libs.edc.validator.data.address.http.data) + implementation(libs.edc.api.control.configuration) + + implementation(libs.edc.edr.cache.api) + implementation(libs.edc.edr.store.core) + implementation(libs.edc.edr.store.receiver) implementation(libs.edc.data.plane.selector.api) implementation(libs.edc.data.plane.selector.core) - implementation(libs.edc.data.plane.selector.client) + implementation(libs.edc.data.plane.util) - implementation(libs.edc.data.plane.api) + implementation(libs.edc.data.plane.self.registration) + implementation(libs.edc.data.plane.control.api) + implementation(libs.edc.data.plane.public.api) implementation(libs.edc.data.plane.core) implementation(libs.edc.data.plane.http) - implementation(libs.edc.api.observability) - implementation(libs.edc.auth.tokenbased) - - runtimeOnly(libs.edc.monitor.jdk.logger) + //implementation(project(":transfer:transfer-01-file-transfer:transfer-file-local")) } application { - mainClass.set("org.eclipse.edc.boot.system.runtime.BaseRuntime") + mainClass.set("$group.boot.system.runtime.BaseRuntime") } tasks.withType { + exclude("**/pom.properties", "**/pom.xm") mergeServiceFiles() archiveFileName.set("provider.jar") -} - -tasks.register("copyOpenTelemetryJar", Copy::class) { - val openTelemetry = configurations.create("open-telemetry") - - dependencies { - openTelemetry(libs.opentelemetry) - } - - from(openTelemetry) - into("build/libs") -} - -tasks.build { - finalizedBy("copyOpenTelemetryJar") -} +} \ No newline at end of file diff --git a/policy/policy-02-provision/policy-provision-provider/config.properties b/policy/policy-02-provision/policy-provision-provider/config.properties new file mode 100644 index 00000000..67341053 --- /dev/null +++ b/policy/policy-02-provision/policy-provision-provider/config.properties @@ -0,0 +1,17 @@ +edc.participant.id=provider +edc.dsp.callback.address=http://localhost:8282/protocol +web.http.port=8181 +web.http.path=/api +web.http.management.port=8182 +web.http.management.path=/management +web.http.protocol.port=8282 +web.http.protocol.path=/protocol +edc.transfer.proxy.token.signer.privatekey.alias=private-key +edc.transfer.proxy.token.verifier.publickey.alias=public-key +web.http.public.port=8185 +web.http.public.path=/public +web.http.control.port=8183 +web.http.control.path=/control +edc.samples.policy-02.asset.path=path/to/file +edc.samples.policy-02.constraint.desired.file.path=path/to/desired/location/transfer.txt +edc.dataplane.api.public.baseurl=http://localhost:8185/public \ No newline at end of file diff --git a/policy/policy-02-provision/policy-provision-provider/src/main/java/org/eclipse/edc/sample/extension/policy/PolicyFunctionsExtension.java b/policy/policy-02-provision/policy-provision-provider/src/main/java/org/eclipse/edc/sample/extension/policy/PolicyFunctionsExtension.java new file mode 100644 index 00000000..6bbb8be2 --- /dev/null +++ b/policy/policy-02-provision/policy-provision-provider/src/main/java/org/eclipse/edc/sample/extension/policy/PolicyFunctionsExtension.java @@ -0,0 +1,123 @@ +/* + * Copyright (c) 2023 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ + +package org.eclipse.edc.sample.extension.policy; + +import org.eclipse.edc.connector.controlplane.asset.spi.domain.Asset; +import org.eclipse.edc.connector.controlplane.contract.spi.offer.store.ContractDefinitionStore; +import org.eclipse.edc.connector.controlplane.contract.spi.types.offer.ContractDefinition; +import org.eclipse.edc.connector.controlplane.policy.spi.PolicyDefinition; +import org.eclipse.edc.connector.controlplane.policy.spi.store.PolicyDefinitionStore; +import org.eclipse.edc.policy.engine.spi.RuleBindingRegistry; +import org.eclipse.edc.policy.model.Action; +import org.eclipse.edc.policy.model.AtomicConstraint; +import org.eclipse.edc.policy.model.LiteralExpression; +import org.eclipse.edc.policy.model.Operator; +import org.eclipse.edc.policy.model.Permission; +import org.eclipse.edc.policy.model.Policy; +import org.eclipse.edc.runtime.metamodel.annotation.Extension; +import org.eclipse.edc.runtime.metamodel.annotation.Inject; +import org.eclipse.edc.spi.query.Criterion; +import org.eclipse.edc.spi.system.ServiceExtension; +import org.eclipse.edc.spi.system.ServiceExtensionContext; + +import java.util.List; + +import static org.eclipse.edc.policy.engine.spi.PolicyEngine.ALL_SCOPES; + +@Extension(value = PolicyFunctionsExtension.NAME) +public class PolicyFunctionsExtension implements ServiceExtension { + private static final String FILE_PATH = "edc.samples.policy-02.constraint.desired.file.path"; + private static final String KEY = "POLICY_REGULATE_FILE_PATH"; + public static final String NAME = "Policy Functions Extension"; + public static final String POLICY_TYPE = "USE"; + public static final String RIGHT_OPERAND = "test-document"; + public static final String DEFAULT_FILE_PATH = "/tmp/desired/path/transfer.txt"; + + @Inject + private RuleBindingRegistry ruleBindingRegistry; + @Inject + private PolicyDefinitionStore policyStore; + @Inject + private ContractDefinitionStore contractDefinitionStore; + + @Override + public String name() { + return NAME; + } + + @Override + public void initialize(ServiceExtensionContext context) { + ruleBindingRegistry.bind(POLICY_TYPE, ALL_SCOPES); + + registerContractDefinition(context); + } + + private PolicyDefinition createAccessPolicy() { + var usePermission = Permission.Builder.newInstance() + .action(Action.Builder.newInstance().type("USE").build()) + .build(); + + return PolicyDefinition.Builder.newInstance() + .id("use") + .policy(Policy.Builder.newInstance() + .permission(usePermission) + .build()) + .build(); + } + + private PolicyDefinition createContractPolicy(ServiceExtensionContext context) { + var desiredFilePath = context.getSetting(FILE_PATH, DEFAULT_FILE_PATH); + var regulateFilePathConstraint = AtomicConstraint.Builder.newInstance() + .leftExpression(new LiteralExpression(KEY)) + .operator(Operator.EQ) + .rightExpression(new LiteralExpression(desiredFilePath)) + .build(); + + + var permission = Permission.Builder.newInstance() + .action(Action.Builder.newInstance().type(POLICY_TYPE).build()) + .constraint(regulateFilePathConstraint) + .build(); + + + return PolicyDefinition.Builder.newInstance() + .id("use-regulated-path") + .policy(Policy.Builder.newInstance() + .permission(permission) + .build()) + .build(); + } + + private void registerContractDefinition(ServiceExtensionContext context) { + var accessPolicy = createAccessPolicy(); + policyStore.create(accessPolicy); + + var contractPolicy = createContractPolicy(context); + policyStore.create(contractPolicy); + + var contractDefinition = ContractDefinition.Builder.newInstance() + .id("1") + .accessPolicyId(accessPolicy.getId()) + .contractPolicyId(contractPolicy.getId()) + .assetsSelector(List.of(Criterion.Builder.newInstance() + .operandLeft(Asset.PROPERTY_ID) + .operator("=") // TODO changed to EQ? + .operandRight(RIGHT_OPERAND) + .build())) + .build(); + contractDefinitionStore.save(contractDefinition); + } + +} diff --git a/policy/policy-02-provision/policy-provision-provider/src/main/java/org/eclipse/edc/sample/extension/transfer/FileTransferDataSink.java b/policy/policy-02-provision/policy-provision-provider/src/main/java/org/eclipse/edc/sample/extension/transfer/FileTransferDataSink.java new file mode 100644 index 00000000..89e0418a --- /dev/null +++ b/policy/policy-02-provision/policy-provision-provider/src/main/java/org/eclipse/edc/sample/extension/transfer/FileTransferDataSink.java @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2022 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - initial API and implementation + * + */ + +package org.eclipse.edc.sample.extension.transfer; + +import org.eclipse.edc.connector.dataplane.spi.pipeline.DataSource; +import org.eclipse.edc.connector.dataplane.spi.pipeline.StreamFailure; +import org.eclipse.edc.connector.dataplane.spi.pipeline.StreamResult; +import org.eclipse.edc.connector.dataplane.util.sink.ParallelSink; + +import java.io.File; +import java.io.FileOutputStream; +import java.util.List; +import java.util.Objects; + +import static java.lang.String.format; +import static org.eclipse.edc.connector.dataplane.spi.pipeline.StreamFailure.Reason.GENERAL_ERROR; + +class FileTransferDataSink extends ParallelSink { + private File file; + + @Override + protected StreamResult transferParts(List parts) { + for (DataSource.Part part : parts) { + var fileName = part.name(); + try (var input = part.openStream()) { + try (var output = new FileOutputStream(file)) { + try { + input.transferTo(output); + } catch (Exception e) { + return getTransferResult(e, "Error transferring file %s", fileName); + } + } catch (Exception e) { + return getTransferResult(e, "Error creating file %s", fileName); + } + } catch (Exception e) { + return getTransferResult(e, "Error reading file %s", fileName); + } + } + return StreamResult.success(null); + } + + private StreamResult getTransferResult(Exception e, String logMessage, Object... args) { + var message = format(logMessage, args); + monitor.severe(message, e); + return StreamResult.failure(new StreamFailure(List.of(message), GENERAL_ERROR)); + } + + public static class Builder extends ParallelSink.Builder { + + public static Builder newInstance() { + return new Builder(); + } + + public Builder file(File file) { + sink.file = file; + return this; + } + + @Override + protected void validate() { + Objects.requireNonNull(sink.file, "file"); + } + + private Builder() { + super(new FileTransferDataSink()); + } + } +} diff --git a/policy/policy-02-provision/policy-provision-provider/src/main/java/org/eclipse/edc/sample/extension/transfer/FileTransferDataSinkFactory.java b/policy/policy-02-provision/policy-provision-provider/src/main/java/org/eclipse/edc/sample/extension/transfer/FileTransferDataSinkFactory.java new file mode 100644 index 00000000..98e77124 --- /dev/null +++ b/policy/policy-02-provision/policy-provision-provider/src/main/java/org/eclipse/edc/sample/extension/transfer/FileTransferDataSinkFactory.java @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2022 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - initial API and implementation + * + */ + +package org.eclipse.edc.sample.extension.transfer; + +import org.eclipse.edc.connector.dataplane.spi.pipeline.DataSink; +import org.eclipse.edc.connector.dataplane.spi.pipeline.DataSinkFactory; +import org.eclipse.edc.spi.monitor.Monitor; +import org.eclipse.edc.spi.result.Result; +import org.eclipse.edc.spi.types.domain.transfer.DataFlowStartMessage; +import org.jetbrains.annotations.NotNull; + +import java.io.File; +import java.util.concurrent.ExecutorService; + +public class FileTransferDataSinkFactory implements DataSinkFactory { + private final Monitor monitor; + private final ExecutorService executorService; + private final int partitionSize; + + public FileTransferDataSinkFactory(Monitor monitor, ExecutorService executorService, + int partitionSize) { + this.monitor = monitor; + this.executorService = executorService; + this.partitionSize = partitionSize; + } + + @Override + public String supportedType() { + return "File"; + } + + @Override + public boolean canHandle(DataFlowStartMessage request) { + return "File".equalsIgnoreCase(request.getDestinationDataAddress().getType()); + } + + @Override + public DataSink createSink(DataFlowStartMessage request) { + var destination = request.getDestinationDataAddress(); + + // verify destination path + var path = destination.getStringProperty("path"); + // As this is a controlled test input below is to avoid path-injection warning by CodeQL + var destinationFile = new File(path.replaceAll("\\.", ".").replaceAll("/", "/")); + + return FileTransferDataSink.Builder.newInstance() + .file(destinationFile) + .requestId(request.getId()) + .partitionSize(partitionSize) + .executorService(executorService) + .monitor(monitor) + .build(); + } + + @Override + public @NotNull Result validateRequest(DataFlowStartMessage request) { + return Result.success(); + } +} diff --git a/policy/policy-02-provision/policy-provision-provider/src/main/java/org/eclipse/edc/sample/extension/transfer/FileTransferDataSource.java b/policy/policy-02-provision/policy-provision-provider/src/main/java/org/eclipse/edc/sample/extension/transfer/FileTransferDataSource.java new file mode 100644 index 00000000..85ef937e --- /dev/null +++ b/policy/policy-02-provision/policy-provision-provider/src/main/java/org/eclipse/edc/sample/extension/transfer/FileTransferDataSource.java @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2022 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - initial API and implementation + * + */ + +package org.eclipse.edc.sample.extension.transfer; + +import org.eclipse.edc.connector.dataplane.spi.pipeline.DataSource; +import org.eclipse.edc.connector.dataplane.spi.pipeline.StreamResult; +import org.eclipse.edc.spi.EdcException; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.InputStream; +import java.util.stream.Stream; + +class FileTransferDataSource implements DataSource { + + private final File file; + + FileTransferDataSource(File file) { + this.file = file; + } + + @Override + public StreamResult> openPartStream() { + var part = new Part() { + @Override + public String name() { + return file.getName(); + } + + @Override + public InputStream openStream() { + try { + return new FileInputStream(file); + } catch (FileNotFoundException e) { + throw new EdcException(e); + } + } + }; + return StreamResult.success(Stream.of(part)); + } + + @Override + public void close() throws Exception { + + } +} diff --git a/policy/policy-02-provision/policy-provision-provider/src/main/java/org/eclipse/edc/sample/extension/transfer/FileTransferDataSourceFactory.java b/policy/policy-02-provision/policy-provision-provider/src/main/java/org/eclipse/edc/sample/extension/transfer/FileTransferDataSourceFactory.java new file mode 100644 index 00000000..7a4fe144 --- /dev/null +++ b/policy/policy-02-provision/policy-provision-provider/src/main/java/org/eclipse/edc/sample/extension/transfer/FileTransferDataSourceFactory.java @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2022 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - initial API and implementation + * + */ + +package org.eclipse.edc.sample.extension.transfer; + +import org.eclipse.edc.connector.dataplane.spi.pipeline.DataSource; +import org.eclipse.edc.connector.dataplane.spi.pipeline.DataSourceFactory; +import org.eclipse.edc.spi.result.Result; +import org.eclipse.edc.spi.types.domain.transfer.DataFlowStartMessage; +import org.jetbrains.annotations.NotNull; + +import java.io.File; + +public class FileTransferDataSourceFactory implements DataSourceFactory { + @Override + public String supportedType() { + return "File"; + } + + @Override + public boolean canHandle(DataFlowStartMessage dataRequest) { + return "File".equalsIgnoreCase(dataRequest.getSourceDataAddress().getType()); + } + + @Override + public DataSource createSource(DataFlowStartMessage request) { + var source = getFile(request); + return new FileTransferDataSource(source); + } + + @Override + public @NotNull Result validateRequest(DataFlowStartMessage request) { + var source = getFile(request); + if (!source.exists()) { + return Result.failure("Source file " + source.getName() + " does not exist at " + source.getAbsolutePath()); + } + + return Result.success(); + } + + @NotNull + private File getFile(DataFlowStartMessage request) { + var dataAddress = request.getSourceDataAddress(); + // verify source path + var sourceFileName = dataAddress.getStringProperty("filename"); + var path = dataAddress.getStringProperty("path"); + // As this is a controlled test input below is to avoid path-injection warning by CodeQL + sourceFileName = sourceFileName.replaceAll("\\.", ".").replaceAll("/", "/"); + path = path.replaceAll("\\.", ".").replaceAll("/", "/"); + return new File(path, sourceFileName); + } +} diff --git a/policy/policy-02-provision/policy-provision-provider/src/main/java/org/eclipse/edc/sample/extension/transfer/FileTransferExtension.java b/policy/policy-02-provision/policy-provision-provider/src/main/java/org/eclipse/edc/sample/extension/transfer/FileTransferExtension.java new file mode 100644 index 00000000..60916992 --- /dev/null +++ b/policy/policy-02-provision/policy-provision-provider/src/main/java/org/eclipse/edc/sample/extension/transfer/FileTransferExtension.java @@ -0,0 +1,78 @@ +/* + * Copyright (c) 2021 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ + +package org.eclipse.edc.sample.extension.transfer; + +import org.eclipse.edc.connector.controlplane.asset.spi.domain.Asset; +import org.eclipse.edc.connector.controlplane.asset.spi.index.AssetIndex; +import org.eclipse.edc.connector.controlplane.transfer.spi.flow.DataFlowManager; +import org.eclipse.edc.connector.dataplane.spi.pipeline.DataTransferExecutorServiceContainer; +import org.eclipse.edc.connector.dataplane.spi.pipeline.PipelineService; +import org.eclipse.edc.runtime.metamodel.annotation.Extension; +import org.eclipse.edc.runtime.metamodel.annotation.Inject; +import org.eclipse.edc.spi.monitor.Monitor; +import org.eclipse.edc.spi.system.ServiceExtension; +import org.eclipse.edc.spi.system.ServiceExtensionContext; +import org.eclipse.edc.spi.types.domain.DataAddress; + +import java.nio.file.Path; + +@Extension(FileTransferExtension.NAME) +public class FileTransferExtension implements ServiceExtension { + public static final String NAME = "File Transfer Extension"; + private static final String EDC_ASSET_PATH = "edc.samples.policy-02.asset.path"; + private static final String DEFAULT_PATH = "/tmp/provider/test-document.txt"; + + @Inject + private AssetIndex assetIndex; + @Inject + private PipelineService pipelineService; + @Inject + private DataTransferExecutorServiceContainer executorContainer; + @Inject + private Monitor monitor; + @Inject + private DataFlowManager dataFlowManager; + + @Override + public void initialize(ServiceExtensionContext context) { + pipelineService.registerFactory(new FileTransferDataSourceFactory()); + + var sinkFactory = new FileTransferDataSinkFactory(monitor, executorContainer.getExecutorService(), 5); + pipelineService.registerFactory(sinkFactory); + + registerDataEntries(context); + } + + private void registerDataEntries(ServiceExtensionContext context) { + var assetPathSetting = context.getSetting(EDC_ASSET_PATH, DEFAULT_PATH); + var assetPath = Path.of(assetPathSetting); + var filename = assetPath.getFileName().toString(); + var path = assetPath.getParent().toString(); + + var dataAddress = DataAddress.Builder.newInstance() + .property("type", "File") + .property("filename", filename) + .property("path", path) + .build(); + + + var asset = Asset.Builder.newInstance() + .id("test-document") + .dataAddress(dataAddress) + .build(); + + assetIndex.create(asset); + } +} diff --git a/policy/policy-02-provision/policy-provision-provider/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension b/policy/policy-02-provision/policy-provision-provider/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension new file mode 100644 index 00000000..9b4a0fd1 --- /dev/null +++ b/policy/policy-02-provision/policy-provision-provider/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension @@ -0,0 +1,16 @@ +# +# Copyright (c) 2023 Fraunhofer Institute for Software and Systems Engineering +# +# This program and the accompanying materials are made available under the +# terms of the Apache License, Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# +# Contributors: +# Fraunhofer Institute for Software and Systems Engineering +# +# + +org.eclipse.edc.sample.extension.policy.PolicyFunctionsExtension +org.eclipse.edc.sample.extension.transfer.FileTransferExtension \ No newline at end of file diff --git a/settings.gradle.kts b/settings.gradle.kts index 20ed91e6..f1c25d55 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -45,9 +45,20 @@ include(":transfer:streaming:streaming-01-http-to-http:streaming-01-runtime") include(":transfer:streaming:streaming-02-kafka-to-http:streaming-02-runtime") include(":transfer:streaming:streaming-03-kafka-broker:streaming-03-runtime") -include(":advanced:advanced-01-open-telemetry:open-telemetry-consumer") -include(":advanced:advanced-01-open-telemetry:open-telemetry-provider") +include(":advanced:advanced-01-open-telemetry:open-telemetry-runtime") +//policy +include(":policy:policy-01-policy-enforcement:policy-enforcement-provider") +include(":policy:policy-01-policy-enforcement:policy-enforcement-consumer") +include(":policy:policy-01-policy-enforcement:policy-functions") + + +//policy + +include(":policy:policy-02-provision:policy-provision-consumer") +include(":policy:policy-02-provision:policy-provision-provider") + +// modules for code samples ------------------------------------------------------------------------ include(":advanced:advanced-02-custom-runtime") include(":util:http-request-logger") diff --git a/system-tests/build.gradle.kts b/system-tests/build.gradle.kts index b80be01a..66d28aba 100644 --- a/system-tests/build.gradle.kts +++ b/system-tests/build.gradle.kts @@ -18,8 +18,10 @@ plugins { dependencies { testImplementation(libs.edc.junit) - testImplementation(libs.edc.json.ld) + testImplementation(libs.edc.json.ld.lib) + testImplementation(libs.edc.json.ld.spi) testImplementation(libs.edc.control.plane.spi) + testImplementation(testFixtures(libs.edc.management.api.test.fixtures)) testImplementation(libs.awaitility) testImplementation(libs.okhttp.mockwebserver) testImplementation(libs.restAssured) @@ -40,6 +42,17 @@ dependencies { testCompileOnly(project(":transfer:streaming:streaming-02-kafka-to-http:streaming-02-runtime")) testCompileOnly(project(":transfer:streaming:streaming-03-kafka-broker:streaming-03-runtime")) - testCompileOnly(project(":advanced:advanced-01-open-telemetry:open-telemetry-provider")) - testCompileOnly(project(":advanced:advanced-01-open-telemetry:open-telemetry-consumer")) + testCompileOnly(project(":advanced:advanced-01-open-telemetry:open-telemetry-runtime")) + + testCompileOnly(project(":policy:policy-01-policy-enforcement:policy-enforcement-provider")) + testCompileOnly(project(":policy:policy-01-policy-enforcement:policy-enforcement-consumer")) + testCompileOnly(project(":policy:policy-01-policy-enforcement:policy-functions")) + + testCompileOnly(project(":policy:policy-02-provision:policy-provision-consumer")) + testCompileOnly(project(":policy:policy-02-provision:policy-provision-provider")) + +} + +tasks.compileJava { + dependsOn(":advanced:advanced-01-open-telemetry:open-telemetry-runtime:build") } diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/advanced/Advanced01openTelemetryTest.java b/system-tests/src/test/java/org/eclipse/edc/samples/advanced/Advanced01openTelemetryTest.java index d13488dc..2c43a224 100644 --- a/system-tests/src/test/java/org/eclipse/edc/samples/advanced/Advanced01openTelemetryTest.java +++ b/system-tests/src/test/java/org/eclipse/edc/samples/advanced/Advanced01openTelemetryTest.java @@ -15,22 +15,15 @@ package org.eclipse.edc.samples.advanced; -import org.apache.http.HttpStatus; -import org.eclipse.edc.connector.transfer.spi.types.TransferProcessStates; import org.eclipse.edc.junit.annotations.EndToEndTest; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.testcontainers.containers.ComposeContainer; import org.testcontainers.containers.wait.strategy.Wait; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; -import java.io.IOException; -import java.net.HttpURLConnection; -import java.net.URL; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.fail; +import static io.restassured.RestAssured.given; +import static org.eclipse.edc.connector.controlplane.transfer.spi.types.TransferProcessStates.STARTED; import static org.eclipse.edc.samples.common.FileTransferCommon.getFileContentFromRelativePath; import static org.eclipse.edc.samples.common.FileTransferCommon.getFileFromRelativePath; import static org.eclipse.edc.samples.common.NegotiationCommon.createAsset; @@ -39,7 +32,6 @@ import static org.eclipse.edc.samples.common.NegotiationCommon.fetchDatasetFromCatalog; import static org.eclipse.edc.samples.common.NegotiationCommon.getContractAgreementId; import static org.eclipse.edc.samples.common.NegotiationCommon.negotiateContract; -import static org.eclipse.edc.samples.common.PrerequisitesCommon.runPrerequisites; import static org.eclipse.edc.samples.util.TransferUtil.checkTransferStatus; import static org.eclipse.edc.samples.util.TransferUtil.startTransfer; @@ -47,44 +39,35 @@ @Testcontainers public class Advanced01openTelemetryTest { - private static final String DOCKER_COMPOSE_YAML = "advanced/advanced-01-open-telemetry/docker-compose.yaml"; - private static final String FETCH_DATASET_FROM_CATALOG_FILE_PATH = "advanced/advanced-01-open-telemetry/resources/get-dataset.json"; - private static final String NEGOTIATE_CONTRACT_FILE_PATH = "advanced/advanced-01-open-telemetry/resources/negotiate-contract.json"; - private static final String START_TRANSFER_FILE_PATH = "advanced/advanced-01-open-telemetry/resources/start-transfer.json"; + private static final String SAMPLE_FOLDER = "advanced/advanced-01-open-telemetry"; + private static final String DOCKER_COMPOSE_YAML = SAMPLE_FOLDER + "/docker-compose.yaml"; + private static final String FETCH_DATASET_FROM_CATALOG_FILE_PATH = SAMPLE_FOLDER + "/resources/get-dataset.json"; + private static final String NEGOTIATE_CONTRACT_FILE_PATH = SAMPLE_FOLDER + "/resources/negotiate-contract.json"; + private static final String START_TRANSFER_FILE_PATH = SAMPLE_FOLDER + "/resources/start-transfer.json"; private static final String JAEGER_URL = "http://localhost:16686"; @Container - public static ComposeContainer environment = + public ComposeContainer environment = new ComposeContainer(getFileFromRelativePath(DOCKER_COMPOSE_YAML)) .withLocalCompose(true) .waitingFor("consumer", Wait.forLogMessage(".*ready.*", 1)); - @BeforeAll - static void setUp() { - environment.start(); - } - @Test void runSampleSteps() { - runPrerequisites(); createAsset(); createPolicy(); createContractDefinition(); var catalogDatasetId = fetchDatasetFromCatalog(FETCH_DATASET_FROM_CATALOG_FILE_PATH); var contractNegotiationId = negotiateContract(NEGOTIATE_CONTRACT_FILE_PATH, catalogDatasetId); var contractAgreementId = getContractAgreementId(contractNegotiationId); - var transferProcessId = startTransfer(getFileContentFromRelativePath(START_TRANSFER_FILE_PATH), contractAgreementId); - checkTransferStatus(transferProcessId, TransferProcessStates.STARTED); - assertJaegerState(); - } + var transferRequest = getFileContentFromRelativePath(START_TRANSFER_FILE_PATH); + var transferProcessId = startTransfer(transferRequest, contractAgreementId); + checkTransferStatus(transferProcessId, STARTED); - private void assertJaegerState() { - try { - var url = new URL(JAEGER_URL); - var huc = (HttpURLConnection) url.openConnection(); - assertThat(huc.getResponseCode()).isEqualTo(HttpStatus.SC_OK); - } catch (IOException e) { - fail("Unable to assert Jaeger state", e); - } + given() + .baseUri(JAEGER_URL) + .get() + .then() + .statusCode(200); } } diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/basic/Basic01basicConnectorTest.java b/system-tests/src/test/java/org/eclipse/edc/samples/basic/Basic01basicConnectorTest.java index 4d426312..57600e75 100644 --- a/system-tests/src/test/java/org/eclipse/edc/samples/basic/Basic01basicConnectorTest.java +++ b/system-tests/src/test/java/org/eclipse/edc/samples/basic/Basic01basicConnectorTest.java @@ -15,7 +15,9 @@ package org.eclipse.edc.samples.basic; import org.eclipse.edc.junit.annotations.EndToEndTest; -import org.eclipse.edc.junit.extensions.EdcRuntimeExtension; +import org.eclipse.edc.junit.extensions.EmbeddedRuntime; +import org.eclipse.edc.junit.extensions.RuntimeExtension; +import org.eclipse.edc.junit.extensions.RuntimePerClassExtension; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; @@ -28,14 +30,14 @@ class Basic01basicConnectorTest { @RegisterExtension - static EdcRuntimeExtension connector = new EdcRuntimeExtension( - ":basic:basic-01-basic-connector", + static RuntimeExtension connector = new RuntimePerClassExtension(new EmbeddedRuntime( "connector", - emptyMap() - ); + emptyMap(), + ":basic:basic-01-basic-connector" + )); @Test void shouldStartConnector() { - assertThat(connector.getContext().getService(Clock.class)).isNotNull(); + assertThat(connector.getService(Clock.class)).isNotNull(); } } diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/common/NegotiationCommon.java b/system-tests/src/test/java/org/eclipse/edc/samples/common/NegotiationCommon.java index 3c66574b..acbf75bb 100644 --- a/system-tests/src/test/java/org/eclipse/edc/samples/common/NegotiationCommon.java +++ b/system-tests/src/test/java/org/eclipse/edc/samples/common/NegotiationCommon.java @@ -31,14 +31,14 @@ public class NegotiationCommon { private static final String CREATE_ASSET_FILE_PATH = "transfer/transfer-01-negotiation/resources/create-asset.json"; private static final String V3_ASSETS_PATH = "/v3/assets"; private static final String CREATE_POLICY_FILE_PATH = "transfer/transfer-01-negotiation/resources/create-policy.json"; - private static final String V2_POLICY_DEFINITIONS_PATH = "/v2/policydefinitions"; + private static final String V2_POLICY_DEFINITIONS_PATH = "/v3/policydefinitions"; private static final String CREATE_CONTRACT_DEFINITION_FILE_PATH = "transfer/transfer-01-negotiation/resources/create-contract-definition.json"; - private static final String V2_CONTRACT_DEFINITIONS_PATH = "/v2/contractdefinitions"; - private static final String V2_CATALOG_DATASET_REQUEST_PATH = "/v2/catalog/dataset/request"; + private static final String V2_CONTRACT_DEFINITIONS_PATH = "/v3/contractdefinitions"; + private static final String V2_CATALOG_DATASET_REQUEST_PATH = "/v3/catalog/dataset/request"; private static final String FETCH_DATASET_FROM_CATALOG_FILE_PATH = "transfer/transfer-01-negotiation/resources/get-dataset.json"; private static final String CATALOG_DATASET_ID = "\"odrl:hasPolicy\".'@id'"; private static final String NEGOTIATE_CONTRACT_FILE_PATH = "transfer/transfer-01-negotiation/resources/negotiate-contract.json"; - private static final String V2_CONTRACT_NEGOTIATIONS_PATH = "/v2/contractnegotiations/"; + private static final String V2_CONTRACT_NEGOTIATIONS_PATH = "/v3/contractnegotiations/"; private static final String CONTRACT_NEGOTIATION_ID = "@id"; private static final String CONTRACT_AGREEMENT_ID = "contractAgreementId"; private static final String CONTRACT_OFFER_ID_KEY = "{{contract-offer-id}}"; @@ -78,13 +78,18 @@ public static String negotiateContract(String negotiateContractFilePath, String } public static String getContractAgreementId(String contractNegotiationId) { - String url = PrerequisitesCommon.CONSUMER_MANAGEMENT_URL + V2_CONTRACT_NEGOTIATIONS_PATH + contractNegotiationId; + var url = PrerequisitesCommon.CONSUMER_MANAGEMENT_URL + V2_CONTRACT_NEGOTIATIONS_PATH + contractNegotiationId; return await() .atMost(TIMEOUT) .pollInterval(POLL_INTERVAL) .until(() -> get(url, CONTRACT_AGREEMENT_ID), Objects::nonNull); } + public static String getContractNegotiationState(String contractNegotiationId) { + var url = PrerequisitesCommon.CONSUMER_MANAGEMENT_URL + V2_CONTRACT_NEGOTIATIONS_PATH + contractNegotiationId; + return get(url, "state"); + } + public static String runNegotiation() { createAsset(); createPolicy(); diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/common/PolicyCommon.java b/system-tests/src/test/java/org/eclipse/edc/samples/common/PolicyCommon.java new file mode 100644 index 00000000..77411b80 --- /dev/null +++ b/system-tests/src/test/java/org/eclipse/edc/samples/common/PolicyCommon.java @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2024 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ + +package org.eclipse.edc.samples.common; + +import static org.eclipse.edc.samples.common.FileTransferCommon.getFileContentFromRelativePath; +import static org.eclipse.edc.samples.util.TransferUtil.post; + +public class PolicyCommon { + + private static final String V3_ASSETS_PATH = "/v3/assets"; + private static final String V2_POLICY_DEFINITIONS_PATH = "/v3/policydefinitions"; + private static final String V2_CONTRACT_DEFINITIONS_PATH = "/v3/contractdefinitions"; + + public static void createAsset(String createAssetFilePath) { + post(PrerequisitesCommon.PROVIDER_MANAGEMENT_URL + V3_ASSETS_PATH, getFileContentFromRelativePath(createAssetFilePath)); + } + + public static void createPolicy(String createPolicyFilePath) { + post(PrerequisitesCommon.PROVIDER_MANAGEMENT_URL + V2_POLICY_DEFINITIONS_PATH, getFileContentFromRelativePath(createPolicyFilePath)); + } + + public static void createContractDefinition(String createContractDefinitionFilePath) { + post(PrerequisitesCommon.PROVIDER_MANAGEMENT_URL + V2_CONTRACT_DEFINITIONS_PATH, getFileContentFromRelativePath(createContractDefinitionFilePath)); + } + +} diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/common/PrerequisitesCommon.java b/system-tests/src/test/java/org/eclipse/edc/samples/common/PrerequisitesCommon.java index a799da83..79055b21 100644 --- a/system-tests/src/test/java/org/eclipse/edc/samples/common/PrerequisitesCommon.java +++ b/system-tests/src/test/java/org/eclipse/edc/samples/common/PrerequisitesCommon.java @@ -14,13 +14,10 @@ package org.eclipse.edc.samples.common; -import io.restassured.http.ContentType; -import org.apache.http.HttpStatus; import org.eclipse.edc.junit.extensions.EdcRuntimeExtension; import java.util.Map; -import static io.restassured.RestAssured.given; import static org.eclipse.edc.samples.common.FileTransferCommon.getFileFromRelativePath; public class PrerequisitesCommon { @@ -28,51 +25,34 @@ public class PrerequisitesCommon { public static final String API_KEY_HEADER_VALUE = "password"; public static final String PROVIDER_MANAGEMENT_URL = "http://localhost:19193/management"; public static final String CONSUMER_MANAGEMENT_URL = "http://localhost:29193/management"; - public static final String CONSUMER_PUBLIC_URL = "http://localhost:29291/public"; private static final String CONNECTOR_MODULE_PATH = ":transfer:transfer-00-prerequisites:connector"; private static final String PROVIDER = "provider"; private static final String CONSUMER = "consumer"; private static final String EDC_KEYSTORE = "edc.keystore"; private static final String EDC_KEYSTORE_PASSWORD = "edc.keystore.password"; - private static final String EDC_VAULT = "edc.vault"; private static final String EDC_FS_CONFIG = "edc.fs.config"; private static final String CERT_PFX_FILE_PATH = "transfer/transfer-00-prerequisites/resources/certs/cert.pfx"; private static final String KEYSTORE_PASSWORD = "123456"; private static final String PROVIDER_CONFIG_PROPERTIES_FILE_PATH = "transfer/transfer-00-prerequisites/resources/configuration/provider-configuration.properties"; private static final String CONSUMER_CONFIG_PROPERTIES_FILE_PATH = "transfer/transfer-00-prerequisites/resources/configuration/consumer-configuration.properties"; - private static final String PROVIDER_VAULT_PROPERTIES_FILE_PATH = "transfer/transfer-00-prerequisites/resources/configuration/provider-vault.properties"; - private static final String CONSUMER_VAULT_PROPERTIES_FILE_PATH = "transfer/transfer-00-prerequisites/resources/configuration/consumer-vault.properties"; - private static final String REGISTER_DATA_PLANE_PROVIDER_JSON = "transfer/transfer-00-prerequisites/resources/dataplane/register-data-plane-provider.json"; - private static final String REGISTER_DATA_PLANE_CONSUMER_JSON = "transfer/transfer-00-prerequisites/resources/dataplane/register-data-plane-consumer.json"; - private static final String V2_DATAPLANES_PATH = "/v2/dataplanes"; public static EdcRuntimeExtension getProvider() { - return getConnector(CONNECTOR_MODULE_PATH, PROVIDER, PROVIDER_VAULT_PROPERTIES_FILE_PATH, PROVIDER_CONFIG_PROPERTIES_FILE_PATH); - + return getConnector(CONNECTOR_MODULE_PATH, PROVIDER, PROVIDER_CONFIG_PROPERTIES_FILE_PATH); } public static EdcRuntimeExtension getConsumer() { - return getConnector(CONNECTOR_MODULE_PATH, CONSUMER, CONSUMER_VAULT_PROPERTIES_FILE_PATH, CONSUMER_CONFIG_PROPERTIES_FILE_PATH); + return getConnector(CONNECTOR_MODULE_PATH, CONSUMER, CONSUMER_CONFIG_PROPERTIES_FILE_PATH); } public static EdcRuntimeExtension getConsumer(String modulePath) { - return getConnector(modulePath, CONSUMER, CONSUMER_VAULT_PROPERTIES_FILE_PATH, CONSUMER_CONFIG_PROPERTIES_FILE_PATH); - } - - public static void registerDataPlaneProvider() { - registerDataPlane(PROVIDER_MANAGEMENT_URL, REGISTER_DATA_PLANE_PROVIDER_JSON); - } - - public static void runPrerequisites() { - registerDataPlaneProvider(); + return getConnector(modulePath, CONSUMER, CONSUMER_CONFIG_PROPERTIES_FILE_PATH); } private static EdcRuntimeExtension getConnector( String modulePath, String moduleName, - String vaultPropertiesFilePath, String configPropertiesFilePath ) { return new EdcRuntimeExtension( @@ -81,24 +61,8 @@ private static EdcRuntimeExtension getConnector( Map.of( EDC_KEYSTORE, getFileFromRelativePath(CERT_PFX_FILE_PATH).getAbsolutePath(), EDC_KEYSTORE_PASSWORD, KEYSTORE_PASSWORD, - EDC_VAULT, getFileFromRelativePath(vaultPropertiesFilePath).getAbsolutePath(), EDC_FS_CONFIG, getFileFromRelativePath(configPropertiesFilePath).getAbsolutePath() ) ); } - - private static void registerDataPlane(String host, String payloadPath) { - var requestBody = getFileFromRelativePath(payloadPath); - - given() - .headers(API_KEY_HEADER_KEY, API_KEY_HEADER_VALUE) - .contentType(ContentType.JSON) - .body(requestBody) - .when() - .post(host + V2_DATAPLANES_PATH) - .then() - .log() - .ifError() - .statusCode(HttpStatus.SC_OK); - } } diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/policy/Policy01BasicTest.java b/system-tests/src/test/java/org/eclipse/edc/samples/policy/Policy01BasicTest.java new file mode 100644 index 00000000..55c5c65d --- /dev/null +++ b/system-tests/src/test/java/org/eclipse/edc/samples/policy/Policy01BasicTest.java @@ -0,0 +1,109 @@ +/* + * Copyright (c) 2024 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ + +package org.eclipse.edc.samples.policy; + +import org.eclipse.edc.junit.annotations.EndToEndTest; +import org.eclipse.edc.junit.extensions.EmbeddedRuntime; +import org.eclipse.edc.junit.extensions.RuntimeExtension; +import org.eclipse.edc.junit.extensions.RuntimePerClassExtension; +import org.eclipse.edc.samples.common.NegotiationCommon; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import java.util.Map; + +import static org.awaitility.Awaitility.await; +import static org.eclipse.edc.samples.common.FileTransferCommon.getFileFromRelativePath; +import static org.eclipse.edc.samples.common.NegotiationCommon.getContractNegotiationState; +import static org.eclipse.edc.samples.common.NegotiationCommon.negotiateContract; +import static org.eclipse.edc.samples.common.PolicyCommon.createAsset; +import static org.eclipse.edc.samples.common.PolicyCommon.createContractDefinition; +import static org.eclipse.edc.samples.common.PolicyCommon.createPolicy; +import static org.eclipse.edc.samples.util.TransferUtil.POLL_INTERVAL; +import static org.eclipse.edc.samples.util.TransferUtil.TIMEOUT; + +@EndToEndTest +class Policy01BasicTest { + + private static final String SAMPLE_FOLDER = "policy/policy-01-policy-enforcement"; + private static final String CREATE_ASSET_FILE_PATH = SAMPLE_FOLDER + "/resources/create-asset.json"; + private static final String CREATE_POLICY_FILE_PATH = SAMPLE_FOLDER + "/resources/create-policy.json"; + private static final String CREATE_CONTRACT_DEFINITION_FILE_PATH = SAMPLE_FOLDER + "/resources/create-contract-definition.json"; + private static final String CONTRACT_OFFER_FILE_PATH = SAMPLE_FOLDER + "/resources/contract-request.json"; + + @Nested + class Terminated { + + @RegisterExtension + static final RuntimeExtension PROVIDER_RUNTIME = provider(); + + @RegisterExtension + static final RuntimeExtension CONSUMER_RUNTIME = consumer("system-tests/src/test/resources/policy/config-us.properties"); + + @Test + void runSampleSteps() { + createAsset(CREATE_ASSET_FILE_PATH); + createPolicy(CREATE_POLICY_FILE_PATH); + createContractDefinition(CREATE_CONTRACT_DEFINITION_FILE_PATH); + var negotiationId = negotiateContract(CONTRACT_OFFER_FILE_PATH, ""); + + await() + .atMost(TIMEOUT) + .pollInterval(POLL_INTERVAL) + .until(() -> NegotiationCommon.getContractNegotiationState(negotiationId), s -> s.equals("TERMINATED")); + } + + } + + @Nested + class Finalized { + + @RegisterExtension + static final RuntimeExtension PROVIDER_RUNTIME = provider(); + + @RegisterExtension + static final RuntimeExtension CONSUMER_RUNTIME = consumer("system-tests/src/test/resources/policy/config-eu.properties"); + + @Test + void runSampleSteps() { + createAsset(CREATE_ASSET_FILE_PATH); + createPolicy(CREATE_POLICY_FILE_PATH); + createContractDefinition(CREATE_CONTRACT_DEFINITION_FILE_PATH); + var negotiationId = negotiateContract(CONTRACT_OFFER_FILE_PATH, ""); + + await().atMost(TIMEOUT).pollInterval(POLL_INTERVAL) + .until(() -> getContractNegotiationState(negotiationId), s -> s.equals("FINALIZED")); + } + + } + + private static RuntimeExtension provider() { + return new RuntimePerClassExtension(new EmbeddedRuntime( + "provider", + Map.of("edc.fs.config", getFileFromRelativePath(SAMPLE_FOLDER + "/policy-enforcement-provider/config.properties").getAbsolutePath()), + ":policy:policy-01-policy-enforcement:policy-enforcement-provider" + )); + } + + private static RuntimeExtension consumer(String configurationFilePath) { + return new RuntimePerClassExtension(new EmbeddedRuntime( + "consumer", + Map.of("edc.fs.config", getFileFromRelativePath(configurationFilePath).getAbsolutePath()), + ":policy:policy-01-policy-enforcement:policy-enforcement-consumer" + )); + } + +} diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/policy/PolicyProvisionSampleTest.java b/system-tests/src/test/java/org/eclipse/edc/samples/policy/PolicyProvisionSampleTest.java new file mode 100644 index 00000000..467e1757 --- /dev/null +++ b/system-tests/src/test/java/org/eclipse/edc/samples/policy/PolicyProvisionSampleTest.java @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2023 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ + +package org.eclipse.edc.samples.policy; + +import org.eclipse.edc.connector.controlplane.transfer.spi.types.TransferProcessStates; +import org.eclipse.edc.junit.annotations.EndToEndTest; +import org.eclipse.edc.junit.extensions.EdcRuntimeExtension; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import java.util.Map; + +import static org.eclipse.edc.samples.policy.PolicyProvisionSampleTestCommon.getFileFromRelativePath; + +/** + * This test class runs test for checking if the policy defined in {@code provision.manifest.verify} scope is regulating the destination location properly. + * We will be providing a destination file path, as we have used in previous samples for file transfers. + * However, because of our policy, the file will be stored in the desired location (NOT in the provided destination file path) that is defined in the policy. + */ +@EndToEndTest +public class PolicyProvisionSampleTest { + static final String CONSUMER_CONFIG_PROPERTIES_FILE_PATH = "policy/policy-02-provision/policy-provision-consumer/config.properties"; + static final String PROVIDER_CONFIG_PROPERTIES_FILE_PATH = "policy/policy-02-provision/policy-provision-provider/config.properties"; + static final String SAMPLE_ASSET_FILE_PATH = "policy/policy-02-provision/README.md"; + static final String DESTINATION_FILE_PATH = "policy/policy-02-provision/requested_file.txt"; + static final String DESIRED_DESTINATION_FILE_PATH = "policy/policy-02-provision/transfer.txt"; + static final String TRANSFER_FILE_PATH = "policy/policy-02-provision/filetransfer.json"; + static final String CONTRACT_OFFER_FILE_PATH = "policy/policy-02-provision/contractoffer.json"; + + @RegisterExtension + static EdcRuntimeExtension provider = new EdcRuntimeExtension( + ":policy:policy-02-provision:policy-provision-provider", + "provider", + Map.of( + "edc.samples.policy-02.asset.path", getFileFromRelativePath(SAMPLE_ASSET_FILE_PATH).getAbsolutePath(), + "edc.samples.policy-02.constraint.desired.file.path", getFileFromRelativePath(DESIRED_DESTINATION_FILE_PATH).getAbsolutePath(), + "edc.fs.config", getFileFromRelativePath(PROVIDER_CONFIG_PROPERTIES_FILE_PATH).getAbsolutePath() + ) + ); + + @RegisterExtension + static EdcRuntimeExtension consumer = new EdcRuntimeExtension( + ":policy:policy-02-provision:policy-provision-consumer", + "consumer", + Map.of( + "edc.fs.config", getFileFromRelativePath(CONSUMER_CONFIG_PROPERTIES_FILE_PATH).getAbsolutePath() + ) + ); + + final PolicyProvisionSampleTestCommon testUtils = new PolicyProvisionSampleTestCommon(SAMPLE_ASSET_FILE_PATH, DESTINATION_FILE_PATH, DESIRED_DESTINATION_FILE_PATH); + + @Test + void runSampleSteps() throws Exception { + testUtils.assertTestPrerequisites(); + + testUtils.initiateContractNegotiation(); + testUtils.lookUpContractAgreementId(); + var transferProcessId = testUtils.requestTransferFile(); + testUtils.assertDestinationFileContent(); + testUtils.assertFileDoesNotExist(); + testUtils.checkTransferStatus(transferProcessId, TransferProcessStates.COMPLETED); + } + + @AfterEach + protected void tearDown() { + testUtils.cleanTemporaryTestFiles(); + } +} diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/policy/PolicyProvisionSampleTestCommon.java b/system-tests/src/test/java/org/eclipse/edc/samples/policy/PolicyProvisionSampleTestCommon.java new file mode 100644 index 00000000..cf9f6e8d --- /dev/null +++ b/system-tests/src/test/java/org/eclipse/edc/samples/policy/PolicyProvisionSampleTestCommon.java @@ -0,0 +1,294 @@ +/* + * Copyright (c) 2023 Fraunhofer Institute for Software and Systems Engineering + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Fraunhofer Institute for Software and Systems Engineering - initial API and implementation + * + */ + +package org.eclipse.edc.samples.policy; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.restassured.RestAssured; +import io.restassured.http.ContentType; +import org.apache.http.HttpStatus; +import org.eclipse.edc.connector.controlplane.transfer.spi.types.TransferProcessStates; +import org.eclipse.edc.connector.controlplane.transfer.spi.types.TransferProcess; + +import org.eclipse.edc.junit.testfixtures.TestUtils; +import org.eclipse.edc.policy.model.Action; +import org.eclipse.edc.policy.model.AtomicConstraint; +import org.eclipse.edc.policy.model.LiteralExpression; +import org.eclipse.edc.policy.model.Operator; +import org.eclipse.edc.policy.model.Permission; +import org.eclipse.edc.policy.model.Policy; +import org.jetbrains.annotations.NotNull; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.time.Duration; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.awaitility.Awaitility.await; +import static org.hamcrest.Matchers.*; + +public class PolicyProvisionSampleTestCommon { + static final ObjectMapper MAPPER = new ObjectMapper(); + //region constant test settings + static final String INITIATE_CONTRACT_NEGOTIATION_URI = "http://localhost:9192/management/v2/contractnegotiations"; + static final String LOOK_UP_CONTRACT_AGREEMENT_URI = "http://localhost:9192/management/v2/contractnegotiations/%s"; + static final String TRANSFER_PROCESS_URI = "http://localhost:9192/management/v2/transferprocesses"; + static final String API_KEY_HEADER_KEY = "X-Api-Key"; + static final String API_KEY_HEADER_VALUE = "password"; + public static final Duration TIMEOUT = Duration.ofSeconds(30); + public static final Duration POLL_DELAY = Duration.ofMillis(1000); + public static final Duration POLL_INTERVAL = Duration.ofMillis(500); + private static final String EDC_STATE = "state"; + //endregion + + //region changeable test settings + final String sampleAssetFilePath; + final File sampleAssetFile; + final String destinationFilePath; + final File destinationFile; + final String desiredDestinationFilePath; + final File desiredDestinationFile; + Duration timeout = Duration.ofSeconds(60); + Duration pollInterval = Duration.ofMillis(500); + //endregion + + String contractNegotiationId; + String contractAgreementId; + + /** + * Creates a new {@link PolicyProvisionSampleTestCommon} instance. + */ + public PolicyProvisionSampleTestCommon(@NotNull String sampleAssetFilePath, @NotNull String destinationFilePath, @NotNull String desiredDestinationFilePath) { + this.sampleAssetFilePath = sampleAssetFilePath; + sampleAssetFile = getFileFromRelativePath(sampleAssetFilePath); + + this.destinationFilePath = destinationFilePath; + destinationFile = getFileFromRelativePath(destinationFilePath); + + this.desiredDestinationFilePath = desiredDestinationFilePath; + desiredDestinationFile = getFileFromRelativePath(desiredDestinationFilePath); + } + + /** + * Resolves a {@link File} instance from a relative path. + */ + @NotNull + public static File getFileFromRelativePath(String relativePath) { + return new File(TestUtils.findBuildRoot(), relativePath); + } + + /** + * Assert that prerequisites are fulfilled before running the test. + * This assertion checks only whether the file to be copied is not existing already. + */ + void assertTestPrerequisites() { + assertThat(destinationFile).doesNotExist(); + assertThat(desiredDestinationFile).doesNotExist(); + } + + /** + * Remove files created while running the tests. + * The copied file will be deleted. + */ + void cleanTemporaryTestFiles() { + destinationFile.delete(); + desiredDestinationFile.delete(); + } + + /** + * Assert that the file to be copied exists at the expected location. + * This method waits a duration which is defined in {@link PolicyProvisionSampleTestCommon#timeout}. + */ + void assertDestinationFileContent() { + await().atMost(timeout).pollInterval(pollInterval).untilAsserted(() + -> assertThat(desiredDestinationFile).hasSameBinaryContentAs(sampleAssetFile)); + } + + /** + * Assert that there is no file in location {@link PolicyProvisionSampleTestCommon#destinationFilePath} + * This method waits a duration which is defined in {@link PolicyProvisionSampleTestCommon#timeout}. + */ + void assertFileDoesNotExist() { + await().atMost(timeout).pollInterval(pollInterval).untilAsserted(() + -> assertThat(destinationFile).doesNotExist()); + } + + /** + * Assert that the transfer process state on the consumer is completed. + */ + void assertTransferProcessStatusConsumerSide(String transferProcessId) { + await().atMost(timeout).pollInterval(pollInterval).untilAsserted(() + -> { + var transferProcess = getTransferProcessById(transferProcessId); + + //as policy sample file transfer does not use any status checker yet, it will not update to 'COMPLETED' state. + //for now we will just check if the state is 'IN_PROGRESS' + // TODO: should be changed to 'COMPLETED' once the status checker is implemented in the module + assertThat(transferProcess).extracting(TransferProcess::getState).isEqualTo(TransferProcessStates.COMPLETED.toString()); + }); + } + + /** + * Gets the transfer process by ID. + * + * @return The transfer process. + */ + public TransferProcess getTransferProcessById(String processId) { + return RestAssured.given() + .headers(API_KEY_HEADER_KEY, API_KEY_HEADER_VALUE) + .when() + .get(String.format("%s/%s", TRANSFER_PROCESS_URI, processId)) + .then() + .statusCode(HttpStatus.SC_OK) + .extract().body().as(TransferProcess.class); + } + + /** + * Creates a policy that matches the policy used by provider connector. + * + * @return The suitable {@link Policy}. + */ + private Policy createContractPolicy() { + + var regulateFilePathConstraint = AtomicConstraint.Builder.newInstance() + .leftExpression(new LiteralExpression("POLICY_REGULATE_FILE_PATH")) + .operator(Operator.EQ) + .rightExpression(new LiteralExpression(getFileFromRelativePath(desiredDestinationFilePath).getAbsolutePath())) + .build(); + + + var permission = Permission.Builder.newInstance() + .action(Action.Builder.newInstance().type("USE").build()) + .constraint(regulateFilePathConstraint) + .build(); + + + return Policy.Builder.newInstance() + .permission(permission) + .build(); + } + + /** + * Assert that a POST request to initiate a contract negotiation is successful. + * This method corresponds to the command in the sample: {@code curl -X POST -H "Content-Type: application/json" -H "X-Api-Key: password" -d @policy/policy-02-provision/contractoffer.json "http://localhost:9192/management/v2/contractnegotiations"} + */ + void initiateContractNegotiation() throws IOException { + var contractOfferFile = new File(TestUtils.findBuildRoot(), PolicyProvisionSampleTest.CONTRACT_OFFER_FILE_PATH); + ObjectNode contractOfferJsonRootNode = MAPPER.readValue(contractOfferFile, ObjectNode.class); + + + + + var response = RestAssured + .given() + .headers(API_KEY_HEADER_KEY, API_KEY_HEADER_VALUE) + .contentType(ContentType.JSON) + .body(contractOfferJsonRootNode) + .when() + .post(INITIATE_CONTRACT_NEGOTIATION_URI) + .then() + .statusCode(HttpStatus.SC_OK) + .extract() + .response(); + + System.out.println("Response from contract negotiation initiation: " + response.asString()); + + contractNegotiationId = response.jsonPath().getString("@id"); + + if (contractNegotiationId == null || contractNegotiationId.isEmpty()) { + throw new IllegalStateException("Contract negotiation ID is null or empty after initiation"); + } + + System.out.println("Contract negotiation ID: " + contractNegotiationId); + } + + /** + * Assert that a GET request to look up a contract agreement is successful. + * This method corresponds to the command in the sample: {@code curl -X GET -H 'X-Api-Key: password' "http://localhost:9192/api/v1/management/contractnegotiations/{UUID}"} + */ + void lookUpContractAgreementId() { + if (contractNegotiationId == null || contractNegotiationId.isEmpty()) { + throw new IllegalArgumentException("Contract negotiation ID must not be null or empty"); + } + + // Wait for transfer to be completed. + await().atMost(timeout).pollInterval(pollInterval).untilAsserted(() -> { + String url = String.format(LOOK_UP_CONTRACT_AGREEMENT_URI, contractNegotiationId); + System.out.println("Looking up contract agreement ID using URL: " + url); + + contractAgreementId = RestAssured + .given() + .headers(API_KEY_HEADER_KEY, API_KEY_HEADER_VALUE) + .when() + .get(url) + .then() + .statusCode(HttpStatus.SC_OK) + .body("state", equalTo("FINALIZED")) + .body("contractAgreementId", not(emptyString())) + .extract().body().jsonPath().getString("contractAgreementId"); + + if (contractAgreementId == null || contractAgreementId.isEmpty()) { + throw new IllegalStateException("Contract agreement ID is null or empty after lookup"); + } + }); + } + + String requestTransferFile() throws IOException { + var fileTransferFile = new File(TestUtils.findBuildRoot(), PolicyProvisionSampleTest.TRANSFER_FILE_PATH); + String fileTransferJson = new String(Files.readAllBytes(Paths.get(fileTransferFile.getPath()))); + + fileTransferJson = fileTransferJson.replace("{{contract-agreement-id}}", contractAgreementId); + + var response = RestAssured + .given() + .headers(API_KEY_HEADER_KEY, API_KEY_HEADER_VALUE) + .contentType(ContentType.JSON) + .body(fileTransferJson) + .when() + .log().all() + .post(TRANSFER_PROCESS_URI) + .then() + .log().all() + .statusCode(HttpStatus.SC_OK) + .extract() + .response(); + return response.jsonPath().getString("@id"); + + + } + void checkTransferStatus(String transferProcessId, TransferProcessStates status) { + await() + .atMost(TIMEOUT) + .pollDelay(POLL_DELAY) + .pollInterval(POLL_INTERVAL) + .untilAsserted(() -> { + var state = get(transferProcessId); + assertThat(state).isEqualTo(status.name()); + }); + } + + private String get(String transferProcessId) { + return RestAssured + .given() + .headers(API_KEY_HEADER_KEY, API_KEY_HEADER_VALUE) + .when() + .get(String.format("%s/%s", TRANSFER_PROCESS_URI, transferProcessId)) + .then() + .statusCode(HttpStatus.SC_OK) + .extract().jsonPath().getString(EDC_STATE); + } +} \ No newline at end of file diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer00prerequisitesTest.java b/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer00prerequisitesTest.java deleted file mode 100644 index b05bb404..00000000 --- a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer00prerequisitesTest.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2023 Mercedes-Benz Tech Innovation GmbH - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0 - * - * SPDX-License-Identifier: Apache-2.0 - * - * Contributors: - * Mercedes-Benz Tech Innovation GmbH - Initial implementation - * - */ - -package org.eclipse.edc.samples.transfer; - -import org.eclipse.edc.junit.annotations.EndToEndTest; -import org.eclipse.edc.junit.extensions.EdcRuntimeExtension; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; - -import static org.eclipse.edc.samples.common.PrerequisitesCommon.getConsumer; -import static org.eclipse.edc.samples.common.PrerequisitesCommon.getProvider; -import static org.eclipse.edc.samples.common.PrerequisitesCommon.registerDataPlaneProvider; - -@EndToEndTest -public class Transfer00prerequisitesTest { - - @RegisterExtension - static EdcRuntimeExtension provider = getProvider(); - - @RegisterExtension - static EdcRuntimeExtension consumer = getConsumer(); - - @Test - void runSampleSteps() { - registerDataPlaneProvider(); - } -} diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer01negotiationTest.java b/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer01negotiationTest.java index 2e3bce76..1acc054f 100644 --- a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer01negotiationTest.java +++ b/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer01negotiationTest.java @@ -29,7 +29,6 @@ import static org.eclipse.edc.samples.common.NegotiationCommon.negotiateContract; import static org.eclipse.edc.samples.common.PrerequisitesCommon.getConsumer; import static org.eclipse.edc.samples.common.PrerequisitesCommon.getProvider; -import static org.eclipse.edc.samples.common.PrerequisitesCommon.runPrerequisites; @EndToEndTest public class Transfer01negotiationTest { @@ -45,7 +44,6 @@ public class Transfer01negotiationTest { @Test void runSampleSteps() { - runPrerequisites(); createAsset(); createPolicy(); createContractDefinition(); diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer02consumerPullTest.java b/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer02consumerPullTest.java index 49523807..1ae29039 100644 --- a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer02consumerPullTest.java +++ b/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer02consumerPullTest.java @@ -15,18 +15,17 @@ package org.eclipse.edc.samples.transfer; +import io.restassured.common.mapper.TypeRef; import org.apache.http.HttpStatus; -import org.eclipse.edc.connector.transfer.spi.types.TransferProcessStates; +import org.eclipse.edc.connector.controlplane.transfer.spi.types.TransferProcessStates; import org.eclipse.edc.junit.annotations.EndToEndTest; import org.eclipse.edc.junit.extensions.EdcRuntimeExtension; -import org.eclipse.edc.samples.util.HttpRequestLoggerConsumer; -import org.eclipse.edc.samples.util.HttpRequestLoggerContainer; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; -import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; +import java.util.Map; + import static io.restassured.RestAssured.given; import static org.apache.http.HttpHeaders.AUTHORIZATION; import static org.assertj.core.api.Assertions.assertThat; @@ -34,10 +33,9 @@ import static org.eclipse.edc.samples.common.NegotiationCommon.runNegotiation; import static org.eclipse.edc.samples.common.PrerequisitesCommon.API_KEY_HEADER_KEY; import static org.eclipse.edc.samples.common.PrerequisitesCommon.API_KEY_HEADER_VALUE; -import static org.eclipse.edc.samples.common.PrerequisitesCommon.CONSUMER_PUBLIC_URL; +import static org.eclipse.edc.samples.common.PrerequisitesCommon.CONSUMER_MANAGEMENT_URL; import static org.eclipse.edc.samples.common.PrerequisitesCommon.getConsumer; import static org.eclipse.edc.samples.common.PrerequisitesCommon.getProvider; -import static org.eclipse.edc.samples.common.PrerequisitesCommon.runPrerequisites; import static org.eclipse.edc.samples.util.TransferUtil.checkTransferStatus; import static org.eclipse.edc.samples.util.TransferUtil.startTransfer; import static org.hamcrest.Matchers.emptyString; @@ -47,9 +45,7 @@ @Testcontainers public class Transfer02consumerPullTest { - private static final HttpRequestLoggerConsumer LOG_CONSUMER = new HttpRequestLoggerConsumer(); private static final String START_TRANSFER_FILE_PATH = "transfer/transfer-02-consumer-pull/resources/start-transfer.json"; - private static final String AUTH_CODE_KEY = "authCode"; @RegisterExtension static EdcRuntimeExtension provider = getProvider(); @@ -57,34 +53,30 @@ public class Transfer02consumerPullTest { @RegisterExtension static EdcRuntimeExtension consumer = getConsumer(); - @Container - public static HttpRequestLoggerContainer httpRequestLoggerContainer = new HttpRequestLoggerContainer(LOG_CONSUMER); - - @BeforeAll - static void setUp() { - httpRequestLoggerContainer.start(); - } - @Test void runSampleSteps() { - runPrerequisites(); var requestBody = getFileContentFromRelativePath(START_TRANSFER_FILE_PATH); var contractAgreementId = runNegotiation(); var transferProcessId = startTransfer(requestBody, contractAgreementId); checkTransferStatus(transferProcessId, TransferProcessStates.STARTED); - var authCode = LOG_CONSUMER.getJsonValue(AUTH_CODE_KEY); - checkData(authCode); - } - private static void checkData(String authCode) { + var edr = given() + .when() + .get(CONSUMER_MANAGEMENT_URL + "/v3/edrs/{id}/dataaddress", transferProcessId) + .then() + .log().ifValidationFails() + .statusCode(200) + .extract().body().as(new TypeRef>() { + }); + var result = given() - .headers(API_KEY_HEADER_KEY, API_KEY_HEADER_VALUE, AUTHORIZATION, authCode) + .header(API_KEY_HEADER_KEY, API_KEY_HEADER_VALUE) + .header(AUTHORIZATION, edr.get("authorization")) .when() - .get(CONSUMER_PUBLIC_URL) + .get(edr.get("endpoint").toString()) .then() .statusCode(HttpStatus.SC_OK) - .log() - .ifError() + .log().ifValidationFails() .body("[0].name", not(emptyString())) .extract() .jsonPath() @@ -92,4 +84,5 @@ private static void checkData(String authCode) { assertThat(result).isEqualTo("Leanne Graham"); } + } diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer03providerPushTest.java b/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer03providerPushTest.java index a607c159..68c030a8 100644 --- a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer03providerPushTest.java +++ b/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer03providerPushTest.java @@ -15,7 +15,7 @@ package org.eclipse.edc.samples.transfer; -import org.eclipse.edc.connector.transfer.spi.types.TransferProcessStates; +import org.eclipse.edc.connector.controlplane.transfer.spi.types.TransferProcessStates; import org.eclipse.edc.junit.annotations.EndToEndTest; import org.eclipse.edc.junit.extensions.EdcRuntimeExtension; import org.eclipse.edc.samples.util.HttpRequestLoggerConsumer; @@ -31,7 +31,6 @@ import static org.eclipse.edc.samples.common.NegotiationCommon.runNegotiation; import static org.eclipse.edc.samples.common.PrerequisitesCommon.getConsumer; import static org.eclipse.edc.samples.common.PrerequisitesCommon.getProvider; -import static org.eclipse.edc.samples.common.PrerequisitesCommon.runPrerequisites; import static org.eclipse.edc.samples.util.TransferUtil.checkTransferStatus; import static org.eclipse.edc.samples.util.TransferUtil.startTransfer; @@ -58,7 +57,6 @@ static void setUp() { @Test void runSampleSteps() { - runPrerequisites(); var contractAgreementId = runNegotiation(); var requestBody = getFileContentFromRelativePath(START_TRANSFER_FILE_PATH); var transferProcessId = startTransfer(requestBody, contractAgreementId); diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer04eventConsumerTest.java b/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer04eventConsumerTest.java index 60f42781..8c77f665 100644 --- a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer04eventConsumerTest.java +++ b/system-tests/src/test/java/org/eclipse/edc/samples/transfer/Transfer04eventConsumerTest.java @@ -15,7 +15,7 @@ package org.eclipse.edc.samples.transfer; -import org.eclipse.edc.connector.transfer.spi.types.TransferProcessStates; +import org.eclipse.edc.connector.controlplane.transfer.spi.types.TransferProcessStates; import org.eclipse.edc.junit.annotations.EndToEndTest; import org.eclipse.edc.junit.extensions.EdcRuntimeExtension; import org.eclipse.edc.samples.util.HttpRequestLoggerContainer; @@ -32,7 +32,6 @@ import static org.eclipse.edc.samples.common.NegotiationCommon.runNegotiation; import static org.eclipse.edc.samples.common.PrerequisitesCommon.getConsumer; import static org.eclipse.edc.samples.common.PrerequisitesCommon.getProvider; -import static org.eclipse.edc.samples.common.PrerequisitesCommon.runPrerequisites; import static org.eclipse.edc.samples.util.TransferUtil.checkTransferStatus; import static org.eclipse.edc.samples.util.TransferUtil.startTransfer; @@ -59,7 +58,6 @@ static void setUp() { void runSampleSteps() { var standardOutputStream = new ByteArrayOutputStream(); System.setOut(new PrintStream(standardOutputStream)); - runPrerequisites(); var requestBody = getFileContentFromRelativePath(START_TRANSFER_FILE_PATH); var contractAgreementId = runNegotiation(); var transferProcessId = startTransfer(requestBody, contractAgreementId); diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/streaming/Participant.java b/system-tests/src/test/java/org/eclipse/edc/samples/transfer/streaming/Participant.java deleted file mode 100644 index 53f14490..00000000 --- a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/streaming/Participant.java +++ /dev/null @@ -1,443 +0,0 @@ -/* - * Copyright (c) 2023 Bayerische Motoren Werke Aktiengesellschaft (BMW AG) - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0 - * - * SPDX-License-Identifier: Apache-2.0 - * - * Contributors: - * Bayerische Motoren Werke Aktiengesellschaft (BMW AG) - initial API and implementation - * - */ - -package org.eclipse.edc.samples.transfer.streaming; - -import com.fasterxml.jackson.databind.ObjectMapper; -import io.restassured.specification.RequestSpecification; -import jakarta.json.Json; -import jakarta.json.JsonArray; -import jakarta.json.JsonObject; -import org.eclipse.edc.connector.contract.spi.ContractOfferId; -import org.eclipse.edc.jsonld.TitaniumJsonLd; -import org.eclipse.edc.jsonld.spi.JsonLd; -import org.eclipse.edc.jsonld.util.JacksonJsonLd; -import org.eclipse.edc.spi.EdcException; -import org.eclipse.edc.spi.monitor.ConsoleMonitor; - -import java.net.URI; -import java.time.Duration; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.UUID; -import java.util.concurrent.atomic.AtomicReference; - -import static io.restassured.RestAssured.given; -import static io.restassured.http.ContentType.JSON; -import static jakarta.json.Json.createArrayBuilder; -import static jakarta.json.Json.createObjectBuilder; -import static org.assertj.core.api.Assertions.assertThat; -import static org.awaitility.Awaitility.await; -import static org.eclipse.edc.connector.contract.spi.types.negotiation.ContractNegotiationStates.FINALIZED; -import static org.eclipse.edc.jsonld.spi.JsonLdKeywords.CONTEXT; -import static org.eclipse.edc.jsonld.spi.JsonLdKeywords.ID; -import static org.eclipse.edc.jsonld.spi.JsonLdKeywords.TYPE; -import static org.eclipse.edc.jsonld.spi.PropertyAndTypeNames.DCAT_DATASET_ATTRIBUTE; -import static org.eclipse.edc.jsonld.spi.PropertyAndTypeNames.ODRL_POLICY_ATTRIBUTE; -import static org.eclipse.edc.spi.CoreConstants.EDC_NAMESPACE; -import static org.eclipse.edc.spi.CoreConstants.EDC_PREFIX; - -/** - * Essentially a wrapper around the management API enabling to test interactions with other participants, eg. catalog, transfer... - */ -public class Participant { - - private static final String DSP_PROTOCOL = "dataspace-protocol-http"; - private static final Duration TIMEOUT = Duration.ofSeconds(30); - - protected String id; - protected String name; - protected Endpoint managementEndpoint; - protected Endpoint protocolEndpoint; - protected Endpoint controlEndpoint; - protected JsonLd jsonLd; - protected ObjectMapper objectMapper; - - protected Participant() { - } - - public String getName() { - return name; - } - - public void registerDataPlane(List sourceTypes, List destinationTypes) { - var jsonObject = Json.createObjectBuilder() - .add(CONTEXT, createObjectBuilder().add(EDC_PREFIX, EDC_NAMESPACE)) - .add(ID, UUID.randomUUID().toString()) - .add(EDC_NAMESPACE + "url", controlEndpoint.url + "/transfer") - .add(EDC_NAMESPACE + "allowedSourceTypes", createArrayBuilder(sourceTypes)) - .add(EDC_NAMESPACE + "allowedDestTypes", createArrayBuilder(destinationTypes)) - .build(); - - managementEndpoint.baseRequest() - .contentType(JSON) - .body(jsonObject.toString()) - .when() - .post("/v2/dataplanes") - .then() - .statusCode(200); - } - - public String createAsset(String requestBody) { - return managementEndpoint.baseRequest() - .contentType(JSON) - .body(requestBody) - .when() - .post("/v3/assets") - .then() - .statusCode(200) - .contentType(JSON) - .extract().jsonPath().getString(ID); - } - - public String createPolicyDefinition(String requestBody) { - return managementEndpoint.baseRequest() - .contentType(JSON) - .body(requestBody) - .when() - .post("/v2/policydefinitions") - .then() - .statusCode(200) - .contentType(JSON) - .extract().jsonPath().getString(ID); - } - - public String createContractDefinition(String requestBody) { - return managementEndpoint.baseRequest() - .contentType(JSON) - .body(requestBody) - .when() - .post("/v2/contractdefinitions") - .then() - .statusCode(200) - .extract().jsonPath().getString(ID); - } - - /** - * Request provider catalog. - * - * @param provider data provider - * @return list of {@link org.eclipse.edc.catalog.spi.Dataset}. - */ - public JsonArray getCatalogDatasets(Participant provider) { - var datasetReference = new AtomicReference(); - var requestBody = createObjectBuilder() - .add(CONTEXT, createObjectBuilder().add(EDC_PREFIX, EDC_NAMESPACE)) - .add(TYPE, "CatalogRequest") - .add("counterPartyAddress", provider.protocolEndpoint.url.toString()) - .add("protocol", DSP_PROTOCOL) - .build(); - - await().atMost(TIMEOUT).untilAsserted(() -> { - var response = managementEndpoint.baseRequest() - .contentType(JSON) - .when() - .body(requestBody) - .post("/v2/catalog/request") - .then() - .log().all() - .statusCode(200) - .extract().body().asString(); - - var responseBody = objectMapper.readValue(response, JsonObject.class); - - var catalog = jsonLd.expand(responseBody).orElseThrow(f -> new EdcException(f.getFailureDetail())); - - var datasets = catalog.getJsonArray(DCAT_DATASET_ATTRIBUTE); - assertThat(datasets).hasSizeGreaterThan(0); - - datasetReference.set(datasets); - }); - - return datasetReference.get(); - } - - /** - * Get first {@link org.eclipse.edc.catalog.spi.Dataset} from provider matching the given asset id. - * - * @param provider data provider - * @param assetId asset id - * @return dataset. - */ - public JsonObject getDatasetForAsset(Participant provider, String assetId) { - var datasetReference = new AtomicReference(); - var requestBody = createObjectBuilder() - .add(CONTEXT, createObjectBuilder().add(EDC_PREFIX, EDC_NAMESPACE)) - .add(TYPE, "DatasetRequest") - .add(ID, assetId) - .add("counterPartyAddress", provider.protocolEndpoint.url.toString()) - .add("protocol", DSP_PROTOCOL) - .build(); - - await().atMost(TIMEOUT).untilAsserted(() -> { - var response = managementEndpoint.baseRequest() - .contentType(JSON) - .when() - .body(requestBody) - .post("/v2/catalog/dataset/request") - .then() - .log().all() - .statusCode(200) - .extract().body().asString(); - - var compacted = objectMapper.readValue(response, JsonObject.class); - - var dataset = jsonLd.expand(compacted).orElseThrow(f -> new EdcException(f.getFailureDetail())); - - datasetReference.set(dataset); - }); - - return datasetReference.get(); - } - - /** - * Initiate negotiation with a provider. - * - * @param provider data provider - * @param offerId contract definition id - * @param assetId asset id - * @param policy policy - * @return id of the contract agreement. - */ - public String negotiateContract(Participant provider, String offerId, String assetId, JsonObject policy) { - var requestBody = createObjectBuilder() - .add(CONTEXT, createObjectBuilder().add(EDC_PREFIX, EDC_NAMESPACE)) - .add(TYPE, "ContractRequestDto") - .add("providerId", provider.id) - .add("counterPartyAddress", provider.protocolEndpoint.url.toString()) - .add("protocol", DSP_PROTOCOL) - .add("policy", jsonLd.compact(policy).getContent()) - .build(); - - var negotiationId = managementEndpoint.baseRequest() - .contentType(JSON) - .body(requestBody) - .when() - .post("/v2/contractnegotiations") - .then() - .statusCode(200) - .extract().body().jsonPath().getString(ID); - - await().atMost(TIMEOUT).untilAsserted(() -> { - var state = getContractNegotiationState(negotiationId); - assertThat(state).isEqualTo(FINALIZED.name()); - }); - - return getContractAgreementId(negotiationId); - } - - /** - * Initiate data transfer. - * - * @param provider data provider - * @param contractAgreementId contract agreement id - * @param assetId asset id - * @param privateProperties private properties - * @param destination data destination address - * @return id of the transfer process. - */ - public String initiateTransfer(Participant provider, String contractAgreementId, String assetId, JsonObject privateProperties, JsonObject destination) { - var requestBody = createObjectBuilder() - .add(CONTEXT, createObjectBuilder().add(EDC_PREFIX, EDC_NAMESPACE)) - .add(TYPE, "TransferRequest") - .add("dataDestination", destination) - .add("protocol", DSP_PROTOCOL) - .add("assetId", assetId) - .add("contractId", contractAgreementId) - .add("connectorId", provider.id) - .add("counterPartyAddress", provider.protocolEndpoint.url.toString()) - .add("privateProperties", privateProperties) - .build(); - - return managementEndpoint.baseRequest() - .contentType(JSON) - .body(requestBody) - .when() - .post("/v2/transferprocesses") - .then() - .log().ifError() - .statusCode(200) - .extract().body().jsonPath().getString(ID); - } - - /** - * Request a provider asset: - * - retrieves the contract definition associated with the asset, - * - handles the contract negotiation, - * - initiate the data transfer. - * - * @param provider data provider - * @param assetId asset id - * @param privateProperties private properties of the data request - * @param destination data destination - * @return transfer process id. - */ - public String requestAsset(Participant provider, String assetId, JsonObject privateProperties, JsonObject destination) { - var dataset = getDatasetForAsset(provider, assetId); - var policy = dataset.getJsonArray(ODRL_POLICY_ATTRIBUTE).get(0).asJsonObject(); - var contractDefinitionId = ContractOfferId.parseId(policy.getString(ID)) - .orElseThrow(failure -> new RuntimeException(failure.getFailureDetail())); - var contractAgreementId = negotiateContract(provider, contractDefinitionId.toString(), assetId, policy); - var transferProcessId = initiateTransfer(provider, contractAgreementId, assetId, privateProperties, destination); - assertThat(transferProcessId).isNotNull(); - return transferProcessId; - } - - /** - * Get current state of a transfer process. - * - * @param id transfer process id - * @return state of the transfer process. - */ - public String getTransferProcessState(String id) { - return managementEndpoint.baseRequest() - .contentType(JSON) - .when() - .get("/v2/transferprocesses/{id}/state", id) - .then() - .statusCode(200) - .extract().body().jsonPath().getString("state"); - } - - private String getContractNegotiationState(String id) { - return managementEndpoint.baseRequest() - .contentType(JSON) - .when() - .get("/v2/contractnegotiations/{id}/state", id) - .then() - .statusCode(200) - .extract().body().jsonPath().getString("state"); - } - - - private String getContractAgreementId(String negotiationId) { - var contractAgreementIdAtomic = new AtomicReference(); - - await().atMost(TIMEOUT).untilAsserted(() -> { - var agreementId = getContractNegotiationField(negotiationId, "contractAgreementId"); - assertThat(agreementId).isNotNull().isInstanceOf(String.class); - - contractAgreementIdAtomic.set(agreementId); - }); - - var contractAgreementId = contractAgreementIdAtomic.get(); - assertThat(id).isNotEmpty(); - return contractAgreementId; - } - - private String getContractNegotiationField(String negotiationId, String fieldName) { - return managementEndpoint.baseRequest() - .contentType(JSON) - .when() - .get("/v2/contractnegotiations/{id}", negotiationId) - .then() - .statusCode(200) - .extract().body().jsonPath() - .getString(fieldName); - } - - /** - * Represent an endpoint exposed by a {@link Participant}. - */ - public static class Endpoint { - private final URI url; - private final Map headers; - - public Endpoint(URI url) { - this.url = url; - this.headers = new HashMap<>(); - } - - public Endpoint(URI url, Map headers) { - this.url = url; - this.headers = headers; - } - - public RequestSpecification baseRequest() { - return given().baseUri(url.toString()).headers(headers); - } - - public URI getUrl() { - return url; - } - } - - public static class Builder

> { - protected final P participant; - - protected Builder(P participant) { - this.participant = participant; - } - - public static > Builder newInstance() { - return new Builder<>(new Participant()); - } - - public B id(String id) { - participant.id = id; - return self(); - } - - public B name(String name) { - participant.name = name; - return self(); - } - - public B managementEndpoint(Endpoint managementEndpoint) { - participant.managementEndpoint = managementEndpoint; - return self(); - } - - public B protocolEndpoint(Endpoint protocolEndpoint) { - participant.protocolEndpoint = protocolEndpoint; - return self(); - } - - public B controlEndpoint(Endpoint controlEndpoint) { - participant.controlEndpoint = controlEndpoint; - return self(); - } - - public B jsonLd(JsonLd jsonLd) { - participant.jsonLd = jsonLd; - return self(); - } - - public B objectMapper(ObjectMapper objectMapper) { - participant.objectMapper = objectMapper; - return self(); - } - - public Participant build() { - Objects.requireNonNull(participant.id, "id"); - Objects.requireNonNull(participant.name, "name"); - Objects.requireNonNull(participant.managementEndpoint, "managementEndpoint"); - Objects.requireNonNull(participant.protocolEndpoint, "protocolEndpoint"); - if (participant.jsonLd == null) { - participant.jsonLd = new TitaniumJsonLd(new ConsoleMonitor()); - } - if (participant.objectMapper == null) { - participant.objectMapper = JacksonJsonLd.createObjectMapper(); - } - return participant; - } - - @SuppressWarnings("unchecked") - private B self() { - return (B) this; - } - } -} diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/streaming/Streaming01httpToHttpTest.java b/system-tests/src/test/java/org/eclipse/edc/samples/transfer/streaming/Streaming01httpToHttpTest.java index 12b70552..f4c92958 100644 --- a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/streaming/Streaming01httpToHttpTest.java +++ b/system-tests/src/test/java/org/eclipse/edc/samples/transfer/streaming/Streaming01httpToHttpTest.java @@ -18,7 +18,7 @@ import okhttp3.mockwebserver.MockWebServer; import org.eclipse.edc.junit.annotations.EndToEndTest; import org.eclipse.edc.junit.extensions.EdcRuntimeExtension; -import org.eclipse.edc.junit.testfixtures.TestUtils; +import org.eclipse.edc.util.io.Ports; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; @@ -28,7 +28,6 @@ import java.nio.file.Files; import java.nio.file.Path; import java.time.Duration; -import java.util.List; import java.util.Map; import java.util.UUID; @@ -36,7 +35,7 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS; import static org.assertj.core.api.Assertions.assertThat; import static org.awaitility.Awaitility.await; -import static org.eclipse.edc.connector.transfer.spi.types.TransferProcessStates.STARTED; +import static org.eclipse.edc.connector.controlplane.transfer.spi.types.TransferProcessStates.STARTED; import static org.eclipse.edc.samples.common.FileTransferCommon.getFileContentFromRelativePath; import static org.eclipse.edc.samples.common.FileTransferCommon.getFileFromRelativePath; @@ -46,20 +45,20 @@ public class Streaming01httpToHttpTest { private static final String SAMPLE_FOLDER = "transfer/streaming/streaming-01-http-to-http"; private static final Duration TIMEOUT = Duration.ofSeconds(30); - private static final Participant PROVIDER = Participant.Builder.newInstance() + private static final StreamingParticipant PROVIDER = StreamingParticipant.Builder.newStreamingInstance() .name("provider") .id("provider") - .managementEndpoint(new Participant.Endpoint(URI.create("http://localhost:18181/management"))) - .protocolEndpoint(new Participant.Endpoint(URI.create("http://localhost:18182/protocol"))) - .controlEndpoint(new Participant.Endpoint(URI.create("http://localhost:18183/control"))) + .managementEndpoint(new StreamingParticipant.Endpoint(URI.create("http://localhost:18181/management"))) + .protocolEndpoint(new StreamingParticipant.Endpoint(URI.create("http://localhost:18182/protocol"))) + .controlEndpoint(new StreamingParticipant.Endpoint(URI.create("http://localhost:18183/control"))) .build(); - private static final Participant CONSUMER = Participant.Builder.newInstance() + private static final StreamingParticipant CONSUMER = StreamingParticipant.Builder.newStreamingInstance() .name("consumer") .id("consumer") - .managementEndpoint(new Participant.Endpoint(URI.create("http://localhost:28181/management"))) - .protocolEndpoint(new Participant.Endpoint(URI.create("http://localhost:28182/protocol"))) - .controlEndpoint(new Participant.Endpoint(URI.create("http://localhost:28183/control"))) + .managementEndpoint(new StreamingParticipant.Endpoint(URI.create("http://localhost:28181/management"))) + .protocolEndpoint(new StreamingParticipant.Endpoint(URI.create("http://localhost:28182/protocol"))) + .controlEndpoint(new StreamingParticipant.Endpoint(URI.create("http://localhost:28183/control"))) .build(); @RegisterExtension @@ -79,7 +78,7 @@ public class Streaming01httpToHttpTest { "edc.fs.config", getFileFromRelativePath(SAMPLE_FOLDER + "/streaming-01-runtime/consumer.properties").getAbsolutePath() ) ); - private final int httpReceiverPort = TestUtils.getFreePort(); + private final int httpReceiverPort = Ports.getFreePort(); private final MockWebServer consumerReceiverServer = new MockWebServer(); @BeforeEach @@ -90,7 +89,6 @@ void setUp() throws IOException { @Test void streamData() throws IOException { var source = Files.createTempDirectory("source"); - PROVIDER.registerDataPlane(List.of("HttpStreaming"), List.of("HttpData")); PROVIDER.createAsset(getFileContentFromRelativePath(SAMPLE_FOLDER + "/asset.json") .replace("{{sourceFolder}}", source.toString())); @@ -101,10 +99,13 @@ void streamData() throws IOException { .add("type", "HttpData") .add("baseUrl", "http://localhost:" + httpReceiverPort) .build(); - var transferProcessId = CONSUMER.requestAsset(PROVIDER, "stream-asset", Json.createObjectBuilder().build(), destination); + var transferProcessId = CONSUMER.requestAssetFrom("stream-asset", PROVIDER) + .withDestination(destination) + .withTransferType("HttpData-PUSH") + .execute(); await().atMost(TIMEOUT).untilAsserted(() -> { - String state = CONSUMER.getTransferProcessState(transferProcessId); + var state = CONSUMER.getTransferProcessState(transferProcessId); assertThat(state).isEqualTo(STARTED.name()); }); diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/streaming/Streaming02KafkaToHttpTest.java b/system-tests/src/test/java/org/eclipse/edc/samples/transfer/streaming/Streaming02KafkaToHttpTest.java index de876440..8d03d0bd 100644 --- a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/streaming/Streaming02KafkaToHttpTest.java +++ b/system-tests/src/test/java/org/eclipse/edc/samples/transfer/streaming/Streaming02KafkaToHttpTest.java @@ -23,7 +23,7 @@ import org.apache.kafka.common.serialization.StringSerializer; import org.eclipse.edc.junit.annotations.EndToEndTest; import org.eclipse.edc.junit.extensions.EdcRuntimeExtension; -import org.eclipse.edc.junit.testfixtures.TestUtils; +import org.eclipse.edc.util.io.Ports; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; @@ -35,7 +35,6 @@ import java.io.IOException; import java.net.URI; import java.time.Duration; -import java.util.List; import java.util.Map; import java.util.Properties; import java.util.concurrent.Executors; @@ -43,7 +42,7 @@ import static java.util.concurrent.TimeUnit.MICROSECONDS; import static org.assertj.core.api.Assertions.assertThat; import static org.awaitility.Awaitility.await; -import static org.eclipse.edc.connector.transfer.spi.types.TransferProcessStates.STARTED; +import static org.eclipse.edc.connector.controlplane.transfer.spi.types.TransferProcessStates.STARTED; import static org.eclipse.edc.samples.common.FileTransferCommon.getFileContentFromRelativePath; import static org.eclipse.edc.samples.common.FileTransferCommon.getFileFromRelativePath; @@ -56,19 +55,19 @@ public class Streaming02KafkaToHttpTest { private static final String MAX_DURATION = "PT30S"; private static final String SAMPLE_FOLDER = "transfer/streaming/streaming-02-kafka-to-http"; private static final Duration TIMEOUT = Duration.ofSeconds(30); - private static final Participant PROVIDER = Participant.Builder.newInstance() + private static final StreamingParticipant PROVIDER = StreamingParticipant.Builder.newStreamingInstance() .name("provider") .id("provider") - .managementEndpoint(new Participant.Endpoint(URI.create("http://localhost:18181/management"))) - .protocolEndpoint(new Participant.Endpoint(URI.create("http://localhost:18182/protocol"))) - .controlEndpoint(new Participant.Endpoint(URI.create("http://localhost:18183/control"))) + .managementEndpoint(new StreamingParticipant.Endpoint(URI.create("http://localhost:18181/management"))) + .protocolEndpoint(new StreamingParticipant.Endpoint(URI.create("http://localhost:18182/protocol"))) + .controlEndpoint(new StreamingParticipant.Endpoint(URI.create("http://localhost:18183/control"))) .build(); - private static final Participant CONSUMER = Participant.Builder.newInstance() + private static final StreamingParticipant CONSUMER = StreamingParticipant.Builder.newStreamingInstance() .name("consumer") .id("consumer") - .managementEndpoint(new Participant.Endpoint(URI.create("http://localhost:28181/management"))) - .protocolEndpoint(new Participant.Endpoint(URI.create("http://localhost:28182/protocol"))) - .controlEndpoint(new Participant.Endpoint(URI.create("http://localhost:28183/control"))) + .managementEndpoint(new StreamingParticipant.Endpoint(URI.create("http://localhost:28181/management"))) + .protocolEndpoint(new StreamingParticipant.Endpoint(URI.create("http://localhost:28182/protocol"))) + .controlEndpoint(new StreamingParticipant.Endpoint(URI.create("http://localhost:28183/control"))) .build(); @Container @@ -98,7 +97,7 @@ public class Streaming02KafkaToHttpTest { .getAbsolutePath() ) ); - private final int httpReceiverPort = TestUtils.getFreePort(); + private final int httpReceiverPort = Ports.getFreePort(); private final MockWebServer consumerReceiverServer = new MockWebServer(); @BeforeEach @@ -108,9 +107,6 @@ void setUp() throws IOException { @Test void streamData() { - - PROVIDER.registerDataPlane(List.of("Kafka"), List.of("HttpData")); - PROVIDER.createAsset(getFileContentFromRelativePath(SAMPLE_FOLDER + "/1-asset.json") .replace("{{bootstrap.servers}}", kafkaContainer.getBootstrapServers()) .replace("{{max.duration}}", MAX_DURATION) @@ -124,11 +120,13 @@ void streamData() { .add("baseUrl", "http://localhost:" + httpReceiverPort) .build(); - var transferProcessId = CONSUMER.requestAsset(PROVIDER, "kafka-stream-asset", - Json.createObjectBuilder().build(), destination); + var transferProcessId = CONSUMER.requestAssetFrom("kafka-stream-asset", PROVIDER) + .withDestination(destination) + .withTransferType("HttpData-PUSH") + .execute(); await().atMost(TIMEOUT).untilAsserted(() -> { - String state = CONSUMER.getTransferProcessState(transferProcessId); + var state = CONSUMER.getTransferProcessState(transferProcessId); assertThat(state).isEqualTo(STARTED.name()); }); diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/streaming/Streaming03KafkaToKafkaTest.java b/system-tests/src/test/java/org/eclipse/edc/samples/transfer/streaming/Streaming03KafkaToKafkaTest.java index 2416592f..0b8bf86a 100644 --- a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/streaming/Streaming03KafkaToKafkaTest.java +++ b/system-tests/src/test/java/org/eclipse/edc/samples/transfer/streaming/Streaming03KafkaToKafkaTest.java @@ -40,8 +40,8 @@ import org.apache.kafka.common.serialization.StringSerializer; import org.eclipse.edc.junit.annotations.EndToEndTest; import org.eclipse.edc.junit.extensions.EdcRuntimeExtension; -import org.eclipse.edc.junit.testfixtures.TestUtils; import org.eclipse.edc.spi.types.domain.edr.EndpointDataReference; +import org.eclipse.edc.util.io.Ports; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.junit.jupiter.api.BeforeEach; @@ -69,10 +69,10 @@ import static java.util.concurrent.TimeUnit.SECONDS; import static org.assertj.core.api.Assertions.assertThat; import static org.awaitility.Awaitility.await; -import static org.eclipse.edc.connector.transfer.spi.types.TransferProcessStates.STARTED; +import static org.eclipse.edc.connector.controlplane.transfer.spi.types.TransferProcessStates.STARTED; import static org.eclipse.edc.samples.common.FileTransferCommon.getFileContentFromRelativePath; import static org.eclipse.edc.samples.common.FileTransferCommon.getFileFromRelativePath; -import static org.eclipse.edc.spi.CoreConstants.EDC_NAMESPACE; +import static org.eclipse.edc.spi.constants.CoreConstants.EDC_NAMESPACE; @Testcontainers @EndToEndTest @@ -84,19 +84,19 @@ public class Streaming03KafkaToKafkaTest { private static final Path SAMPLE_FOLDER = Path.of("transfer", "streaming", SAMPLE_NAME); private static final Path RUNTIME_PATH = SAMPLE_FOLDER.resolve(RUNTIME_NAME); private static final Duration TIMEOUT = Duration.ofSeconds(60); - private static final Participant PROVIDER = Participant.Builder.newInstance() + private static final StreamingParticipant PROVIDER = StreamingParticipant.Builder.newStreamingInstance() .name("provider") .id("provider") - .managementEndpoint(new Participant.Endpoint(URI.create("http://localhost:18181/management"))) - .protocolEndpoint(new Participant.Endpoint(URI.create("http://localhost:18182/protocol"))) - .controlEndpoint(new Participant.Endpoint(URI.create("http://localhost:18183/control"))) + .managementEndpoint(new StreamingParticipant.Endpoint(URI.create("http://localhost:18181/management"))) + .protocolEndpoint(new StreamingParticipant.Endpoint(URI.create("http://localhost:18182/protocol"))) + .controlEndpoint(new StreamingParticipant.Endpoint(URI.create("http://localhost:18183/control"))) .build(); - private static final Participant CONSUMER = Participant.Builder.newInstance() + private static final StreamingParticipant CONSUMER = StreamingParticipant.Builder.newStreamingInstance() .name("consumer") .id("consumer") - .managementEndpoint(new Participant.Endpoint(URI.create("http://localhost:28181/management"))) - .protocolEndpoint(new Participant.Endpoint(URI.create("http://localhost:28182/protocol"))) - .controlEndpoint(new Participant.Endpoint(URI.create("http://localhost:28183/control"))) + .managementEndpoint(new StreamingParticipant.Endpoint(URI.create("http://localhost:28181/management"))) + .protocolEndpoint(new StreamingParticipant.Endpoint(URI.create("http://localhost:28182/protocol"))) + .controlEndpoint(new StreamingParticipant.Endpoint(URI.create("http://localhost:28183/control"))) .build(); private static final String GROUP_ID = "group_id"; @@ -126,7 +126,7 @@ public class Streaming03KafkaToKafkaTest { ) ); - private final int httpReceiverPort = TestUtils.getFreePort(); + private final int httpReceiverPort = Ports.getFreePort(); private final MockWebServer edrReceiverServer = new MockWebServer(); @BeforeEach @@ -155,7 +155,11 @@ void streamData() throws InterruptedException, JsonProcessingException { var transferProcessPrivateProperties = Json.createObjectBuilder() .add("receiverHttpEndpoint", "http://localhost:" + httpReceiverPort) .build(); - var transferProcessId = CONSUMER.requestAsset(PROVIDER, "kafka-stream-asset", transferProcessPrivateProperties, destination); + var transferProcessId = CONSUMER.requestAssetFrom("kafka-stream-asset", PROVIDER) + .withPrivateProperties(transferProcessPrivateProperties) + .withDestination(destination) + .withTransferType("KafkaBroker-PULL") + .execute(); await().atMost(TIMEOUT).untilAsserted(() -> { var state = CONSUMER.getTransferProcessState(transferProcessId); diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/transfer/streaming/StreamingParticipant.java b/system-tests/src/test/java/org/eclipse/edc/samples/transfer/streaming/StreamingParticipant.java new file mode 100644 index 00000000..6b8de79a --- /dev/null +++ b/system-tests/src/test/java/org/eclipse/edc/samples/transfer/streaming/StreamingParticipant.java @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2023 Bayerische Motoren Werke Aktiengesellschaft (BMW AG) + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Bayerische Motoren Werke Aktiengesellschaft (BMW AG) - initial API and implementation + * + */ + +package org.eclipse.edc.samples.transfer.streaming; + +import org.eclipse.edc.connector.controlplane.test.system.utils.Participant; + +import static io.restassured.http.ContentType.JSON; +import static org.eclipse.edc.jsonld.spi.JsonLdKeywords.ID; + +/** + * Essentially a wrapper around the management API enabling to test interactions with other participants, eg. catalog, transfer... + */ +public class StreamingParticipant extends Participant { + + protected Endpoint controlEndpoint; + + protected StreamingParticipant() { + } + + public String getName() { + return name; + } + + public String createAsset(String requestBody) { + return managementEndpoint.baseRequest() + .contentType(JSON) + .body(requestBody) + .when() + .post("/v3/assets") + .then() + .statusCode(200) + .contentType(JSON) + .extract().jsonPath().getString(ID); + } + + public String createPolicyDefinition(String requestBody) { + return managementEndpoint.baseRequest() + .contentType(JSON) + .body(requestBody) + .when() + .post("/v3/policydefinitions") + .then() + .statusCode(200) + .contentType(JSON) + .extract().jsonPath().getString(ID); + } + + public String createContractDefinition(String requestBody) { + return managementEndpoint.baseRequest() + .contentType(JSON) + .body(requestBody) + .when() + .post("/v3/contractdefinitions") + .then() + .statusCode(200) + .extract().jsonPath().getString(ID); + } + + public static class Builder

> extends Participant.Builder { + + protected Builder(P participant) { + super(participant); + } + + public static > Builder newStreamingInstance() { + return new Builder<>(new StreamingParticipant()); + } + + public B controlEndpoint(Endpoint controlEndpoint) { + participant.controlEndpoint = controlEndpoint; + return self(); + } + + @Override + public StreamingParticipant build() { + return (StreamingParticipant) super.build(); + } + } +} diff --git a/system-tests/src/test/java/org/eclipse/edc/samples/util/TransferUtil.java b/system-tests/src/test/java/org/eclipse/edc/samples/util/TransferUtil.java index 769ed50c..e0a1ed6a 100644 --- a/system-tests/src/test/java/org/eclipse/edc/samples/util/TransferUtil.java +++ b/system-tests/src/test/java/org/eclipse/edc/samples/util/TransferUtil.java @@ -17,11 +17,12 @@ import io.restassured.http.ContentType; import org.apache.http.HttpStatus; -import org.eclipse.edc.connector.transfer.spi.types.TransferProcessStates; +import org.eclipse.edc.connector.controlplane.transfer.spi.types.TransferProcessStates; import java.time.Duration; import static io.restassured.RestAssured.given; +import static org.assertj.core.api.Assertions.assertThat; import static org.awaitility.Awaitility.await; import static org.eclipse.edc.jsonld.spi.JsonLdKeywords.ID; import static org.eclipse.edc.samples.common.PrerequisitesCommon.API_KEY_HEADER_KEY; @@ -37,7 +38,7 @@ public class TransferUtil { public static final Duration POLL_INTERVAL = Duration.ofMillis(500); private static final String CONTRACT_AGREEMENT_ID_KEY = "{{contract-agreement-id}}"; - private static final String V2_TRANSFER_PROCESSES_PATH = "/v2/transferprocesses/"; + private static final String V2_TRANSFER_PROCESSES_PATH = "/v3/transferprocesses/"; private static final String EDC_STATE = "state"; public static void get(String url) { @@ -104,9 +105,9 @@ public static void checkTransferStatus(String transferProcessId, TransferProcess .atMost(TIMEOUT) .pollDelay(POLL_DELAY) .pollInterval(POLL_INTERVAL) - .until( - () -> get(CONSUMER_MANAGEMENT_URL + V2_TRANSFER_PROCESSES_PATH + transferProcessId, EDC_STATE), - (result) -> status.name().equals(result) - ); + .untilAsserted(() -> { + var state = get(CONSUMER_MANAGEMENT_URL + V2_TRANSFER_PROCESSES_PATH + transferProcessId, EDC_STATE); + assertThat(state).isEqualTo(status.name()); + }); } } diff --git a/system-tests/src/test/resources/policy/config-eu.properties b/system-tests/src/test/resources/policy/config-eu.properties new file mode 100644 index 00000000..73efd16e --- /dev/null +++ b/system-tests/src/test/resources/policy/config-eu.properties @@ -0,0 +1,12 @@ +web.http.port=29191 +web.http.path=/api +web.http.management.port=29193 +web.http.management.path=/management +web.http.protocol.port=29194 +web.http.protocol.path=/protocol + +edc.api.auth.key=password +edc.dsp.callback.address=http://localhost:29194/protocol +edc.participant.id=consumer +edc.ids.id=urn:connector:consumer +edc.mock.region=eu diff --git a/system-tests/src/test/resources/policy/config-us.properties b/system-tests/src/test/resources/policy/config-us.properties new file mode 100644 index 00000000..f1f61b84 --- /dev/null +++ b/system-tests/src/test/resources/policy/config-us.properties @@ -0,0 +1,12 @@ +web.http.port=29191 +web.http.path=/api +web.http.management.port=29193 +web.http.management.path=/management +web.http.protocol.port=29194 +web.http.protocol.path=/protocol + +edc.api.auth.key=password +edc.dsp.callback.address=http://localhost:29194/protocol +edc.participant.id=consumer +edc.ids.id=urn:connector:consumer +edc.mock.region=us diff --git a/transfer/streaming/streaming-01-http-to-http/README.md b/transfer/streaming/streaming-01-http-to-http/README.md index 66331433..9d71bab4 100644 --- a/transfer/streaming/streaming-01-http-to-http/README.md +++ b/transfer/streaming/streaming-01-http-to-http/README.md @@ -26,15 +26,6 @@ export EDC_FS_CONFIG=transfer/streaming/streaming-01-http-to-http/streaming-01-r java -jar transfer/streaming/streaming-01-http-to-http/streaming-01-runtime/build/libs/connector.jar ``` -#### Register Data Plane on provider -The provider connector needs to be aware of the streaming capabilities of the embedded dataplane, which can be registered with -this call: -```shell -curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-01-http-to-http/dataplane.json -X POST "http://localhost:18181/management/v2/dataplanes" -``` - -If you look at the `dataplane.json` you'll notice that the supported source is `HttpStreaming` and the supported sink is `HttpData`. - #### Register Asset, Policy Definition and Contract Definition on provider A "source" folder must first be created where the data plane will get the messages to be sent to the consumers. To do this, create a temp folder: @@ -58,11 +49,11 @@ curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-01-htt ``` ```shell -curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-01-http-to-http/policy-definition.json -X POST "http://localhost:18181/management/v2/policydefinitions" +curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-01-http-to-http/policy-definition.json -X POST "http://localhost:18181/management/v3/policydefinitions" ``` ```shell -curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-01-http-to-http/contract-definition.json -X POST "http://localhost:18181/management/v2/contractdefinitions" +curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-01-http-to-http/contract-definition.json -X POST "http://localhost:18181/management/v3/contractdefinitions" ``` #### Negotiate the contract @@ -70,7 +61,7 @@ The typical flow requires fetching the catalog from the consumer side and using However, in this sample case, we already have the provider asset (`"stream-asset"`) so we can get the related dataset directly with this call: ```shell -curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-01-http-to-http/get-dataset.json -X POST "http://localhost:28181/management/v2/catalog/dataset/request" -s | jq +curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-01-http-to-http/get-dataset.json -X POST "http://localhost:28181/management/v3/catalog/dataset/request" -s | jq ``` The output will be something like: @@ -107,7 +98,7 @@ The output will be something like: With the `odrl:hasPolicy/@id` we can now replace it in the [negotiate-contract.json](negotiate-contract.json) file and request the contract negotiation: ```shell -curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-01-http-to-http/negotiate-contract.json -X POST "http://localhost:28181/management/v2/contractnegotiations" -s | jq +curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-01-http-to-http/negotiate-contract.json -X POST "http://localhost:28181/management/v3/contractnegotiations" -s | jq ``` ### Start the transfer @@ -122,13 +113,13 @@ It will run on port 4000. At this point the contract agreement should already been issued, to verify that, please check the contract negotiation state with this call, replacing `{{contract-negotiation-id}}` with the id returned by the negotiate contract call. ```shell -curl "http://localhost:28181/management/v2/contractnegotiations/{{contract-negotiation-id}}" -s | jq +curl "http://localhost:28181/management/v3/contractnegotiations/{{contract-negotiation-id}}" -s | jq ``` If the `edc:contractAgreementId` is valued, it can be used to start the transfer, replacing it in the [transfer.json](transfer.json) file to `{{contract-agreement-id}}` and then calling the connector with this command: ```shell -curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-01-http-to-http/transfer.json -X POST "http://localhost:28181/management/v2/transferprocesses" -s | jq +curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-01-http-to-http/transfer.json -X POST "http://localhost:28181/management/v3/transferprocesses" -s | jq ``` > Note that the destination address is `localhost:4000`, this because is where our logging webserver is listening. @@ -136,7 +127,7 @@ curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-01-htt Let's wait until the transfer state is `STARTED` state executing this call, replacing to `{{transfer-process-id}}` the id returned by the start transfer call: ```shell -curl "http://localhost:28181/management/v2/transferprocesses/{{transfer-process-id}}" -s | jq +curl "http://localhost:28181/management/v3/transferprocesses/{{transfer-process-id}}" -s | jq ``` Here we can test the transfer creating a file into the `source` folder that we configured before, e.g. copying the `README.md` diff --git a/transfer/streaming/streaming-01-http-to-http/dataplane.json b/transfer/streaming/streaming-01-http-to-http/dataplane.json deleted file mode 100644 index a7980a42..00000000 --- a/transfer/streaming/streaming-01-http-to-http/dataplane.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "@context": { - "@vocab": "https://w3id.org/edc/v0.0.1/ns/" - }, - "@id": "http-pull-provider-dataplane", - "url": "http://localhost:19192/control/transfer", - "allowedSourceTypes": [ "HttpStreaming" ], - "allowedDestTypes": [ "HttpData" ] -} diff --git a/transfer/streaming/streaming-01-http-to-http/negotiate-contract.json b/transfer/streaming/streaming-01-http-to-http/negotiate-contract.json index 5b905364..43c42a2e 100644 --- a/transfer/streaming/streaming-01-http-to-http/negotiate-contract.json +++ b/transfer/streaming/streaming-01-http-to-http/negotiate-contract.json @@ -3,13 +3,13 @@ "@vocab": "https://w3id.org/edc/v0.0.1/ns/", "odrl": "http://www.w3.org/ns/odrl/2/" }, - "@type": "NegotiationInitiateRequestDto", + "@type": "ContractRequest", "counterPartyAddress": "http://localhost:18182/protocol", "providerId": "provider", "protocol": "dataspace-protocol-http", "policy": { "@id": "{{offerId}}", - "@type": "use", + "@type": "Offer", "odrl:permission": [], "odrl:prohibition": [], "odrl:obligation": [], diff --git a/transfer/streaming/streaming-01-http-to-http/policy-definition.json b/transfer/streaming/streaming-01-http-to-http/policy-definition.json index e1bdd6af..4ef2a1ec 100644 --- a/transfer/streaming/streaming-01-http-to-http/policy-definition.json +++ b/transfer/streaming/streaming-01-http-to-http/policy-definition.json @@ -3,6 +3,6 @@ "@id": "no-constraint-policy", "policy": { "@context": "http://www.w3.org/ns/odrl.jsonld", - "@type": "use" + "@type": "Set" } } diff --git a/transfer/streaming/streaming-01-http-to-http/streaming-01-runtime/build.gradle.kts b/transfer/streaming/streaming-01-http-to-http/streaming-01-runtime/build.gradle.kts index 4679f6c3..7557a2b3 100644 --- a/transfer/streaming/streaming-01-http-to-http/streaming-01-runtime/build.gradle.kts +++ b/transfer/streaming/streaming-01-http-to-http/streaming-01-runtime/build.gradle.kts @@ -21,6 +21,7 @@ plugins { } dependencies { + implementation(libs.edc.control.api.configuration) implementation(libs.edc.control.plane.api.client) implementation(libs.edc.control.plane.core) implementation(libs.edc.data.plane.selector.core) @@ -30,10 +31,10 @@ dependencies { implementation(libs.edc.management.api) implementation(libs.edc.dsp) implementation(libs.edc.data.plane.selector.api) - implementation(libs.edc.data.plane.selector.client) - implementation(libs.edc.transfer.data.plane) + implementation(libs.edc.transfer.data.plane.signaling) implementation(libs.edc.data.plane.spi) implementation(libs.edc.data.plane.core) + implementation(libs.edc.data.plane.self.registration) implementation(libs.edc.data.plane.http) } diff --git a/transfer/streaming/streaming-01-http-to-http/streaming-01-runtime/src/main/java/org/eclipse/edc/samples/transfer/streaming/http/HttpStreamingDataSourceFactory.java b/transfer/streaming/streaming-01-http-to-http/streaming-01-runtime/src/main/java/org/eclipse/edc/samples/transfer/streaming/http/HttpStreamingDataSourceFactory.java index 45c2af8c..93dc0a4d 100644 --- a/transfer/streaming/streaming-01-http-to-http/streaming-01-runtime/src/main/java/org/eclipse/edc/samples/transfer/streaming/http/HttpStreamingDataSourceFactory.java +++ b/transfer/streaming/streaming-01-http-to-http/streaming-01-runtime/src/main/java/org/eclipse/edc/samples/transfer/streaming/http/HttpStreamingDataSourceFactory.java @@ -14,11 +14,11 @@ package org.eclipse.edc.samples.transfer.streaming.http; +import org.eclipse.edc.connector.controlplane.transfer.spi.types.TransferProcess; import org.eclipse.edc.connector.dataplane.spi.pipeline.DataSource; import org.eclipse.edc.connector.dataplane.spi.pipeline.DataSourceFactory; -import org.eclipse.edc.connector.transfer.spi.types.TransferProcess; import org.eclipse.edc.spi.result.Result; -import org.eclipse.edc.spi.types.domain.transfer.DataFlowRequest; +import org.eclipse.edc.spi.types.domain.transfer.DataFlowStartMessage; import org.jetbrains.annotations.NotNull; import java.io.File; @@ -31,28 +31,27 @@ public class HttpStreamingDataSourceFactory implements DataSourceFactory { @Override - public boolean canHandle(DataFlowRequest request) { - return request.getSourceDataAddress().getType().equals("HttpStreaming"); + public String supportedType() { + return "HttpStreaming"; } @Override - public DataSource createSource(DataFlowRequest request) { - return new HttpStreamingDataSource(sourceFolder(request).get()); + public DataSource createSource(DataFlowStartMessage dataFlowStartMessage) { + return new HttpStreamingDataSource(sourceFolder(dataFlowStartMessage).get()); } @Override - public @NotNull Result validateRequest(DataFlowRequest request) { - return sourceFolder(request) + public @NotNull Result validateRequest(DataFlowStartMessage dataFlowStartMessage) { + return sourceFolder(dataFlowStartMessage) .map(it -> Result.success()) .orElseGet(() -> Result.failure("sourceFolder is not found or it does not exist")); } - private Optional sourceFolder(DataFlowRequest request) { + private Optional sourceFolder(DataFlowStartMessage request) { return Optional.of(request) - .map(DataFlowRequest::getSourceDataAddress) + .map(DataFlowStartMessage::getSourceDataAddress) .map(it -> it.getStringProperty("sourceFolder")) .map(File::new) .filter(File::exists); } - } diff --git a/transfer/streaming/streaming-02-kafka-to-http/2-policy-definition.json b/transfer/streaming/streaming-02-kafka-to-http/2-policy-definition.json index 4919c71a..5ff4e910 100644 --- a/transfer/streaming/streaming-02-kafka-to-http/2-policy-definition.json +++ b/transfer/streaming/streaming-02-kafka-to-http/2-policy-definition.json @@ -5,6 +5,6 @@ }, "@id": "no-constraint-policy", "policy": { - "@type": "odrl:use" + "@type": "odrl:Set" } } diff --git a/transfer/streaming/streaming-02-kafka-to-http/5-negotiate-contract.json b/transfer/streaming/streaming-02-kafka-to-http/5-negotiate-contract.json index 5aa6655e..0307f917 100644 --- a/transfer/streaming/streaming-02-kafka-to-http/5-negotiate-contract.json +++ b/transfer/streaming/streaming-02-kafka-to-http/5-negotiate-contract.json @@ -3,16 +3,16 @@ "@vocab": "https://w3id.org/edc/v0.0.1/ns/", "odrl": "http://www.w3.org/ns/odrl/2/" }, - "@type": "NegotiationInitiateRequestDto", + "@type": "ContractRequest", "counterPartyAddress": "http://localhost:18182/protocol", - "providerId": "provider", "protocol": "dataspace-protocol-http", "policy": { "@id": "{{offerId}}", - "@type": "use", + "@type": "Offer", "odrl:permission": [], "odrl:prohibition": [], "odrl:obligation": [], + "odrl:assigner": "provider", "odrl:target": "kafka-stream-asset" } } diff --git a/transfer/streaming/streaming-02-kafka-to-http/README.md b/transfer/streaming/streaming-02-kafka-to-http/README.md index c404fef6..a41a9b68 100644 --- a/transfer/streaming/streaming-02-kafka-to-http/README.md +++ b/transfer/streaming/streaming-02-kafka-to-http/README.md @@ -27,16 +27,6 @@ export EDC_FS_CONFIG=transfer/streaming/streaming-02-kafka-to-http/streaming-02- java -jar transfer/streaming/streaming-02-kafka-to-http/streaming-02-runtime/build/libs/connector.jar ``` -### Register Data Plane on provider - -The provider connector needs to be aware of the kafka streaming capabilities of the embedded dataplane, which can be registered with -this call: -```shell -curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-02-kafka-to-http/0-dataplane.json -X POST "http://localhost:18181/management/v2/dataplanes" -s | jq -``` - -If you look at the `0-dataplane.json` you'll notice that the supported source is `Kafka` and the supported sink is `HttpData`. - ### Register Asset, Policy Definition and Contract Definition on provider A "source" kafka topic must first be created where the data plane will get the event records to be sent to the consumers. @@ -63,11 +53,11 @@ curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-02-kaf ``` ```shell -curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-02-kafka-to-http/2-policy-definition.json -X POST "http://localhost:18181/management/v2/policydefinitions" -s | jq +curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-02-kafka-to-http/2-policy-definition.json -X POST "http://localhost:18181/management/v3/policydefinitions" -s | jq ``` ```shell -curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-02-kafka-to-http/3-contract-definition.json -X POST "http://localhost:18181/management/v2/contractdefinitions" -s | jq +curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-02-kafka-to-http/3-contract-definition.json -X POST "http://localhost:18181/management/v3/contractdefinitions" -s | jq ``` ### Negotiate the contract @@ -76,7 +66,7 @@ The typical flow requires fetching the catalog from the consumer side and using However, in this sample case, we already have the provider asset (`"kafka-stream-asset"`) so we can get the related dataset directly with this call: ```shell -curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-02-kafka-to-http/4-get-dataset.json -X POST "http://localhost:28181/management/v2/catalog/dataset/request" -s | jq +curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-02-kafka-to-http/4-get-dataset.json -X POST "http://localhost:28181/management/v3/catalog/dataset/request" -s | jq ``` The output will be something like: @@ -113,7 +103,7 @@ The output will be something like: With the `odrl:hasPolicy/@id` we can now replace it in the [negotiate-contract.json](5-negotiate-contract.json) file and request the contract negotiation: ```shell -curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-02-kafka-to-http/5-negotiate-contract.json -X POST "http://localhost:28181/management/v2/contractnegotiations" -s | jq +curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-02-kafka-to-http/5-negotiate-contract.json -X POST "http://localhost:28181/management/v3/contractnegotiations" -s | jq ``` ### Start the transfer @@ -129,20 +119,20 @@ It will run on port 4000. At this point the contract agreement should already been issued, to verify that, please check the contract negotiation state with this call, replacing `{{contract-negotiation-id}}` with the id returned by the negotiate contract call. ```shell -curl "http://localhost:28181/management/v2/contractnegotiations/{{contract-negotiation-id}}" -s | jq +curl "http://localhost:28181/management/v3/contractnegotiations/{{contract-negotiation-id}}" -s | jq ``` If the `edc:contractAgreementId` is valued, it can be used to start the transfer, replacing it in the [6-transfer.json](6-transfer.json) file to `{{contract-agreement-id}}` and then calling the connector with this command: ```shell -curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-02-kafka-to-http/6-transfer.json -X POST "http://localhost:28181/management/v2/transferprocesses" -s | jq +curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-02-kafka-to-http/6-transfer.json -X POST "http://localhost:28181/management/v3/transferprocesses" -s | jq ``` > Note that the destination address is `localhost:4000`, this because is where our logging webserver is listening. Let's wait until the transfer state is `STARTED` state executing this call, replacing to `{{transfer-process-id}}` the id returned by the start transfer call: ```shell -curl "http://localhost:28181/management/v2/transferprocesses/{{transfer-process-id}}" -s | jq +curl "http://localhost:28181/management/v3/transferprocesses/{{transfer-process-id}}" -s | jq ``` ### Produce events diff --git a/transfer/streaming/streaming-02-kafka-to-http/streaming-02-runtime/build.gradle.kts b/transfer/streaming/streaming-02-kafka-to-http/streaming-02-runtime/build.gradle.kts index 745aa1ae..5e6c8c53 100644 --- a/transfer/streaming/streaming-02-kafka-to-http/streaming-02-runtime/build.gradle.kts +++ b/transfer/streaming/streaming-02-kafka-to-http/streaming-02-runtime/build.gradle.kts @@ -21,6 +21,7 @@ plugins { } dependencies { + implementation(libs.edc.control.api.configuration) implementation(libs.edc.control.plane.api.client) implementation(libs.edc.control.plane.core) implementation(libs.edc.data.plane.selector.core) @@ -30,10 +31,10 @@ dependencies { implementation(libs.edc.management.api) implementation(libs.edc.dsp) implementation(libs.edc.data.plane.selector.api) - implementation(libs.edc.data.plane.selector.client) - implementation(libs.edc.transfer.data.plane) + implementation(libs.edc.transfer.data.plane.signaling) implementation(libs.edc.data.plane.spi) implementation(libs.edc.data.plane.core) + implementation(libs.edc.data.plane.self.registration) implementation(libs.edc.data.plane.http) implementation(libs.edc.data.plane.kafka) diff --git a/transfer/streaming/streaming-03-kafka-broker/2-policy-definition.json b/transfer/streaming/streaming-03-kafka-broker/2-policy-definition.json index 4919c71a..5ff4e910 100644 --- a/transfer/streaming/streaming-03-kafka-broker/2-policy-definition.json +++ b/transfer/streaming/streaming-03-kafka-broker/2-policy-definition.json @@ -5,6 +5,6 @@ }, "@id": "no-constraint-policy", "policy": { - "@type": "odrl:use" + "@type": "odrl:Set" } } diff --git a/transfer/streaming/streaming-03-kafka-broker/5-negotiate-contract.json b/transfer/streaming/streaming-03-kafka-broker/5-negotiate-contract.json index b525b894..a5803296 100644 --- a/transfer/streaming/streaming-03-kafka-broker/5-negotiate-contract.json +++ b/transfer/streaming/streaming-03-kafka-broker/5-negotiate-contract.json @@ -3,21 +3,16 @@ "edc": "https://w3id.org/edc/v0.0.1/ns/", "odrl": "http://www.w3.org/ns/odrl/2/" }, - "@type": "NegotiationInitiateRequestDto", - "connectorAddress": "http://localhost:18182/protocol", + "@type": "ContractRequest", "counterPartyAddress": "http://localhost:18182/protocol", - "providerId": "provider", "protocol": "dataspace-protocol-http", - "offer": { - "offerId": "{{offerId}}", - "assetId": "kafka-stream-asset", - "policy": { - "@id": "{{offerId}}", - "@type": "use", - "odrl:permission": [], - "odrl:prohibition": [], - "odrl:obligation": [], - "odrl:target": "kafka-stream-asset" - } + "policy": { + "@id": "{{offerId}}", + "@type": "Offer", + "odrl:permission": [], + "odrl:prohibition": [], + "odrl:obligation": [], + "odrl:assigner": "provider", + "odrl:target": "kafka-stream-asset" } } diff --git a/transfer/streaming/streaming-03-kafka-broker/README.md b/transfer/streaming/streaming-03-kafka-broker/README.md index 7273196a..db80aef7 100644 --- a/transfer/streaming/streaming-03-kafka-broker/README.md +++ b/transfer/streaming/streaming-03-kafka-broker/README.md @@ -95,11 +95,11 @@ curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-03-kaf ``` ```shell -curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-03-kafka-broker/2-policy-definition.json -X POST "http://localhost:18181/management/v2/policydefinitions" -s | jq +curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-03-kafka-broker/2-policy-definition.json -X POST "http://localhost:18181/management/v3/policydefinitions" -s | jq ``` ```shell -curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-03-kafka-broker/3-contract-definition.json -X POST "http://localhost:18181/management/v2/contractdefinitions" -s | jq +curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-03-kafka-broker/3-contract-definition.json -X POST "http://localhost:18181/management/v3/contractdefinitions" -s | jq ``` ### Negotiate the contract @@ -108,7 +108,7 @@ The typical flow requires fetching the catalog from the consumer side and using However, in this sample case, we already have the provider asset (`"kafka-stream-asset"`) so we can get the related dataset directly with this call: ```shell -curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-03-kafka-broker/4-get-dataset.json -X POST "http://localhost:28181/management/v2/catalog/dataset/request" -s | jq +curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-03-kafka-broker/4-get-dataset.json -X POST "http://localhost:28181/management/v3/catalog/dataset/request" -s | jq ``` The output will be something like: @@ -139,7 +139,7 @@ The output will be something like: With the `odrl:hasPolicy/@id` we can now replace it in the [negotiate-contract.json](5-negotiate-contract.json) file and negotiate the contract: ```shell -curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-03-kafka-broker/5-negotiate-contract.json -X POST "http://localhost:28181/management/v2/contractnegotiations" -s | jq +curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-03-kafka-broker/5-negotiate-contract.json -X POST "http://localhost:28181/management/v3/contractnegotiations" -s | jq ``` ### Start the transfer @@ -156,20 +156,20 @@ It will run on port 4000. At this point the contract agreement should already been issued, to verify that, please check the contract negotiation state with this call, replacing `{{contract-negotiation-id}}` with the id returned by the negotiate contract call. ```shell -curl "http://localhost:28181/management/v2/contractnegotiations/{{contract-negotiation-id}}" -s | jq +curl "http://localhost:28181/management/v3/contractnegotiations/{{contract-negotiation-id}}" -s | jq ``` If the `edc:contractAgreementId` is valued, it can be used to start the transfer, replacing it in the [6-transfer.json](6-transfer.json) file to `{{contract-agreement-id}}` and then calling the connector with this command: ```shell -curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-03-kafka-broker/6-transfer.json -X POST "http://localhost:28181/management/v2/transferprocesses" -s | jq +curl -H 'Content-Type: application/json' -d @transfer/streaming/streaming-03-kafka-broker/6-transfer.json -X POST "http://localhost:28181/management/v3/transferprocesses" -s | jq ``` > Note that the destination address is `localhost:4000`, this because is where our logging webserver is listening. Let's wait until the transfer state is `STARTED` state executing this call, replacing to `{{transfer-process-id}}` the id returned by the start transfer call: ```shell -curl "http://localhost:28181/management/v2/transferprocesses/{{transfer-process-id}}" -s | jq +curl "http://localhost:28181/management/v3/transferprocesses/{{transfer-process-id}}" -s | jq ``` ### Consume events diff --git a/transfer/streaming/streaming-03-kafka-broker/streaming-03-runtime/build.gradle.kts b/transfer/streaming/streaming-03-kafka-broker/streaming-03-runtime/build.gradle.kts index bb7726d1..537c3e85 100644 --- a/transfer/streaming/streaming-03-kafka-broker/streaming-03-runtime/build.gradle.kts +++ b/transfer/streaming/streaming-03-kafka-broker/streaming-03-runtime/build.gradle.kts @@ -28,8 +28,7 @@ dependencies { implementation(libs.edc.management.api) implementation(libs.edc.dsp) implementation(libs.edc.data.plane.selector.api) - implementation(libs.edc.data.plane.selector.client) - implementation(libs.edc.transfer.data.plane) + implementation(libs.edc.transfer.data.plane.signaling) implementation(libs.edc.transfer.pull.http.dynamic.receiver) implementation(libs.edc.data.plane.spi) implementation(libs.edc.data.plane.core) diff --git a/transfer/streaming/streaming-03-kafka-broker/streaming-03-runtime/src/main/java/org/eclipse/edc/samples/streaming/KafkaExtension.java b/transfer/streaming/streaming-03-kafka-broker/streaming-03-runtime/src/main/java/org/eclipse/edc/samples/streaming/KafkaExtension.java index 7657cfb2..4a93b703 100644 --- a/transfer/streaming/streaming-03-kafka-broker/streaming-03-runtime/src/main/java/org/eclipse/edc/samples/streaming/KafkaExtension.java +++ b/transfer/streaming/streaming-03-kafka-broker/streaming-03-runtime/src/main/java/org/eclipse/edc/samples/streaming/KafkaExtension.java @@ -14,7 +14,7 @@ package org.eclipse.edc.samples.streaming; -import org.eclipse.edc.connector.transfer.spi.flow.DataFlowManager; +import org.eclipse.edc.connector.controlplane.transfer.spi.flow.DataFlowManager; import org.eclipse.edc.runtime.metamodel.annotation.Inject; import org.eclipse.edc.spi.system.ServiceExtension; import org.eclipse.edc.spi.system.ServiceExtensionContext; diff --git a/transfer/streaming/streaming-03-kafka-broker/streaming-03-runtime/src/main/java/org/eclipse/edc/samples/streaming/KafkaToKafkaDataFlowController.java b/transfer/streaming/streaming-03-kafka-broker/streaming-03-runtime/src/main/java/org/eclipse/edc/samples/streaming/KafkaToKafkaDataFlowController.java index a78e268f..fb8a7ce9 100644 --- a/transfer/streaming/streaming-03-kafka-broker/streaming-03-runtime/src/main/java/org/eclipse/edc/samples/streaming/KafkaToKafkaDataFlowController.java +++ b/transfer/streaming/streaming-03-kafka-broker/streaming-03-runtime/src/main/java/org/eclipse/edc/samples/streaming/KafkaToKafkaDataFlowController.java @@ -14,9 +14,10 @@ package org.eclipse.edc.samples.streaming; -import org.eclipse.edc.connector.transfer.spi.flow.DataFlowController; -import org.eclipse.edc.connector.transfer.spi.types.DataFlowResponse; -import org.eclipse.edc.connector.transfer.spi.types.TransferProcess; +import org.eclipse.edc.connector.controlplane.asset.spi.domain.Asset; +import org.eclipse.edc.connector.controlplane.transfer.spi.flow.DataFlowController; +import org.eclipse.edc.connector.controlplane.transfer.spi.types.DataFlowResponse; +import org.eclipse.edc.connector.controlplane.transfer.spi.types.TransferProcess; import org.eclipse.edc.dataaddress.kafka.spi.KafkaDataAddressSchema; import org.eclipse.edc.policy.model.Policy; import org.eclipse.edc.spi.response.StatusResult; @@ -24,18 +25,19 @@ import org.eclipse.edc.spi.types.domain.edr.EndpointDataReference; import org.jetbrains.annotations.NotNull; +import java.util.Set; + import static org.eclipse.edc.dataaddress.kafka.spi.KafkaDataAddressSchema.KAFKA_TYPE; -import static org.eclipse.edc.spi.CoreConstants.EDC_NAMESPACE; class KafkaToKafkaDataFlowController implements DataFlowController { @Override public boolean canHandle(TransferProcess transferProcess) { - return KAFKA_TYPE.equals(transferProcess.getContentDataAddress().getType()) && "KafkaBroker".equals(transferProcess.getDestinationType()); + return KAFKA_TYPE.equals(transferProcess.getContentDataAddress().getType()) && "KafkaBroker-PULL".equals(transferProcess.getTransferType()); } @Override - public @NotNull StatusResult initiateFlow(TransferProcess transferProcess, Policy policy) { + public @NotNull StatusResult start(TransferProcess transferProcess, Policy policy) { // static credentials, in a production case these should be created dynamically and an ACLs entry should be added var username = "alice"; var password = "alice-secret"; @@ -48,16 +50,27 @@ public boolean canHandle(TransferProcess transferProcess) { .property(EndpointDataReference.AUTH_KEY, username) .property(EndpointDataReference.AUTH_CODE, password) .property(EndpointDataReference.CONTRACT_ID, transferProcess.getContractId()) - .property(EDC_NAMESPACE + KafkaDataAddressSchema.TOPIC, contentDataAddress.getStringProperty(KafkaDataAddressSchema.TOPIC)) + .property(KafkaDataAddressSchema.TOPIC, contentDataAddress.getStringProperty(KafkaDataAddressSchema.TOPIC)) .build(); return StatusResult.success(DataFlowResponse.Builder.newInstance().dataAddress(kafkaDataAddress).build()); } + @Override + public StatusResult suspend(TransferProcess transferProcess) { + // here the flow can be suspended, not something covered in this sample + return StatusResult.success(); + } + @Override public StatusResult terminate(TransferProcess transferProcess) { // here the flow can be terminated, not something covered in this sample return StatusResult.success(); } + @Override + public Set transferTypesFor(Asset asset) { + return Set.of("Kafka-PULL"); + } + } diff --git a/transfer/transfer-00-prerequisites/README.md b/transfer/transfer-00-prerequisites/README.md index b79ddad0..e0820e92 100644 --- a/transfer/transfer-00-prerequisites/README.md +++ b/transfer/transfer-00-prerequisites/README.md @@ -40,57 +40,24 @@ Inspect the different configuration files below: * [provider-configuration.properties](resources/configuration/provider-configuration.properties) * [consumer-configuration.properties](resources/configuration/consumer-configuration.properties) -The section bellow will show you some explanation about some of the properties that you can find in -the configuration files. - -#### 1. edc.receiver.http.endpoint - -This property is used to define the endpoint where the connector consumer will send the -EndpointDataReference. - -#### 2. edc.dataplane.token.validation.endpoint - -This property is used to define the endpoint exposed by the control plane to validate the token. - ### 2. Run the connectors To run the provider, just run the following command ```bash -java -Dedc.keystore=transfer/transfer-00-prerequisites/resources/certs/cert.pfx -Dedc.keystore.password=123456 -Dedc.vault=transfer/transfer-00-prerequisites/resources/configuration/provider-vault.properties -Dedc.fs.config=transfer/transfer-00-prerequisites/resources/configuration/provider-configuration.properties -jar transfer/transfer-00-prerequisites/connector/build/libs/connector.jar +java -Dedc.keystore=transfer/transfer-00-prerequisites/resources/certs/cert.pfx -Dedc.keystore.password=123456 -Dedc.fs.config=transfer/transfer-00-prerequisites/resources/configuration/provider-configuration.properties -jar transfer/transfer-00-prerequisites/connector/build/libs/connector.jar ``` To run the consumer, just run the following command (different terminal) ```bash -java -Dedc.keystore=transfer/transfer-00-prerequisites/resources/certs/cert.pfx -Dedc.keystore.password=123456 -Dedc.vault=transfer/transfer-00-prerequisites/resources/configuration/consumer-vault.properties -Dedc.fs.config=transfer/transfer-00-prerequisites/resources/configuration/consumer-configuration.properties -jar transfer/transfer-00-prerequisites/connector/build/libs/connector.jar +java -Dedc.keystore=transfer/transfer-00-prerequisites/resources/certs/cert.pfx -Dedc.keystore.password=123456 -Dedc.fs.config=transfer/transfer-00-prerequisites/resources/configuration/consumer-configuration.properties -jar transfer/transfer-00-prerequisites/connector/build/libs/connector.jar ``` Assuming you didn't change the ports in config files, the consumer will listen on the ports `29191`, `29192` (management API) and `29292` (DSP API) and the provider will listen on the ports `12181`, `19182` (management API) and `19282` (DSP API). -Running this sample consists of multiple steps, that are executed one by one and following the same -order. - -### 3. Register data plane instance for provider - -Before a consumer can start talking to a provider, it is necessary to register the data plane -instance of a connector. This is done by sending a POST request to the management API of the -provider connector. The [request body](resources/dataplane/register-data-plane-provider.json) should contain the data plane instance of the consumer -connector. - -The registration of the provider data plane instance is done by sending a POST -request to the management API of the connector. - -Open a new terminal and execute: - -```bash -curl -H 'Content-Type: application/json' \ - -d @transfer/transfer-00-prerequisites/resources/dataplane/register-data-plane-provider.json \ - -X POST "http://localhost:19193/management/v2/dataplanes" -s | jq -``` - The connectors have been configured successfully and are ready to be used. [Next Chapter](../transfer-01-negotiation/README.md) diff --git a/transfer/transfer-00-prerequisites/connector/build.gradle.kts b/transfer/transfer-00-prerequisites/connector/build.gradle.kts index 216f0f6e..5018d8b4 100644 --- a/transfer/transfer-00-prerequisites/connector/build.gradle.kts +++ b/transfer/transfer-00-prerequisites/connector/build.gradle.kts @@ -19,22 +19,28 @@ plugins { } dependencies { + implementation(libs.edc.control.api.configuration) implementation(libs.edc.control.plane.api.client) implementation(libs.edc.control.plane.api) implementation(libs.edc.control.plane.core) implementation(libs.edc.dsp) implementation(libs.edc.configuration.filesystem) - implementation(libs.edc.vault.filesystem) implementation(libs.edc.iam.mock) implementation(libs.edc.management.api) - implementation(libs.edc.transfer.data.plane) + implementation(libs.edc.transfer.data.plane.signaling) implementation(libs.edc.transfer.pull.http.receiver) + implementation(libs.edc.validator.data.address.http.data) + + implementation(libs.edc.edr.cache.api) + implementation(libs.edc.edr.store.core) + implementation(libs.edc.edr.store.receiver) implementation(libs.edc.data.plane.selector.api) implementation(libs.edc.data.plane.selector.core) - implementation(libs.edc.data.plane.selector.client) - implementation(libs.edc.data.plane.api) + implementation(libs.edc.data.plane.self.registration) + implementation(libs.edc.data.plane.control.api) + implementation(libs.edc.data.plane.public.api) implementation(libs.edc.data.plane.core) implementation(libs.edc.data.plane.http) } diff --git a/transfer/transfer-00-prerequisites/connector/src/main/java/org/eclipse/edc/sample/runtime/SeedVaultExtension.java b/transfer/transfer-00-prerequisites/connector/src/main/java/org/eclipse/edc/sample/runtime/SeedVaultExtension.java new file mode 100644 index 00000000..171d96ff --- /dev/null +++ b/transfer/transfer-00-prerequisites/connector/src/main/java/org/eclipse/edc/sample/runtime/SeedVaultExtension.java @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2024 Bayerische Motoren Werke Aktiengesellschaft (BMW AG) + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Bayerische Motoren Werke Aktiengesellschaft (BMW AG) - initial API and implementation + * + */ + +package org.eclipse.edc.sample.runtime; + +import org.eclipse.edc.runtime.metamodel.annotation.Inject; +import org.eclipse.edc.spi.security.Vault; +import org.eclipse.edc.spi.system.ServiceExtension; +import org.eclipse.edc.spi.system.ServiceExtensionContext; + +public class SeedVaultExtension implements ServiceExtension { + + @Inject + private Vault vault; + + private static final String PUBLIC_KEY = """ + -----BEGIN CERTIFICATE----- + MIIDazCCAlOgAwIBAgIUZ3/sZXYzW4PjmOXKrZn6WBmUJ+4wDQYJKoZIhvcNAQEL + BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM + GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yMjAyMjMxNTA2MDNaFw0zMjAy + MjExNTA2MDNaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw + HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB + AQUAA4IBDwAwggEKAoIBAQDBl6XaJnXTL+6DWip3aBhU+MzmY4d1V9hbTm1tiZ3g + E0VbUrvGO3LoYaxpPv6zFmsg3uJv6JxVAde7EddidN0ITHB9cQNdAfdUJ5njmsGS + PbdQuOQTHw0aG7/QvTI/nsvfEE6e0lbV/0e7DHacZT/+OztBH1RwkG2ymM94Hf8H + I6x7q6yfRTAZOqeOMrPCYTcluAgE9NskoPvjX5qASakBtXISKIsOU84N0/2HDN3W + EGMXvoHUQu6vrij6BwiwxKaw1AKwWENKoga775bPXN3M+JTSaIKE7dZbKzvx0Zi0 + h5X+bxc3BJi3Z/CsUBCzE+Y0SFetOiYmyl/2YmnneYoVAgMBAAGjUzBRMB0GA1Ud + DgQWBBTvK1wVERwjni4B2vdH7KtEJeVWFzAfBgNVHSMEGDAWgBTvK1wVERwjni4B + 2vdH7KtEJeVWFzAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBn + QHiPA7OBYukHd9gS7c0HXE+fsWcS3GZeLqcHfQQnV3pte1vTmu9//IVW71wNCJ1/ + rySRyODPQoPehxEcyHwupNZSzXK//nPlTdSgjMfFxscvt1YndyQLQYCfyOJMixAe + Aqrb14GTFHUUrdor0PyElhkULjkOXUrSIsdBrfWrwLTkelE8NK3tb5ZG8KPzD9Jy + +NwEPPr9d+iHkUkM7EFWw/cl56wka9ryBb97RI7DqbO6/j6OXHMk4GByxKv7DSIR + IvF9/Dw20qytajtaHV0pluFcOBuFc0NfiDvCaQlbTsfjzbc6UmZWbOi9YOJl3VQ/ + g3h+15GuzbsSzOCOEYOT + -----END CERTIFICATE----- + """; + + private static final String PRIVATE_KEY = """ + -----BEGIN PRIVATE KEY----- + MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDBl6XaJnXTL+6D + Wip3aBhU+MzmY4d1V9hbTm1tiZ3gE0VbUrvGO3LoYaxpPv6zFmsg3uJv6JxVAde7 + EddidN0ITHB9cQNdAfdUJ5njmsGSPbdQuOQTHw0aG7/QvTI/nsvfEE6e0lbV/0e7 + DHacZT/+OztBH1RwkG2ymM94Hf8HI6x7q6yfRTAZOqeOMrPCYTcluAgE9NskoPvj + X5qASakBtXISKIsOU84N0/2HDN3WEGMXvoHUQu6vrij6BwiwxKaw1AKwWENKoga7 + 75bPXN3M+JTSaIKE7dZbKzvx0Zi0h5X+bxc3BJi3Z/CsUBCzE+Y0SFetOiYmyl/2 + YmnneYoVAgMBAAECggEBAJHXiN6bctAyn+DcoHlsNkhtVw+Jk5bXIutGXjHTJtiU + K//siAGC78IZMyXmi0KndPVCdBwShROVW8xWWIiXuZxy2Zvm872xqX4Ah3JsN7/Q + NrXdVBUDo38zwIGkxqIfIz9crZ4An+J/eq5zaTfRHzCLtswMqjRS2hFeBY5cKrBY + 4bkSDGTP/c5cP7xS/UwaiTR2Ptd41f4zTyd4l5rl30TYHpazQNlbdxcOV4jh2Rnp + E0+cFEvEfeagVq7RmfBScKG5pk4qcRG0q2QHMyK5y00hdYvhdRjSgN7xIDkeO5B8 + s8/tSLU78nCl2gA9IKxTXYLitpISwZ81Q04mEAKRRtECgYEA+6lKnhn//aXerkLo + ZOLOjWQZhh005jHdNxX7DZqLpTrrfxc8v15KWUkAK1H0QHqYvfPrbbsBV1MY1xXt + sKmkeu/k8fJQzCIvFN4K2J5W5kMfq9PSw5d3XPeDaQuXUVaxBVp0gzPEPHmkKRbA + AkUqY0oJwA9gMKf8dK+flmLZfbsCgYEAxO4Roj2G46/Oox1GEZGxdLpiMpr9rEdR + JlSZ9kMGfddNLV7sFp6yPXDcyc/AOqeNj7tw1MyoT3Ar454+V0q83EZzCXvs4U6f + jUrfFcoVWIwf9AV/J4KWzMIzfqPIeNwqymZKd6BrZgcXXvAEPWt27mwO4a1GhC4G + oZv0t3lAsm8CgYAQ8C0IhSF4tgBN5Ez19VoHpDQflbmowLRt77nNCZjajyOokyzQ + iI0ig0pSoBp7eITtTAyNfyew8/PZDi3IVTKv35OeQTv08VwP4H4EZGve5aetDf3C + kmBDTpl2qYQOwnH5tUPgTMypcVp+NXzI6lTXB/WuCprjy3qvc96e5ZpT3wKBgQC8 + Xny/k9rTL/eYTwgXBiWYYjBL97VudUlKQOKEjNhIxwkrvQBXIrWbz7lh0Tcu49al + BcaHxru4QLO6pkM7fGHq0fh3ufJ8EZjMrjF1xjdk26Q05o0aXe+hLKHVIRVBhlfo + ArB4fRo+HcpdJXjox0KcDQCvHe+1v9DYBTWvymv4QQKBgBy3YH7hKz35DcXvA2r4 + Kis9a4ycuZqTXockO4rkcIwC6CJp9JbHDIRzig8HYOaRqmZ4a+coqLmddXr2uOF1 + 7+iAxxG1KzdT6uFNd+e/j2cdUjnqcSmz49PRtdDswgyYhoDT+W4yVGNQ4VuKg6a3 + Z3pC+KTdoHSKeA2FyAGnSUpD + -----END PRIVATE KEY----- + """; + + @Override + public void initialize(ServiceExtensionContext context) { + vault.storeSecret("public-key", PUBLIC_KEY); + vault.storeSecret("private-key", PRIVATE_KEY); + } +} diff --git a/transfer/transfer-00-prerequisites/connector/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension b/transfer/transfer-00-prerequisites/connector/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension new file mode 100644 index 00000000..eaca6845 --- /dev/null +++ b/transfer/transfer-00-prerequisites/connector/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension @@ -0,0 +1 @@ +org.eclipse.edc.sample.runtime.SeedVaultExtension diff --git a/transfer/transfer-00-prerequisites/resources/certs/cert.pem b/transfer/transfer-00-prerequisites/resources/certs/cert.pem deleted file mode 100644 index c7dc26fa..00000000 --- a/transfer/transfer-00-prerequisites/resources/certs/cert.pem +++ /dev/null @@ -1,21 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDazCCAlOgAwIBAgIUZ3/sZXYzW4PjmOXKrZn6WBmUJ+4wDQYJKoZIhvcNAQEL -BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM -GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yMjAyMjMxNTA2MDNaFw0zMjAy -MjExNTA2MDNaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw -HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB -AQUAA4IBDwAwggEKAoIBAQDBl6XaJnXTL+6DWip3aBhU+MzmY4d1V9hbTm1tiZ3g -E0VbUrvGO3LoYaxpPv6zFmsg3uJv6JxVAde7EddidN0ITHB9cQNdAfdUJ5njmsGS -PbdQuOQTHw0aG7/QvTI/nsvfEE6e0lbV/0e7DHacZT/+OztBH1RwkG2ymM94Hf8H -I6x7q6yfRTAZOqeOMrPCYTcluAgE9NskoPvjX5qASakBtXISKIsOU84N0/2HDN3W -EGMXvoHUQu6vrij6BwiwxKaw1AKwWENKoga775bPXN3M+JTSaIKE7dZbKzvx0Zi0 -h5X+bxc3BJi3Z/CsUBCzE+Y0SFetOiYmyl/2YmnneYoVAgMBAAGjUzBRMB0GA1Ud -DgQWBBTvK1wVERwjni4B2vdH7KtEJeVWFzAfBgNVHSMEGDAWgBTvK1wVERwjni4B -2vdH7KtEJeVWFzAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBn -QHiPA7OBYukHd9gS7c0HXE+fsWcS3GZeLqcHfQQnV3pte1vTmu9//IVW71wNCJ1/ -rySRyODPQoPehxEcyHwupNZSzXK//nPlTdSgjMfFxscvt1YndyQLQYCfyOJMixAe -Aqrb14GTFHUUrdor0PyElhkULjkOXUrSIsdBrfWrwLTkelE8NK3tb5ZG8KPzD9Jy -+NwEPPr9d+iHkUkM7EFWw/cl56wka9ryBb97RI7DqbO6/j6OXHMk4GByxKv7DSIR -IvF9/Dw20qytajtaHV0pluFcOBuFc0NfiDvCaQlbTsfjzbc6UmZWbOi9YOJl3VQ/ -g3h+15GuzbsSzOCOEYOT ------END CERTIFICATE----- diff --git a/transfer/transfer-00-prerequisites/resources/certs/key.pem b/transfer/transfer-00-prerequisites/resources/certs/key.pem deleted file mode 100644 index e72229e8..00000000 --- a/transfer/transfer-00-prerequisites/resources/certs/key.pem +++ /dev/null @@ -1,28 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDBl6XaJnXTL+6D -Wip3aBhU+MzmY4d1V9hbTm1tiZ3gE0VbUrvGO3LoYaxpPv6zFmsg3uJv6JxVAde7 -EddidN0ITHB9cQNdAfdUJ5njmsGSPbdQuOQTHw0aG7/QvTI/nsvfEE6e0lbV/0e7 -DHacZT/+OztBH1RwkG2ymM94Hf8HI6x7q6yfRTAZOqeOMrPCYTcluAgE9NskoPvj -X5qASakBtXISKIsOU84N0/2HDN3WEGMXvoHUQu6vrij6BwiwxKaw1AKwWENKoga7 -75bPXN3M+JTSaIKE7dZbKzvx0Zi0h5X+bxc3BJi3Z/CsUBCzE+Y0SFetOiYmyl/2 -YmnneYoVAgMBAAECggEBAJHXiN6bctAyn+DcoHlsNkhtVw+Jk5bXIutGXjHTJtiU -K//siAGC78IZMyXmi0KndPVCdBwShROVW8xWWIiXuZxy2Zvm872xqX4Ah3JsN7/Q -NrXdVBUDo38zwIGkxqIfIz9crZ4An+J/eq5zaTfRHzCLtswMqjRS2hFeBY5cKrBY -4bkSDGTP/c5cP7xS/UwaiTR2Ptd41f4zTyd4l5rl30TYHpazQNlbdxcOV4jh2Rnp -E0+cFEvEfeagVq7RmfBScKG5pk4qcRG0q2QHMyK5y00hdYvhdRjSgN7xIDkeO5B8 -s8/tSLU78nCl2gA9IKxTXYLitpISwZ81Q04mEAKRRtECgYEA+6lKnhn//aXerkLo -ZOLOjWQZhh005jHdNxX7DZqLpTrrfxc8v15KWUkAK1H0QHqYvfPrbbsBV1MY1xXt -sKmkeu/k8fJQzCIvFN4K2J5W5kMfq9PSw5d3XPeDaQuXUVaxBVp0gzPEPHmkKRbA -AkUqY0oJwA9gMKf8dK+flmLZfbsCgYEAxO4Roj2G46/Oox1GEZGxdLpiMpr9rEdR -JlSZ9kMGfddNLV7sFp6yPXDcyc/AOqeNj7tw1MyoT3Ar454+V0q83EZzCXvs4U6f -jUrfFcoVWIwf9AV/J4KWzMIzfqPIeNwqymZKd6BrZgcXXvAEPWt27mwO4a1GhC4G -oZv0t3lAsm8CgYAQ8C0IhSF4tgBN5Ez19VoHpDQflbmowLRt77nNCZjajyOokyzQ -iI0ig0pSoBp7eITtTAyNfyew8/PZDi3IVTKv35OeQTv08VwP4H4EZGve5aetDf3C -kmBDTpl2qYQOwnH5tUPgTMypcVp+NXzI6lTXB/WuCprjy3qvc96e5ZpT3wKBgQC8 -Xny/k9rTL/eYTwgXBiWYYjBL97VudUlKQOKEjNhIxwkrvQBXIrWbz7lh0Tcu49al -BcaHxru4QLO6pkM7fGHq0fh3ufJ8EZjMrjF1xjdk26Q05o0aXe+hLKHVIRVBhlfo -ArB4fRo+HcpdJXjox0KcDQCvHe+1v9DYBTWvymv4QQKBgBy3YH7hKz35DcXvA2r4 -Kis9a4ycuZqTXockO4rkcIwC6CJp9JbHDIRzig8HYOaRqmZ4a+coqLmddXr2uOF1 -7+iAxxG1KzdT6uFNd+e/j2cdUjnqcSmz49PRtdDswgyYhoDT+W4yVGNQ4VuKg6a3 -Z3pC+KTdoHSKeA2FyAGnSUpD ------END PRIVATE KEY----- diff --git a/transfer/transfer-00-prerequisites/resources/configuration/consumer-configuration.properties b/transfer/transfer-00-prerequisites/resources/configuration/consumer-configuration.properties index bb7df005..5b2059ad 100644 --- a/transfer/transfer-00-prerequisites/resources/configuration/consumer-configuration.properties +++ b/transfer/transfer-00-prerequisites/resources/configuration/consumer-configuration.properties @@ -6,13 +6,9 @@ web.http.management.port=29193 web.http.management.path=/management web.http.protocol.port=29194 web.http.protocol.path=/protocol -edc.receiver.http.endpoint=http://localhost:4000/receiver/urn:connector:provider/callback -edc.public.key.alias=public-key -edc.transfer.dataplane.token.signer.privatekey.alias=1 -edc.transfer.proxy.token.signer.privatekey.alias=1 +edc.transfer.proxy.token.signer.privatekey.alias=private-key edc.transfer.proxy.token.verifier.publickey.alias=public-key web.http.public.port=29291 web.http.public.path=/public web.http.control.port=29192 web.http.control.path=/control -edc.dataplane.token.validation.endpoint=http://localhost:29192/control/token diff --git a/transfer/transfer-00-prerequisites/resources/configuration/consumer-vault.properties b/transfer/transfer-00-prerequisites/resources/configuration/consumer-vault.properties deleted file mode 100644 index 6ebdebd5..00000000 --- a/transfer/transfer-00-prerequisites/resources/configuration/consumer-vault.properties +++ /dev/null @@ -1 +0,0 @@ -public-key=-----BEGIN CERTIFICATE-----\r\nMIIDazCCAlOgAwIBAgIUZ3/sZXYzW4PjmOXKrZn6WBmUJ+4wDQYJKoZIhvcNAQEL\r\nBQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM\r\nGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yMjAyMjMxNTA2MDNaFw0zMjAy\r\nMjExNTA2MDNaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw\r\nHwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB\r\nAQUAA4IBDwAwggEKAoIBAQDBl6XaJnXTL+6DWip3aBhU+MzmY4d1V9hbTm1tiZ3g\r\nE0VbUrvGO3LoYaxpPv6zFmsg3uJv6JxVAde7EddidN0ITHB9cQNdAfdUJ5njmsGS\r\nPbdQuOQTHw0aG7/QvTI/nsvfEE6e0lbV/0e7DHacZT/+OztBH1RwkG2ymM94Hf8H\r\nI6x7q6yfRTAZOqeOMrPCYTcluAgE9NskoPvjX5qASakBtXISKIsOU84N0/2HDN3W\r\nEGMXvoHUQu6vrij6BwiwxKaw1AKwWENKoga775bPXN3M+JTSaIKE7dZbKzvx0Zi0\r\nh5X+bxc3BJi3Z/CsUBCzE+Y0SFetOiYmyl/2YmnneYoVAgMBAAGjUzBRMB0GA1Ud\r\nDgQWBBTvK1wVERwjni4B2vdH7KtEJeVWFzAfBgNVHSMEGDAWgBTvK1wVERwjni4B\r\n2vdH7KtEJeVWFzAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBn\r\nQHiPA7OBYukHd9gS7c0HXE+fsWcS3GZeLqcHfQQnV3pte1vTmu9//IVW71wNCJ1/\r\nrySRyODPQoPehxEcyHwupNZSzXK//nPlTdSgjMfFxscvt1YndyQLQYCfyOJMixAe\r\nAqrb14GTFHUUrdor0PyElhkULjkOXUrSIsdBrfWrwLTkelE8NK3tb5ZG8KPzD9Jy\r\n+NwEPPr9d+iHkUkM7EFWw/cl56wka9ryBb97RI7DqbO6/j6OXHMk4GByxKv7DSIR\r\nIvF9/Dw20qytajtaHV0pluFcOBuFc0NfiDvCaQlbTsfjzbc6UmZWbOi9YOJl3VQ/\r\ng3h+15GuzbsSzOCOEYOT\r\n-----END CERTIFICATE----- diff --git a/transfer/transfer-00-prerequisites/resources/configuration/provider-configuration.properties b/transfer/transfer-00-prerequisites/resources/configuration/provider-configuration.properties index 4679ff53..733d735a 100644 --- a/transfer/transfer-00-prerequisites/resources/configuration/provider-configuration.properties +++ b/transfer/transfer-00-prerequisites/resources/configuration/provider-configuration.properties @@ -6,13 +6,10 @@ web.http.management.port=19193 web.http.management.path=/management web.http.protocol.port=19194 web.http.protocol.path=/protocol -edc.receiver.http.endpoint=http://localhost:4000/receiver/urn:connector:provider/callback -edc.public.key.alias=public-key -edc.transfer.dataplane.token.signer.privatekey.alias=1 -edc.transfer.proxy.token.signer.privatekey.alias=1 +edc.transfer.proxy.token.signer.privatekey.alias=private-key edc.transfer.proxy.token.verifier.publickey.alias=public-key web.http.public.port=19291 web.http.public.path=/public web.http.control.port=19192 web.http.control.path=/control -edc.dataplane.token.validation.endpoint=http://localhost:19192/control/token +edc.dataplane.api.public.baseurl=http://localhost:19291/public diff --git a/transfer/transfer-00-prerequisites/resources/configuration/provider-vault.properties b/transfer/transfer-00-prerequisites/resources/configuration/provider-vault.properties deleted file mode 100644 index 6ebdebd5..00000000 --- a/transfer/transfer-00-prerequisites/resources/configuration/provider-vault.properties +++ /dev/null @@ -1 +0,0 @@ -public-key=-----BEGIN CERTIFICATE-----\r\nMIIDazCCAlOgAwIBAgIUZ3/sZXYzW4PjmOXKrZn6WBmUJ+4wDQYJKoZIhvcNAQEL\r\nBQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM\r\nGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yMjAyMjMxNTA2MDNaFw0zMjAy\r\nMjExNTA2MDNaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw\r\nHwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB\r\nAQUAA4IBDwAwggEKAoIBAQDBl6XaJnXTL+6DWip3aBhU+MzmY4d1V9hbTm1tiZ3g\r\nE0VbUrvGO3LoYaxpPv6zFmsg3uJv6JxVAde7EddidN0ITHB9cQNdAfdUJ5njmsGS\r\nPbdQuOQTHw0aG7/QvTI/nsvfEE6e0lbV/0e7DHacZT/+OztBH1RwkG2ymM94Hf8H\r\nI6x7q6yfRTAZOqeOMrPCYTcluAgE9NskoPvjX5qASakBtXISKIsOU84N0/2HDN3W\r\nEGMXvoHUQu6vrij6BwiwxKaw1AKwWENKoga775bPXN3M+JTSaIKE7dZbKzvx0Zi0\r\nh5X+bxc3BJi3Z/CsUBCzE+Y0SFetOiYmyl/2YmnneYoVAgMBAAGjUzBRMB0GA1Ud\r\nDgQWBBTvK1wVERwjni4B2vdH7KtEJeVWFzAfBgNVHSMEGDAWgBTvK1wVERwjni4B\r\n2vdH7KtEJeVWFzAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBn\r\nQHiPA7OBYukHd9gS7c0HXE+fsWcS3GZeLqcHfQQnV3pte1vTmu9//IVW71wNCJ1/\r\nrySRyODPQoPehxEcyHwupNZSzXK//nPlTdSgjMfFxscvt1YndyQLQYCfyOJMixAe\r\nAqrb14GTFHUUrdor0PyElhkULjkOXUrSIsdBrfWrwLTkelE8NK3tb5ZG8KPzD9Jy\r\n+NwEPPr9d+iHkUkM7EFWw/cl56wka9ryBb97RI7DqbO6/j6OXHMk4GByxKv7DSIR\r\nIvF9/Dw20qytajtaHV0pluFcOBuFc0NfiDvCaQlbTsfjzbc6UmZWbOi9YOJl3VQ/\r\ng3h+15GuzbsSzOCOEYOT\r\n-----END CERTIFICATE----- diff --git a/transfer/transfer-00-prerequisites/resources/dataplane/register-data-plane-consumer.json b/transfer/transfer-00-prerequisites/resources/dataplane/register-data-plane-consumer.json deleted file mode 100644 index 7ca6ff1a..00000000 --- a/transfer/transfer-00-prerequisites/resources/dataplane/register-data-plane-consumer.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "@context": { - "@vocab": "https://w3id.org/edc/v0.0.1/ns/" - }, - "@id": "http-pull-consumer-dataplane", - "url": "http://localhost:29192/control/transfer", - "allowedSourceTypes": [ - "HttpData" - ], - "allowedDestTypes": [ - "HttpProxy", - "HttpData" - ], - "properties": { - "https://w3id.org/edc/v0.0.1/ns/publicApiUrl/publicApiUrl": "http://localhost:29291/public/" - } -} diff --git a/transfer/transfer-00-prerequisites/resources/dataplane/register-data-plane-provider.json b/transfer/transfer-00-prerequisites/resources/dataplane/register-data-plane-provider.json deleted file mode 100644 index 0ee24de4..00000000 --- a/transfer/transfer-00-prerequisites/resources/dataplane/register-data-plane-provider.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "@context": { - "@vocab": "https://w3id.org/edc/v0.0.1/ns/" - }, - "@id": "http-pull-provider-dataplane", - "url": "http://localhost:19192/control/transfer", - "allowedSourceTypes": [ - "HttpData" - ], - "allowedDestTypes": [ - "HttpProxy", - "HttpData" - ], - "properties": { - "https://w3id.org/edc/v0.0.1/ns/publicApiUrl": "http://localhost:19291/public/" - } -} diff --git a/transfer/transfer-01-negotiation/README.md b/transfer/transfer-01-negotiation/README.md index 01dbebf2..bed0c671 100644 --- a/transfer/transfer-01-negotiation/README.md +++ b/transfer/transfer-01-negotiation/README.md @@ -67,7 +67,7 @@ This means that the consumer connector can request any asset from the provider c ```bash curl -d @transfer/transfer-01-negotiation/resources/create-policy.json \ - -H 'content-type: application/json' http://localhost:19193/management/v2/policydefinitions \ + -H 'content-type: application/json' http://localhost:19193/management/v3/policydefinitions \ -s | jq ``` @@ -80,7 +80,7 @@ catalog. In this case, the selection is empty, so every asset is attached to the ```bash curl -d @transfer/transfer-01-negotiation/resources/create-contract-definition.json \ - -H 'content-type: application/json' http://localhost:19193/management/v2/contractdefinitions \ + -H 'content-type: application/json' http://localhost:19193/management/v3/contractdefinitions \ -s | jq ``` @@ -104,7 +104,7 @@ offer, the so-called "catalog". To get the catalog from the consumer side, you c request: ```bash -curl -X POST "http://localhost:29193/management/v2/catalog/request" \ +curl -X POST "http://localhost:29193/management/v3/catalog/request" \ -H 'Content-Type: application/json' \ -d @transfer/transfer-01-negotiation/resources/fetch-catalog.json -s | jq ``` @@ -186,7 +186,7 @@ file with the contract offer id you found in the catalog at the path `dcat:datas ```bash curl -d @transfer/transfer-01-negotiation/resources/negotiate-contract.json \ - -X POST -H 'content-type: application/json' http://localhost:29193/management/v2/contractnegotiations \ + -X POST -H 'content-type: application/json' http://localhost:29193/management/v3/contractnegotiations \ -s | jq ``` @@ -211,7 +211,7 @@ state, the negotiation is finished. We can now use the UUID to check the current negotiation using an endpoint on the consumer side. ```bash -curl -X GET "http://localhost:29193/management/v2/contractnegotiations/{{contract-negotiation-id}}" \ +curl -X GET "http://localhost:29193/management/v3/contractnegotiations/{{contract-negotiation-id}}" \ --header 'Content-Type: application/json' \ -s | jq ``` diff --git a/transfer/transfer-01-negotiation/resources/create-policy.json b/transfer/transfer-01-negotiation/resources/create-policy.json index 5047f001..cf7a4703 100644 --- a/transfer/transfer-01-negotiation/resources/create-policy.json +++ b/transfer/transfer-01-negotiation/resources/create-policy.json @@ -5,9 +5,10 @@ }, "@id": "aPolicy", "policy": { - "@type": "set", - "odrl:permission": [], - "odrl:prohibition": [], - "odrl:obligation": [] + "@context": "http://www.w3.org/ns/odrl.jsonld", + "@type": "Set", + "permission": [], + "prohibition": [], + "obligation": [] } } diff --git a/transfer/transfer-01-negotiation/resources/negotiate-contract.json b/transfer/transfer-01-negotiation/resources/negotiate-contract.json index 85762a75..82fe8036 100644 --- a/transfer/transfer-01-negotiation/resources/negotiate-contract.json +++ b/transfer/transfer-01-negotiation/resources/negotiate-contract.json @@ -2,19 +2,14 @@ "@context": { "@vocab": "https://w3id.org/edc/v0.0.1/ns/" }, - "@type": "NegotiationInitiateRequestDto", - "connectorId": "provider", + "@type": "ContractRequest", "counterPartyAddress": "http://localhost:19194/protocol", - "consumerId": "consumer", - "providerId": "provider", "protocol": "dataspace-protocol-http", "policy": { "@context": "http://www.w3.org/ns/odrl.jsonld", "@id": "{{contract-offer-id}}", - "@type": "Set", - "permission": [], - "prohibition": [], - "obligation": [], + "@type": "Offer", + "assigner": "provider", "target": "assetId" } } diff --git a/transfer/transfer-02-consumer-pull/README.md b/transfer/transfer-02-consumer-pull/README.md index c63847a0..86dda9ed 100644 --- a/transfer/transfer-02-consumer-pull/README.md +++ b/transfer/transfer-02-consumer-pull/README.md @@ -23,33 +23,22 @@ and [Negotiation](../transfer-01-negotiation/README.md) chapters. Running this sample consists of multiple steps, that are executed one by one and following the same order. -### 1. Start a http server - -As a pre-requisite, you need to have a logging webserver that runs on port 4000 and logs all the incoming requests, it will -be mandatory to get the EndpointDataReference that will be used to get the data. - -```bash -docker build -t http-request-logger util/http-request-logger -docker run -p 4000:4000 http-request-logger -``` - -### 2. Start the transfer +### 1. Start the transfer In the [request body](resources/start-transfer.json), we need to specify which asset we want transferred, the ID of the contract agreement, the address of the provider connector and where we want the file transferred. Before executing the request, insert the `contractAgreementId` from the previous chapter. Then run: ```bash -curl -X POST "http://localhost:29193/management/v2/transferprocesses" \ +curl -X POST "http://localhost:29193/management/v3/transferprocesses" \ -H "Content-Type: application/json" \ -d @transfer/transfer-02-consumer-pull/resources/start-transfer.json \ -s | jq ``` -> the "HttpProxy" method is used for the consumer pull method, and it means that it will be up to -> the consumer to request the data to the provider and that the request will be a proxy for the -> datasource +> the "HttpData-PULL" transfer type is used for the consumer pull method, and it means that it will be up to +> the consumer to request the data to the provider and that the request will be a proxy for the datasource Then, we will get a UUID in the response. This time, this is the ID of the `TransferProcess` ( process id) created on the consumer @@ -67,13 +56,13 @@ Sample output: } ``` -### 3. Check the transfer status +### 2. Check the transfer status Due to the nature of the transfer, it will be very fast and most likely already done by the time you read the UUID. ```bash -curl http://localhost:29193/management/v2/transferprocesses/ +curl http://localhost:29193/management/v3/transferprocesses/ ``` You should see the Transfer Process in `STARTED` state: @@ -91,28 +80,36 @@ You should see the Transfer Process in `STARTED` state: > Note that for the consumer pull scenario the TP will stay in STARTED state after the data has been transferred successfully. > It might get eventually get shifted to TERMINATED or DEPROVISIONED by other resources, but this is not scope of this sample. -### 4. Check the data +### 3. Check the data -At this step, if you look at the http server logs, you will find a json representing the EndpointDataReference, needed -to get the data from the provider: +At this step, an EndpointDataReference would have been generated by the provider and sent to the consumer. The latter +stored it in a cache, so we can obtain it using the transfer process id: +```bash +curl http://localhost:29193/management/v3/edrs//dataaddress | jq +``` ```json { - "id": "591bb609-1edb-4a6b-babe-50f1eca3e1e9", - "endpoint": "http://localhost:29291/public/", - "authKey": "Authorization", - "authCode": "{{auth-code}}", - "properties": { - "cid": "1:1e47895f-9025-49e1-971d-eb865c38d540" + "@type": "DataAddress", + "type": "https://w3id.org/idsa/v4.1/HTTP", + "endpoint": "http://localhost:19291/public", + "authType": "bearer", + "endpointType": "https://w3id.org/idsa/v4.1/HTTP", + "authorization": "eyJraWQiOiJwdWJsaWMta2V5IiwiYWxnIjoiUlMyNTYifQ.eyJpc3MiOiJwcm92aWRlciIsImF1ZCI6ImNvbnN1bWVyIiwic3ViIjoicHJvdmlkZXIiLCJpYXQiOjE3MTc3NjkyMzEyOTYsImp0aSI6IjM2M2RhMGU4LWZmOGItNDY1My05YjQwLWY4MjdlMWMzOGMzYyJ9.WOVPz6m7XzIrbiMTfLqOXacGYz8Xk_-iQu7gmxoIgDFYsgo0da2Iv51EsugIpqbodPsmB0kK7zkyrmsFOfAASAq7fjsy4gQF-u5egYwoGpcxjYaJJdQa5lkwjC0fRxdVFVwZwrOaT5Mg-vGA9HssTEnlA64q-O0ae_aTH5ToflmPDM3FhAgL55I3odM5ysM2POEJY6pgOxIV9XjuhZFl_i_iTiUCZy__oQUZiYk58wKoqfK758Sy1WzpH-eyZCDUi_Z3n6cJB80_0ZThoPhtiFH7Tl9DfStnjsCoaeqMLFnTXp0s8h4ZGFmjfBc-72aAdRQqqLDT8WXNg3Csv5B56Q", + "@context": { + "@vocab": "https://w3id.org/edc/v0.0.1/ns/", + "edc": "https://w3id.org/edc/v0.0.1/ns/", + "odrl": "http://www.w3.org/ns/odrl/2/" } } + ``` Once this json is read, use a tool like postman or curl to execute the following query, to read the data ```bash -curl --location --request GET 'http://localhost:29291/public/' --header 'Authorization: ' +curl --location --request GET 'http://localhost:19291/public/' --header 'Authorization: ' ``` At the end, and to be sure that you correctly achieved the pull, you can check if the data you get @@ -122,7 +119,7 @@ is the same as the one you can get at https://jsonplaceholder.typicode.com/users Since we configured the `HttpData` with `proxyPath`, we could also ask for a specific user with: ```bash -curl --location --request GET 'http://localhost:29291/public/1' --header 'Authorization: ' +curl --location --request GET 'http://localhost:19291/public/1' --header 'Authorization: ' ``` And the data returned will be the same as in https://jsonplaceholder.typicode.com/users/1 diff --git a/transfer/transfer-02-consumer-pull/resources/start-transfer.json b/transfer/transfer-02-consumer-pull/resources/start-transfer.json index 48a2cc31..c9959b7b 100644 --- a/transfer/transfer-02-consumer-pull/resources/start-transfer.json +++ b/transfer/transfer-02-consumer-pull/resources/start-transfer.json @@ -8,7 +8,5 @@ "contractId": "{{contract-agreement-id}}", "assetId": "assetId", "protocol": "dataspace-protocol-http", - "dataDestination": { - "type": "HttpProxy" - } + "transferType": "HttpData-PULL" } diff --git a/transfer/transfer-03-provider-push/README.md b/transfer/transfer-03-provider-push/README.md index 495c3fef..f3c7adab 100644 --- a/transfer/transfer-03-provider-push/README.md +++ b/transfer/transfer-03-provider-push/README.md @@ -24,14 +24,24 @@ If not, re-visit the [Prerequisites](../transfer-00-prerequisites/README.md) Running this sample consists of multiple steps, that are executed one by one and following the same order. -### 1. Start the transfer +### 1. Start a http server + +As a pre-requisite, you need to have a logging webserver that runs on port 4000 and logs all the incoming requests, the +data will be sent to this server. + +```bash +docker build -t http-request-logger util/http-request-logger +docker run -p 4000:4000 http-request-logger +``` + +### 2. Start the transfer Before executing the request, modify the [request body](resources/start-transfer.json) by inserting the contract agreement ID from the [Negotiation](../transfer-01-negotiation/README.md) chapter. You can re-use the same asset, policies and contract negotiation from before. ```bash -curl -X POST "http://localhost:29193/management/v2/transferprocesses" \ +curl -X POST "http://localhost:29193/management/v3/transferprocesses" \ -H "Content-Type: application/json" \ -d @transfer/transfer-03-provider-push/resources/start-transfer.json \ -s | jq @@ -50,18 +60,18 @@ Sample output: } ``` -### 2. Check the transfer status +### 3. Check the transfer status Due to the nature of the transfer, it will be very fast and most likely already done by the time you read the UUID. ```bash -curl http://localhost:29193/management/v2/transferprocesses/ +curl http://localhost:29193/management/v3/transferprocesses/ ``` Notice the transfer COMPLETED state -### 3. Check the data +### 4. Check the data At this step, you can check the data by checking the log of the http server exposed on port 4000, you should see a log that shows the same data that you can get from https://jsonplaceholder.typicode.com/users. diff --git a/transfer/transfer-03-provider-push/resources/start-transfer.json b/transfer/transfer-03-provider-push/resources/start-transfer.json index 65fe1541..51e6706b 100644 --- a/transfer/transfer-03-provider-push/resources/start-transfer.json +++ b/transfer/transfer-03-provider-push/resources/start-transfer.json @@ -8,6 +8,7 @@ "contractId": "{{contract-agreement-id}}", "assetId": "assetId", "protocol": "dataspace-protocol-http", + "transferType": "HttpData-PUSH", "dataDestination": { "type": "HttpData", "baseUrl": "http://localhost:4000/api/consumer/store" diff --git a/transfer/transfer-04-event-consumer/README.md b/transfer/transfer-04-event-consumer/README.md index 3358da19..0f3d9cc3 100644 --- a/transfer/transfer-04-event-consumer/README.md +++ b/transfer/transfer-04-event-consumer/README.md @@ -64,21 +64,21 @@ Run this to build and launch the consumer with listener extension: ```bash ./gradlew transfer:transfer-04-event-consumer:consumer-with-listener:build -java -Dedc.keystore=transfer/transfer-00-prerequisites/resources/certs/cert.pfx -Dedc.keystore.password=123456 -Dedc.vault=transfer/transfer-00-prerequisites/resources/configuration/consumer-vault.properties -Dedc.fs.config=transfer/transfer-00-prerequisites/resources/configuration/consumer-configuration.properties -jar transfer/transfer-04-event-consumer/consumer-with-listener/build/libs/connector.jar +java -Dedc.keystore=transfer/transfer-00-prerequisites/resources/certs/cert.pfx -Dedc.keystore.password=123456 -Dedc.fs.config=transfer/transfer-00-prerequisites/resources/configuration/consumer-configuration.properties -jar transfer/transfer-04-event-consumer/consumer-with-listener/build/libs/connector.jar ```` ### 2. Negotiate a new contract ```bash curl -d @transfer/transfer-01-negotiation/resources/negotiate-contract.json \ - -X POST -H 'content-type: application/json' http://localhost:29193/management/v2/contractnegotiations \ + -X POST -H 'content-type: application/json' http://localhost:29193/management/v3/contractnegotiations \ -s | jq ``` ### 3. Get the contract agreement id ```bash -curl -X GET "http://localhost:29193/management/v2/contractnegotiations/{{contract-negotiation-id}}" \ +curl -X GET "http://localhost:29193/management/v3/contractnegotiations/{{contract-negotiation-id}}" \ --header 'Content-Type: application/json' \ -s | jq ``` @@ -89,7 +89,7 @@ Replace the `contractId` property inside the [request body](../transfer-02-consu Afterward run: ```bash -curl -X POST "http://localhost:29193/management/v2/transferprocesses" \ +curl -X POST "http://localhost:29193/management/v3/transferprocesses" \ -H "Content-Type: application/json" \ -d @transfer/transfer-02-consumer-pull/resources/start-transfer.json \ -s | jq @@ -101,7 +101,7 @@ The consumer should spew out logs similar to: ```bash DEBUG 2023-10-16T09:29:45.316908 [TransferProcessManagerImpl] TransferProcess 762b5a0c-43fb-4b8b-8022-669043c8fa81 is now in state REQUESTED -DEBUG 2023-10-16T09:29:46.269998 DSP: Incoming TransferStartMessage for class org.eclipse.edc.connector.transfer.spi.types.TransferProcess process: 762b5a0c-43fb-4b8b-8022-669043c8fa81 +DEBUG 2023-10-16T09:29:46.269998 DSP: Incoming TransferStartMessage for class org.eclipse.edc.connector.controlplane.transfer.spi.types.TransferProcess process: 762b5a0c-43fb-4b8b-8022-669043c8fa81 DEBUG 2023-10-16T09:29:46.271592 TransferProcessStartedListener received STARTED event <---------------------------- DEBUG 2023-10-16T09:29:46.27174 TransferProcess 762b5a0c-43fb-4b8b-8022-669043c8fa81 is now in state STARTED ``` diff --git a/transfer/transfer-04-event-consumer/consumer-with-listener/build.gradle.kts b/transfer/transfer-04-event-consumer/consumer-with-listener/build.gradle.kts index 805fe5f3..2e116f41 100644 --- a/transfer/transfer-04-event-consumer/consumer-with-listener/build.gradle.kts +++ b/transfer/transfer-04-event-consumer/consumer-with-listener/build.gradle.kts @@ -21,21 +21,20 @@ plugins { dependencies { implementation(libs.edc.control.plane.api.client) + implementation(libs.edc.control.plane.api) implementation(libs.edc.control.plane.core) - implementation(libs.edc.control.plane.api.client) implementation(libs.edc.dsp) implementation(libs.edc.configuration.filesystem) - implementation(libs.edc.vault.filesystem) implementation(libs.edc.iam.mock) implementation(libs.edc.management.api) - implementation(libs.edc.transfer.data.plane) + implementation(libs.edc.transfer.data.plane.signaling) implementation(libs.edc.transfer.pull.http.receiver) implementation(libs.edc.data.plane.selector.api) implementation(libs.edc.data.plane.selector.core) - implementation(libs.edc.data.plane.selector.client) - implementation(libs.edc.data.plane.api) + implementation(libs.edc.data.plane.control.api) + implementation(libs.edc.data.plane.public.api) implementation(libs.edc.data.plane.core) implementation(libs.edc.data.plane.http) diff --git a/transfer/transfer-04-event-consumer/listener/src/main/java/org/eclipse/edc/sample/extension/listener/TransferProcessStartedListener.java b/transfer/transfer-04-event-consumer/listener/src/main/java/org/eclipse/edc/sample/extension/listener/TransferProcessStartedListener.java index 515a9e26..acc94405 100644 --- a/transfer/transfer-04-event-consumer/listener/src/main/java/org/eclipse/edc/sample/extension/listener/TransferProcessStartedListener.java +++ b/transfer/transfer-04-event-consumer/listener/src/main/java/org/eclipse/edc/sample/extension/listener/TransferProcessStartedListener.java @@ -14,8 +14,8 @@ package org.eclipse.edc.sample.extension.listener; -import org.eclipse.edc.connector.transfer.spi.observe.TransferProcessListener; -import org.eclipse.edc.connector.transfer.spi.types.TransferProcess; +import org.eclipse.edc.connector.controlplane.transfer.spi.observe.TransferProcessListener; +import org.eclipse.edc.connector.controlplane.transfer.spi.types.TransferProcess; import org.eclipse.edc.spi.monitor.Monitor; public class TransferProcessStartedListener implements TransferProcessListener { diff --git a/transfer/transfer-04-event-consumer/listener/src/main/java/org/eclipse/edc/sample/extension/listener/TransferProcessStartedListenerExtension.java b/transfer/transfer-04-event-consumer/listener/src/main/java/org/eclipse/edc/sample/extension/listener/TransferProcessStartedListenerExtension.java index a2e5f47e..96ab60d1 100644 --- a/transfer/transfer-04-event-consumer/listener/src/main/java/org/eclipse/edc/sample/extension/listener/TransferProcessStartedListenerExtension.java +++ b/transfer/transfer-04-event-consumer/listener/src/main/java/org/eclipse/edc/sample/extension/listener/TransferProcessStartedListenerExtension.java @@ -14,7 +14,7 @@ package org.eclipse.edc.sample.extension.listener; -import org.eclipse.edc.connector.transfer.spi.observe.TransferProcessObservable; +import org.eclipse.edc.connector.controlplane.transfer.spi.observe.TransferProcessObservable; import org.eclipse.edc.spi.system.ServiceExtension; import org.eclipse.edc.spi.system.ServiceExtensionContext; diff --git a/transfer/transfer-05-file-transfer-cloud/README.md b/transfer/transfer-05-file-transfer-cloud/README.md index 460605b1..353d0f8e 100644 --- a/transfer/transfer-05-file-transfer-cloud/README.md +++ b/transfer/transfer-05-file-transfer-cloud/README.md @@ -99,11 +99,15 @@ java -Dedc.fs.config=transfer/transfer-05-file-transfer-cloud/cloud-transfer-pro To request data offers from the provider, run: ```bash -curl -X POST "http://localhost:9192/management/catalog/request" \ +curl -X POST "http://localhost:9192/management/v3/catalog/request" \ --header 'X-Api-Key: password' \ --header 'Content-Type: application/json' \ --data-raw '{ - "counterPartyAddress": "http://localhost:8282/protocol" + "@context": { + "@vocab": "https://w3id.org/edc/v0.0.1/ns/" + }, + "counterPartyAddress": "http://localhost:8282/protocol", + "protocol": "dataspace-protocol-http" }' ``` @@ -113,7 +117,7 @@ To negotiate a contract copy one of the contract offers into the statement below it is only possible to negotiate an _unchanged_ contract, so counter offers are not supported. ```bash -curl --location --request POST 'http://localhost:9192/management/v2/contractnegotiations' \ +curl --location --request POST 'http://localhost:9192/management/v3/contractnegotiations' \ --header 'X-API-Key: password' \ --header 'Content-Type: application/json' \ --data-raw '{ @@ -131,7 +135,7 @@ The EDC will answer with the contract negotiation id. This id will be used in st To get the contract agreement id insert the negotiation id into the following statement end execute it. ```bash -curl -X GET -H 'X-Api-Key: password' "http://localhost:9192/management/v2/contractnegotiations/{negotiationId}" +curl -X GET -H 'X-Api-Key: password' "http://localhost:9192/management/v3/contractnegotiations/{negotiationId}" ``` The EDC will return the current state of the contract negotiation. When the negotiation is completed successfully @@ -143,7 +147,7 @@ To initiate the data transfer, execute the statement below. Please take care of obtained at previous step as well as a unique bucket name. ```bash -curl --location --request POST 'http://localhost:9192/management/v2/transferprocesses' \ +curl --location --request POST 'http://localhost:9192/management/v3/transferprocesses' \ --header 'X-API-Key: password' \ --header 'Content-Type: application/json' \ --data-raw ' @@ -173,7 +177,7 @@ Deprovisioning is not necessary per se, but it will do some cleanup, delete the it's generally advisable to do it. ```bash -curl -X POST -H 'X-Api-Key: password' "http://localhost:9192/management/v2/transferprocesses/{transferProcessId}/deprovision" +curl -X POST -H 'X-Api-Key: password' "http://localhost:9192/management/v3/transferprocesses/{transferProcessId}/deprovision" ``` Finally, run terraform to clean-up the vault and other remaining stuffs: diff --git a/transfer/transfer-05-file-transfer-cloud/cloud-transfer-consumer/build.gradle.kts b/transfer/transfer-05-file-transfer-cloud/cloud-transfer-consumer/build.gradle.kts index 334eb169..a2b98a41 100644 --- a/transfer/transfer-05-file-transfer-cloud/cloud-transfer-consumer/build.gradle.kts +++ b/transfer/transfer-05-file-transfer-cloud/cloud-transfer-consumer/build.gradle.kts @@ -36,6 +36,18 @@ dependencies { implementation(libs.edc.dsp) implementation(libs.edc.data.plane.selector.core) + + implementation(libs.edc.control.plane.api.client) + implementation(libs.edc.control.plane.api) + implementation(libs.edc.transfer.data.plane.signaling) + implementation(libs.edc.transfer.pull.http.receiver) + + implementation(libs.edc.data.plane.selector.api) + + implementation(libs.edc.data.plane.control.api) + implementation(libs.edc.data.plane.public.api) + implementation(libs.edc.data.plane.core) + implementation(libs.edc.data.plane.http) } application { diff --git a/transfer/transfer-05-file-transfer-cloud/cloud-transfer-consumer/config.properties b/transfer/transfer-05-file-transfer-cloud/cloud-transfer-consumer/config.properties index d729fdc4..ae19e14c 100644 --- a/transfer/transfer-05-file-transfer-cloud/cloud-transfer-consumer/config.properties +++ b/transfer/transfer-05-file-transfer-cloud/cloud-transfer-consumer/config.properties @@ -10,3 +10,15 @@ edc.vault.tenantid= edc.vault.certificate= edc.vault.name= edc.dsp.callback.address=http://localhost:9292/protocol + +#configuration from earlier examples +edc.participant.id=consumer +edc.receiver.http.endpoint=http://localhost:4000/receiver/urn:connector:provider/callback +edc.transfer.dataplane.token.signer.privatekey.alias=1 +edc.transfer.proxy.token.signer.privatekey.alias=1 +edc.transfer.proxy.token.verifier.publickey.alias=public-key +web.http.public.port=29291 +web.http.public.path=/public +web.http.control.port=29192 +web.http.control.path=/control +edc.dataplane.token.validation.endpoint=http://localhost:29192/control/token diff --git a/transfer/transfer-05-file-transfer-cloud/cloud-transfer-provider/build.gradle.kts b/transfer/transfer-05-file-transfer-cloud/cloud-transfer-provider/build.gradle.kts index 4c77fc5d..dc5b397a 100644 --- a/transfer/transfer-05-file-transfer-cloud/cloud-transfer-provider/build.gradle.kts +++ b/transfer/transfer-05-file-transfer-cloud/cloud-transfer-provider/build.gradle.kts @@ -36,6 +36,18 @@ dependencies { implementation(libs.edc.dsp) implementation(project(":transfer:transfer-05-file-transfer-cloud:transfer-file-cloud")) + + implementation(libs.edc.control.plane.api.client) + implementation(libs.edc.control.plane.api) + implementation(libs.edc.transfer.data.plane.signaling) + implementation(libs.edc.transfer.pull.http.receiver) + + implementation(libs.edc.data.plane.selector.api) + + implementation(libs.edc.data.plane.control.api) + implementation(libs.edc.data.plane.public.api) + implementation(libs.edc.data.plane.core) + implementation(libs.edc.data.plane.http) } application { diff --git a/transfer/transfer-05-file-transfer-cloud/cloud-transfer-provider/config.properties b/transfer/transfer-05-file-transfer-cloud/cloud-transfer-provider/config.properties index d6385145..a9f7dd05 100644 --- a/transfer/transfer-05-file-transfer-cloud/cloud-transfer-provider/config.properties +++ b/transfer/transfer-05-file-transfer-cloud/cloud-transfer-provider/config.properties @@ -2,8 +2,23 @@ web.http.port=8181 web.http.path=/api web.http.management.port=8182 web.http.management.path=/management +web.http.protocol.port=8282 +web.http.protocol.path=/protocol +edc.api.auth.key=password edc.vault.clientid= edc.vault.tenantid= edc.vault.certificate= edc.vault.name= edc.dsp.callback.address=http://localhost:8282/protocol + +#configuration from earlier examples +edc.participant.id=provider +edc.receiver.http.endpoint=http://localhost:4000/receiver/urn:connector:provider/callback +edc.transfer.dataplane.token.signer.privatekey.alias=1 +edc.transfer.proxy.token.signer.privatekey.alias=1 +edc.transfer.proxy.token.verifier.publickey.alias=public-key +web.http.public.port=19291 +web.http.public.path=/public +web.http.control.port=19192 +web.http.control.path=/control +edc.dataplane.token.validation.endpoint=http://localhost:19192/control/token diff --git a/transfer/transfer-05-file-transfer-cloud/transfer-file-cloud/build.gradle.kts b/transfer/transfer-05-file-transfer-cloud/transfer-file-cloud/build.gradle.kts index 0f870bcc..e717775e 100644 --- a/transfer/transfer-05-file-transfer-cloud/transfer-file-cloud/build.gradle.kts +++ b/transfer/transfer-05-file-transfer-cloud/transfer-file-cloud/build.gradle.kts @@ -23,9 +23,8 @@ dependencies { implementation(libs.edc.data.plane.azure.storage) implementation(libs.edc.data.plane.aws.s3) implementation(libs.edc.data.plane.client) - implementation(libs.edc.data.plane.selector.client) implementation(libs.edc.data.plane.selector.core) - implementation(libs.edc.transfer.data.plane) + implementation(libs.edc.transfer.data.plane.signaling) implementation(libs.opentelemetry.annotations) } diff --git a/transfer/transfer-05-file-transfer-cloud/transfer-file-cloud/src/main/java/org/eclipse/edc/sample/extension/transfer/CloudTransferExtension.java b/transfer/transfer-05-file-transfer-cloud/transfer-file-cloud/src/main/java/org/eclipse/edc/sample/extension/transfer/CloudTransferExtension.java index accb343d..78d29cda 100644 --- a/transfer/transfer-05-file-transfer-cloud/transfer-file-cloud/src/main/java/org/eclipse/edc/sample/extension/transfer/CloudTransferExtension.java +++ b/transfer/transfer-05-file-transfer-cloud/transfer-file-cloud/src/main/java/org/eclipse/edc/sample/extension/transfer/CloudTransferExtension.java @@ -14,19 +14,19 @@ package org.eclipse.edc.sample.extension.transfer; -import org.eclipse.edc.connector.contract.spi.offer.store.ContractDefinitionStore; -import org.eclipse.edc.connector.contract.spi.types.offer.ContractDefinition; -import org.eclipse.edc.connector.policy.spi.PolicyDefinition; -import org.eclipse.edc.connector.policy.spi.store.PolicyDefinitionStore; +import org.eclipse.edc.connector.controlplane.asset.spi.domain.Asset; +import org.eclipse.edc.connector.controlplane.asset.spi.index.AssetIndex; +import org.eclipse.edc.connector.controlplane.contract.spi.offer.store.ContractDefinitionStore; +import org.eclipse.edc.connector.controlplane.contract.spi.types.offer.ContractDefinition; +import org.eclipse.edc.connector.controlplane.policy.spi.PolicyDefinition; +import org.eclipse.edc.connector.controlplane.policy.spi.store.PolicyDefinitionStore; import org.eclipse.edc.policy.model.Action; import org.eclipse.edc.policy.model.Permission; import org.eclipse.edc.policy.model.Policy; import org.eclipse.edc.runtime.metamodel.annotation.Inject; -import org.eclipse.edc.spi.asset.AssetIndex; import org.eclipse.edc.spi.system.ServiceExtension; import org.eclipse.edc.spi.system.ServiceExtensionContext; import org.eclipse.edc.spi.types.domain.DataAddress; -import org.eclipse.edc.spi.types.domain.asset.Asset; import static org.eclipse.edc.spi.query.Criterion.criterion; @@ -49,7 +49,7 @@ public void initialize(ServiceExtensionContext context) { policyDefinitionStore.create(policy); registerDataEntries(); - registerContractDefinition(policy.getUid()); + registerContractDefinition(policy.getId()); } public void registerDataEntries() {