Skip to content

Commit

Permalink
Merge pull request #229 from drcgjung/fix/e2e-patches
Browse files Browse the repository at this point in the history
fix|chore: e2e test issues
  • Loading branch information
almadigabor authored Jul 30, 2024
2 parents a0c4bdc + b4371e4 commit 0d2d0be
Show file tree
Hide file tree
Showing 35 changed files with 264 additions and 136 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ jobs:
type=semver,pattern={{version}}
type=semver,pattern={{major}}
type=semver,pattern={{major}}.{{minor}}
type=raw,value=1.13.21-SNAPSHOT,enable=${{ github.event.inputs.deploy_docker == 'true' || github.ref == format('refs/heads/{0}', 'main') }}
type=raw,value=1.13.22-SNAPSHOT,enable=${{ github.event.inputs.deploy_docker == 'true' || github.ref == format('refs/heads/{0}', 'main') }}
type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'main') }}
- name: Agent Plane Hashicorp Container Build and push
Expand Down Expand Up @@ -180,7 +180,7 @@ jobs:
type=semver,pattern={{version}}
type=semver,pattern={{major}}
type=semver,pattern={{major}}.{{minor}}
type=raw,value=1.13.21-SNAPSHOT,enable=${{ github.event.inputs.deploy_docker == 'true' || github.ref == format('refs/heads/{0}', 'main') }}
type=raw,value=1.13.22-SNAPSHOT,enable=${{ github.event.inputs.deploy_docker == 'true' || github.ref == format('refs/heads/{0}', 'main') }}
type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'main') }}
- name: Agent Plane Azure Vault Container Build and push
Expand Down
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,14 @@ All notable changes to this product will be documented in this file.

# Released

## [1.13.21] - 2024-07-15
## [1.13.22] - 2024-07-29

### Added

### Changed

- Adapted to Tractus-X EDC 0.7.3
- Add connector url allowance patterns to the chart values for easier configuration.

## [1.12.19] - 2024-05-17

Expand Down
4 changes: 2 additions & 2 deletions DEPENDENCIES
Original file line number Diff line number Diff line change
Expand Up @@ -267,8 +267,8 @@ maven/mavencentral/org.eclipse.jetty/jetty-util/11.0.21, EPL-2.0 OR Apache-2.0,
maven/mavencentral/org.eclipse.jetty/jetty-webapp/11.0.21, EPL-2.0 OR Apache-2.0, approved, rt.jetty
maven/mavencentral/org.eclipse.jetty/jetty-xml/11.0.21, EPL-2.0 OR Apache-2.0, approved, rt.jetty
maven/mavencentral/org.eclipse.parsson/parsson/1.1.6, EPL-2.0, approved, ee4j.parsson
maven/mavencentral/org.eclipse.tractusx.agents.edc.agent-plane/agent-plane-protocol/1.13.21-SNAPSHOT, Apache-2.0, approved, automotive.tractusx
maven/mavencentral/org.eclipse.tractusx.edc/auth-jwt/1.13.21-SNAPSHOT, Apache-2.0, approved, automotive.tractusx
maven/mavencentral/org.eclipse.tractusx.agents.edc.agent-plane/agent-plane-protocol/1.13.22-SNAPSHOT, Apache-2.0, approved, automotive.tractusx
maven/mavencentral/org.eclipse.tractusx.edc/auth-jwt/1.13.22-SNAPSHOT, Apache-2.0, approved, automotive.tractusx
maven/mavencentral/org.eclipse.tractusx.edc/core-spi/0.7.3, Apache-2.0, approved, automotive.tractusx
maven/mavencentral/org.eclipse.tractusx.edc/core-utils/0.7.3, Apache-2.0, approved, automotive.tractusx
maven/mavencentral/org.eclipse.tractusx.edc/data-plane-migration/0.7.3, Apache-2.0, approved, automotive.tractusx
Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -120,8 +120,8 @@ kubectl wait --namespace ingress-nginx \
--selector=app.kubernetes.io/component=controller \
--timeout=90s
# transfer images
kind load docker-image docker.io/tractusx/agentplane-hashicorp:1.13.21-SNAPSHOT --name ka
kind load docker-image docker.io/tractusx/agentplane-azure-vault:1.13.21-SNAPSHOT --name ka
kind load docker-image docker.io/tractusx/agentplane-hashicorp:1.13.22-SNAPSHOT --name ka
kind load docker-image docker.io/tractusx/agentplane-azure-vault:1.13.22-SNAPSHOT --name ka
# run chart testing
ct install --charts charts/agent-plane
ct install --charts charts/agent-plane-azure-vault
Expand Down
4 changes: 2 additions & 2 deletions agent-plane/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,10 @@ mvn package -Pwith-docker-image
Alternatively, after a successful build, you can invoke docker yourself

```console
docker build -t tractusx/agentplane-azure-vault:1.13.21-SNAPSHOT -f agentplane-azure-vault/src/main/docker/Dockerfile .
docker build -t tractusx/agentplane-azure-vault:1.13.22-SNAPSHOT -f agentplane-azure-vault/src/main/docker/Dockerfile .
```

```console
docker build -t tractusx/agentplane-hashicorp:1.13.21-SNAPSHOT -f agentplane-hashicorp/src/main/docker/Dockerfile .
docker build -t tractusx/agentplane-hashicorp:1.13.22-SNAPSHOT -f agentplane-hashicorp/src/main/docker/Dockerfile .
```

2 changes: 1 addition & 1 deletion agent-plane/agent-plane-protocol/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ Add the following dependency to your data-plane artifact pom:
<dependency>
<groupId>org.eclipse.tractusx.agents.edc</groupId>
<artifactId>agent-plane-protocol</artifactId>
<version>1.13.21-SNAPSHOT</version>
<version>1.13.22-SNAPSHOT</version>
</dependency>
```

Expand Down
2 changes: 1 addition & 1 deletion agent-plane/agent-plane-protocol/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
<parent>
<groupId>org.eclipse.tractusx.agents.edc</groupId>
<artifactId>agent-plane</artifactId>
<version>1.13.21-SNAPSHOT</version>
<version>1.13.22-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -169,14 +169,16 @@ public void initialize(ServiceExtensionContext context) {
monitor,
httpRequestFactory,
processor,
skillStore);
skillStore,
typeManager);
AgentSourceFactory skillSourceFactory = new AgentSourceFactory(AgentProtocol.SKILL_HTTP.getProtocolId(),
edcHttpClient,
new AgentSourceRequestParamsSupplier(vault, typeManager, config, monitor),
monitor,
httpRequestFactory,
processor,
skillStore);
skillStore,
typeManager);
pipelineService.registerFactory(sparqlSourceFactory);
pipelineService.registerFactory(skillSourceFactory);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,19 @@
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import jakarta.ws.rs.BadRequestException;
import org.apache.http.HttpStatus;
import org.apache.jena.fuseki.servlets.HttpAction;
import org.apache.jena.fuseki.system.ActionCategory;
import org.eclipse.tractusx.agents.edc.Tuple;
import org.eclipse.tractusx.agents.edc.TupleSet;
import org.slf4j.Logger;

import java.net.URLDecoder;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Stack;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
Expand All @@ -37,10 +42,11 @@


/**
* HttpAction which may either contain
* a query or a predefined skill. In each case
* HttpAction is a wrapper around a request/response
* which may either contain a query or a predefined skill. In each case
* the parameterization/input binding can be done either by
* url parameters, by a binding set body or both.
* It contains also helper code to bind parameterized queries.
*/
public class AgentHttpAction extends HttpAction {
final String skill;
Expand Down Expand Up @@ -116,25 +122,35 @@ protected void parseArgs(HttpServletRequest request, HttpServletResponse respons
}

/**
* parses the body
* parses a given binding into a tupleset
*
* @param resultSet new binding spec
* @param tuples existing bindings
*/
public static void parseBinding(JsonNode resultSet, TupleSet tuples) throws Exception {
ArrayNode bindings = ((ArrayNode) resultSet.get("results").get("bindings"));
for (int count = 0; count < bindings.size(); count++) {
TupleSet ts = new TupleSet();
JsonNode binding = bindings.get(count);
Iterator<String> vars = binding.fieldNames();
while (vars.hasNext()) {
String var = vars.next();
JsonNode value = binding.get(var).get("value");
ts.add(var, value.textValue());
}
tuples.merge(ts);
}
}

/**
* parses the body of the request as an input binding, if
* the content type is hinting to a sparql resultset
*/
protected void parseBody(HttpServletRequest request, HttpServletResponse response) {
if (RESULTSET_CONTENT_TYPE.equals(request.getContentType())) {
ObjectMapper om = new ObjectMapper();
try {
JsonNode bindingSet = om.readTree(request.getInputStream());
ArrayNode bindings = ((ArrayNode) bindingSet.get("results").get("bindings"));
for (int count = 0; count < bindings.size(); count++) {
TupleSet ts = new TupleSet();
JsonNode binding = bindings.get(count);
Iterator<String> vars = binding.fieldNames();
while (vars.hasNext()) {
String var = vars.next();
JsonNode value = binding.get(var).get("value");
ts.add(var, value.textValue());
}
tupleSet.merge(ts);
}
parseBinding(om.readTree(request.getInputStream()), tupleSet);
} catch (Exception e) {
response.setStatus(HttpStatus.SC_BAD_REQUEST);
}
Expand Down Expand Up @@ -167,4 +183,83 @@ public String getGraphs() {
public TupleSet getInputBindings() {
return tupleSet;
}

/**
* helper method to bind a given tupleset to a parameterized query
*
* @param query the parameterized query
* @param bindings the tupleset to bind
* @return bound query
*/
public static String bind(String query, TupleSet bindings) throws Exception {
Pattern tuplePattern = Pattern.compile("\\([^()]*\\)");
Pattern variablePattern = Pattern.compile("@(?<name>[a-zA-Z0-9]+)");
Matcher tupleMatcher = tuplePattern.matcher(query);
StringBuilder replaceQuery = new StringBuilder();
int lastStart = 0;

//
// First find parameterized tuple appearances. Each tuple appearance is
// cloned for each bound "row"
//
while (tupleMatcher.find()) {
replaceQuery.append(query.substring(lastStart, tupleMatcher.start()));
String otuple = tupleMatcher.group(0);
Matcher variableMatcher = variablePattern.matcher(otuple);
List<String> variables = new java.util.ArrayList<>();
while (variableMatcher.find()) {
variables.add(variableMatcher.group("name"));
}
if (variables.size() > 0) {
boolean isFirst = true;
Collection<Tuple> tuples = bindings.getTuples(variables.toArray(new String[0]));
for (Tuple rtuple : tuples) {
if (isFirst) {
isFirst = false;
} else {
replaceQuery.append(" ");
}
String newTuple = otuple;
for (String key : rtuple.getVariables()) {
newTuple = newTuple.replace("@" + key, rtuple.get(key));
}
replaceQuery.append(newTuple);
}
} else {
replaceQuery.append(otuple);
}
lastStart = tupleMatcher.end();
}
replaceQuery.append(query.substring(lastStart));

//
// Replacing "global" variables appearing not in a tuple expression.
// This cannot be done for all bindings, but only the
// very first one
//
String queryString = replaceQuery.toString();

Matcher variableMatcher = variablePattern.matcher(queryString);
List<String> variables = new java.util.ArrayList<>();
while (variableMatcher.find()) {
variables.add(variableMatcher.group("name"));
}
try {
Collection<Tuple> tuples = bindings.getTuples(variables.toArray(new String[0]));
if (tuples.size() == 0 && variables.size() > 0) {
throw new BadRequestException(String.format("Error: Got variables %s on top-level but no bindings.", Arrays.toString(variables.toArray())));
} else if (tuples.size() > 1) {
System.err.println(String.format("Warning: Got %s tuples for top-level bindings of variables %s. Using only the first one.", tuples.size(), Arrays.toString(variables.toArray())));
}
if (tuples.size() > 0) {
Tuple rtuple = tuples.iterator().next();
for (String key : rtuple.getVariables()) {
queryString = queryString.replace("@" + key, rtuple.get(key));
}
}
} catch (Exception e) {
throw new BadRequestException(String.format("Error: Could not bind variables"), e);
}
return queryString;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,19 @@
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
import okio.Buffer;
import org.eclipse.edc.connector.dataplane.http.params.HttpRequestFactory;
import org.eclipse.edc.connector.dataplane.http.spi.HttpRequestParams;
import org.eclipse.edc.connector.dataplane.spi.pipeline.DataSource;
import org.eclipse.edc.connector.dataplane.spi.pipeline.StreamResult;
import org.eclipse.edc.http.spi.EdcHttpClient;
import org.eclipse.edc.spi.types.TypeManager;
import org.eclipse.edc.spi.types.domain.transfer.DataFlowStartMessage;
import org.eclipse.tractusx.agents.edc.AgentConfig;
import org.eclipse.tractusx.agents.edc.SkillDistribution;
import org.eclipse.tractusx.agents.edc.SkillStore;
import org.eclipse.tractusx.agents.edc.TupleSet;
import org.eclipse.tractusx.agents.edc.http.AgentHttpAction;
import org.eclipse.tractusx.agents.edc.sparql.SparqlQueryProcessor;
import org.jetbrains.annotations.NotNull;

Expand Down Expand Up @@ -71,6 +75,8 @@ public class AgentSource implements DataSource {

protected DataFlowStartMessage request;

protected TypeManager typeManager;

protected String matchmakingAgentUrl;

public static final String AGENT_BOUNDARY = "--";
Expand Down Expand Up @@ -114,6 +120,13 @@ protected StreamResult<Stream<Part>> openMatchmakingInternal() {
SkillDistribution distribution = skillStore.getDistribution(asset);
String params = request.getProperties().get(AgentSourceHttpParamsDecorator.QUERY_PARAMS);
SkillDistribution runMode = SkillDistribution.ALL;
// if we have no distribution constraints, let the runMode decide
if (distribution == SkillDistribution.ALL) {
distribution = runMode;
} else if (runMode == SkillDistribution.ALL) {
// if we have no runMode constraints, let the distribution decide
runMode = distribution;
}
if (params.contains("runMode=provider") || params.contains("runMode=PROVIDER")) {
runMode = SkillDistribution.PROVIDER;
} else if (params.contains("runMode=consumer") || params.contains("runMode=CONSUMER")) {
Expand All @@ -123,7 +136,20 @@ protected StreamResult<Stream<Part>> openMatchmakingInternal() {
if (distribution == SkillDistribution.PROVIDER) {
return StreamResult.error(String.format("Run distribution of skill %s should be consumer, but was set to provider only.", asset));
}
return StreamResult.success(Stream.of(new AgentPart("application/sparql-query", skillText.get().getBytes())));
String query = skillText.get();
okhttp3.Request tempRequest = this.requestFactory.toRequest(this.params);
if (tempRequest.body() != null && AgentHttpAction.RESULTSET_CONTENT_TYPE.equals(tempRequest.body().contentType().toString())) {
TupleSet bindings = new TupleSet();
try {
Buffer buffer = new Buffer();
tempRequest.body().writeTo(buffer);
AgentHttpAction.parseBinding(typeManager.getMapper().readTree(buffer.readByteArray()), bindings);
query = AgentHttpAction.bind(query, bindings);
} catch (Exception e) {
return StreamResult.error(String.format("The query could not be bound to the given input tupleset.", e));
}
}
return StreamResult.success(Stream.of(new AgentPart("application/sparql-query", query.getBytes())));
} else if (runMode == SkillDistribution.PROVIDER && distribution == SkillDistribution.CONSUMER) {
return StreamResult.error(String.format("Run distribution of skill %s should be provider, but was set to consumer only.", asset));
}
Expand Down Expand Up @@ -183,11 +209,31 @@ protected StreamResult<Stream<Part>> openMatchmakingRest() {
} else if (params.contains("runMode=consumer") || params.contains("runMode=CONSUMER")) {
runMode = SkillDistribution.CONSUMER;
}
// if we have no distribution constraints, let the runMode decide
if (distribution == SkillDistribution.ALL) {
distribution = runMode;
} else if (runMode == SkillDistribution.ALL) {
// if we have no runMode constraints, let the distribution decide
runMode = distribution;
}
if (runMode == SkillDistribution.CONSUMER) {
if (distribution == SkillDistribution.PROVIDER) {
return StreamResult.error(String.format("Run distribution of skill %s should be consumer, but was set to provider only.", asset));
}
return StreamResult.success(Stream.of(new AgentPart("application/sparql-query", skillText.get().getBytes())));
String query = skillText.get();
okhttp3.Request tempRequest = this.requestFactory.toRequest(this.params);
if (tempRequest.body() != null && AgentHttpAction.RESULTSET_CONTENT_TYPE.equals(tempRequest.body().contentType().toString())) {
TupleSet bindings = new TupleSet();
try {
Buffer buffer = new Buffer();
tempRequest.body().writeTo(buffer);
AgentHttpAction.parseBinding(typeManager.getMapper().readTree(buffer.readByteArray()), bindings);
query = AgentHttpAction.bind(query, bindings);
} catch (Exception e) {
return StreamResult.error(String.format("The query could not be bound to the given input tupleset.", e));
}
}
return StreamResult.success(Stream.of(new AgentPart("application/sparql-query", query.getBytes())));
} else if (runMode == SkillDistribution.PROVIDER && distribution == SkillDistribution.CONSUMER) {
return StreamResult.error(String.format("Run distribution of skill %s should be provider, but was set to consumer only.", asset));
}
Expand Down Expand Up @@ -301,6 +347,11 @@ public AgentSource.Builder matchmakingAgentUrl(String matchmakingAgentUrl) {
return this;
}

public AgentSource.Builder typeManager(TypeManager typeManager) {
dataSource.typeManager = typeManager;
return this;
}

public AgentSource build() {
Objects.requireNonNull(dataSource.requestId, "requestId");
Objects.requireNonNull(dataSource.httpClient, "httpClient");
Expand Down
Loading

0 comments on commit 0d2d0be

Please sign in to comment.