Skip to content

Commit

Permalink
fix: jackson + slf4j updates
Browse files Browse the repository at this point in the history
  • Loading branch information
berezovskyi committed Oct 19, 2024
1 parent bd590d4 commit 7398ead
Show file tree
Hide file tree
Showing 10 changed files with 19 additions and 25 deletions.
2 changes: 1 addition & 1 deletion backend/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-math3</artifactId>
<version>3.2</version>
<version>3.6.1</version>
</dependency>
</dependencies>
<repositories>
Expand Down
4 changes: 1 addition & 3 deletions backend/src/main/java/sparqles/analytics/AAnalyser.java
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package sparqles.analytics;

import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
import org.mortbay.log.Log;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sparqles.avro.Endpoint;
Expand Down Expand Up @@ -35,7 +34,6 @@ static void setDateCalculator(DateCalculator calc) {
/**
* Computes the aggregated statistics for the Availability task
*
* @param ep
*/
public boolean analyse(AResult ares) {
try {
Expand Down Expand Up @@ -171,7 +169,7 @@ private AvailabilityView getView(Endpoint ep) {
AvailabilityView view = null;
List<AvailabilityView> views = _db.getResults(ep, AvailabilityView.class, AvailabilityView.SCHEMA$);
if (views.size() != 1) {
Log.warn("We have {} AvailabilityView, expected was 1", views.size());
log.warn("We have {} AvailabilityView, expected was 1", views.size());
}
if (views.size() == 0) {
view = new AvailabilityView();
Expand Down
4 changes: 3 additions & 1 deletion backend/src/main/java/sparqles/analytics/Analytics.java
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
package sparqles.analytics;

import org.apache.avro.specific.SpecificRecordBase;
import org.mortbay.log.Log;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sparqles.avro.Endpoint;
import sparqles.avro.analytics.EPView;
import sparqles.avro.analytics.EPViewAvailability;
Expand All @@ -28,6 +29,7 @@
*/
public abstract class Analytics<V extends SpecificRecordBase> {

private static Logger Log = LoggerFactory.getLogger(Analytics.class);

protected final MongoDBManager _db;

Expand Down
3 changes: 1 addition & 2 deletions backend/src/main/java/sparqles/analytics/DAnalyser.java
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
package sparqles.analytics;

import org.mortbay.log.Log;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sparqles.avro.Endpoint;
Expand Down Expand Up @@ -183,7 +182,7 @@ private DiscoverabilityView getView(Endpoint ep) {
views = _db.getResults(ep, DiscoverabilityView.class, DiscoverabilityView.SCHEMA$);
}
if (views.size() != 1) {
Log.warn("We have {} FeatureView, expected was 1", views.size());
log.warn("We have {} FeatureView, expected was 1", views.size());
}
if (views.size() == 0) {
view = new DiscoverabilityView();
Expand Down
3 changes: 1 addition & 2 deletions backend/src/main/java/sparqles/analytics/FAnalyser.java
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
package sparqles.analytics;

import org.mortbay.log.Log;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sparqles.avro.Endpoint;
Expand Down Expand Up @@ -80,7 +79,7 @@ private InteroperabilityView getView(Endpoint ep) {
InteroperabilityView view = null;
List<InteroperabilityView> views = _db.getResults(ep, InteroperabilityView.class, InteroperabilityView.SCHEMA$);
if (views.size() != 1) {
Log.warn("We have {} FeatureView, expected was 1", views.size());
log.warn("We have {} FeatureView, expected was 1", views.size());
}
if (views.size() == 0) {
view = new InteroperabilityView();
Expand Down
3 changes: 1 addition & 2 deletions backend/src/main/java/sparqles/analytics/PAnalyser.java
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package sparqles.analytics;

import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
import org.mortbay.log.Log;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sparqles.avro.Endpoint;
Expand Down Expand Up @@ -123,7 +122,7 @@ private PerformanceView getView(Endpoint ep) {
PerformanceView view = null;
List<PerformanceView> views = _db.getResults(ep, PerformanceView.class, PerformanceView.SCHEMA$);
if (views.size() != 1) {
Log.warn("We have {} AvailabilityView, expected was 1", views.size());
log.warn("We have {} AvailabilityView, expected was 1", views.size());
}
if (views.size() == 0) {
view = new PerformanceView();
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package sparqles.analytics;

import org.apache.commons.math.stat.descriptive.SummaryStatistics;
import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
import sparqles.avro.EndpointResult;
import sparqles.avro.analytics.Index;
import sparqles.avro.availability.AResult;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import org.apache.http.params.CoreConnectionPNames;
import org.apache.http.params.HttpParams;
import org.apache.http.params.HttpProtocolParams;
import org.mortbay.log.Log;

import sparqles.core.CONSTANTS;

Expand Down
20 changes: 10 additions & 10 deletions backend/src/main/java/sparqles/utils/DatahubAccess.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
package sparqles.utils;

import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
Expand All @@ -12,9 +16,6 @@
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.conn.SingleClientConnManager;
import org.apache.http.util.EntityUtils;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sparqles.avro.Dataset;
Expand Down Expand Up @@ -45,7 +46,6 @@ public class DatahubAccess {
/**
* This class fetch the SPARQL endpoint list from datahub using the datahub API
*
* @param epm
**/
public static Collection<Endpoint> checkEndpointList() {
Map<String, Endpoint> results = new HashMap<String, Endpoint>();
Expand Down Expand Up @@ -84,18 +84,18 @@ public static Collection<Endpoint> checkEndpointList() {
ObjectMapper mapper = new ObjectMapper(factory);
JsonNode rootNode = mapper.readTree(respString);

JsonNode res = rootNode.get("result");
res = res.get("results");
var resNode = rootNode.get("result");
var res = (ArrayNode) resNode.get("results");
log.info("We found {} datasets", res.size());
Iterator<JsonNode> iter = res.getElements();
Iterator<JsonNode> iter = res.elements();
int c = 1;


Map<String, Set<String>> map = new HashMap<String, Set<String>>();
while (iter.hasNext()) {
JsonNode node = iter.next();
String endpointURL = node.findPath("url").getTextValue().trim();
String datasetId = node.findPath("package_id").getTextValue().trim();
String endpointURL = node.findPath("url").asText().trim();
String datasetId = node.findPath("package_id").asText().trim();

Set<String> s = map.get(endpointURL);
if (s == null) {
Expand Down Expand Up @@ -161,7 +161,7 @@ private static Endpoint checkForDataset(Endpoint ep, String datasetId, HttpClien
JsonNode res = rootNode.get("result");

// System.out.println(rootNode);
String ckan_url = res.findPath("url").getTextValue();
String ckan_url = res.findPath("url").asText();
List<JsonNode> titles = res.findValues("title");
String title = null;
for (JsonNode s : titles) {
Expand Down
2 changes: 0 additions & 2 deletions backend/src/test/java/sparqles/core/ARQRequestTEST.java
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
package sparqles.core;

import org.apache.jena.http.HttpOp;
import org.apache.jena.query.QueryExecution;
import org.apache.jena.query.QueryExecutionFactory;
import org.apache.jena.query.ResultSet;
import org.junit.After;
import org.junit.Before;
Expand Down

0 comments on commit 7398ead

Please sign in to comment.