diff --git a/code-formatting/eclipse-formatter.xml b/code-formatting/eclipse-formatter.xml
new file mode 100644
index 0000000..e7cfaba
--- /dev/null
+++ b/code-formatting/eclipse-formatter.xml
@@ -0,0 +1,402 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/code-formatting/pre-commit.sh b/code-formatting/pre-commit.sh
new file mode 100755
index 0000000..ed051d1
--- /dev/null
+++ b/code-formatting/pre-commit.sh
@@ -0,0 +1,28 @@
+#!/bin/sh -e
+CWD=$(pwd)
+cd $(git rev-parse --show-toplevel)
+format_cmd=""
+
+# skip if NO_VERIFY env var set
+if [ "$NO_VERIFY" ]; then
+ echo 'code formatting skipped' 1>&2
+ exit 0
+fi
+
+# I'm not great at bash, so this is a bit ugly, but I'll explain each pipe
+# 1. Get all staged files
+# 2. Reduce to just .java files
+# 3. Replace newlines with commas (this was really hard to do in sed)
+# 4. Replace commas with $,^.*
+# 5. Crop off the last 4 chars
+# This results in foo.java$,^.*bar.java$,^.*baz.java$
+# I then append ^.* to the beginning of that.
+STAGED_JAVA_FILES_AS_REGEX=$(git diff --staged --name-only --diff-filter=ACMR | grep '.java$' | tr '\n' ',' | sed -e 's/,/$,^.*/g' | sed 's/.\{4\}$//')
+FILES_TO_RESTAGE=$(git diff --staged --name-only --diff-filter=ACMR)
+if [ -n "$STAGED_JAVA_FILES_AS_REGEX" ]; then
+ echo "Found the following staged java files to format: $STAGED_JAVA_FILES_AS_REGEX"
+ mvn spotless:apply -DspotlessFiles=^.*$STAGED_JAVA_FILES_AS_REGEX
+ git add $FILES_TO_RESTAGE
+fi
+
+cd $CWD
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index 5580bba..1849179 100644
--- a/pom.xml
+++ b/pom.xml
@@ -15,6 +15,7 @@
Data Dictionary API
21
+ 2.41.1
@@ -74,6 +75,20 @@
org.springframework.boot
spring-boot-maven-plugin
+
+ com.diffplug.spotless
+ spotless-maven-plugin
+ ${spotless.version}
+
+
+
+ 4.26
+ code-formatting/eclipse-formatter.xml
+
+
+
+
+
diff --git a/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/ConceptDecoratorService.java b/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/ConceptDecoratorService.java
index 08a56a3..d9bc0ca 100644
--- a/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/ConceptDecoratorService.java
+++ b/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/ConceptDecoratorService.java
@@ -1,6 +1,8 @@
package edu.harvard.dbmi.avillach.dictionary.concept;
import edu.harvard.dbmi.avillach.dictionary.concept.model.Concept;
+import edu.harvard.dbmi.avillach.dictionary.dataset.Dataset;
+import edu.harvard.dbmi.avillach.dictionary.dataset.DatasetService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@@ -9,6 +11,7 @@
import org.springframework.stereotype.Service;
import java.util.List;
+import java.util.Optional;
import java.util.function.Predicate;
import java.util.stream.Stream;
@@ -18,16 +21,18 @@ public class ConceptDecoratorService {
private static final Logger LOG = LoggerFactory.getLogger(ConceptDecoratorService.class);
private final boolean enabled;
private final ConceptService conceptService;
+ private final DatasetService datasetService;
private static final int COMPLIANT = 4, NON_COMPLIANT_TABLED = 3, NON_COMPLIANT_UNTABLED = 2;
@Autowired
public ConceptDecoratorService(
- @Value("${dashboard.enable.extra_details}") boolean enabled,
- @Lazy ConceptService conceptService // circular dep
+ @Value("${dashboard.enable.extra_details}") boolean enabled, @Lazy ConceptService conceptService, DatasetService datasetService // circular
+ // dep
) {
this.enabled = enabled;
this.conceptService = conceptService;
+ this.datasetService = datasetService;
}
@@ -37,8 +42,20 @@ public Concept populateParentConcepts(Concept concept) {
}
// In some environments, certain parent concepts have critical details that we need to add to the detailed response
- List conceptNodes = Stream.of(concept.conceptPath()
- .split("\\\\")).filter(Predicate.not(String::isBlank)).toList(); // you have to double escape the slash. Once for strings, and once for regex
+ List conceptNodes = Stream.of(concept.conceptPath().split("\\\\")).filter(Predicate.not(String::isBlank)).toList(); // you
+ // have
+ // to
+ // double
+ // escape
+ // the
+ // slash.
+ // Once
+ // for
+ // strings,
+ // and
+ // once
+ // for
+ // regex
return switch (conceptNodes.size()) {
case COMPLIANT, NON_COMPLIANT_TABLED -> populateTabledConcept(concept, conceptNodes);
@@ -51,16 +68,14 @@ public Concept populateParentConcepts(Concept concept) {
}
private Concept populateTabledConcept(Concept concept, List conceptNodes) {
- String studyPath = "\\" + String.join("\\", conceptNodes.subList(0, 1)) + "\\";
String tablePath = "\\" + String.join("\\", conceptNodes.subList(0, 2)) + "\\";
- Concept study = conceptService.conceptDetailWithoutAncestors(concept.dataset(), studyPath).orElse(null);
+ Dataset study = datasetService.getDataset(concept.dataset()).orElse(null);
Concept table = conceptService.conceptDetailWithoutAncestors(concept.dataset(), tablePath).orElse(null);
return concept.withStudy(study).withTable(table);
}
private Concept populateNonCompliantTabledConcept(Concept concept, List conceptNodes) {
- String studyPath = String.join("\\", conceptNodes.subList(0, 1));
- Concept study = conceptService.conceptDetail(concept.dataset(), studyPath).orElse(null);
+ Dataset study = datasetService.getDataset(concept.dataset()).orElse(null);
return concept.withStudy(study);
}
}
diff --git a/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/model/CategoricalConcept.java b/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/model/CategoricalConcept.java
index 7c4ee16..135a43a 100644
--- a/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/model/CategoricalConcept.java
+++ b/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/model/CategoricalConcept.java
@@ -1,6 +1,7 @@
package edu.harvard.dbmi.avillach.dictionary.concept.model;
import com.fasterxml.jackson.annotation.JsonProperty;
+import edu.harvard.dbmi.avillach.dictionary.dataset.Dataset;
import jakarta.annotation.Nullable;
import java.util.List;
@@ -12,31 +13,27 @@ public record CategoricalConcept(
List values, boolean allowFiltering, String studyAcronym,
- @Nullable
- List children,
+ @Nullable List children,
- @Nullable
- Map meta,
+ @Nullable Map meta,
- @Nullable
- Concept table,
+ @Nullable Concept table,
- @Nullable
- Concept study
+ @Nullable Dataset study
) implements Concept {
public CategoricalConcept(
- String conceptPath, String name, String display, String dataset, String description, List values,
- boolean allowFiltering, String studyAcronym, @Nullable List children, @Nullable Map meta
+ String conceptPath, String name, String display, String dataset, String description, List values, boolean allowFiltering,
+ String studyAcronym, @Nullable List children, @Nullable Map meta
) {
this(conceptPath, name, display, dataset, description, values, allowFiltering, studyAcronym, children, meta, null, null);
}
public CategoricalConcept(CategoricalConcept core, Map meta) {
this(
- core.conceptPath, core.name, core.display, core.dataset, core.description, core.values,
- core.allowFiltering, core.studyAcronym, core.children, meta
+ core.conceptPath, core.name, core.display, core.dataset, core.description, core.values, core.allowFiltering, core.studyAcronym,
+ core.children, meta
);
}
@@ -66,7 +63,7 @@ public Concept withTable(Concept table) {
}
@Override
- public Concept withStudy(Concept study) {
+ public Concept withStudy(Dataset study) {
return new CategoricalConcept(
conceptPath, name, display, dataset, description, values, allowFiltering, studyAcronym, children, meta, table, study
);
diff --git a/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/model/Concept.java b/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/model/Concept.java
index c57f222..17443bc 100644
--- a/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/model/Concept.java
+++ b/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/model/Concept.java
@@ -3,6 +3,7 @@
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
+import edu.harvard.dbmi.avillach.dictionary.dataset.Dataset;
import jakarta.annotation.Nullable;
import java.util.List;
@@ -16,12 +17,11 @@
// - The name is set in the 'type' property
// - For each possible Concept type, here is what the 'type' property will be
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
-@JsonSubTypes({
- @JsonSubTypes.Type(value = ContinuousConcept.class, name = "Continuous"),
- @JsonSubTypes.Type(value = CategoricalConcept.class, name = "Categorical"),
-})
-public sealed interface Concept
- permits CategoricalConcept, ConceptShell, ContinuousConcept {
+@JsonSubTypes(
+ {@JsonSubTypes.Type(value = ContinuousConcept.class, name = "Continuous"),
+ @JsonSubTypes.Type(value = CategoricalConcept.class, name = "Categorical"),}
+)
+public sealed interface Concept permits CategoricalConcept, ConceptShell, ContinuousConcept {
/**
* @return The complete concept path for this concept (// delimited)
@@ -53,7 +53,7 @@ public sealed interface Concept
Concept table();
- Concept study();
+ Dataset study();
Map meta();
@@ -68,7 +68,7 @@ default boolean allowFiltering() {
Concept withTable(Concept table);
- Concept withStudy(Concept study);
+ Concept withStudy(Dataset study);
default boolean conceptEquals(Object object) {
if (this == object) return true;
diff --git a/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/model/ConceptShell.java b/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/model/ConceptShell.java
index c75e8ee..164b953 100644
--- a/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/model/ConceptShell.java
+++ b/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/model/ConceptShell.java
@@ -1,5 +1,6 @@
package edu.harvard.dbmi.avillach.dictionary.concept.model;
+import edu.harvard.dbmi.avillach.dictionary.dataset.Dataset;
import jakarta.annotation.Nullable;
import java.util.List;
@@ -33,7 +34,7 @@ public Concept table() {
}
@Override
- public Concept study() {
+ public Dataset study() {
return null;
}
@@ -58,7 +59,7 @@ public Concept withTable(Concept table) {
}
@Override
- public Concept withStudy(Concept study) {
+ public Concept withStudy(Dataset study) {
return this;
}
diff --git a/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/model/ContinuousConcept.java b/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/model/ContinuousConcept.java
index aadc689..8b465d3 100644
--- a/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/model/ContinuousConcept.java
+++ b/src/main/java/edu/harvard/dbmi/avillach/dictionary/concept/model/ContinuousConcept.java
@@ -1,6 +1,7 @@
package edu.harvard.dbmi.avillach.dictionary.concept.model;
import com.fasterxml.jackson.annotation.JsonProperty;
+import edu.harvard.dbmi.avillach.dictionary.dataset.Dataset;
import jakarta.annotation.Nullable;
import java.util.ArrayList;
@@ -11,32 +12,24 @@
public record ContinuousConcept(
String conceptPath, String name, String display, String dataset, String description, boolean allowFiltering,
- @Nullable Float min, @Nullable Float max, String studyAcronym,
- Map meta,
- @Nullable
- List children,
+ @Nullable Float min, @Nullable Float max, String studyAcronym, Map meta, @Nullable List children,
- @Nullable
- Concept table,
+ @Nullable Concept table,
- @Nullable
- Concept study
+ @Nullable Dataset study
) implements Concept {
public ContinuousConcept(
- String conceptPath, String name, String display, String dataset, String description, boolean allowFiltering,
- @Nullable Float min, @Nullable Float max, String studyAcronym, Map meta, @Nullable List children
+ String conceptPath, String name, String display, String dataset, String description, boolean allowFiltering, @Nullable Float min,
+ @Nullable Float max, String studyAcronym, Map meta, @Nullable List children
) {
- this(
- conceptPath, name, display, dataset, description, allowFiltering,
- min, max, studyAcronym, meta, children, null, null
- );
+ this(conceptPath, name, display, dataset, description, allowFiltering, min, max, studyAcronym, meta, children, null, null);
}
public ContinuousConcept(ContinuousConcept core, Map meta) {
this(
- core.conceptPath, core.name, core.display, core.dataset, core.description, core.allowFiltering,
- core.min, core.max, core.studyAcronym, meta, core.children
+ core.conceptPath, core.name, core.display, core.dataset, core.description, core.allowFiltering, core.min, core.max,
+ core.studyAcronym, meta, core.children
);
}
@@ -45,8 +38,8 @@ public ContinuousConcept(String conceptPath, String dataset) {
}
public ContinuousConcept(
- String conceptPath, String name, String display, String dataset, String description, boolean allowFiltering,
- @Nullable Float min, @Nullable Float max, String studyAcronym, Map meta
+ String conceptPath, String name, String display, String dataset, String description, boolean allowFiltering, @Nullable Float min,
+ @Nullable Float max, String studyAcronym, Map meta
) {
this(conceptPath, name, display, dataset, description, allowFiltering, min, max, studyAcronym, meta, null);
}
@@ -67,16 +60,14 @@ public ContinuousConcept withChildren(List children) {
@Override
public Concept withTable(Concept table) {
return new ContinuousConcept(
- conceptPath, name, display, dataset, description, allowFiltering,
- min, max, studyAcronym, meta, children, table, study
+ conceptPath, name, display, dataset, description, allowFiltering, min, max, studyAcronym, meta, children, table, study
);
}
@Override
- public Concept withStudy(Concept study) {
+ public Concept withStudy(Dataset study) {
return new ContinuousConcept(
- conceptPath, name, display, dataset, description, allowFiltering,
- min, max, studyAcronym, meta, children, table, study
+ conceptPath, name, display, dataset, description, allowFiltering, min, max, studyAcronym, meta, children, table, study
);
}
diff --git a/src/main/java/edu/harvard/dbmi/avillach/dictionary/dataset/Dataset.java b/src/main/java/edu/harvard/dbmi/avillach/dictionary/dataset/Dataset.java
new file mode 100644
index 0000000..b6f1b64
--- /dev/null
+++ b/src/main/java/edu/harvard/dbmi/avillach/dictionary/dataset/Dataset.java
@@ -0,0 +1,16 @@
+package edu.harvard.dbmi.avillach.dictionary.dataset;
+
+import jakarta.annotation.Nullable;
+
+import java.util.Map;
+
+public record Dataset(String ref, String fullName, String abbreviation, String description, @Nullable Map meta) {
+
+ public Dataset(String ref, String fullName, String abbreviation, String description) {
+ this(ref, fullName, abbreviation, description, null);
+ }
+
+ public Dataset withMeta(Map meta) {
+ return new Dataset(ref, fullName, abbreviation, description, meta);
+ }
+}
diff --git a/src/main/java/edu/harvard/dbmi/avillach/dictionary/dataset/DatasetMapper.java b/src/main/java/edu/harvard/dbmi/avillach/dictionary/dataset/DatasetMapper.java
new file mode 100644
index 0000000..a6488c7
--- /dev/null
+++ b/src/main/java/edu/harvard/dbmi/avillach/dictionary/dataset/DatasetMapper.java
@@ -0,0 +1,15 @@
+package edu.harvard.dbmi.avillach.dictionary.dataset;
+
+import org.springframework.jdbc.core.RowMapper;
+import org.springframework.stereotype.Component;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+@Component
+public class DatasetMapper implements RowMapper {
+ @Override
+ public Dataset mapRow(ResultSet rs, int rowNum) throws SQLException {
+ return new Dataset(rs.getString("ref"), rs.getString("full_name"), rs.getString("abbreviation"), rs.getString("description"));
+ }
+}
diff --git a/src/main/java/edu/harvard/dbmi/avillach/dictionary/dataset/DatasetRepository.java b/src/main/java/edu/harvard/dbmi/avillach/dictionary/dataset/DatasetRepository.java
new file mode 100644
index 0000000..79333bb
--- /dev/null
+++ b/src/main/java/edu/harvard/dbmi/avillach/dictionary/dataset/DatasetRepository.java
@@ -0,0 +1,53 @@
+package edu.harvard.dbmi.avillach.dictionary.dataset;
+
+import edu.harvard.dbmi.avillach.dictionary.concept.model.Concept;
+import edu.harvard.dbmi.avillach.dictionary.util.MapExtractor;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
+import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
+import org.springframework.stereotype.Repository;
+
+import java.util.Map;
+import java.util.Optional;
+
+@Repository
+public class DatasetRepository {
+ private final NamedParameterJdbcTemplate template;
+ private final DatasetMapper mapper;
+ private final MapExtractor metaExtractor = new MapExtractor("key", "value");
+
+ @Autowired
+ public DatasetRepository(NamedParameterJdbcTemplate template, DatasetMapper mapper) {
+ this.template = template;
+ this.mapper = mapper;
+ }
+
+ public Optional getDataset(String ref) {
+ String sql = """
+ SELECT
+ ref, full_name, abbreviation, description
+ FROM
+ dataset
+ WHERE
+ dataset.REF = :ref
+ """;
+
+ MapSqlParameterSource params = new MapSqlParameterSource().addValue("ref", ref);
+
+ return template.query(sql, params, mapper).stream().findAny();
+ }
+
+ public Map getDatasetMeta(String ref) {
+ String sql = """
+ SELECT
+ key, value
+ FROM
+ dataset_meta
+ LEFT JOIN dataset ON dataset_meta.dataset_id = dataset.dataset_id
+ WHERE
+ dataset.REF = :ref
+ """;
+ MapSqlParameterSource params = new MapSqlParameterSource().addValue("ref", ref);
+ return template.query(sql, params, metaExtractor);
+ }
+}
diff --git a/src/main/java/edu/harvard/dbmi/avillach/dictionary/dataset/DatasetService.java b/src/main/java/edu/harvard/dbmi/avillach/dictionary/dataset/DatasetService.java
new file mode 100644
index 0000000..ddaf782
--- /dev/null
+++ b/src/main/java/edu/harvard/dbmi/avillach/dictionary/dataset/DatasetService.java
@@ -0,0 +1,24 @@
+package edu.harvard.dbmi.avillach.dictionary.dataset;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+import java.util.Map;
+import java.util.Optional;
+
+@Service
+public class DatasetService {
+
+ private final DatasetRepository repository;
+
+
+ @Autowired
+ public DatasetService(DatasetRepository repository) {
+ this.repository = repository;
+ }
+
+ public Optional getDataset(String ref) {
+ Map meta = repository.getDatasetMeta(ref);
+ return repository.getDataset(ref).map(ds -> ds.withMeta(meta));
+ }
+}
diff --git a/src/test/java/edu/harvard/dbmi/avillach/dictionary/concept/ConceptDecoratorServiceTest.java b/src/test/java/edu/harvard/dbmi/avillach/dictionary/concept/ConceptDecoratorServiceTest.java
index 435aefb..6800f57 100644
--- a/src/test/java/edu/harvard/dbmi/avillach/dictionary/concept/ConceptDecoratorServiceTest.java
+++ b/src/test/java/edu/harvard/dbmi/avillach/dictionary/concept/ConceptDecoratorServiceTest.java
@@ -2,6 +2,8 @@
import edu.harvard.dbmi.avillach.dictionary.concept.model.CategoricalConcept;
import edu.harvard.dbmi.avillach.dictionary.concept.model.Concept;
+import edu.harvard.dbmi.avillach.dictionary.dataset.Dataset;
+import edu.harvard.dbmi.avillach.dictionary.dataset.DatasetService;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
@@ -9,6 +11,7 @@
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
+import javax.print.attribute.DocAttributeSet;
import java.util.Optional;
@@ -18,6 +21,9 @@ class ConceptDecoratorServiceTest {
@MockBean
ConceptService conceptService;
+ @MockBean
+ DatasetService datasetService;
+
@Autowired
ConceptDecoratorService subject;
@@ -25,12 +31,10 @@ class ConceptDecoratorServiceTest {
void shouldPopulateCompliantStudy() {
CategoricalConcept concept = new CategoricalConcept("\\study\\table\\idk\\concept\\", "dataset");
CategoricalConcept table = new CategoricalConcept("\\study\\table\\", "dataset");
- CategoricalConcept study = new CategoricalConcept("\\study\\", "dataset");
+ Dataset study = new Dataset("dataset", "", "", "");
- Mockito.when(conceptService.conceptDetail("dataset", table.dataset()))
- .thenReturn(Optional.of(table));
- Mockito.when(conceptService.conceptDetail("dataset", study.dataset()))
- .thenReturn(Optional.of(study));
+ Mockito.when(conceptService.conceptDetail("dataset", table.dataset())).thenReturn(Optional.of(table));
+ Mockito.when(datasetService.getDataset("dataset")).thenReturn(Optional.of(study));
Concept actual = subject.populateParentConcepts(concept);
Concept expected = concept.withStudy(study).withTable(table);
@@ -42,12 +46,10 @@ void shouldPopulateCompliantStudy() {
void shouldPopulateNonCompliantTabledStudy() {
CategoricalConcept concept = new CategoricalConcept("\\study\\table\\concept\\", "dataset");
CategoricalConcept table = new CategoricalConcept("\\study\\table\\", "dataset");
- CategoricalConcept study = new CategoricalConcept("\\study\\", "dataset");
+ Dataset study = new Dataset("dataset", "", "", "");
- Mockito.when(conceptService.conceptDetail("dataset", table.dataset()))
- .thenReturn(Optional.of(table));
- Mockito.when(conceptService.conceptDetail("dataset", study.dataset()))
- .thenReturn(Optional.of(study));
+ Mockito.when(conceptService.conceptDetail("dataset", table.dataset())).thenReturn(Optional.of(table));
+ Mockito.when(datasetService.getDataset("dataset")).thenReturn(Optional.of(study));
Concept actual = subject.populateParentConcepts(concept);
Concept expected = concept.withStudy(study).withTable(table);
@@ -58,10 +60,9 @@ void shouldPopulateNonCompliantTabledStudy() {
@Test
void shouldPopulateNonCompliantUnTabledStudy() {
CategoricalConcept concept = new CategoricalConcept("\\study\\concept\\", "dataset");
- CategoricalConcept study = new CategoricalConcept("\\study\\", "dataset");
+ Dataset study = new Dataset("dataset", "", "", "");
- Mockito.when(conceptService.conceptDetail("dataset", study.dataset()))
- .thenReturn(Optional.of(study));
+ Mockito.when(datasetService.getDataset("dataset")).thenReturn(Optional.of(study));
Concept actual = subject.populateParentConcepts(concept);
Concept expected = concept.withStudy(study);
@@ -76,4 +77,4 @@ void shouldNotPopulateWeirdConcept() {
Assertions.assertEquals(concept, actual);
}
-}
\ No newline at end of file
+}
diff --git a/src/test/java/edu/harvard/dbmi/avillach/dictionary/concept/ConceptRepositoryTest.java b/src/test/java/edu/harvard/dbmi/avillach/dictionary/concept/ConceptRepositoryTest.java
index bca46b3..f371012 100644
--- a/src/test/java/edu/harvard/dbmi/avillach/dictionary/concept/ConceptRepositoryTest.java
+++ b/src/test/java/edu/harvard/dbmi/avillach/dictionary/concept/ConceptRepositoryTest.java
@@ -30,12 +30,8 @@ class ConceptRepositoryTest {
ConceptRepository subject;
@Container
- static final PostgreSQLContainer> databaseContainer =
- new PostgreSQLContainer<>("postgres:16")
- .withReuse(true)
- .withCopyFileToContainer(
- MountableFile.forClasspathResource("seed.sql"), "/docker-entrypoint-initdb.d/seed.sql"
- );
+ static final PostgreSQLContainer> databaseContainer = new PostgreSQLContainer<>("postgres:16").withReuse(true)
+ .withCopyFileToContainer(MountableFile.forClasspathResource("seed.sql"), "/docker-entrypoint-initdb.d/seed.sql");
@DynamicPropertySource
static void mySQLProperties(DynamicPropertyRegistry registry) {
@@ -48,7 +44,7 @@ static void mySQLProperties(DynamicPropertyRegistry registry) {
@Test
void shouldListAllConcepts() {
List actual = subject.getConcepts(new Filter(List.of(), "", List.of()), Pageable.unpaged());
-
+
Assertions.assertEquals(30, actual.size());
}
@@ -76,12 +72,23 @@ void shouldListNextTwoConcepts() {
@Test
void shouldFilterConceptsByFacet() {
- List actual =
- subject.getConcepts(new Filter(List.of(new Facet("phs000007", "", "", "", 1, null, "study_ids_dataset_ids", null)), "", List.of()), Pageable.unpaged());
+ List actual = subject.getConcepts(
+ new Filter(List.of(new Facet("phs000007", "", "", "", 1, null, "study_ids_dataset_ids", null)), "", List.of()),
+ Pageable.unpaged()
+ );
List extends Record> expected = List.of(
- new ContinuousConcept("\\phs000007\\pht000022\\phv00004260\\FM219\\", "phv00004260", "FM219", "phs000007", "# 12 OZ CUPS OF CAFFEINATED COLA / DAY", true, 0F, 1F, "FHS", null),
- new ContinuousConcept("\\phs000007\\pht000021\\phv00003844\\FL200\\", "phv00003844", "FL200", "phs000007", "# 12 OZ CUPS OF CAFFEINATED COLA / DAY", true, 0F, 3F, "FHS", null),
- new ContinuousConcept("\\phs000007\\pht000033\\phv00008849\\D080\\", "phv00008849", "D080", "phs000007", "# 12 OZ CUPS OF CAFFEINATED COLA/DAY", true, 0F, 5F, "FHS", null)
+ new ContinuousConcept(
+ "\\phs000007\\pht000021\\phv00003844\\FL200\\", "phv00003844", "FL200", "phs000007",
+ "# 12 OZ CUPS OF CAFFEINATED COLA / DAY", true, 0F, 3F, "FHS", null
+ ),
+ new ContinuousConcept(
+ "\\phs000007\\pht000022\\phv00004260\\FM219\\", "phv00004260", "FM219", "phs000007",
+ "# 12 OZ CUPS OF CAFFEINATED COLA / DAY", true, 0F, 1F, "FHS", null
+ ),
+ new ContinuousConcept(
+ "\\phs000007\\pht000033\\phv00008849\\D080\\", "phv00008849", "D080", "phs000007", "# 12 OZ CUPS OF CAFFEINATED COLA/DAY",
+ true, 0F, 5F, "FHS", null
+ )
);
Assertions.assertEquals(expected, actual);
@@ -91,9 +98,18 @@ void shouldFilterConceptsByFacet() {
void shouldFilterBySearch() {
List actual = subject.getConcepts(new Filter(List.of(), "COLA", List.of()), Pageable.unpaged());
List extends Record> expected = List.of(
- new ContinuousConcept("\\phs000007\\pht000022\\phv00004260\\FM219\\", "phv00004260", "FM219", "phs000007", "# 12 OZ CUPS OF CAFFEINATED COLA / DAY", true, 0F, 1F, "FHS", null),
- new ContinuousConcept("\\phs000007\\pht000021\\phv00003844\\FL200\\", "phv00003844", "FL200", "phs000007", "# 12 OZ CUPS OF CAFFEINATED COLA / DAY", true, 0F, 3F, "FHS", null),
- new ContinuousConcept("\\phs000007\\pht000033\\phv00008849\\D080\\", "phv00008849", "D080", "phs000007", "# 12 OZ CUPS OF CAFFEINATED COLA/DAY", true, 0F, 5F, "FHS", null)
+ new ContinuousConcept(
+ "\\phs000007\\pht000021\\phv00003844\\FL200\\", "phv00003844", "FL200", "phs000007",
+ "# 12 OZ CUPS OF CAFFEINATED COLA / DAY", true, 0F, 3F, "FHS", null
+ ),
+ new ContinuousConcept(
+ "\\phs000007\\pht000022\\phv00004260\\FM219\\", "phv00004260", "FM219", "phs000007",
+ "# 12 OZ CUPS OF CAFFEINATED COLA / DAY", true, 0F, 1F, "FHS", null
+ ),
+ new ContinuousConcept(
+ "\\phs000007\\pht000033\\phv00008849\\D080\\", "phv00008849", "D080", "phs000007", "# 12 OZ CUPS OF CAFFEINATED COLA/DAY",
+ true, 0F, 5F, "FHS", null
+ )
);
Assertions.assertEquals(expected, actual);
@@ -101,11 +117,19 @@ void shouldFilterBySearch() {
@Test
void shouldFilterByBothSearchAndFacet() {
- List actual =
- subject.getConcepts(new Filter(List.of(new Facet("phs002715", "", "", "", 1, null, "study_ids_dataset_ids", null)), "phs002715", List.of()), Pageable.unpaged());
+ List actual = subject.getConcepts(
+ new Filter(List.of(new Facet("phs002715", "", "", "", 1, null, "study_ids_dataset_ids", null)), "phs002715", List.of()),
+ Pageable.unpaged()
+ );
List extends Record> expected = List.of(
- new CategoricalConcept("\\phs002715\\age\\", "AGE_CATEGORY", "age", "phs002715", "Participant's age (category)", List.of("21"), true, "NSRR CFS", null, null),
- new CategoricalConcept("\\phs002715\\nsrr_ever_smoker\\", "nsrr_ever_smoker", "nsrr_ever_smoker", "phs002715", "Smoker status", List.of("yes"), true, "NSRR CFS", null, null)
+ new CategoricalConcept(
+ "\\phs002715\\age\\", "AGE_CATEGORY", "age", "phs002715", "Participant's age (category)", List.of("21"), true, "NSRR CFS",
+ null, null
+ ),
+ new CategoricalConcept(
+ "\\phs002715\\nsrr_ever_smoker\\", "nsrr_ever_smoker", "nsrr_ever_smoker", "phs002715", "Smoker status", List.of("yes"),
+ true, "NSRR CFS", null, null
+ )
);
Assertions.assertEquals(expected, actual);
@@ -120,14 +144,17 @@ void shouldGetCount() {
@Test
void shouldGetCountWithFilter() {
- Long actual = subject.countConcepts(new Filter(List.of(new Facet("phs002715", "", "", "", 1, null, "study_ids_dataset_ids", null)), "", List.of()));
+ Long actual = subject
+ .countConcepts(new Filter(List.of(new Facet("phs002715", "", "", "", 1, null, "study_ids_dataset_ids", null)), "", List.of()));
Assertions.assertEquals(2L, actual);
}
@Test
void shouldGetDetailForConcept() {
- ContinuousConcept expected =
- new ContinuousConcept("\\phs000007\\pht000033\\phv00008849\\D080\\", "phv00008849", "D080", "phs000007", "# 12 OZ CUPS OF CAFFEINATED COLA/DAY", true, 0F, 5F, "FHS", null);
+ ContinuousConcept expected = new ContinuousConcept(
+ "\\phs000007\\pht000033\\phv00008849\\D080\\", "phv00008849", "D080", "phs000007", "# 12 OZ CUPS OF CAFFEINATED COLA/DAY", true,
+ 0F, 5F, "FHS", null
+ );
Optional actual = subject.getConcept("phs000007", "\\phs000007\\pht000033\\phv00008849\\D080\\");
Assertions.assertEquals(Optional.of(expected), actual);
@@ -161,27 +188,22 @@ void shouldGetStigmatizedConcept() {
@Test
void shouldGetMetaForMultipleConcepts() {
List concepts = List.of(
- new ContinuousConcept("\\phs000007\\pht000022\\phv00004260\\FM219\\", "", "", "phs000007", "", true, null, null, "FHS", Map.of()),
+ new ContinuousConcept(
+ "\\phs000007\\pht000022\\phv00004260\\FM219\\", "", "", "phs000007", "", true, null, null, "FHS", Map.of()
+ ),
new ContinuousConcept("\\phs000007\\pht000033\\phv00008849\\D080\\", "", "", "phs000007", "", true, null, null, "FHS", Map.of())
);
Map> actual = subject.getConceptMetaForConcepts(concepts);
Map> expected = Map.of(
- new ConceptShell("\\phs000007\\pht000033\\phv00008849\\D080\\", "phs000007"), Map.of(
- "unique_identifier", "false",
- "stigmatized", "false",
- "bdc_open_access", "true",
- "values", "[0.57,6.77]",
- "description", "# 12 OZ CUPS OF CAFFEINATED COLA/DAY",
- "free_text", "false"
- ),
- new ConceptShell("\\phs000007\\pht000022\\phv00004260\\FM219\\", "phs000007"), Map.of(
- "unique_identifier", "false",
- "stigmatized", "false",
- "bdc_open_access", "true",
- "values", "[0, 1]",
- "description", "# 12 OZ CUPS OF CAFFEINATED COLA / DAY",
- "free_text", "false"
+ new ConceptShell("\\phs000007\\pht000033\\phv00008849\\D080\\", "phs000007"),
+ Map.of(
+ "unique_identifier", "false", "stigmatized", "false", "bdc_open_access", "true", "values", "[0.57,6.77]", "description",
+ "# 12 OZ CUPS OF CAFFEINATED COLA/DAY", "free_text", "false"
+ ), new ConceptShell("\\phs000007\\pht000022\\phv00004260\\FM219\\", "phs000007"),
+ Map.of(
+ "unique_identifier", "false", "stigmatized", "false", "bdc_open_access", "true", "values", "[0, 1]", "description",
+ "# 12 OZ CUPS OF CAFFEINATED COLA / DAY", "free_text", "false"
)
);
Assertions.assertEquals(expected, actual);
@@ -191,16 +213,29 @@ void shouldGetMetaForMultipleConcepts() {
void shouldGetTree() {
Concept d0 = new CategoricalConcept("\\ACT Diagnosis ICD-10\\", "1");
Concept d1 = new CategoricalConcept("\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\", "1");
- Concept d2 = new CategoricalConcept("\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\J40-J47 Chronic lower respiratory diseases (J40-J47)\\", "1");
- Concept d3 = new CategoricalConcept("\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\J40-J47 Chronic lower respiratory diseases (J40-J47)\\J45 Asthma\\", "1");
- Concept d4A = new CategoricalConcept("\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\J40-J47 Chronic lower respiratory diseases (J40-J47)\\J45 Asthma\\J45.5 Severe persistent asthma\\", "1");
- Concept d4B = new CategoricalConcept("\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\J40-J47 Chronic lower respiratory diseases (J40-J47)\\J45 Asthma\\J45.9 Other and unspecified );asthma\\", "1");
+ Concept d2 = new CategoricalConcept(
+ "\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\J40-J47 Chronic lower respiratory diseases (J40-J47)\\",
+ "1"
+ );
+ Concept d3 = new CategoricalConcept(
+ "\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\J40-J47 Chronic lower respiratory diseases (J40-J47)\\J45 Asthma\\",
+ "1"
+ );
+ Concept d4A = new CategoricalConcept(
+ "\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\J40-J47 Chronic lower respiratory diseases (J40-J47)\\J45 Asthma\\J45.5 Severe persistent asthma\\",
+ "1"
+ );
+ Concept d4B = new CategoricalConcept(
+ "\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\J40-J47 Chronic lower respiratory diseases (J40-J47)\\J45 Asthma\\J45.9 Other and unspecified );asthma\\",
+ "1"
+ );
d3 = d3.withChildren(List.of(d4A, d4B));
d2.withChildren(List.of(d3));
d1.withChildren(List.of(d2));
d0.withChildren(List.of(d1));
- Optional actual = subject.getConceptTree("1", "\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\", 3);
+ Optional actual =
+ subject.getConceptTree("1", "\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\", 3);
Optional expected = Optional.of(d0);
Assertions.assertEquals(expected, actual);
@@ -210,16 +245,29 @@ void shouldGetTree() {
void shouldGetTreeForDepthThatExceedsOntology() {
Concept d0 = new CategoricalConcept("\\ACT Diagnosis ICD-10\\", "1");
Concept d1 = new CategoricalConcept("\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\", "1");
- Concept d2 = new CategoricalConcept("\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\J40-J47 Chronic lower respiratory diseases (J40-J47)\\", "1");
- Concept d3 = new CategoricalConcept("\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\J40-J47 Chronic lower respiratory diseases (J40-J47)\\J45 Asthma\\", "1");
- Concept d4A = new CategoricalConcept("\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\J40-J47 Chronic lower respiratory diseases (J40-J47)\\J45 Asthma\\J45.5 Severe persistent asthma\\", "1");
- Concept d4B = new CategoricalConcept("\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\J40-J47 Chronic lower respiratory diseases (J40-J47)\\J45 Asthma\\J45.9 Other and unspecified );asthma\\", "1");
+ Concept d2 = new CategoricalConcept(
+ "\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\J40-J47 Chronic lower respiratory diseases (J40-J47)\\",
+ "1"
+ );
+ Concept d3 = new CategoricalConcept(
+ "\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\J40-J47 Chronic lower respiratory diseases (J40-J47)\\J45 Asthma\\",
+ "1"
+ );
+ Concept d4A = new CategoricalConcept(
+ "\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\J40-J47 Chronic lower respiratory diseases (J40-J47)\\J45 Asthma\\J45.5 Severe persistent asthma\\",
+ "1"
+ );
+ Concept d4B = new CategoricalConcept(
+ "\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\J40-J47 Chronic lower respiratory diseases (J40-J47)\\J45 Asthma\\J45.9 Other and unspecified );asthma\\",
+ "1"
+ );
d3 = d3.withChildren(List.of(d4A, d4B));
d2.withChildren(List.of(d3));
d1.withChildren(List.of(d2));
d0.withChildren(List.of(d1));
- Optional actual = subject.getConceptTree("1", "\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\", 30);
+ Optional actual =
+ subject.getConceptTree("1", "\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\", 30);
Optional expected = Optional.of(d0);
Assertions.assertEquals(expected, actual);
@@ -235,7 +283,8 @@ void shouldReturnEmptyTreeForDNE() {
@Test
void shouldReturnEmptyForNegativeDepth() {
- Optional actual = subject.getConceptTree("1", "\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\", -1);
+ Optional actual =
+ subject.getConceptTree("1", "\\ACT Diagnosis ICD-10\\J00-J99 Diseases of the respiratory system (J00-J99)\\", -1);
Optional expected = Optional.empty();
Assertions.assertEquals(expected, actual);
@@ -244,7 +293,9 @@ void shouldReturnEmptyForNegativeDepth() {
@Test
void shouldGetStigmatizingConcept() {
Optional actual = subject.getConcept("phs002385", "\\phs002385\\TXNUM\\");
- ContinuousConcept expected = new ContinuousConcept("\\phs002385\\TXNUM\\", "TXNUM", "TXNUM", "phs002385", "Transplant Number", false, 0F, 0F, "HCT_for_SCD", Map.of());
+ ContinuousConcept expected = new ContinuousConcept(
+ "\\phs002385\\TXNUM\\", "TXNUM", "TXNUM", "phs002385", "Transplant Number", false, 0F, 0F, "HCT_for_SCD", Map.of()
+ );
Assertions.assertTrue(actual.isPresent());
Assertions.assertEquals(expected, actual.get());
@@ -272,4 +323,4 @@ void shouldGetContConceptWithDecimalNotation() {
Assertions.assertEquals(0.57f, concept.min());
Assertions.assertEquals(6.77f, concept.max());
}
-}
\ No newline at end of file
+}
diff --git a/src/test/java/edu/harvard/dbmi/avillach/dictionary/concept/ConceptServiceIntegrationTest.java b/src/test/java/edu/harvard/dbmi/avillach/dictionary/concept/ConceptServiceIntegrationTest.java
index 398d037..e51ac7b 100644
--- a/src/test/java/edu/harvard/dbmi/avillach/dictionary/concept/ConceptServiceIntegrationTest.java
+++ b/src/test/java/edu/harvard/dbmi/avillach/dictionary/concept/ConceptServiceIntegrationTest.java
@@ -3,6 +3,7 @@
import edu.harvard.dbmi.avillach.dictionary.concept.model.CategoricalConcept;
import edu.harvard.dbmi.avillach.dictionary.concept.model.Concept;
import edu.harvard.dbmi.avillach.dictionary.concept.model.ContinuousConcept;
+import edu.harvard.dbmi.avillach.dictionary.dataset.Dataset;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
@@ -26,12 +27,8 @@ class ConceptServiceIntegrationTest {
ConceptService subject;
@Container
- static final PostgreSQLContainer> databaseContainer =
- new PostgreSQLContainer<>("postgres:16")
- .withReuse(true)
- .withCopyFileToContainer(
- MountableFile.forClasspathResource("seed.sql"), "/docker-entrypoint-initdb.d/seed.sql"
- );
+ static final PostgreSQLContainer> databaseContainer = new PostgreSQLContainer<>("postgres:16").withReuse(true)
+ .withCopyFileToContainer(MountableFile.forClasspathResource("seed.sql"), "/docker-entrypoint-initdb.d/seed.sql");
@DynamicPropertySource
static void mySQLProperties(DynamicPropertyRegistry registry) {
@@ -46,28 +43,23 @@ void shouldGetDetails() {
Optional actual = subject.conceptDetail("phs000007", "\\phs000007\\pht000021\\phv00003844\\FL200\\");
CategoricalConcept table = new CategoricalConcept(
- "\\phs000007\\pht000021\\", "pht000021", "ex0_19s", "phs000007",
- "Clinic Exam, Original Cohort Exam 19",
- List.of(), true, "FHS", null, Map.of("description", "Clinic Exam, Original Cohort Exam 19"), null, null
+ "\\phs000007\\pht000021\\", "pht000021", "ex0_19s", "phs000007", "Clinic Exam, Original Cohort Exam 19", List.of(), true, "FHS",
+ null, Map.of("description", "Clinic Exam, Original Cohort Exam 19"), null, null
);
- CategoricalConcept study = new CategoricalConcept(
- "\\phs000007\\", "", "", "phs000007", null, List.of(), true, "FHS", null, Map.of(), null, null
+ Dataset study = new Dataset(
+ "phs000007", "Framingham Cohort", "FHS",
+ "Startup of Framingham Heart Study. Cardiovascular disease (CVD) is the leading cause of death and serious illness in the United States. In 1948, the Framingham Heart Study (FHS) -- under the direction of the National Heart Institute (now known as the National Heart, Lung, and Blood Institute, NHLBI) -- embarked on a novel and ambitious project in health research. At the time, little was known about the general causes of heart disease and stroke, but the death rates for CVD had been increasing steadily since the beginning of the century and had become an American epidemic.\\n\\nThe objective of the FHS was to identify the common factors or characteristics that contribute to CVD by following its development over a long period of time in a large group of participants who had not yet developed overt symptoms of CVD or suffered a heart attack or stroke.\\n\\nDesign of Framingham Heart Study. In 1948, the researchers recruited 5,209 men and women between the ages of 30 and 62 from the town of Framingham, Massachusetts, and began the first round of extensive physical examinations and lifestyle interviews that they would later analyze for common patterns related to CVD development. Since 1948, the subjects have returned to the study every two years for an examination consisting of a detailed medical history, physical examination, and laboratory tests, and in 1971, the study enrolled a second-generation cohort -- 5,124 of the original participants' adult children and their spouses -- to participate in similar examinations. The second examination of the Offspring cohort occurred eight years after the first examination, and subsequent examinations have occurred approximately every four years thereafter. In April 2002 the Study entered a new phase: the enrollment of a third generation of participants, the grandchildren of the original cohort. The first examination of the Third Generation Study was completed in July 2005 and involved 4,095 participants. Thus, the FHS has evolved into a prospective, community-based, three generation family study. The FHS is a joint project of the National Heart, Lung and Blood Institute and Boston University.\\n\\nResearch Areas in the Framingham Heart Study. Over the years, careful monitoring of the FHS population has led to the identification of the major CVD risk factors -- high blood pressure, high blood cholesterol, smoking, obesity, diabetes, and physical inactivity -- as well as a great deal of valuable information on the effects of related factors such as blood triglyceride and HDL cholesterol levels, age, gender, and psychosocial issues. Risk factors have been identified for the major components of CVD, including coronary heart disease, stroke, intermittent claudication, and heart failure. It is also clear from research in the FHS and other studies that substantial subclinical vascular disease occurs in the blood vessels, heart and brain that precedes clinical CVD. With recent advances in technology, the FHS has enhanced its research capabilities and capitalized on its inherent resources by the conduct of high resolution imaging to detect and quantify subclinical vascular disease in the major blood vessels, heart and brain. These studies have included ultrasound studies of the heart (echocardiography) and carotid arteries, computed tomography studies of the heart and aorta, and magnetic resonance imaging studies of the brain, heart, and aorta. Although the Framingham cohort is primarily white, the importance of the major CVD risk factors identified in this group have been shown in other studies to apply almost universally among racial and ethnic groups, even though the patterns of distribution may vary from group to group. In the past half century, the Study has produced approximately 1,200 articles in leading medical journals. The concept of CVD risk factors has become an integral part of the modern medical curriculum and has led to the development of effective treatment and preventive strategies in clinical practice.\\n\\nIn addition to research studies focused on risk factors, subclinical CVD and clinically apparent CVD, Framingham investigators have also collaborated with leading researchers from around the country and throughout the world on projects involving some of the major chronic illnesses in men and women, including dementia, osteoporosis and arthritis, nutritional deficiencies, eye diseases, hearing disorders, and chronic obstructive lung diseases.\\n\\nGenetic Research in the Framingham Heart Study. While pursuing the Study's established research goals, the NHLBI and the Framingham investigators has expanded its research mission into the study of genetic factors underlying CVD and other disorders. Over the past two decades, DNA has been collected from blood samples and from immortalized cell lines obtained from Original Cohort participants, members of the Offspring Cohort and the Third Generation Cohort. Several large-scale genotyping projects have been conducted in the past decade. Genome-wide linkage analysis has been conducted using genotypes of approximately 400 microsatellite markers that have been completed in over 9,300 subjects in all three generations. Analyses using microsatellite markers completed in the original cohort and offspring cohorts have resulted in over 100 publications, including many publications from the Genetics Analysis Workshop 13. Several other recent collaborative projects have completed thousands of SNP genotypes for candidate gene regions in subsets of FHS subjects with available DNA. These projects include the Cardiogenomics Program of the NHLBI's Programs for Genomics Applications, the genotyping of ~3000 SNPs in inflammation genes, and the completion of a genome-wide scan of 100,000 SNPs using the Affymetrix 100K Genechip.\\n\\nFramingham Cohort Phenotype Data. The phenotype database contains a vast array of phenotype information available in all three generations. These will include the quantitative measures of the major risk factors such as systolic blood pressure, total and HDL cholesterol, fasting glucose, and cigarette use, as well as anthropomorphic measures such as body mass index, biomarkers such as fibrinogen and CRP, and electrocardiography measures such as the QT interval. Many of these measures have been collected repeatedly in the original and offspring cohorts. Also included in the SHARe database will be an array of recently collected biomarkers, subclinical disease imaging measures, clinical CVD outcomes as well as an array of ancillary studies. The phenotype data is located here in the top-level study phs000007 Framingham Cohort. To view the phenotype variables collected from the Framingham Cohort, please click on the Variables tab above."
);
ContinuousConcept expected = new ContinuousConcept(
- "\\phs000007\\pht000021\\phv00003844\\FL200\\", "phv00003844", "FL200", "phs000007",
- "# 12 OZ CUPS OF CAFFEINATED COLA / DAY", true, 0F, 3F, "FHS",
+ "\\phs000007\\pht000021\\phv00003844\\FL200\\", "phv00003844", "FL200", "phs000007", "# 12 OZ CUPS OF CAFFEINATED COLA / DAY",
+ true, 0F, 3F, "FHS",
Map.of(
- "unique_identifier", "no",
- "stigmatizing", "no",
- "bdc_open_access", "yes",
- "values", "[0, 3]",
- "description", "# 12 OZ CUPS OF CAFFEINATED COLA / DAY",
- "free_text", "no"
- ),
- null, table, study
+ "unique_identifier", "no", "stigmatizing", "no", "bdc_open_access", "yes", "values", "[0, 3]", "description",
+ "# 12 OZ CUPS OF CAFFEINATED COLA / DAY", "free_text", "no"
+ ), null, table, study
);
Assertions.assertTrue(actual.isPresent());
Assertions.assertEquals(expected, actual.get());
}
-}
\ No newline at end of file
+}
diff --git a/src/test/java/edu/harvard/dbmi/avillach/dictionary/dataset/DatasetRepositoryTest.java b/src/test/java/edu/harvard/dbmi/avillach/dictionary/dataset/DatasetRepositoryTest.java
new file mode 100644
index 0000000..036a36d
--- /dev/null
+++ b/src/test/java/edu/harvard/dbmi/avillach/dictionary/dataset/DatasetRepositoryTest.java
@@ -0,0 +1,66 @@
+package edu.harvard.dbmi.avillach.dictionary.dataset;
+
+import edu.harvard.dbmi.avillach.dictionary.facet.FacetRepository;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.test.context.DynamicPropertyRegistry;
+import org.springframework.test.context.DynamicPropertySource;
+import org.testcontainers.containers.PostgreSQLContainer;
+import org.testcontainers.junit.jupiter.Container;
+import org.testcontainers.junit.jupiter.Testcontainers;
+import org.testcontainers.utility.MountableFile;
+
+import java.util.Map;
+import java.util.Optional;
+
+import static org.junit.jupiter.api.Assertions.*;
+
+@Testcontainers
+@SpringBootTest
+class DatasetRepositoryTest {
+
+ @Autowired
+ DatasetRepository subject;
+
+ @Container
+ static final PostgreSQLContainer> databaseContainer = new PostgreSQLContainer<>("postgres:16").withReuse(true)
+ .withCopyFileToContainer(MountableFile.forClasspathResource("seed.sql"), "/docker-entrypoint-initdb.d/seed.sql");
+
+ @DynamicPropertySource
+ static void mySQLProperties(DynamicPropertyRegistry registry) {
+ registry.add("spring.datasource.url", databaseContainer::getJdbcUrl);
+ registry.add("spring.datasource.username", databaseContainer::getUsername);
+ registry.add("spring.datasource.password", databaseContainer::getPassword);
+ registry.add("spring.datasource.db", databaseContainer::getDatabaseName);
+ }
+
+ @Test
+ void shouldGetDataset() {
+ Optional actual = subject.getDataset("1");
+ Dataset expected = new Dataset(
+ "1", "Genomic Information Commons", "GIC",
+ "The GIC utilizes the ACT ontology to ensure data alignment across the sites. This project also includes other variables of interest as defined by the Governance Committee, such as biosamples, consents, etc."
+ );
+
+ Assertions.assertTrue(actual.isPresent());
+ Assertions.assertEquals(expected, actual.get());
+ }
+
+ @Test
+ void shouldNotGetDatasetThatDNE() {
+ Optional actual = subject.getDataset(":)");
+
+ Assertions.assertFalse(actual.isPresent());
+ }
+
+ @Test
+ void shouldGetDatasetMeta() {
+ Map actual = subject.getDatasetMeta("phs002715");
+ Map expected = Map
+ .of("focus", "Sleep Apnea Syndromes", "design", "Prospective Longitudinal Cohort", "clinvars", "500", "participants", "23432");
+
+ Assertions.assertEquals(expected, actual);
+ }
+}
diff --git a/src/test/java/edu/harvard/dbmi/avillach/dictionary/dataset/DatasetServiceTest.java b/src/test/java/edu/harvard/dbmi/avillach/dictionary/dataset/DatasetServiceTest.java
new file mode 100644
index 0000000..22b8f12
--- /dev/null
+++ b/src/test/java/edu/harvard/dbmi/avillach/dictionary/dataset/DatasetServiceTest.java
@@ -0,0 +1,43 @@
+package edu.harvard.dbmi.avillach.dictionary.dataset;
+
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
+import org.mockito.Mockito;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.boot.test.mock.mockito.MockBean;
+
+import java.util.Map;
+import java.util.Optional;
+
+@SpringBootTest
+class DatasetServiceTest {
+
+ @MockBean
+ DatasetRepository repository;
+
+ @Autowired
+ DatasetService subject;
+
+ @Test
+ void shouldGetDataset() {
+ Mockito.when(repository.getDataset("foo")).thenReturn(Optional.of(new Dataset("foo", "1", "asdf", "idk")));
+ Mockito.when(repository.getDatasetMeta("foo")).thenReturn(Map.of("key1", "val1", "key2", "val2"));
+
+ Optional actual = subject.getDataset("foo");
+ Dataset expected = new Dataset("foo", "1", "asdf", "idk", Map.of("key1", "val1", "key2", "val2"));
+
+ Assertions.assertTrue(actual.isPresent());
+ Assertions.assertEquals(expected, actual.get());
+ }
+
+ @Test
+ void shouldNotGetDatasetThatDNE() {
+ Mockito.when(repository.getDataset("foo")).thenReturn(Optional.empty());
+ Mockito.when(repository.getDatasetMeta("foo")).thenReturn(Map.of());
+
+ Optional actual = subject.getDataset("foo");
+
+ Assertions.assertFalse(actual.isPresent());
+ }
+}
diff --git a/src/test/java/edu/harvard/dbmi/avillach/dictionary/facet/FacetRepositoryTest.java b/src/test/java/edu/harvard/dbmi/avillach/dictionary/facet/FacetRepositoryTest.java
index 4978476..93a2b00 100644
--- a/src/test/java/edu/harvard/dbmi/avillach/dictionary/facet/FacetRepositoryTest.java
+++ b/src/test/java/edu/harvard/dbmi/avillach/dictionary/facet/FacetRepositoryTest.java
@@ -22,54 +22,60 @@
@SpringBootTest
class FacetRepositoryTest {
- @Autowired
- FacetRepository subject;
-
- @Container
- static final PostgreSQLContainer> databaseContainer =
- new PostgreSQLContainer<>("postgres:16")
- .withReuse(true)
- .withCopyFileToContainer(
- MountableFile.forClasspathResource("seed.sql"), "/docker-entrypoint-initdb.d/seed.sql"
- );
-
- @DynamicPropertySource
- static void mySQLProperties(DynamicPropertyRegistry registry) {
- registry.add("spring.datasource.url", databaseContainer::getJdbcUrl);
- registry.add("spring.datasource.username", databaseContainer::getUsername);
- registry.add("spring.datasource.password", databaseContainer::getPassword);
- registry.add("spring.datasource.db", databaseContainer::getDatabaseName);
- }
+ @Autowired
+ FacetRepository subject;
+
+ @Container
+ static final PostgreSQLContainer> databaseContainer = new PostgreSQLContainer<>("postgres:16").withReuse(true)
+ .withCopyFileToContainer(MountableFile.forClasspathResource("seed.sql"), "/docker-entrypoint-initdb.d/seed.sql");
+
+ @DynamicPropertySource
+ static void mySQLProperties(DynamicPropertyRegistry registry) {
+ registry.add("spring.datasource.url", databaseContainer::getJdbcUrl);
+ registry.add("spring.datasource.username", databaseContainer::getUsername);
+ registry.add("spring.datasource.password", databaseContainer::getPassword);
+ registry.add("spring.datasource.db", databaseContainer::getDatabaseName);
+ }
@Test
void shouldGetAllFacets() {
Filter filter = new Filter(List.of(), "", List.of());
List actual = subject.getFacets(filter);
List expected = List.of(
- new FacetCategory("study_ids_dataset_ids", "Study IDs/Dataset IDs", "",
+ new FacetCategory(
+ "study_ids_dataset_ids", "Study IDs/Dataset IDs", "",
List.of(
new Facet("1", "GIC", null, null, 13, List.of(), "study_ids_dataset_ids", null),
new Facet("phs000284", "CFS", null, "Chronic Fatigue Syndrome", 3, List.of(), "study_ids_dataset_ids", null),
new Facet("phs000007", "FHS", null, "Framingham Heart Study", 3, List.of(), "study_ids_dataset_ids", null),
new Facet("phs002385", "HCT_for_SCD", null, null, 3, List.of(), "study_ids_dataset_ids", null),
new Facet("phs002808", "nuMoM2b", null, null, 3, List.of(), "study_ids_dataset_ids", null),
- new Facet("2", "National Health and Nutrition Examination Survey", null, null, 2, List.of(), "study_ids_dataset_ids", null),
- new Facet("phs002715", "NSRR CFS", null, "National Sleep Research Resource", 2, List.of(), "study_ids_dataset_ids", null),
- new Facet("3", "1000 Genomes Project", null, null, 0, List.of(), "study_ids_dataset_ids", null),
+ new Facet(
+ "2", "National Health and Nutrition Examination Survey", null, null, 2, List.of(), "study_ids_dataset_ids", null
+ ),
+ new Facet(
+ "phs002715", "NSRR CFS", null, "National Sleep Research Resource", 2, List.of(), "study_ids_dataset_ids", null
+ ), new Facet("3", "1000 Genomes Project", null, null, 0, List.of(), "study_ids_dataset_ids", null),
new Facet("phs003463", "RECOVER_Adult", null, null, 0, List.of(), "study_ids_dataset_ids", null),
new Facet("phs003543", "NSRR_HSHC", null, null, 0, List.of(), "study_ids_dataset_ids", null),
new Facet("phs003566", "SPRINT", null, null, 0, List.of(), "study_ids_dataset_ids", null),
- new Facet("phs001963", "DEMENTIA-SEQ", null, null, 0, List.of(
- new Facet("NEST_1", "My Nested Facet 1", null, null, 0, List.of(), "study_ids_dataset_ids", null),
- new Facet("NEST_2", "My Nested Facet 2", null, null, 0, List.of(), "study_ids_dataset_ids", null)
- ), "study_ids_dataset_ids", null)
+ new Facet(
+ "phs001963", "DEMENTIA-SEQ", null, null, 0,
+ List.of(
+ new Facet("NEST_1", "My Nested Facet 1", null, null, 0, List.of(), "study_ids_dataset_ids", null),
+ new Facet("NEST_2", "My Nested Facet 2", null, null, 0, List.of(), "study_ids_dataset_ids", null)
+ ), "study_ids_dataset_ids", null
+ )
)
),
- new FacetCategory("nsrr_harmonized", "Common Data Element Collection", "",
+ new FacetCategory(
+ "nsrr_harmonized", "Common Data Element Collection", "",
List.of(
new Facet("LOINC", "LOINC", null, null, 1, List.of(), "nsrr_harmonized", null),
new Facet("PhenX", "PhenX", null, null, 1, List.of(), "nsrr_harmonized", null),
- new Facet("gad_7", "Generalized Anxiety Disorder Assessment (GAD-7)", null, null, 0, List.of(), "nsrr_harmonized", null),
+ new Facet(
+ "gad_7", "Generalized Anxiety Disorder Assessment (GAD-7)", null, null, 0, List.of(), "nsrr_harmonized", null
+ ),
new Facet("taps_tool", "NIDA CTN Common Data Elements = TAPS Tool", null, null, 0, List.of(), "nsrr_harmonized", null)
)
)
@@ -81,10 +87,13 @@ void shouldGetAllFacets() {
@Test
void shouldGetFacetWithChildren() {
Optional actual = subject.getFacet("study_ids_dataset_ids", "phs001963");
- Facet expected = new Facet("phs001963", "DEMENTIA-SEQ", null, null, null, List.of(
- new Facet("NEST_1", "My Nested Facet 1", null, null, null, List.of(), "study_ids_dataset_ids", null),
- new Facet("NEST_2", "My Nested Facet 2", null, null, null, List.of(), "study_ids_dataset_ids", null)
- ), "study_ids_dataset_ids", null);
+ Facet expected = new Facet(
+ "phs001963", "DEMENTIA-SEQ", null, null, null,
+ List.of(
+ new Facet("NEST_1", "My Nested Facet 1", null, null, null, List.of(), "study_ids_dataset_ids", null),
+ new Facet("NEST_2", "My Nested Facet 2", null, null, null, List.of(), "study_ids_dataset_ids", null)
+ ), "study_ids_dataset_ids", null
+ );
Assertions.assertTrue(actual.isPresent());
Assertions.assertEquals(expected, actual.get());
@@ -93,7 +102,8 @@ void shouldGetFacetWithChildren() {
@Test
void shouldGetFacet() {
Optional actual = subject.getFacet("study_ids_dataset_ids", "phs000007");
- Optional expected = Optional.of(new Facet("phs000007", "FHS", null, "Framingham Heart Study", null, List.of(), "study_ids_dataset_ids", null));
+ Optional expected =
+ Optional.of(new Facet("phs000007", "FHS", null, "Framingham Heart Study", null, List.of(), "study_ids_dataset_ids", null));
Assertions.assertEquals(expected, actual);
}
@@ -121,4 +131,4 @@ void shouldGetEmptyMeta() {
Assertions.assertEquals(expected, actual);
}
-}
\ No newline at end of file
+}