Skip to content

Commit

Permalink
Merge branch 'develop' into sts_AN-225_batch_api_workspace_setting
Browse files Browse the repository at this point in the history
  • Loading branch information
sam-schu authored Dec 9, 2024
2 parents 1968a10 + 5eee80c commit c96cbb8
Show file tree
Hide file tree
Showing 19 changed files with 98 additions and 425 deletions.
4 changes: 2 additions & 2 deletions automation/project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ import sbt._
object Dependencies {
val scalaV = "2.13"

val jacksonV = "2.18.1"
val jacksonHotfixV = "2.18.1" // for when only some of the Jackson libs have hotfix releases
val jacksonV = "2.18.2"
val jacksonHotfixV = "2.18.2" // for when only some of the Jackson libs have hotfix releases
val akkaV = "2.6.19"
val akkaHttpV = "10.2.10"
val workbenchLibsHash = "3e0cf25"
Expand Down
2 changes: 1 addition & 1 deletion automation/project/build.properties
Original file line number Diff line number Diff line change
@@ -1 +1 @@
sbt.version = 1.10.5
sbt.version = 1.10.6
10 changes: 5 additions & 5 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ import sbt._
object Dependencies {
val akkaV = "2.9.3"
val akkaHttpV = "10.6.3"
val jacksonV = "2.18.1"
val jacksonHotfixV = "2.18.1" // for when only some of the Jackson libs have hotfix releases
val jacksonV = "2.18.2"
val jacksonHotfixV = "2.18.2" // for when only some of the Jackson libs have hotfix releases
val nettyV = "4.1.115.Final"
val workbenchLibsHash = "3e0cf25" // see https://github.com/broadinstitute/workbench-libs readme for hash values

Expand All @@ -29,7 +29,7 @@ object Dependencies {
"org.yaml" % "snakeyaml" % "2.3",
"org.apache.commons" % "commons-compress" % "1.27.1", // workbench-libs libraries pull this in
"com.google.apis" % "google-api-services-pubsub" % "v1-rev20240918-2.0.0", // from workbench-google2
"com.google.apis" % "google-api-services-admin-directory" % "directory_v1-rev20241113-2.0.0" // from workbench-google2
"com.google.apis" % "google-api-services-admin-directory" % "directory_v1-rev20241126-2.0.0" // from workbench-google2
)

val rootDependencies: Seq[ModuleID] = Seq(
Expand All @@ -44,7 +44,7 @@ object Dependencies {
// elasticsearch requires log4j, but we redirect log4j to logback
"org.apache.logging.log4j" % "log4j-to-slf4j" % "2.24.2",
"ch.qos.logback" % "logback-classic" % "1.5.12",
"io.sentry" % "sentry-logback" % "7.18.0",
"io.sentry" % "sentry-logback" % "7.18.1",
"com.typesafe.scala-logging" %% "scala-logging" % "3.9.5",

"org.parboiled" % "parboiled-core" % "1.4.1",
Expand All @@ -56,7 +56,7 @@ object Dependencies {
excludeGuava("org.broadinstitute.dsde.workbench" %% "workbench-util" % s"0.10-$workbenchLibsHash"),
"org.broadinstitute.dsde.workbench" %% "workbench-google2" % s"0.36-$workbenchLibsHash",
"org.broadinstitute.dsde.workbench" %% "workbench-oauth2" % s"0.8-$workbenchLibsHash",
"org.broadinstitute.dsde.workbench" %% "sam-client" % "v0.0.319",
"org.broadinstitute.dsde.workbench" %% "sam-client" % "v0.0.325",
"org.broadinstitute.dsde.workbench" %% "workbench-notifications" %s"0.8-$workbenchLibsHash",
"org.databiosphere" % "workspacedataservice-client-okhttp-jakarta" % "0.2.167-SNAPSHOT",
"bio.terra" % "externalcreds-client-resttemplate" % "1.44.0-20240725.201427-1" excludeAll(excludeSpring, excludeSpringBoot),
Expand Down
2 changes: 1 addition & 1 deletion project/build.properties
Original file line number Diff line number Diff line change
@@ -1 +1 @@
sbt.version=1.10.5
sbt.version=1.10.6
Original file line number Diff line number Diff line change
Expand Up @@ -283,14 +283,15 @@ class EntityService(rawlsDAO: RawlsDAO,
deleteEmptyValues: Boolean = false
): Future[PerRequestMessage] = {

def stripEntityType(entityTypeString: String): String = {
val entityType = entityTypeString.stripSuffix("_id")
if (entityType == entityTypeString)
throw new FireCloudExceptionWithErrorReport(errorReport =
ErrorReport(StatusCodes.BadRequest, "Invalid first column header, entity type should end in _id")
)
entityType
}
/**
* Translates a TSV column name into an entity type, by removing a trailing "_id" if one exists.
* - "myvalue_id" becomes "myvalue"
* - "myvalue" remains unchanged as "myvalue"
* @param entityTypeString the column name from which to extract an entity type
* @return the calculated entity type
*/
def stripEntityType(entityTypeString: String): String =
entityTypeString.stripSuffix("_id")

withTSVFile(tsvString) { tsv =>
val (tsvType, entityType) = tsv.firstColumnHeader.split(":") match {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,20 +71,18 @@ trait FireCloudApiService
with RegisterApiService
with WorkspaceApiService
with WorkspaceV2ApiService
with NotificationsApiService
with MethodConfigurationApiService
with BillingApiService
with SubmissionApiService
with StatusApiService
with MethodsApiService
with Ga4ghApiService
with UserApiService
with ShareLogApiService
with ManagedGroupApiService
with CromIamApiService
with HealthApiService
with StaticNotebooksApiService
with PerimeterApiService {
with PerimeterApiService
with PassthroughApiService {

override lazy val log = LoggerFactory.getLogger(getClass)

Expand Down Expand Up @@ -184,7 +182,6 @@ trait FireCloudApiService
methodConfigurationRoutes ~
submissionServiceRoutes ~
nihRoutes ~
billingServiceRoutes ~
shareLogServiceRoutes ~
staticNotebooksRoutes ~
perimeterServiceRoutes
Expand Down Expand Up @@ -217,14 +214,14 @@ trait FireCloudApiService
managedGroupServiceRoutes ~
workspaceRoutes ~
workspaceV2Routes ~
notificationsRoutes ~
statusRoutes ~
ga4ghRoutes ~
pathPrefix("api") {
apiRoutes
} ~
// insecure cookie-authed routes
cookieAuthedRoutes
cookieAuthedRoutes ~
// wildcard passthrough routes. These must be last to allow other routes to override them.
passthroughRoutes
}

}
Expand Down

This file was deleted.

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
package org.broadinstitute.dsde.firecloud.webservice

import akka.http.scaladsl.server.{Directives, Route}
import org.broadinstitute.dsde.firecloud.FireCloudConfig
import org.broadinstitute.dsde.firecloud.utils.StreamingPassthrough

trait PassthroughApiService extends Directives with StreamingPassthrough {

private lazy val agora = FireCloudConfig.Agora.baseUrl
private lazy val rawls = FireCloudConfig.Rawls.baseUrl

val passthroughRoutes: Route = concat(
pathPrefix("ga4gh")(streamingPassthrough(s"$agora/ga4gh")),
pathPrefix("api" / "billing")(streamingPassthrough(s"$rawls/api/billing")),
pathPrefix("api" / "notifications")(streamingPassthrough(s"$rawls/api/notifications"))
)

}
9 changes: 9 additions & 0 deletions src/test/resources/testfiles/tsv/PARTICIPANTS_NO_PREFIX.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
participant_id
participant_01
participant_02
participant_03
participant_04
participant_05
participant_06
participant_07
participant_08
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
participant
participant_01
participant_02
participant_03
participant_04
participant_05
participant_06
participant_07
participant_08
9 changes: 9 additions & 0 deletions src/test/resources/testfiles/tsv/PARTICIPANTS_NO_SUFFIX.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
entity:participant
participant_01
participant_02
participant_03
participant_04
participant_05
participant_06
participant_07
participant_08
6 changes: 5 additions & 1 deletion src/test/resources/testfiles/tsv/TEST_INVALID_COLUMNS.txt
Original file line number Diff line number Diff line change
@@ -1 +1,5 @@
TCGA-5M-AAT4-01A TCGA-5M-AAT4 primary_solid_tumorTCGA-5M-AAT4-10A TCGA-5M-AAT4 blood_derived_normalTCGA-NH-A8F8-01A TCGA-NH-A8F8 primary_solid_tumorTCGA-NH-A8F8-10A TCGA-NH-A8F8 blood-derived_normal
bad-prefix:participant_id col1 col2
TCGA-5M-AAT4-01A TCGA-5M-AAT4 primary_solid_tumor
TCGA-5M-AAT4-10A TCGA-5M-AAT4 blood_derived_normal
TCGA-NH-A8F8-01A TCGA-NH-A8F8 primary_solid_tumor
TCGA-NH-A8F8-10A TCGA-NH-A8F8 blood-derived_normal
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import akka.http.scaladsl.model.headers.OAuth2BearerToken
import akka.http.scaladsl.model.{HttpResponse, StatusCode, StatusCodes}
import com.google.cloud.storage.StorageException
import org.broadinstitute.dsde.firecloud.dataaccess.LegacyFileTypes.FILETYPE_RAWLS

import org.broadinstitute.dsde.firecloud.dataaccess.{MockCwdsDAO, MockRawlsDAO}
import org.broadinstitute.dsde.firecloud.mock.MockGoogleServicesDAO
import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
Expand Down Expand Up @@ -58,15 +57,28 @@ class EntityServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
val tsvParticipants = FileUtils.readAllTextFromResource("testfiles/tsv/ADD_PARTICIPANTS.txt")
val tsvMembership = FileUtils.readAllTextFromResource("testfiles/tsv/MEMBERSHIP_SAMPLE_SET.tsv")
val tsvUpdate = FileUtils.readAllTextFromResource("testfiles/tsv/UPDATE_SAMPLES.txt")
val tsvParticipantsNoPrefix = FileUtils.readAllTextFromResource("testfiles/tsv/PARTICIPANTS_NO_PREFIX.txt")
val tsvParticipantsNoSuffix = FileUtils.readAllTextFromResource("testfiles/tsv/PARTICIPANTS_NO_SUFFIX.txt")
val tsvParticipantsNoPrefixOrSuffix =
FileUtils.readAllTextFromResource("testfiles/tsv/PARTICIPANTS_NO_PREFIX_OR_SUFFIX.txt")

val tsvInvalid = FileUtils.readAllTextFromResource("testfiles/tsv/TEST_INVALID_COLUMNS.txt")

val userToken: UserInfo = UserInfo("[email protected]", OAuth2BearerToken(""), 3600, "111")

// (tsvType, tsvData)
val asyncTSVs = List(("upsert", tsvParticipants), ("membership", tsvMembership), ("update", tsvUpdate))
val asyncTSVs =
List(
("upsert", tsvParticipants),
("upsert", tsvParticipantsNoPrefix),
("upsert", tsvParticipantsNoSuffix),
("upsert", tsvParticipantsNoPrefixOrSuffix),
("membership", tsvMembership),
("update", tsvUpdate)
)

asyncTSVs foreach { case (tsvType, tsvData) =>
s"should return Accepted with an import jobId for (async=true + $tsvType TSV)" in {
s"should return Accepted with an import jobId for (async=true + $tsvType TSV) [${tsvData.hashCode}]" in {
val testCwdsDao = new SuccessfulCwdsDAO
val entityService = getEntityService(cwdsDAO = testCwdsDao)
val response =
Expand All @@ -88,13 +100,17 @@ class EntityServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
}

// (tsvType, expectedEntityType, tsvData)
val goodTSVs = List(("upsert", "participant", tsvParticipants),
("membership", "sample_set", tsvMembership),
("update", "sample", tsvUpdate)
val goodTSVs = List(
("upsert", "participant", tsvParticipants),
("upsert", "participant", tsvParticipantsNoPrefix),
("upsert", "participant", tsvParticipantsNoSuffix),
("upsert", "participant", tsvParticipantsNoPrefixOrSuffix),
("membership", "sample_set", tsvMembership),
("update", "sample", tsvUpdate)
)

goodTSVs foreach { case (tsvType, expectedEntityType, tsvData) =>
s"should return OK with the entity type for (async=false + $tsvType TSV)" in {
s"should return OK with the entity type for (async=false + $tsvType TSV) [${tsvData.hashCode}]" in {
val entityService = getEntityService()
val response =
entityService
Expand All @@ -103,7 +119,7 @@ class EntityServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {
response shouldBe RequestComplete(StatusCodes.OK, expectedEntityType)
}

s"should call the appropriate upsert/update method for (async=false + $tsvType TSV)" in {
s"should call the appropriate upsert/update method for (async=false + $tsvType TSV) [${tsvData.hashCode}]" in {
val mockedRawlsDAO = mockito[MockRawlsDAO] // mocking the mock
when(
mockedRawlsDAO.batchUpdateEntities(any[String], any[String], any[String], any[Seq[EntityUpdateDefinition]])(
Expand Down Expand Up @@ -144,7 +160,7 @@ class EntityServiceSpec extends BaseServiceSpec with BeforeAndAfterEach {

}

s"should send $expectedEntityType tsv to cWDS with appropriate options" in {
s"should send $expectedEntityType tsv to cWDS with appropriate options [${tsvData.hashCode}]" in {
// set up mocks
val cwdsDAO = mockito[MockCwdsDAO]
val rawlsDAO = mockito[MockRawlsDAO]
Expand Down
Loading

0 comments on commit c96cbb8

Please sign in to comment.