diff --git a/.gitmodules b/.gitmodules index 65d9ff3257..043dab3514 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,6 +1,3 @@ -[submodule "prisma-query"] - path = server/prisma-rs/libs/prisma-query - url = https://github.com/prisma/prisma-query.git [submodule "server/.buildkite/build-cli"] path = server/.buildkite/build-cli url = https://github.com/prisma/build-cli.git diff --git a/cli/packages/prisma-cli-core/package.json b/cli/packages/prisma-cli-core/package.json index 68112c3616..b330346ed1 100644 --- a/cli/packages/prisma-cli-core/package.json +++ b/cli/packages/prisma-cli-core/package.json @@ -11,7 +11,7 @@ "devDependencies": { "@types/aws-lambda": "^0.0.17", "@types/fs-extra": "^5.0.0", - "@types/graphql": "14.0.3", + "@types/graphql": "14.2.0", "@types/node": "^8.0.22", "@types/semver": "^5.5.0", "faker": "^4.1.0", @@ -89,7 +89,7 @@ "figures": "^2.0.0", "fs-extra": "^7.0.0", "globby": "^8.0.1", - "graphql": "^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0", + "graphql": "^14.3.0", "graphql-config": "2.2.1", "graphql-config-extension-prisma": "0.2.5", "graphql-playground-middleware-express": "^1.6.2", diff --git a/cli/packages/prisma-cli-engine/package.json b/cli/packages/prisma-cli-engine/package.json index b38d06aa70..3ba3c9a50a 100644 --- a/cli/packages/prisma-cli-engine/package.json +++ b/cli/packages/prisma-cli-engine/package.json @@ -78,12 +78,11 @@ "chalk": "^2.3.0", "charm": "^1.0.2", "debug": "^3.1.0", - "graphql": "^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0", "directory-tree": "2.2.1", "figures": "^2.0.0", "find-up": "^3.0.0", "fs-extra": "^7.0.0", - "graphql": "^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0", + "graphql": "^14.3.0", "graphql-request": "^1.5.0", "inquirer": "^6.2.0", "isomorphic-fetch": "^2.2.1", diff --git a/cli/packages/prisma-cli-engine/src/StatusChecker.ts b/cli/packages/prisma-cli-engine/src/StatusChecker.ts index bfd299fa27..3f44266918 100644 --- a/cli/packages/prisma-cli-engine/src/StatusChecker.ts +++ b/cli/packages/prisma-cli-engine/src/StatusChecker.ts @@ -178,7 +178,7 @@ export function getFid() { return fidCache } const mac = getMac() - const fidSecret = 'yeiB6sooy6eedahgooj0shiez' + const fidSecret = 'AhTheeR7Pee0haebui1viemoe' const fid = mac ? crypto .createHmac('sha256', fidSecret) diff --git a/cli/packages/prisma-client-lib/package.json b/cli/packages/prisma-client-lib/package.json index a9b57c4015..8c8d0047c0 100644 --- a/cli/packages/prisma-client-lib/package.json +++ b/cli/packages/prisma-client-lib/package.json @@ -38,10 +38,10 @@ "zen-observable": "^0.8.10" }, "devDependencies": { - "@types/graphql": "14.0.3", + "@types/graphql": "14.2.0", "@types/prettier": "1.16.1", "ava": "^0.25.0", - "graphql": "^14.0.2", + "graphql": "^14.3.0", "semantic-release": "^15.10.4", "tslint": "5.11.0", "tslint-config-standard": "8.0.1", diff --git a/cli/packages/prisma-client-lib/src/utils/index.ts b/cli/packages/prisma-client-lib/src/utils/index.ts index 7726686a7e..87327f7f93 100644 --- a/cli/packages/prisma-client-lib/src/utils/index.ts +++ b/cli/packages/prisma-client-lib/src/utils/index.ts @@ -13,6 +13,8 @@ import { GraphQLResolveInfo, GraphQLOutputType, print, + Kind, + ASTNode, } from 'graphql' import { Operation } from '../types' @@ -77,13 +79,13 @@ export function printDocumentFromInfo(info: GraphQLResolveInfo) { const fragments = Object.keys(info.fragments).map( fragment => info.fragments[fragment], ) - const doc = { - kind: 'Document', + const doc: ASTNode = { + kind: Kind.DOCUMENT, definitions: [ { - kind: 'OperationDefinition', + kind: Kind.OPERATION_DEFINITION, operation: 'query', - selectionSet: info.fieldNodes[0].selectionSet, + selectionSet: info.fieldNodes[0].selectionSet!, }, ...fragments, ], diff --git a/cli/packages/prisma-datamodel/package.json b/cli/packages/prisma-datamodel/package.json index c0643f4d67..bcac1120e7 100644 --- a/cli/packages/prisma-datamodel/package.json +++ b/cli/packages/prisma-datamodel/package.json @@ -23,7 +23,7 @@ "typescript": "^3.2.2" }, "dependencies": { - "graphql": "^14.0.2", + "graphql": "^14.3.0", "pluralize": "^7.0.0", "popsicle": "10" }, diff --git a/cli/packages/prisma-generate-schema/package.json b/cli/packages/prisma-generate-schema/package.json index a12c97405b..a61a0bb943 100644 --- a/cli/packages/prisma-generate-schema/package.json +++ b/cli/packages/prisma-generate-schema/package.json @@ -24,7 +24,7 @@ "typescript": "^3.2.2" }, "dependencies": { - "graphql": "^14.0.2", + "graphql": "^14.3.0", "pluralize": "^7.0.0", "popsicle": "10", "prisma-datamodel": "1.23.0-alpha.1" diff --git a/cli/scripts/test.sh b/cli/scripts/test.sh index 924a0dbbf2..e26295e246 100755 --- a/cli/scripts/test.sh +++ b/cli/scripts/test.sh @@ -1,6 +1,6 @@ #!/bin/bash -set -e +set -ex cd cli/packages/prisma-datamodel yarn diff --git a/cli/scripts/test_ci.sh b/cli/scripts/test_ci.sh index 3e0f702228..1023e85230 100755 --- a/cli/scripts/test_ci.sh +++ b/cli/scripts/test_ci.sh @@ -1,6 +1,6 @@ #!/bin/bash -set -e +set -ex # # Detect change diff --git a/cli/yarn.lock b/cli/yarn.lock index 664deef110..f21c783090 100644 --- a/cli/yarn.lock +++ b/cli/yarn.lock @@ -323,10 +323,10 @@ "@types/minimatch" "*" "@types/node" "*" -"@types/graphql@14.0.3": - version "14.0.3" - resolved "https://registry.yarnpkg.com/@types/graphql/-/graphql-14.0.3.tgz#389e2e5b83ecdb376d9f98fae2094297bc112c1c" - integrity sha512-TcFkpEjcQK7w8OcrQcd7iIBPjU0rdyi3ldj6d0iJ4PPSzbWqPBvXj9KSwO14hTOX2dm9RoiH7VuxksJLNYdXUQ== +"@types/graphql@14.2.0": + version "14.2.0" + resolved "https://registry.yarnpkg.com/@types/graphql/-/graphql-14.2.0.tgz#74e1da5f2a4a744ac6eb3ed57b48242ea9367202" + integrity sha512-lELg5m6eBOmATWyCZl8qULEOvnPIUG6B443yXKj930glXIgwQirIBPp5rthP2amJW0YSzUg2s5sfgba4mRRCNw== "@types/jest@^20.0.8": version "20.0.8" @@ -2561,7 +2561,7 @@ debug@^4.0.0, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1: dependencies: ms "^2.1.1" -debuglog@*, debuglog@^1.0.1: +debuglog@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/debuglog/-/debuglog-1.0.1.tgz#aa24ffb9ac3df9a2351837cfb2d279360cd78492" integrity sha1-qiT/uaw9+aI1GDfPstJ5NgzXhJI= @@ -4056,10 +4056,10 @@ graphql-tools@^4.0.3: iterall "^1.1.3" uuid "^3.1.0" -"graphql@^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0", graphql@^14.0.2: - version "14.2.1" - resolved "https://registry.yarnpkg.com/graphql/-/graphql-14.2.1.tgz#779529bf9a01e7207b977a54c20670b48ca6e95c" - integrity sha512-2PL1UbvKeSjy/lUeJqHk+eR9CvuErXoCNwJI4jm3oNFEeY+9ELqHNKO1ZuSxAkasPkpWbmT/iMRMFxd3cEL3tQ== +graphql@^14.3.0: + version "14.3.0" + resolved "https://registry.yarnpkg.com/graphql/-/graphql-14.3.0.tgz#34dd36faa489ff642bcd25df6c3b4f988a1a2f3e" + integrity sha512-MdfI4v7kSNC3NhB7cF8KNijDsifuWO2XOtzpyququqaclO8wVuChYv+KogexDwgP5sp7nFI9Z6N4QHgoLkfjrg== dependencies: iterall "^1.2.2" @@ -4415,7 +4415,7 @@ import-local@^1.0.0: pkg-dir "^2.0.0" resolve-cwd "^2.0.0" -imurmurhash@*, imurmurhash@^0.1.4: +imurmurhash@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= @@ -5895,11 +5895,6 @@ lockfile@^1.0.4: dependencies: signal-exit "^3.0.2" -lodash._baseindexof@*: - version "3.1.0" - resolved "https://registry.yarnpkg.com/lodash._baseindexof/-/lodash._baseindexof-3.1.0.tgz#fe52b53a1c6761e42618d654e4a25789ed61822c" - integrity sha1-/lK1OhxnYeQmGNZU5KJXie1hgiw= - lodash._baseuniq@~4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/lodash._baseuniq/-/lodash._baseuniq-4.6.0.tgz#0ebb44e456814af7905c6212fa2c9b2d51b841e8" @@ -5908,33 +5903,11 @@ lodash._baseuniq@~4.6.0: lodash._createset "~4.0.0" lodash._root "~3.0.0" -lodash._bindcallback@*: - version "3.0.1" - resolved "https://registry.yarnpkg.com/lodash._bindcallback/-/lodash._bindcallback-3.0.1.tgz#e531c27644cf8b57a99e17ed95b35c748789392e" - integrity sha1-5THCdkTPi1epnhftlbNcdIeJOS4= - -lodash._cacheindexof@*: - version "3.0.2" - resolved "https://registry.yarnpkg.com/lodash._cacheindexof/-/lodash._cacheindexof-3.0.2.tgz#3dc69ac82498d2ee5e3ce56091bafd2adc7bde92" - integrity sha1-PcaayCSY0u5ePOVgkbr9Ktx73pI= - -lodash._createcache@*: - version "3.1.2" - resolved "https://registry.yarnpkg.com/lodash._createcache/-/lodash._createcache-3.1.2.tgz#56d6a064017625e79ebca6b8018e17440bdcf093" - integrity sha1-VtagZAF2JeeevKa4AY4XRAvc8JM= - dependencies: - lodash._getnative "^3.0.0" - lodash._createset@~4.0.0: version "4.0.3" resolved "https://registry.yarnpkg.com/lodash._createset/-/lodash._createset-4.0.3.tgz#0f4659fbb09d75194fa9e2b88a6644d363c9fe26" integrity sha1-D0ZZ+7CddRlPqeK4imZE02PJ/iY= -lodash._getnative@*, lodash._getnative@^3.0.0: - version "3.9.1" - resolved "https://registry.yarnpkg.com/lodash._getnative/-/lodash._getnative-3.9.1.tgz#570bc7dede46d61cdcde687d65d3eecbaa3aaff5" - integrity sha1-VwvH3t5G1hzc3mh9ZdPuy6o6r/U= - lodash._root@~3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/lodash._root/-/lodash._root-3.0.1.tgz#fba1c4524c19ee9a5f8136b4609f017cf4ded692" @@ -6085,11 +6058,6 @@ lodash.property@^4.4.2: resolved "https://registry.yarnpkg.com/lodash.property/-/lodash.property-4.4.2.tgz#da07124821c6409d025f30db8df851314515bffe" integrity sha1-2gcSSCHGQJ0CXzDbjfhRMUUVv/4= -lodash.restparam@*: - version "3.6.1" - resolved "https://registry.yarnpkg.com/lodash.restparam/-/lodash.restparam-3.6.1.tgz#936a4e309ef330a7645ed4145986c85ae5b20805" - integrity sha1-k2pOMJ7zMKdkXtQUWYbIWuWyCAU= - lodash.result@^4.5.2: version "4.5.2" resolved "https://registry.yarnpkg.com/lodash.result/-/lodash.result-4.5.2.tgz#cb45b27fb914eaa8d8ee6f0ce7b2870b87cb70aa" @@ -8463,7 +8431,7 @@ readable-stream@~1.1.10: isarray "0.0.1" string_decoder "~0.10.x" -readdir-scoped-modules@*, readdir-scoped-modules@^1.0.0: +readdir-scoped-modules@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/readdir-scoped-modules/-/readdir-scoped-modules-1.0.2.tgz#9fafa37d286be5d92cbaebdee030dc9b5f406747" integrity sha1-n6+jfShr5dksuuve4DDcm19AZ0c= diff --git a/server/.buildkite/build-cli b/server/.buildkite/build-cli index d1583f0278..1a8fc1cdb8 160000 --- a/server/.buildkite/build-cli +++ b/server/.buildkite/build-cli @@ -1 +1 @@ -Subproject commit d1583f027841f184cc26e99759d5abf98ea86dc0 +Subproject commit 1a8fc1cdb8cfcced4775884ad375b31a9d0ffe98 diff --git a/server/.envrc b/server/.envrc index 914898ace0..79bf53a409 100644 --- a/server/.envrc +++ b/server/.envrc @@ -18,6 +18,6 @@ export PRISMA_BINARY_CONFIG_PATH=$(pwd)/prisma-rs/prisma.yml ## Rust specific variables export SCHEMA_INFERRER_PATH=$(pwd)/images/schema-inferrer-bin/target/prisma-native-image/schema-inferrer-bin -export PRISMA_DATA_MODEL_PATH=$(pwd)/prisma-rs/schema.prisma +export PRISMA_SDL_PATH=$(pwd)/prisma-rs/schema.prisma export RUST_LOG=actix_web=debug,prisma=debug export RUST_BACKTRACE=1 \ No newline at end of file diff --git a/server/Makefile b/server/Makefile index 5890289281..e6559482d7 100644 --- a/server/Makefile +++ b/server/Makefile @@ -8,6 +8,10 @@ dev-postgres: docker-compose -f docker-compose/postgres/dev-postgres.yml up -d --remove-orphans cp ./docker-compose/postgres/prisma.yml ./prisma.yml +dev-postgres-native: + docker-compose -f docker-compose/postgres/dev-postgres.yml up -d --remove-orphans + cp ./docker-compose/postgres-native/prisma.yml ./prisma.yml + dev-mongo: docker-compose -f docker-compose/mongo/dev-mongo.yml up -d --remove-orphans cp ./docker-compose/mongo/prisma.yml ./prisma.yml diff --git a/server/build.sbt b/server/build.sbt index dc00a91ca9..9319e75c10 100644 --- a/server/build.sbt +++ b/server/build.sbt @@ -298,10 +298,11 @@ lazy val apiConnectorMongo = connectorProject("api-connector-mongo") oldOptions.filterNot(_ == "-Xfatal-warnings") }) -lazy val apiConnectorSQLiteNative = connectorProject("api-connector-sqlite-native") +lazy val apiConnectorNative = connectorProject("api-connector-native") .dependsOn(apiConnector) .dependsOn(prismaRsBinding) .dependsOn(apiConnectorSQLite) + .dependsOn(apiConnectorPostgres) // ################## @@ -519,7 +520,7 @@ lazy val apiConnectorProjects = List( apiConnectorPostgres, apiConnectorMongo, apiConnectorSQLite, - apiConnectorSQLiteNative + apiConnectorNative ) lazy val allConnectorProjects = deployConnectorProjects ++ apiConnectorProjects ++ Seq(connectorUtils, connectorShared) diff --git a/server/connectors/api-connector-native/src/main/scala/com/prisma/api/connector/native/ApiConnectorNative.scala b/server/connectors/api-connector-native/src/main/scala/com/prisma/api/connector/native/ApiConnectorNative.scala new file mode 100644 index 0000000000..74df3bb41a --- /dev/null +++ b/server/connectors/api-connector-native/src/main/scala/com/prisma/api/connector/native/ApiConnectorNative.scala @@ -0,0 +1,42 @@ +package com.prisma.api.connector.native + +import java.sql.Driver + +import com.prisma.api.connector.postgres.PostgresApiConnector +import com.prisma.api.connector.sqlite.SQLiteApiConnector +import com.prisma.api.connector.{ApiConnector, DataResolver, DatabaseMutactionExecutor} +import com.prisma.config.DatabaseConfig +import com.prisma.shared.models.{ConnectorCapabilities, Project, ProjectIdEncoder} + +import scala.concurrent.{ExecutionContext, Future} + +trait Backup { + def driver: Driver +} + +case class SqliteBackup(driver: Driver) extends Backup +case class PostgresBackup(driver: Driver) extends Backup + +case class ApiConnectorNative(config: DatabaseConfig, backup: Backup)(implicit ec: ExecutionContext) extends ApiConnector { + override def initialize(): Future[Unit] = Future.unit + override def shutdown(): Future[Unit] = Future.unit + + override def databaseMutactionExecutor: DatabaseMutactionExecutor = { + backup match { + case SqliteBackup(driver) => { + val base = SQLiteApiConnector(config, driver) + NativeDatabaseMutactionExecutor(base.databaseMutactionExecutor.slickDatabase) + } + case PostgresBackup(driver) => { + val base = PostgresApiConnector(config, driver) + NativeDatabaseMutactionExecutor(base.databaseMutactionExecutor.slickDatabase) + } + } + } + + override def dataResolver(project: Project): DataResolver = NativeDataResolver(project) + override def masterDataResolver(project: Project): DataResolver = NativeDataResolver(project) + override def projectIdEncoder: ProjectIdEncoder = ProjectIdEncoder('_') + + override val capabilities = ConnectorCapabilities.sqliteNative +} diff --git a/server/connectors/api-connector-sqlite-native/src/main/scala/com/prisma/api/connector/sqlite/native/SQLiteNativeDataResolver.scala b/server/connectors/api-connector-native/src/main/scala/com/prisma/api/connector/native/NativeDataResolver.scala similarity index 96% rename from server/connectors/api-connector-sqlite-native/src/main/scala/com/prisma/api/connector/sqlite/native/SQLiteNativeDataResolver.scala rename to server/connectors/api-connector-native/src/main/scala/com/prisma/api/connector/native/NativeDataResolver.scala index fa489f5a60..40b9fe6373 100644 --- a/server/connectors/api-connector-sqlite-native/src/main/scala/com/prisma/api/connector/sqlite/native/SQLiteNativeDataResolver.scala +++ b/server/connectors/api-connector-native/src/main/scala/com/prisma/api/connector/native/NativeDataResolver.scala @@ -1,4 +1,4 @@ -package com.prisma.api.connector.sqlite.native +package com.prisma.api.connector.native import com.google.protobuf.ByteString import com.prisma.api.connector._ @@ -13,13 +13,11 @@ import prisma.protocol.ValueContainer.PrismaValue.GraphqlId import scala.concurrent.{ExecutionContext, Future} -case class SQLiteNativeDataResolver(delegate: DataResolver)(implicit ec: ExecutionContext) extends DataResolver { +case class NativeDataResolver(project: Project)(implicit ec: ExecutionContext) extends DataResolver { import NativeUtils._ import com.prisma.shared.models.ProjectJsonFormatter._ import com.prisma.api.helpers.LimitClauseHelper._ - override def project: Project = delegate.project - override def getNodeByWhere(where: NodeSelector, selectedFields: SelectedFields): Future[Option[PrismaNode]] = Future { val projectJson = Json.toJson(project) val input = prisma.protocol.GetNodeByWhereInput( diff --git a/server/connectors/api-connector-sqlite-native/src/main/scala/com/prisma/api/connector/sqlite/native/SQLiteDatabaseMutactionExecutor.scala b/server/connectors/api-connector-native/src/main/scala/com/prisma/api/connector/native/NativeDatabaseMutactionExecutor.scala similarity index 97% rename from server/connectors/api-connector-sqlite-native/src/main/scala/com/prisma/api/connector/sqlite/native/SQLiteDatabaseMutactionExecutor.scala rename to server/connectors/api-connector-native/src/main/scala/com/prisma/api/connector/native/NativeDatabaseMutactionExecutor.scala index 278955794a..a487d25ff8 100644 --- a/server/connectors/api-connector-sqlite-native/src/main/scala/com/prisma/api/connector/sqlite/native/SQLiteDatabaseMutactionExecutor.scala +++ b/server/connectors/api-connector-native/src/main/scala/com/prisma/api/connector/native/NativeDatabaseMutactionExecutor.scala @@ -1,4 +1,4 @@ -package com.prisma.api.connector.sqlite.native +package com.prisma.api.connector.native import com.google.protobuf.ByteString import com.prisma.api.connector.jdbc.{NestedDatabaseMutactionInterpreter, TopLevelDatabaseMutactionInterpreter} import com.prisma.api.connector.jdbc.impl._ @@ -17,7 +17,7 @@ import slick.jdbc.TransactionIsolation import scala.concurrent.{ExecutionContext, Future} -case class SQLiteDatabaseMutactionExecutor( +case class NativeDatabaseMutactionExecutor( slickDatabaseArg: SlickDatabase )(implicit ec: ExecutionContext) extends DatabaseMutactionExecutor { @@ -26,8 +26,13 @@ case class SQLiteDatabaseMutactionExecutor( import NativeUtils._ override def executeRaw(project: Project, query: String): Future[JsValue] = { - val action = JdbcActionsBuilder(project, slickDatabaseArg).executeRaw(query) - runAttached(project, action) + val envelope = prisma.protocol.ExecuteRawInput( + header = prisma.protocol.Header("ExecuteRawInput"), + dbName = project.dbName, + query = query + ) + + Future(NativeBinding.execute_raw(envelope)) } override def executeNonTransactionally(mutaction: TopLevelDatabaseMutaction) = execute(mutaction) diff --git a/server/connectors/api-connector-sqlite-native/src/main/scala/com/prisma/api/connector/sqlite/native/NativeUtils.scala b/server/connectors/api-connector-native/src/main/scala/com/prisma/api/connector/native/NativeUtils.scala similarity index 93% rename from server/connectors/api-connector-sqlite-native/src/main/scala/com/prisma/api/connector/sqlite/native/NativeUtils.scala rename to server/connectors/api-connector-native/src/main/scala/com/prisma/api/connector/native/NativeUtils.scala index e2653ad01c..3f998d2a42 100644 --- a/server/connectors/api-connector-sqlite-native/src/main/scala/com/prisma/api/connector/sqlite/native/NativeUtils.scala +++ b/server/connectors/api-connector-native/src/main/scala/com/prisma/api/connector/native/NativeUtils.scala @@ -1,4 +1,4 @@ -package com.prisma.api.connector.sqlite.native +package com.prisma.api.connector.native import com.prisma.api.connector._ import com.prisma.gc_values._ import com.prisma.rs.NodeResult @@ -70,7 +70,7 @@ object NativeUtils { case EnumGCValue(e) => PrismaValue.Enum(e) case FloatGCValue(f) => PrismaValue.Float(f) case StringIdGCValue(s) => PrismaValue.GraphqlId(prisma.protocol.GraphqlId(IdValue.String(s))) - case UuidGCValue(uuid) => PrismaValue.Uuid(uuid.toString) + case UuidGCValue(uuid) => PrismaValue.GraphqlId(prisma.protocol.GraphqlId(IdValue.Uuid(uuid.toString))) case IntGCValue(i) => PrismaValue.GraphqlId(prisma.protocol.GraphqlId(IdValue.Int(i))) case JsonGCValue(j) => PrismaValue.Json(j.toString()) case StringGCValue(s) => PrismaValue.String(s) @@ -84,7 +84,7 @@ object NativeUtils { def toPrismaId(value: IdGCValue): protocol.GraphqlId = value match { case StringIdGCValue(s) => protocol.GraphqlId(IdValue.String(s)) case IntGCValue(i) => protocol.GraphqlId(IdValue.Int(i)) - case UuidGCValue(u) => protocol.GraphqlId(IdValue.String(u.toString)) + case UuidGCValue(u) => protocol.GraphqlId(IdValue.Uuid(u.toString)) } def toPrismaSelectedFields(selectedFields: SelectedFields): prisma.protocol.SelectedFields = { @@ -92,7 +92,7 @@ object NativeUtils { selectedField match { case SelectedScalarField(f) => { val field = prisma.protocol.SelectedField( - prisma.protocol.SelectedField.Field.Scalar(f.dbName) + prisma.protocol.SelectedField.Field.Scalar(f.name) ) acc :+ field @@ -101,7 +101,7 @@ object NativeUtils { val field = prisma.protocol.SelectedField( prisma.protocol.SelectedField.Field.Relational( prisma.protocol.RelationalField( - f.dbName, + f.name, toPrismaSelectedFields(sf) )) ) @@ -191,22 +191,22 @@ object NativeUtils { case ScalarFilter(field, scalarCondition) => protocol.Filter( protocol.Filter.Type.Scalar( - protocol.ScalarFilter(field.dbName, toPrismaCondition(scalarCondition)) + protocol.ScalarFilter(field.name, toPrismaCondition(scalarCondition)) ) ) case ScalarListFilter(field, scalarListCondition) => protocol.Filter( - protocol.Filter.Type.ScalarList(protocol.ScalarListFilter(field.dbName, toPrismaListCondition(scalarListCondition))) + protocol.Filter.Type.ScalarList(protocol.ScalarListFilter(field.name, toPrismaListCondition(scalarListCondition))) ) case OneRelationIsNullFilter(field) => protocol.Filter( - protocol.Filter.Type.OneRelationIsNull(protocol.RelationalField(field.dbName, protocol.SelectedFields(Vector.empty))) + protocol.Filter.Type.OneRelationIsNull(protocol.RelationalField(field.name, protocol.SelectedFields(Vector.empty))) ) case RelationFilter(field, nestedFilter, condition) => protocol.Filter( protocol.Filter.Type.Relation( protocol.RelationFilter( - protocol.RelationalField(field.dbName, protocol.SelectedFields(Vector.empty)), + protocol.RelationalField(field.name, protocol.SelectedFields(Vector.empty)), toProtocolFilter(nestedFilter), toRelationFilterCondition(condition) ) @@ -228,7 +228,7 @@ object NativeUtils { } def toPrismaOrderBy(orderBy: OrderBy): protocol.OrderBy = { - protocol.OrderBy(orderBy.field.dbName, orderBy.sortOrder match { + protocol.OrderBy(orderBy.field.name, orderBy.sortOrder match { case SortOrder.Asc => protocol.OrderBy.SortOrder.ASC case SortOrder.Desc => protocol.OrderBy.SortOrder.DESC }) @@ -247,7 +247,7 @@ object NativeUtils { } def toRelationalField(field: RelationField): protocol.RelationalField = { - protocol.RelationalField(field.dbName, protocol.SelectedFields(Vector.empty)) + protocol.RelationalField(field.name, protocol.SelectedFields(Vector.empty)) } def toNodeSelector(where: NodeSelector): protocol.NodeSelector = { diff --git a/server/connectors/api-connector-sqlite-native/src/main/scala/com/prisma/api/connector/sqlite/native/SQLiteApiConnectorNative.scala b/server/connectors/api-connector-sqlite-native/src/main/scala/com/prisma/api/connector/sqlite/native/SQLiteApiConnectorNative.scala deleted file mode 100644 index 76012c0569..0000000000 --- a/server/connectors/api-connector-sqlite-native/src/main/scala/com/prisma/api/connector/sqlite/native/SQLiteApiConnectorNative.scala +++ /dev/null @@ -1,25 +0,0 @@ -package com.prisma.api.connector.sqlite.native - -import com.prisma.api.connector.sqlite.SQLiteApiConnector -import com.prisma.api.connector.{ApiConnector, DataResolver, DatabaseMutactionExecutor} -import com.prisma.config.DatabaseConfig -import com.prisma.shared.models.{ConnectorCapabilities, Project, ProjectIdEncoder} - -import scala.concurrent.{ExecutionContext, Future} - -case class SQLiteApiConnectorNative(config: DatabaseConfig)(implicit ec: ExecutionContext) extends ApiConnector { - lazy val base = SQLiteApiConnector(config, new org.sqlite.JDBC) - - override def initialize() = Future.unit - override def shutdown() = Future.unit - - override def databaseMutactionExecutor: DatabaseMutactionExecutor = { - val exe = base.databaseMutactionExecutor - new SQLiteDatabaseMutactionExecutor(exe.slickDatabase) - } - override def dataResolver(project: Project): DataResolver = SQLiteNativeDataResolver(base.dataResolver(project)) - override def masterDataResolver(project: Project): DataResolver = SQLiteNativeDataResolver(base.dataResolver(project)) - override def projectIdEncoder: ProjectIdEncoder = ProjectIdEncoder('_') - - override val capabilities = ConnectorCapabilities.sqliteNative -} diff --git a/server/connectors/deploy-connector/src/main/scala/com/prisma/deploy/connector/persistence/TestPersistence.scala b/server/connectors/deploy-connector/src/main/scala/com/prisma/deploy/connector/persistence/TestPersistence.scala deleted file mode 100644 index 11bebe3878..0000000000 --- a/server/connectors/deploy-connector/src/main/scala/com/prisma/deploy/connector/persistence/TestPersistence.scala +++ /dev/null @@ -1,26 +0,0 @@ -package com.prisma.deploy.connector.persistence - -import com.prisma.shared.models.MigrationStatus.MigrationStatus -import com.prisma.shared.models.{Migration, MigrationId} -import org.joda.time.DateTime - -import scala.concurrent.Future - -trait TestPersistence { - def lock(): Future[Int] - - def byId(migrationId: MigrationId): Future[Option[Migration]] - def loadAll(projectId: String): Future[Seq[Migration]] - def create(migration: Migration): Future[Migration] - def getNextMigration(projectId: String): Future[Option[Migration]] - def getLastMigration(projectId: String): Future[Option[Migration]] - - def updateMigrationStatus(id: MigrationId, status: MigrationStatus): Future[Unit] - def updateMigrationErrors(id: MigrationId, errors: Vector[String]): Future[Unit] - def updateMigrationApplied(id: MigrationId, applied: Int): Future[Unit] - def updateMigrationRolledBack(id: MigrationId, rolledBack: Int): Future[Unit] - def updateStartedAt(id: MigrationId, startedAt: DateTime): Future[Unit] - def updateFinishedAt(id: MigrationId, finishedAt: DateTime): Future[Unit] - - def loadDistinctUnmigratedProjectIds(): Future[Seq[String]] -} diff --git a/server/connectors/utils/src/main/scala/com/prisma/connectors/utils/ConnectorLoader.scala b/server/connectors/utils/src/main/scala/com/prisma/connectors/utils/ConnectorLoader.scala index d07e976261..d62fd35971 100644 --- a/server/connectors/utils/src/main/scala/com/prisma/connectors/utils/ConnectorLoader.scala +++ b/server/connectors/utils/src/main/scala/com/prisma/connectors/utils/ConnectorLoader.scala @@ -2,9 +2,9 @@ package com.prisma.connectors.utils import com.prisma.api.connector.ApiConnector import com.prisma.api.connector.mongo.MongoApiConnector import com.prisma.api.connector.mysql.MySqlApiConnector +import com.prisma.api.connector.native.{ApiConnectorNative, PostgresBackup, SqliteBackup} import com.prisma.api.connector.postgres.PostgresApiConnector import com.prisma.api.connector.sqlite.SQLiteApiConnector -import com.prisma.api.connector.sqlite.native.SQLiteApiConnectorNative import com.prisma.config.PrismaConfig import com.prisma.deploy.connector.DeployConnector import com.prisma.deploy.connector.mongo.MongoDeployConnector @@ -20,8 +20,9 @@ object ConnectorLoader { databaseConfig.connector match { case "mysql" => MySqlApiConnector(databaseConfig, drivers(SupportedDrivers.MYSQL)) case "postgres" => PostgresApiConnector(databaseConfig, drivers(SupportedDrivers.POSTGRES)) - case "sqlite-native" => SQLiteApiConnectorNative(databaseConfig) - case "native-integration-tests" => SQLiteApiConnectorNative(databaseConfig) + case "sqlite-native" => ApiConnectorNative(databaseConfig, SqliteBackup(drivers(SupportedDrivers.SQLITE))) + case "postgres-native" => ApiConnectorNative(databaseConfig, PostgresBackup(drivers(SupportedDrivers.POSTGRES))) + case "native-integration-tests" => ApiConnectorNative(databaseConfig, SqliteBackup(drivers(SupportedDrivers.SQLITE))) case "sqlite" => SQLiteApiConnector(databaseConfig, drivers(SupportedDrivers.SQLITE)) case "mongo" => MongoApiConnector(databaseConfig) case conn => sys.error(s"Unknown connector $conn") @@ -33,6 +34,7 @@ object ConnectorLoader { databaseConfig.connector match { case "mysql" => MySqlDeployConnector(databaseConfig, drivers(SupportedDrivers.MYSQL)) case "postgres" => PostgresDeployConnector(databaseConfig, drivers(SupportedDrivers.POSTGRES)) + case "postgres-native" => PostgresDeployConnector(databaseConfig, drivers(SupportedDrivers.POSTGRES)) case "sqlite-native" => SQLiteDeployConnector(databaseConfig, drivers(SupportedDrivers.SQLITE)) case "native-integration-tests" => SQLiteDeployConnector(databaseConfig, drivers(SupportedDrivers.SQLITE)) case "sqlite" => SQLiteDeployConnector(databaseConfig, drivers(SupportedDrivers.SQLITE)) diff --git a/server/docker-compose/postgres-native/dev-postgres.yml b/server/docker-compose/postgres-native/dev-postgres.yml new file mode 100644 index 0000000000..c5d1fd3768 --- /dev/null +++ b/server/docker-compose/postgres-native/dev-postgres.yml @@ -0,0 +1,13 @@ +# Transient db - will lose it's data once restarted +version: "3" +services: + postgres: + image: timms/postgres-logging:10.3 + container_name: psql + restart: always + command: postgres -c 'max_connections=1000' + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: prisma + ports: + - "0.0.0.0:5432:5432" \ No newline at end of file diff --git a/server/docker-compose/postgres-native/prisma.yml b/server/docker-compose/postgres-native/prisma.yml new file mode 100644 index 0000000000..dbe50be7e0 --- /dev/null +++ b/server/docker-compose/postgres-native/prisma.yml @@ -0,0 +1,10 @@ +port: 4466 +databases: + default: + connector: postgres-native + host: 127.0.0.1 + port: 5432 + user: postgres + password: prisma + database: prisma + rawAccess: true diff --git a/server/prisma-rs/.gitignore b/server/prisma-rs/.gitignore index 072984c88e..ddef417a65 100644 --- a/server/prisma-rs/.gitignore +++ b/server/prisma-rs/.gitignore @@ -1,3 +1,4 @@ db/* !.placeholder build/ +prisma.yml \ No newline at end of file diff --git a/server/prisma-rs/Cargo.lock b/server/prisma-rs/Cargo.lock index b9802cdd21..371ea009d0 100644 --- a/server/prisma-rs/Cargo.lock +++ b/server/prisma-rs/Cargo.lock @@ -20,8 +20,8 @@ dependencies = [ "crossbeam-channel 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)", @@ -45,7 +45,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "actix 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)", "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)", "net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)", @@ -74,11 +74,11 @@ dependencies = [ "brotli2 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "cookie 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cookie 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)", "encoding 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)", "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "flate2 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", "futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "h2 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", "http 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", @@ -96,7 +96,7 @@ dependencies = [ "percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", "regex 1.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)", "serde_urlencoded 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", "sha1 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -119,9 +119,9 @@ name = "actix_derive" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.29 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.33 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.15.34 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -137,6 +137,14 @@ dependencies = [ "memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "ansi_term" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "antidote" version = "1.0.0" @@ -165,7 +173,7 @@ name = "atty" version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "termion 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -183,7 +191,7 @@ dependencies = [ "autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "backtrace-sys 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-demangle 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -193,10 +201,20 @@ name = "backtrace-sys" version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "barrel" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "barrel" +version = "0.5.5-alpha.0" +source = "git+https://github.com/spacekookie/barrel#538fae1f67c82d65768abf325113996d83cf9e47" + [[package]] name = "base64" version = "0.9.3" @@ -219,13 +237,37 @@ name = "bitflags" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "block-buffer" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "block-padding 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", + "byte-tools 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "generic-array 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "block-padding" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "byte-tools 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "boolinator" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "brotli-sys" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -234,7 +276,7 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "brotli-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -242,6 +284,11 @@ name = "build_const" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "byte-tools" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "byteorder" version = "1.3.1" @@ -258,7 +305,7 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.35" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -273,10 +320,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "clap" +version = "2.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", + "atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "cloudabi" version = "0.0.3" @@ -305,22 +366,18 @@ dependencies = [ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libsqlite3-sys 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)", "once_cell 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "prisma-common 0.0.0", "prisma-models 0.0.0", - "prisma-query 0.1.0", - "r2d2 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)", - "r2d2_sqlite 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rusqlite 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)", + "prisma-query 0.1.0 (git+https://github.com/prisma/prisma-query.git)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)", "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "cookie" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "base64 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)", @@ -334,18 +391,36 @@ name = "core" version = "0.1.0" dependencies = [ "Inflector 0.11.4 (registry+https://github.com/rust-lang/crates.io-index)", + "chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "connector 0.1.0", + "env_logger 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "graphql-parser 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "indexmap 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "prisma-inflector 0.1.0", "prisma-models 0.0.0", "serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)", "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "core-foundation" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "core-foundation-sys" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "crc" version = "1.8.1" @@ -410,6 +485,15 @@ dependencies = [ "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "crypto-mac" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "generic-array 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", + "subtle 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "cuid" version = "0.1.0" @@ -421,6 +505,58 @@ dependencies = [ "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "darling" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "darling_core 0.8.6 (registry+https://github.com/rust-lang/crates.io-index)", + "darling_macro 0.8.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "darling_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "ident_case 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.15.34 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "darling_macro" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "darling_core 0.8.6 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.15.34 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "database-inspector" +version = "0.1.0" +dependencies = [ + "barrel 0.5.5-alpha.0 (git+https://github.com/spacekookie/barrel)", + "rusqlite 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)", + "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "datamodel" +version = "0.1.0" +dependencies = [ + "chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pest 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "debug_stub_derive" version = "0.3.0" @@ -430,9 +566,30 @@ dependencies = [ "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "derive_state_machine_future" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "darling 0.8.6 (registry+https://github.com/rust-lang/crates.io-index)", + "heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.15.34 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "digest" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "generic-array 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "dtoa" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -542,12 +699,27 @@ name = "failure_derive" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.29 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.33 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.15.34 (registry+https://github.com/rust-lang/crates.io-index)", "synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "fake-simd" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "fallible-iterator" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "fallible-iterator" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "fixedbitset" version = "0.1.9" @@ -559,7 +731,7 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "miniz-sys 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", "miniz_oxide_c_api 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -569,6 +741,19 @@ name = "fnv" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "foreign-types-shared 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "fuchsia-cprng" version = "0.1.1" @@ -590,7 +775,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "futures" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -598,10 +783,18 @@ name = "futures-cpupool" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "generic-array" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "graphql-parser" version = "0.2.2" @@ -619,7 +812,7 @@ dependencies = [ "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", "http 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", "indexmap 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", @@ -636,12 +829,21 @@ dependencies = [ "unicode-segmentation 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "hmac" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "crypto-mac 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "digest 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "hostname" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "winutil 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -652,7 +854,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -668,6 +870,11 @@ dependencies = [ "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "idna" version = "0.1.5" @@ -683,7 +890,7 @@ name = "indexmap" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -691,7 +898,7 @@ name = "iovec" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -701,7 +908,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "error-chain 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", - "socket2 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "socket2 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "widestring 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "winreg 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -725,9 +932,21 @@ dependencies = [ [[package]] name = "itoa" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "jsonrpc-core" +version = "10.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "kernel32-sys" version = "0.2.2" @@ -754,7 +973,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "libc" -version = "0.2.53" +version = "0.2.54" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -762,7 +981,7 @@ name = "libsqlite3-sys" version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -797,11 +1016,21 @@ dependencies = [ "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "maplit" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "matches" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "md5" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "memchr" version = "2.2.0" @@ -812,12 +1041,42 @@ name = "memoffset" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "migration-connector" +version = "0.1.0" +dependencies = [ + "chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "datamodel 0.1.0", + "nullable 0.1.0", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "migration-core" +version = "0.1.0" +dependencies = [ + "boolinator 2.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "database-inspector 0.1.0", + "datamodel 0.1.0", + "jsonrpc-core 10.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "migration-connector 0.1.0", + "nullable 0.1.0", + "prisma-models 0.0.0", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)", + "sql-migration-connector 0.1.0", +] + [[package]] name = "mime" version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "unicase 2.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "unicase 2.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -836,8 +1095,8 @@ name = "miniz-sys" version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -853,9 +1112,9 @@ name = "miniz_oxide_c_api" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", "crc 1.8.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "miniz_oxide 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -869,7 +1128,7 @@ dependencies = [ "iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)", @@ -883,7 +1142,7 @@ version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -915,24 +1174,41 @@ dependencies = [ "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "prisma-common 0.0.0", "prisma-models 0.0.0", - "prisma-query 0.1.0", + "prisma-query 0.1.0 (git+https://github.com/prisma/prisma-query.git)", "prost 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "prost-build 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "prost-derive 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "prost-types 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)", "sql-connector 0.1.0", "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "native-tls" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl 0.10.22 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.46 (registry+https://github.com/rust-lang/crates.io-index)", + "schannel 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", + "security-framework 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "security-framework-sys 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "tempfile 3.0.7 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "net2" version = "0.2.33" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -950,6 +1226,43 @@ dependencies = [ "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "nullable" +version = "0.1.0" +dependencies = [ + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-bigint 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "num-complex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "num-iter 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", + "num-rational 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num-bigint" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num-complex" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "num-integer" version = "0.1.39" @@ -958,6 +1271,25 @@ dependencies = [ "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "num-iter" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num-rational" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-bigint 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "num-traits" version = "0.2.6" @@ -968,7 +1300,7 @@ name = "num_cpus" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -984,6 +1316,46 @@ dependencies = [ "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "opaque-debug" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "openssl" +version = "0.10.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "foreign-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.46 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "openssl-probe" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "openssl-sys" +version = "0.9.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", + "vcpkg 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "ordermap" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "owning_ref" version = "0.4.0" @@ -1006,7 +1378,7 @@ name = "parking_lot_core" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1018,12 +1390,52 @@ name = "percent-encoding" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "pest" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "ucd-trie 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "pest_derive" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "pest 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "pest_generator 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "pest_generator" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "pest 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "pest_meta 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.15.34 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "pest_meta" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "maplit 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "pest 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "sha-1 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "petgraph" version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1066,6 +1478,38 @@ name = "pkg-config" version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "postgres" +version = "0.16.0-rc.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", + "fallible-iterator 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.1.19 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-postgres 0.4.0-rc.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "postgres-protocol" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", + "fallible-iterator 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "generic-array 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", + "hmac 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "md5 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", + "sha2 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "stringprep 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "prisma" version = "0.1.0" @@ -1085,9 +1529,10 @@ dependencies = [ "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "prisma-common 0.0.0", + "prisma-inflector 0.1.0", "prisma-models 0.0.0", - "rust-embed 4.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)", + "rust-embed 4.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)", "sql-connector 0.1.0", ] @@ -1099,14 +1544,23 @@ dependencies = [ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "regex 1.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_yaml 0.8.8 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_yaml 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "prisma-inflector" +version = "0.1.0" +dependencies = [ + "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-segmentation 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "prisma-models" version = "0.0.0" @@ -1119,11 +1573,10 @@ dependencies = [ "graphql-parser 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "once_cell 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", - "prisma-query 0.1.0", + "prisma-query 0.1.0 (git+https://github.com/prisma/prisma-query.git)", "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", - "rusqlite 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)", "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1131,15 +1584,19 @@ dependencies = [ [[package]] name = "prisma-query" version = "0.1.0" +source = "git+https://github.com/prisma/prisma-query.git#97b550a5162703099a79adf21c3ad3dd0b66edc0" dependencies = [ + "chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "postgres 0.16.0-rc.1 (registry+https://github.com/rust-lang/crates.io-index)", "rusqlite 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rust_decimal 1.0.1 (git+https://github.com/pimeys/rust-decimal.git)", "serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)", - "sqlite 0.24.0 (registry+https://github.com/rust-lang/crates.io-index)", + "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "proc-macro2" -version = "0.4.29" +version = "0.4.30" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1179,7 +1636,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 0.4.29 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", "syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1209,7 +1666,7 @@ name = "quote" version = "0.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.29 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1222,6 +1679,16 @@ dependencies = [ "scheduled-thread-pool 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "r2d2_postgres" +version = "0.15.0-rc.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "postgres 0.16.0-rc.1 (registry+https://github.com/rust-lang/crates.io-index)", + "r2d2 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-postgres 0.4.0-rc.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "r2d2_sqlite" version = "0.8.0" @@ -1237,7 +1704,7 @@ version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1250,7 +1717,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1261,12 +1728,12 @@ version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_jitter 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1313,10 +1780,10 @@ dependencies = [ [[package]] name = "rand_jitter" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1328,7 +1795,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1400,6 +1867,11 @@ dependencies = [ "winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "rent_to_own" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "resolv-conf" version = "0.6.2" @@ -1414,9 +1886,9 @@ name = "ring" version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "untrusted 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1434,16 +1906,16 @@ dependencies = [ [[package]] name = "rust-embed" -version = "4.3.0" +version = "4.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "rust-embed-impl 4.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rust-embed-impl 4.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.2.7 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "rust-embed-impl" -version = "4.3.0" +version = "4.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1451,6 +1923,18 @@ dependencies = [ "walkdir 2.2.7 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "rust_decimal" +version = "1.0.1" +source = "git+https://github.com/pimeys/rust-decimal.git#fbb60744d6d22d33fe70c42afef8bb3f0e8971e8" +dependencies = [ + "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "num 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "postgres 0.16.0-rc.1 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "rustc-demangle" version = "0.1.14" @@ -1466,7 +1950,7 @@ dependencies = [ [[package]] name = "ryu" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1482,6 +1966,15 @@ dependencies = [ "winapi-util 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "schannel" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "scheduled-thread-pool" version = "0.2.0" @@ -1495,6 +1988,25 @@ name = "scopeguard" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "security-framework" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", + "core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", + "security-framework-sys 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "security-framework-sys" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "semver" version = "0.9.0" @@ -1510,20 +2022,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "serde" -version = "1.0.90" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde_derive 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "serde_derive" -version = "1.0.90" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.29 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.33 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.15.34 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1532,9 +2044,9 @@ version = "1.0.39" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "indexmap 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", - "ryu 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)", + "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", + "ryu 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1542,44 +2054,66 @@ name = "serde_urlencoded" version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "dtoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", - "itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)", + "dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", + "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "serde_yaml" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "dtoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", "yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "sha-1" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "block-buffer 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", + "digest 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "fake-simd 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "opaque-debug 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "sha1" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "sha2" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "block-buffer 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", + "digest 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "fake-simd 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "opaque-debug 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "signal-hook" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", - "signal-hook-registry 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", + "signal-hook-registry 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "signal-hook-registry" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "arc-swap 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1599,11 +2133,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "socket2" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.54 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1619,53 +2153,78 @@ dependencies = [ "failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "libsqlite3-sys 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)", + "native-tls 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "postgres 0.16.0-rc.1 (registry+https://github.com/rust-lang/crates.io-index)", + "prisma-common 0.0.0", "prisma-models 0.0.0", - "prisma-query 0.1.0", + "prisma-query 0.1.0 (git+https://github.com/prisma/prisma-query.git)", "r2d2 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)", + "r2d2_postgres 0.15.0-rc.1 (registry+https://github.com/rust-lang/crates.io-index)", "r2d2_sqlite 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", "rusqlite 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)", + "rust_decimal 1.0.1 (git+https://github.com/pimeys/rust-decimal.git)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-postgres 0.4.0-rc.2 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-postgres-native-tls 0.1.0-rc.1 (registry+https://github.com/rust-lang/crates.io-index)", "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] -name = "sqlite" -version = "0.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" +name = "sql-migration-connector" +version = "0.1.0" dependencies = [ - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", - "sqlite3-sys 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", + "barrel 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", + "chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "database-inspector 0.1.0", + "datamodel 0.1.0", + "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "migration-connector 0.1.0", + "prisma-query 0.1.0 (git+https://github.com/prisma/prisma-query.git)", + "rusqlite 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] -name = "sqlite3-src" -version = "0.2.9" +name = "stable_deref_trait" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "state_machine_future" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", + "derive_state_machine_future 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", + "rent_to_own 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] -name = "sqlite3-sys" -version = "0.12.0" +name = "string" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "stringprep" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", - "sqlite3-src 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-normalization 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] -name = "stable_deref_trait" -version = "1.1.1" +name = "strsim" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] -name = "string" -version = "0.1.3" +name = "subtle" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1683,17 +2242,17 @@ name = "syn" version = "0.14.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.29 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "syn" -version = "0.15.33" +version = "0.15.34" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.29 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1711,9 +2270,9 @@ name = "synstructure" version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.29 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.33 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.15.34 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1726,6 +2285,19 @@ dependencies = [ "remove_dir_all 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "tempfile" +version = "3.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", + "redox_syscall 0.1.54 (registry+https://github.com/rust-lang/crates.io-index)", + "remove_dir_all 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "termcolor" version = "1.0.4" @@ -1739,12 +2311,20 @@ name = "termion" version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "numtoa 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.54 (registry+https://github.com/rust-lang/crates.io-index)", "redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "textwrap" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "thread_local" version = "0.3.6" @@ -1758,7 +2338,7 @@ name = "time" version = "0.1.42" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.54 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1769,7 +2349,7 @@ version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", "mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-codec 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1793,7 +2373,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-io 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1802,7 +2382,7 @@ name = "tokio-current-thread" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-executor 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1812,7 +2392,7 @@ version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "crossbeam-utils 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1820,7 +2400,7 @@ name = "tokio-fs" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-io 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-threadpool 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1831,17 +2411,56 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "tokio-postgres" +version = "0.4.0-rc.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "antidote 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", + "chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "fallible-iterator 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "phf 0.7.24 (registry+https://github.com/rust-lang/crates.io-index)", + "postgres-protocol 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)", + "state_machine_future 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-codec 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-io 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-tcp 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-timer 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-uds 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", + "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "tokio-postgres-native-tls" +version = "0.1.0-rc.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", + "native-tls 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-io 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-postgres 0.4.0-rc.2 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-tls 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "tokio-reactor" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "crossbeam-utils 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1858,8 +2477,8 @@ name = "tokio-signal" version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)", "mio-uds 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)", "signal-hook 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1875,7 +2494,7 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1884,7 +2503,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", "iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-io 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1899,7 +2518,7 @@ dependencies = [ "crossbeam-deque 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-queue 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-utils 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1913,11 +2532,21 @@ version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "crossbeam-utils 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-executor 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "tokio-tls" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", + "native-tls 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-io 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "tokio-trace-core" version = "0.1.0" @@ -1932,7 +2561,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-codec 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1946,9 +2575,9 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", "iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)", "mio-uds 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1962,7 +2591,7 @@ name = "tower-service" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1972,13 +2601,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", "idna 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)", - "socket2 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "socket2 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-executor 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-io 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-reactor 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1995,13 +2624,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", "idna 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)", - "socket2 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "socket2 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-executor 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-io 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-reactor 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2018,7 +2647,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", "ipconfig 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2029,6 +2658,16 @@ dependencies = [ "trust-dns-proto 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "typenum" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "ucd-trie" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "ucd-util" version = "0.1.3" @@ -2044,7 +2683,7 @@ dependencies = [ [[package]] name = "unicase" -version = "2.3.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2071,6 +2710,11 @@ name = "unicode-segmentation" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "unicode-width" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "unicode-xid" version = "0.0.4" @@ -2110,7 +2754,7 @@ name = "url_serde" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2125,7 +2769,7 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2143,9 +2787,9 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 0.4.29 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.33 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.15.34 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2162,6 +2806,11 @@ name = "vcpkg" version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "vec_map" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "version_check" version = "0.1.5" @@ -2188,7 +2837,7 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2283,6 +2932,7 @@ dependencies = [ "checksum actix_derive 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4300e9431455322ae393d43a2ba1ef96b8080573c0fc23b196219efedfb6ba69" "checksum adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7e522997b529f05601e05166c07ed17789691f562762c7f3b987263d2dedee5c" "checksum aho-corasick 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e6f484ae0c99fec2e858eb6134949117399f222608d84cadb3f58c1f97c2364c" +"checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" "checksum antidote 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "34fde25430d87a9388dadbe6e34d7f72a462c8b43ac8d309b42b0a8505d7e2a5" "checksum arc-swap 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)" = "bc4662175ead9cd84451d5c35070517777949a2ed84551764129cedb88384841" "checksum arrayvec 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "92c7fb76bc8826a8b33b4ee5bb07a247a81e76764ab4d55e8f73e3a4d8808c71" @@ -2291,20 +2941,29 @@ dependencies = [ "checksum autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a6d640bee2da49f60a4068a7fae53acde8982514ab7bae8b8cea9e88cbcfd799" "checksum backtrace 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "f106c02a3604afcdc0df5d36cc47b44b55917dbaf3d808f71c163a0ddba64637" "checksum backtrace-sys 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)" = "797c830ac25ccc92a7f8a7b9862bde440715531514594a6154e3d4a54dd769b6" +"checksum barrel 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5f3b2d22ff68d492d073dfc8d29fda59f208e56fbf477ccc97fbdb12158059f1" +"checksum barrel 0.5.5-alpha.0 (git+https://github.com/spacekookie/barrel)" = "" "checksum base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e" "checksum base64 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)" = "489d6c0ed21b11d038c31b6ceccca973e65d73ba3bd8ecb9a2babf5546164643" "checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12" +"checksum block-buffer 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" +"checksum block-padding 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "6d4dc3af3ee2e12f3e5d224e5e1e3d73668abbeb69e566d361f7d5563a4fdf09" +"checksum boolinator 2.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cfa8873f51c92e232f9bac4065cddef41b714152812bfc5f7672ba16d6ef8cd9" "checksum brotli-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4445dea95f4c2b41cde57cc9fee236ae4dbae88d8fcbdb4750fc1bb5d86aaecd" "checksum brotli2 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0cb036c3eade309815c15ddbacec5b22c4d1f3983a774ab2eac2e3e9ea85568e" "checksum build_const 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "39092a32794787acd8525ee150305ff051b0aa6cc2abaf193924f5ab05425f39" +"checksum byte-tools 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" "checksum byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a019b10a2a7cdeb292db131fc8113e57ea2a908f6e7894b0c3c671893b65dbeb" "checksum bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c" -"checksum cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)" = "5e5f3fee5eeb60324c2781f1e41286bdee933850fff9b3c672587fed5ec58c83" +"checksum cc 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)" = "39f75544d7bbaf57560d2168f28fd649ff9c76153874db88bdbdfd839b1a7e7d" "checksum cfg-if 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "11d43355396e872eefb45ce6342e4374ed7bc2b3a502d1b28e36d6e23c05d1f4" "checksum chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "45912881121cb26fad7c38c17ba7daa18764771836b34fab7d3fbd93ed633878" +"checksum clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9" "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" "checksum combine 3.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "da3da6baa321ec19e1cc41d31bf599f00c783d0517095cdaf0332e3fe8d20680" -"checksum cookie 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1465f8134efa296b4c19db34d909637cb2bf0f7aaf21299e23e18fa29ac557cf" +"checksum cookie 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)" = "99be24cfcf40d56ed37fd11c2123be833959bbc5bddecb46e1c2e442e15fa3e0" +"checksum core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "25b9e03f145fd4f2bf705e07b900cd41fc636598fe5dc452fd0db1441c3f496d" +"checksum core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e7ca8a5221364ef15ce201e8ed2f609fc312682a8f4e0e3d4aa5879764e0fa3b" "checksum crc 1.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d663548de7f5cca343f1e0a48d14dcfb0e9eb4e079ec58883b7251539fa10aeb" "checksum crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ba125de2af0df55319f41944744ad91c71113bf74a4646efff39afe1f6842db1" "checksum crossbeam-channel 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "0f0ed1a4de2235cabda8558ff5840bffb97fcb64c97827f354a451307df5f72b" @@ -2312,9 +2971,15 @@ dependencies = [ "checksum crossbeam-epoch 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "04c9e3102cc2d69cd681412141b390abd55a362afc1540965dad0ad4d34280b4" "checksum crossbeam-queue 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7c979cd6cfe72335896575c6b5688da489e420d36a27a0b9eb0c73db574b4a4b" "checksum crossbeam-utils 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "f8306fcef4a7b563b76b7dd949ca48f52bc1141aa067d2ea09565f3e2652aa5c" +"checksum crypto-mac 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4434400df11d95d556bac068ddfedd482915eb18fe8bea89bc80b6e4b1c179e5" "checksum cuid 0.1.0 (git+https://github.com/prisma/cuid-rust)" = "" +"checksum darling 0.8.6 (registry+https://github.com/rust-lang/crates.io-index)" = "9158d690bc62a3a57c3e45b85e4d50de2008b39345592c64efd79345c7e24be0" +"checksum darling_core 0.8.6 (registry+https://github.com/rust-lang/crates.io-index)" = "d2a368589465391e127e10c9e3a08efc8df66fd49b87dc8524c764bbe7f2ef82" +"checksum darling_macro 0.8.6 (registry+https://github.com/rust-lang/crates.io-index)" = "244e8987bd4e174385240cde20a3657f607fb0797563c28255c353b5819a07b1" "checksum debug_stub_derive 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "496b7f8a2f853313c3ca370641d7ff3e42c32974fdccda8f0684599ed0a3ff6b" -"checksum dtoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6d301140eb411af13d3115f9a562c85cc6b541ade9dfa314132244aaee7489dd" +"checksum derive_state_machine_future 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1220ad071cb8996454c20adf547a34ba3ac793759dab793d9dc04996a373ac83" +"checksum digest 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "05f47366984d3ad862010e22c7ce81a7dbcaebbdfb37241a620f8b6596ee135c" +"checksum dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ea57b42383d091c85abcc2706240b94ab2a8fa1fc81c10ff23c4de06e2a90b5e" "checksum either 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5527cfe0d098f36e3f8839852688e63c8fff1c90b2b405aef730615f9a7bcf7b" "checksum encoding 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "6b0d943856b990d12d3b55b359144ff341533e516d94098b1d3fc1ac666d36ec" "checksum encoding-index-japanese 1.20141219.5 (registry+https://github.com/rust-lang/crates.io-index)" = "04e8b2ff42e9a05335dbf8b5c6f7567e5591d0d916ccef4e0b1710d32a0d0c91" @@ -2328,39 +2993,50 @@ dependencies = [ "checksum error-chain 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6930e04918388a9a2e41d518c25cf679ccafe26733fb4127dbf21993f2575d46" "checksum failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "795bd83d3abeb9220f257e597aa0080a508b27533824adf336529648f6abf7e2" "checksum failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "ea1063915fd7ef4309e222a5a07cf9c319fb9c7836b1f89b85458672dbb127e1" +"checksum fake-simd 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" +"checksum fallible-iterator 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "eb7217124812dc5672b7476d0c2d20cfe9f7c0f1ba0904b674a9762a0212f72e" +"checksum fallible-iterator 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" "checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33" "checksum flate2 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "f87e68aa82b2de08a6e037f1385455759df6e445a8df5e005b4297191dbf18aa" "checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3" +"checksum foreign-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +"checksum foreign-types-shared 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" "checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" "checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" "checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" -"checksum futures 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)" = "62941eff9507c8177d448bd83a44d9b9760856e184081d8cd79ba9f03dd24981" +"checksum futures 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)" = "a2037ec1c6c1c4f79557762eab1f7eae1f64f6cb418ace90fae88f0942b60139" "checksum futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "ab90cde24b3319636588d0c35fe03b1333857621051837ed769faefb4c2162e4" +"checksum generic-array 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3c0f28c2f5bfb5960175af447a2da7c18900693738343dc896ffbcabd9839592" "checksum graphql-parser 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "070cef3f91429889a1ed86e5f5824d6e8b3ebcb9870d7c7050f9bfcc9e4ae235" "checksum h2 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "85ab6286db06040ddefb71641b50017c06874614001a134b423783e2db2920bd" "checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" +"checksum hmac 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f127a908633569f208325f86f71255d3363c79721d7f9fe31cd5569908819771" "checksum hostname 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "21ceb46a83a85e824ef93669c8b390009623863b5c195d1ba747292c0c72f94e" "checksum http 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "eed324f0f0daf6ec10c474f150505af2c143f251722bf9dbd1261bd1f2ee2c1a" "checksum httparse 1.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e8734b0cfd3bc3e101ec59100e101c2eecd19282202e87808b3037b442777a83" "checksum humantime 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ca7e5f2e110db35f93b837c81797f3714500b81d517bf20c431b16d3ca4f114" +"checksum ident_case 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" "checksum idna 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "38f09e0f0b1fb55fdee1f17470ad800da77af5186a1a76c026b679358b7e844e" "checksum indexmap 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7e81a7c05f79578dbc15793d8b619db9ba32b4577003ef3af1a91c416798c58d" "checksum iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe6e417e7d0975db6512b90796e8ce223145ac4e33c377e4a42882a0e88bb08" "checksum ipconfig 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "08f7eadeaf4b52700de180d147c4805f199854600b36faa963d91114827b2ffc" "checksum itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)" = "0d47946d458e94a1b7bcabbf6521ea7c037062c81f534615abcad76e84d4970d" "checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358" -"checksum itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1306f3464951f30e30d12373d31c79fbd52d236e5e896fd92f96ec7babbbe60b" +"checksum itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "501266b7edd0174f8530248f87f99c88fbe60ca4ef3dd486835b8d8d53136f7f" +"checksum jsonrpc-core 10.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dc15eef5f8b6bef5ac5f7440a957ff95d036e2f98706947741bfc93d1976db4c" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" "checksum language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a" "checksum lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14" "checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f" -"checksum libc 0.2.53 (registry+https://github.com/rust-lang/crates.io-index)" = "ec350a9417dfd244dc9a6c4a71e13895a4db6b92f0b106f07ebbc3f3bc580cee" +"checksum libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)" = "c6785aa7dd976f5fbf3b71cfd9cd49d7f783c1ff565a858d71031c6c313aa5c6" "checksum libsqlite3-sys 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3567bc1a0c84e2c0d71eeb4a1f08451babf7843babd733158777d9c686dad9f3" "checksum linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ae91b68aebc4ddb91978b11a1b02ddd8602a05ec19002801c5666000e05e0f83" "checksum lock_api 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "62ebf1391f6acad60e5c8b43706dde4582df75c06698ab44511d15016bc2442c" "checksum log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c84ec4b527950aa83a329754b01dbe3f58361d1c5efacd1f6d68c494d08a17c6" "checksum lru-cache 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c" +"checksum maplit 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "08cbb6b4fef96b6d77bfc40ec491b1690c779e77b05cd9f07f787ed376fd4c43" "checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" +"checksum md5 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e6bcd6433cff03a4bfc3d9834d504467db1f1cf6d0ea765d37d330249ed629d" "checksum memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2efc7bc57c883d4a4d6e3246905283d8dae951bb3bd32f49d6ef297f546e1c39" "checksum memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0f9dc261e2b62d7a622bf416ea3c5245cdd5d9a7fcc428c0d06804dfce1775b3" "checksum mime 0.3.13 (registry+https://github.com/rust-lang/crates.io-index)" = "3e27ca21f40a310bd06d9031785f4801710d566c184a6e15bad4f1d9b65f9425" @@ -2372,25 +3048,43 @@ dependencies = [ "checksum mio-uds 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)" = "966257a94e196b11bb43aca423754d87429960a768de9414f3691d6957abf125" "checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919" "checksum multimap 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2eb04b9f127583ed176e163fb9ec6f3e793b87e21deedd5734a69386a18a0151" +"checksum native-tls 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4b2df1a4c22fd44a62147fd8f13dd0f95c9d8ca7b2610299b2a2f9cf8964274e" "checksum net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88" "checksum nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9667ddcc6cc8a43afc9b7917599d7216aa09c463919ea32c59ed6cac8bc945" "checksum nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6" +"checksum num 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cf4825417e1e1406b3782a8ce92f4d53f26ec055e3622e1881ca8e9f5f9e08db" +"checksum num-bigint 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "57450397855d951f1a41305e54851b1a7b8f5d2e349543a02a2effe25459f718" +"checksum num-complex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "107b9be86cd2481930688277b675b0114578227f034674726605b8a482d8baf8" "checksum num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "e83d528d2677f0518c570baf2b7abdcf0cd2d248860b68507bdcb3e91d4c0cea" +"checksum num-iter 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "af3fdbbc3291a5464dc57b03860ec37ca6bf915ed6ee385e7c6c052c422b2124" +"checksum num-rational 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4e96f040177bb3da242b5b1ecf3f54b5d5af3efbbfb18608977a5d2767b22f10" "checksum num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0b3a5d7cc97d6d30d8b9bc8fa19bf45349ffe46241e8816f50f62f6d6aaabee1" "checksum num_cpus 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1a23f0ed30a54abaa0c7e83b1d2d87ada7c3c23078d1d87815af3e3b6385fbba" "checksum numtoa 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b8f8bdf33df195859076e54ab11ee78a1b208382d3a26ec40d142ffc1ecc49ef" "checksum once_cell 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "532c29a261168a45ce28948f9537ddd7a5dd272cc513b3017b1e82a88f962c37" +"checksum opaque-debug 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "93f5bb2e8e8dec81642920ccff6b61f1eb94fa3020c5a325c9851ff604152409" +"checksum openssl 0.10.22 (registry+https://github.com/rust-lang/crates.io-index)" = "a51f452b82d622fc8dd973d7266e9055ac64af25b957d9ced3989142dc61cb6b" +"checksum openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de" +"checksum openssl-sys 0.9.46 (registry+https://github.com/rust-lang/crates.io-index)" = "05636e06b4f8762d4b81d24a351f3966f38bd25ccbcfd235606c91fdb82cc60f" +"checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063" "checksum owning_ref 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "49a4b8ea2179e6a2e27411d3bca09ca6dd630821cf6894c6c7c8467a8ee7ef13" "checksum parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ab41b4aed082705d1056416ae4468b6ea99d52599ecf3169b00088d43113e337" "checksum parking_lot_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "94c8c7923936b28d546dfd14d4472eaf34c99b14e1c973a32b3e6d4eb04298c9" "checksum percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831" +"checksum pest 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "933085deae3f32071f135d799d75667b63c8dc1f4537159756e3d4ceab41868c" +"checksum pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "833d1ae558dc601e9a60366421196a8d94bc0ac980476d0b67e1d0988d72b2d0" +"checksum pest_generator 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "63120576c4efd69615b5537d3d052257328a4ca82876771d6944424ccfd9f646" +"checksum pest_meta 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f249ea6de7c7b7aba92b4ff4376a994c6dbd98fd2166c89d5c4947397ecb574d" "checksum petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9c3659d1ee90221741f65dd128d9998311b0e40c5d3c23a62445938214abce4f" "checksum phf 0.7.24 (registry+https://github.com/rust-lang/crates.io-index)" = "b3da44b85f8e8dfaec21adae67f95d93244b2ecf6ad2a692320598dcc8e6dd18" "checksum phf_codegen 0.7.24 (registry+https://github.com/rust-lang/crates.io-index)" = "b03e85129e324ad4166b06b2c7491ae27fe3ec353af72e72cd1654c7225d517e" "checksum phf_generator 0.7.24 (registry+https://github.com/rust-lang/crates.io-index)" = "09364cc93c159b8b06b1f4dd8a4398984503483891b0c26b867cf431fb132662" "checksum phf_shared 0.7.24 (registry+https://github.com/rust-lang/crates.io-index)" = "234f71a15de2288bcb7e3b6515828d22af7ec8598ee6d24c3b526fa0a80b67a0" "checksum pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "676e8eb2b1b4c9043511a9b7bea0915320d7e502b0a079fb03f9635a5252b18c" -"checksum proc-macro2 0.4.29 (registry+https://github.com/rust-lang/crates.io-index)" = "64c827cea7a7ab30ce4593e5e04d7a11617ad6ece2fa230605a78b00ff965316" +"checksum postgres 0.16.0-rc.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ecd478ee3885e956071eeb6462e477c93c2438ad8a7052388644f8fe7db9d276" +"checksum postgres-protocol 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7f8a9ca2034ea1677ffc0ba134234e4beb383a0c6b5d2eda51b7f6951af30058" +"checksum prisma-query 0.1.0 (git+https://github.com/prisma/prisma-query.git)" = "" +"checksum proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)" = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" "checksum prost 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b9f36c478cd43382388dfc3a3679af175c03d19ed8039e79a3e4447e944cd3f3" "checksum prost-build 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b6325275b85605f58f576456a47af44417edf5956a6f670bb59fbe12aff69597" "checksum prost-derive 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9787d1977ea72e8066d58e46ae66100324a2815e677897fe78dfe54958f48252" @@ -2399,6 +3093,7 @@ dependencies = [ "checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a" "checksum quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "faf4799c5d274f3868a4aae320a0a182cbd2baee377b378f080e16a23e9d80db" "checksum r2d2 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)" = "9dd8a293251281a4d02848925fcdbbc9f466ddb4965981bb06680359b3d12091" +"checksum r2d2_postgres 0.15.0-rc.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e1a148f605e71dcbc51754d58356358887ac0418f6c1f30840e00f1ae2cf0130" "checksum r2d2_sqlite 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a7cba990b29ae565b1a765ef45f6b84a89a77736b91582e0243c12f613653857" "checksum rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" "checksum rand 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c618c47cd3ebd209790115ab837de41425723956ad3ce2e6a7f09890947cacb9" @@ -2408,7 +3103,7 @@ dependencies = [ "checksum rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d0e7a549d590831370895ab7ba4ea0c1b6b011d106b5ff2da6eee112615e6dc0" "checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" "checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" -"checksum rand_jitter 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b9ea758282efe12823e0d952ddb269d2e1897227e464919a554f2a03ef1b832" +"checksum rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" "checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" "checksum rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" "checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" @@ -2418,45 +3113,55 @@ dependencies = [ "checksum regex 1.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "8f0a0bcab2fd7d1d7c54fa9eae6f43eddeb9ce2e7352f8518a814a4f65d60c58" "checksum regex-syntax 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)" = "dcfd8681eebe297b81d98498869d4aae052137651ad7b96822f09ceb690d0a96" "checksum remove_dir_all 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3488ba1b9a2084d38645c4c08276a1752dcbf2c7130d74f1569681ad5d2799c5" +"checksum rent_to_own 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "05a51ad2b1c5c710fa89e6b1631068dab84ed687bc6a5fe061ad65da3d0c25b2" "checksum resolv-conf 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b263b4aa1b5de9ffc0054a2386f96992058bb6870aab516f8cdeb8a667d56dcb" "checksum ring 0.13.5 (registry+https://github.com/rust-lang/crates.io-index)" = "2c4db68a2e35f3497146b7e4563df7d4773a2433230c5e4b448328e31740458a" "checksum rusqlite 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6381ddfe91dbb659b4b132168da15985bc84162378cf4fcdc4eb99c857d063e2" -"checksum rust-embed 4.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "da54e434a7cca32d7973157fbbb12480c059e8b2d52b574bd45a6d9986dc8f16" -"checksum rust-embed-impl 4.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0cf8dd1140e66963d63e996a88e70f80d240962b3d689f2d18727477820c1876" +"checksum rust-embed 4.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "73b42ffc98958788a47b1abde479901c0dfc12d6185965b3f289d05d5655c305" +"checksum rust-embed-impl 4.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bd0e740ca1e1969c2d3310bd2bda2024ed44ce473e527a2585aeaec1de9d81c3" +"checksum rust_decimal 1.0.1 (git+https://github.com/pimeys/rust-decimal.git)" = "" "checksum rustc-demangle 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "ccc78bfd5acd7bf3e89cffcf899e5cb1a52d6fafa8dec2739ad70c9577a57288" "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" -"checksum ryu 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "eb9e9b8cde282a9fe6a42dd4681319bfb63f121b8a8ee9439c6f4107e58a46f7" +"checksum ryu 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "b96a9549dc8d48f2c283938303c4b5a77aa29bfbc5b54b084fb1630408899a8f" "checksum safemem 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8dca453248a96cb0749e36ccdfe2b0b4e54a61bfef89fb97ec621eb8e0a93dd9" "checksum same-file 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8f20c4be53a8a1ff4c1f1b2bd14570d2f634628709752f0702ecdd2b3f9a5267" +"checksum schannel 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f6abf258d99c3c1c5c2131d99d064e94b7b3dd5f416483057f308fea253339" "checksum scheduled-thread-pool 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1a2ff3fc5223829be817806c6441279c676e454cc7da608faf03b0ccc09d3889" "checksum scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "94258f53601af11e6a49f722422f6e3425c52b06245a5cf9bc09908b174f5e27" +"checksum security-framework 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eee63d0f4a9ec776eeb30e220f0bc1e092c3ad744b2a379e3993070364d3adc2" +"checksum security-framework-sys 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9636f8989cbf61385ae4824b98c1aaa54c994d7d8b41f11c601ed799f0549a56" "checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" -"checksum serde 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)" = "aa5f7c20820475babd2c077c3ab5f8c77a31c15e16ea38687b4c02d3e48680f4" -"checksum serde_derive 1.0.90 (registry+https://github.com/rust-lang/crates.io-index)" = "58fc82bec244f168b23d1963b45c8bf5726e9a15a9d146a067f9081aeed2de79" +"checksum serde 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)" = "a72e9b96fa45ce22a4bc23da3858dfccfd60acd28a25bcd328a98fdd6bea43fd" +"checksum serde_derive 1.0.91 (registry+https://github.com/rust-lang/crates.io-index)" = "101b495b109a3e3ca8c4cbe44cf62391527cdfb6ba15821c5ce80bcd5ea23f9f" "checksum serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)" = "5a23aa71d4a4d43fdbfaac00eff68ba8a06a51759a89ac3304323e800c4dd40d" "checksum serde_urlencoded 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "642dd69105886af2efd227f75a520ec9b44a820d65bc133a9131f7d229fd165a" -"checksum serde_yaml 0.8.8 (registry+https://github.com/rust-lang/crates.io-index)" = "0887a8e097a69559b56aa2526bf7aff7c3048cf627dff781f0b56a6001534593" +"checksum serde_yaml 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)" = "38b08a9a90e5260fe01c6480ec7c811606df6d3a660415808c3c3fa8ed95b582" +"checksum sha-1 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "23962131a91661d643c98940b20fcaffe62d776a823247be80a48fcb8b6fce68" "checksum sha1 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2579985fda508104f7587689507983eadd6a6e84dd35d6d115361f530916fa0d" +"checksum sha2 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b4d8bfd0e469f417657573d8451fb33d16cfe0989359b93baf3a1ffc639543d" "checksum signal-hook 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "72ab58f1fda436857e6337dcb6a5aaa34f16c5ddc87b3a8b6ef7a212f90b9c5a" -"checksum signal-hook-registry 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "591fe2ee5a2412968f63a008a190d99918c2cda3f616411026f0975715e1cf62" +"checksum signal-hook-registry 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cded4ffa32146722ec54ab1f16320568465aa922aa9ab4708129599740da85d7" "checksum siphasher 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0b8de496cf83d4ed58b6be86c3a275b8602f6ffe98d3024a869e124147a9a3ac" "checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" "checksum smallvec 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c4488ae950c49d403731982257768f48fada354a5203fe81f9bb6f43ca9002be" -"checksum socket2 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "c4d11a52082057d87cb5caa31ad812f4504b97ab44732cd8359df2e9ff9f48e7" -"checksum sqlite 0.24.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c3bdcd8ae03982ddb21cf0217ae34d4555fc2fd0db465cb666fea82644b77f3b" -"checksum sqlite3-src 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "46e0bc115b563b1ee6c665ef895b56bf488522f57d1c6571887547c57c8f5a88" -"checksum sqlite3-sys 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "71fec807a1534bd13eeaaec396175d67c79bdc68df55e18a452726ec62a8fb08" +"checksum socket2 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "4e626972d3593207547f14bf5fc9efa4d0e7283deb73fef1dff313dae9ab8878" "checksum stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dba1a27d3efae4351c8051072d619e3ade2820635c3958d826bfea39d59b54c8" +"checksum state_machine_future 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "530e1d624baae485bce12e6647acb76aafa253346ee8a16751974eed5a24b13d" "checksum string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b639411d0b9c738748b5397d5ceba08e648f4f1992231aa859af1a017f31f60b" +"checksum stringprep 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "8ee348cb74b87454fff4b551cbf727025810a004f88aeacae7f85b87f4e9a1c1" +"checksum strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" +"checksum subtle 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2d67a5a62ba6e01cb2192ff309324cb4875d0c451d55fe2319433abe7a05a8ee" "checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad" "checksum syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)" = "261ae9ecaa397c42b960649561949d69311f08eeaea86a65696e6e46517cf741" -"checksum syn 0.15.33 (registry+https://github.com/rust-lang/crates.io-index)" = "ec52cd796e5f01d0067225a5392e70084acc4c0013fa71d55166d38a8b307836" +"checksum syn 0.15.34 (registry+https://github.com/rust-lang/crates.io-index)" = "a1393e4a97a19c01e900df2aec855a29f71cf02c402e2f443b8d2747c25c5dbe" "checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6" "checksum synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "73687139bf99285483c96ac0add482c3776528beac1d97d444f6e91f203a2015" "checksum tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8" +"checksum tempfile 3.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "b86c784c88d98c801132806dadd3819ed29d8600836c4088e855cdf3e178ed8a" "checksum termcolor 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "4096add70612622289f2fdcdbd5086dc81c1e2675e6ae58d6c4f62a16c6d7f2f" "checksum termion 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dde0593aeb8d47accea5392b39350015b5eccb12c0d98044d856983d89548dea" +"checksum textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" "checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b" "checksum time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "db8dcfca086c1143c9270ac42a2bbd8a7ee477b78ac8e45b19abfb0cbede4b6f" "checksum tokio 0.1.19 (registry+https://github.com/rust-lang/crates.io-index)" = "cec6c34409089be085de9403ba2010b80e36938c9ca992c4f67f407bb13db0b1" @@ -2465,12 +3170,15 @@ dependencies = [ "checksum tokio-executor 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "83ea44c6c0773cc034771693711c35c677b4b5a4b21b9e7071704c54de7d555e" "checksum tokio-fs 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "3fe6dc22b08d6993916647d108a1a7d15b9cd29c4f4496c62b92c45b5041b7af" "checksum tokio-io 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "5090db468dad16e1a7a54c8c67280c5e4b544f3d3e018f0b913b400261f85926" +"checksum tokio-postgres 0.4.0-rc.2 (registry+https://github.com/rust-lang/crates.io-index)" = "8e256cd39279dcf12a3154f234641ce71127ec9e710f3a0a3587581990f55a2a" +"checksum tokio-postgres-native-tls 0.1.0-rc.1 (registry+https://github.com/rust-lang/crates.io-index)" = "45e1566988692f6b1738e52fe799dd3c6ff2590a68b2ded49aff85a7b38c950b" "checksum tokio-reactor 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "6af16bfac7e112bea8b0442542161bfc41cbfa4466b580bdda7d18cb88b911ce" "checksum tokio-signal 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "dd6dc5276ea05ce379a16de90083ec80836440d5ef8a6a39545a3207373b8296" "checksum tokio-sync 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "5b2f843ffdf8d6e1f90bddd48da43f99ab071660cd92b7ec560ef3cdfd7a409a" "checksum tokio-tcp 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1d14b10654be682ac43efee27401d792507e30fd8d26389e1da3b185de2e4119" "checksum tokio-threadpool 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "72558af20be886ea124595ea0f806dd5703b8958e4705429dd58b3d8231f72f2" "checksum tokio-timer 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "2910970404ba6fa78c5539126a9ae2045d62e3713041e447f695f41405a120c6" +"checksum tokio-tls 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "354b8cd83825b3c20217a9dc174d6a0c67441a2fae5c41bcb1ea6679f6ae0f7c" "checksum tokio-trace-core 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "350c9edade9830dc185ae48ba45667a445ab59f6167ef6d0254ec9d2430d9dd3" "checksum tokio-udp 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "66268575b80f4a4a710ef83d087fdfeeabdce9b74c797535fbac18a2cb906e92" "checksum tokio-uds 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "037ffc3ba0e12a0ab4aca92e5234e0dedeb48fddf6ccd260f1f150a36a9f2445" @@ -2478,12 +3186,15 @@ dependencies = [ "checksum trust-dns-proto 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0838272e89f1c693b4df38dc353412e389cf548ceed6f9fd1af5a8d6e0e7cf74" "checksum trust-dns-proto 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "09144f0992b0870fa8d2972cc069cbf1e3c0fda64d1f3d45c4d68d0e0b52ad4e" "checksum trust-dns-resolver 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8a9f877f7a1ad821ab350505e1f1b146a4960402991787191d6d8cab2ce2de2c" +"checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169" +"checksum ucd-trie 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "71a9c5b1fe77426cf144cc30e49e955270f5086e31a6441dfa8b32efc09b9d77" "checksum ucd-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "535c204ee4d8434478593480b8f86ab45ec9aae0e83c568ca81abf0fd0e88f86" "checksum unicase 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7f4765f83163b74f957c797ad9253caf97f103fb064d3999aea9568d09fc8a33" -"checksum unicase 2.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "41d17211f887da8e4a70a45b9536f26fc5de166b81e2d5d80de4a17fd22553bd" +"checksum unicase 2.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a84e5511b2a947f3ae965dcb29b13b7b1691b6e7332cf5dbc1744138d5acb7f6" "checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" "checksum unicode-normalization 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "141339a08b982d942be2ca06ff8b076563cbe223d1befd5450716790d44e2426" "checksum unicode-segmentation 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "aa6024fc12ddfd1c6dbc14a80fa2324d4568849869b779f6bd37e5e4c03344d1" +"checksum unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "882386231c45df4700b275c7ff55b6f3698780a650026380e72dabe76fa46526" "checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc" "checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" "checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" @@ -2496,6 +3207,7 @@ dependencies = [ "checksum v_escape_derive 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "306896ff4b75998501263a1dc000456de442e21d68fe8c8bdf75c66a33a58e23" "checksum v_htmlescape 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7fbbe0fa88dd36f9c8cf61a218d4b953ba669de4d0785832f33cc72bd081e1be" "checksum vcpkg 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "def296d3eb3b12371b2c7d0e83bfe1403e4db2d7a0bba324a12b21c4ee13143d" +"checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a" "checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" "checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" "checksum walkdir 2.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "9d9d7ed3431229a144296213105a390676cc49c9b6a72bd19f3176c98e129fa1" diff --git a/server/prisma-rs/Cargo.toml b/server/prisma-rs/Cargo.toml index a6463ec082..96b2705fb0 100644 --- a/server/prisma-rs/Cargo.toml +++ b/server/prisma-rs/Cargo.toml @@ -1,9 +1,15 @@ [workspace] members = [ "prisma-models", + "migration-engine/connectors/migration-connector", + "migration-engine/connectors/sql-migration-connector", + "migration-engine/core", "query-engine/connectors/connector", "query-engine/connectors/sql-connector", "query-engine/prisma", "query-engine/native-bridge", "query-engine/core", + "libs/datamodel", + "libs/prisma-inflector", + "libs/database-inspector" ] diff --git a/server/prisma-rs/Makefile b/server/prisma-rs/Makefile index 950d854775..a3e5ef23a0 100644 --- a/server/prisma-rs/Makefile +++ b/server/prisma-rs/Makefile @@ -6,4 +6,12 @@ pedantic: RUSTFLAGS="-D warnings" cargo build release: - cargo build --release \ No newline at end of file + cargo build --release + + +dev-sqlite: + cp dev-configs/sqlite.yml prisma.yml + +dev-postgres: + docker-compose -f ../docker-compose/postgres/dev-postgres.yml up -d --remove-orphans + cp dev-configs/postgres.yml prisma.yml diff --git a/server/prisma-rs/config/prisma.h b/server/prisma-rs/config/prisma.h deleted file mode 100644 index f4f2800899..0000000000 --- a/server/prisma-rs/config/prisma.h +++ /dev/null @@ -1,5 +0,0 @@ -#include -#include - -// ProtoBuf *get_node_by_where(const char *data, uintptr_t len); -// ProtoBuf *get_nodes(const char *data, uintptr_t len); diff --git a/server/prisma-rs/dev-configs/postgres.yml b/server/prisma-rs/dev-configs/postgres.yml new file mode 100644 index 0000000000..69bee61de4 --- /dev/null +++ b/server/prisma-rs/dev-configs/postgres.yml @@ -0,0 +1,11 @@ +port: 4466 +databases: + default: + connector: postgres-native + host: 127.0.0.1 + port: 5432 + user: postgres + password: prisma + database: prisma + schema: CreateMutationSpec_S + rawAccess: true diff --git a/server/prisma-rs/prisma.yml b/server/prisma-rs/dev-configs/sqlite.yml similarity index 60% rename from server/prisma-rs/prisma.yml rename to server/prisma-rs/dev-configs/sqlite.yml index 2bc6fb9685..4410665ff5 100644 --- a/server/prisma-rs/prisma.yml +++ b/server/prisma-rs/dev-configs/sqlite.yml @@ -3,7 +3,7 @@ prototype: true databases: default: connector: sqlite-native - databaseFile: ${SERVER_ROOT}/db/default_default.db + databaseFile: ${SERVER_ROOT}/db/Chinook.db migrations: true active: true - rawAccess: true + rawAccess: true \ No newline at end of file diff --git a/server/prisma-rs/libs/database-inspector/Cargo.toml b/server/prisma-rs/libs/database-inspector/Cargo.toml index ef7376ba7b..a7a74d4a12 100644 --- a/server/prisma-rs/libs/database-inspector/Cargo.toml +++ b/server/prisma-rs/libs/database-inspector/Cargo.toml @@ -5,8 +5,6 @@ authors = ["Marcus Böhm "] edition = "2018" [dependencies] -prisma-query = { path = "../prisma-query" } barrel = { git = "https://github.com/spacekookie/barrel", features = ["sqlite3"] } rusqlite = { version = "0.16" } -prisma-models = { path = "../../prisma-models" } time = "*" diff --git a/server/prisma-rs/libs/database-inspector/src/database_inspector_impl.rs b/server/prisma-rs/libs/database-inspector/src/database_inspector_impl.rs new file mode 100644 index 0000000000..8a31a0b837 --- /dev/null +++ b/server/prisma-rs/libs/database-inspector/src/database_inspector_impl.rs @@ -0,0 +1,173 @@ +use crate::*; + +use rusqlite::{Connection, Result, NO_PARAMS}; + +pub struct DatabaseInspectorImpl { + connection: Connection, +} + +impl DatabaseInspector for DatabaseInspectorImpl { + fn introspect(&self, schema: &String) -> DatabaseSchema { + DatabaseSchema { + tables: self + .get_table_names(schema) + .into_iter() + .map(|t| self.get_table(schema, &t)) + .collect(), + } + } +} + +impl DatabaseInspectorImpl { + pub fn new(connection: Connection) -> DatabaseInspectorImpl { + DatabaseInspectorImpl { connection } + } + + fn get_table_names(&self, schema: &String) -> Vec { + let sql = format!( + " + SELECT + name + FROM + {}.sqlite_master + WHERE + type='table' + ", + schema + ); + + let mut stmt = self.connection.prepare_cached(&sql).unwrap(); + let mut rows = stmt.query(NO_PARAMS).unwrap(); + let mut result = Vec::new(); + + while let Some(row) = rows.next() { + let name: String = row.unwrap().get("name"); + if name != "sqlite_sequence" { + result.push(name); + } + } + + result + } + + fn get_table(&self, schema: &String, table: &String) -> Table { + let introspected_columns = self.get_columns(&schema, &table); + let introspected_foreign_keys = self.get_foreign_constraints(&schema, &table); + // let _index = get_index(&schema, &table); + // let _seq = get_sequence(&schema, &table); + + Table { + name: table.to_string(), + columns: convert_introspected_columns(introspected_columns, introspected_foreign_keys), + indexes: Vec::new(), + } + } + + fn get_columns(&self, schema: &String, table: &String) -> Vec { + let sql = format!(r#"Pragma "{}".table_info ("{}")"#, schema, table); + let mut stmt = self.connection.prepare_cached(&sql).unwrap(); + let mut rows = stmt.query(NO_PARAMS).unwrap(); + let mut result = Vec::new(); + + while let Some(row_result) = rows.next() { + let row = row_result.unwrap(); + result.push(IntrospectedColumn { + name: row.get("name"), + table: table.to_string(), + tpe: row.get("type"), + is_required: row.get("notnull"), + default: row.get("dflt_value"), + }); + } + + result + } + + fn get_foreign_constraints(&self, schema: &String, table: &String) -> Vec { + let sql = format!(r#"Pragma "{}".foreign_key_list("{}");"#, schema, table); + let mut stmt = self.connection.prepare_cached(&sql).unwrap(); + let mut rows = stmt.query(NO_PARAMS).unwrap(); + let mut result = Vec::new(); + + while let Some(row_result) = rows.next() { + let row = row_result.unwrap(); + result.push(IntrospectedForeignKey { + name: "".to_string(), + table: table.to_string(), + column: row.get("from"), + referenced_table: row.get("table"), + referenced_column: row.get("to"), + }); + } + + result + } + + fn get_sequence(&self, _schema: &String, _table: &String) -> Sequence { + unimplemented!() + } + + fn get_index(&self, _schema: &String, _table: &String) -> Index { + unimplemented!() + } + + // fn query(&self, schema: &String, parse: F) -> +} + +fn convert_introspected_columns( + columns: Vec, + foreign_keys: Vec, +) -> Vec { + columns + .iter() + .map(|c| { + let foreign_key = foreign_keys + .iter() + .find(|fk| fk.column == c.name && fk.table == c.table) + .map(|fk| ForeignKey { + table: fk.referenced_table.clone(), + column: fk.referenced_column.clone(), + }); + Column { + name: c.name.clone(), + tpe: column_type(c), + is_required: c.is_required, + foreign_key: foreign_key, + sequence: None, + } + }) + .collect() +} + +fn column_type(column: &IntrospectedColumn) -> ColumnType { + match column.tpe.as_ref() { + "INTEGER" => ColumnType::Int, + "REAL" => ColumnType::Float, + "BOOLEAN" => ColumnType::Boolean, + "TEXT" => ColumnType::String, + s if s.starts_with("VARCHAR") => ColumnType::String, + "DATE" => ColumnType::DateTime, + x => panic!(format!( + "type {} is not supported here yet. Column was: {}", + x, column.name + )), + } +} + +#[derive(Debug)] +struct IntrospectedColumn { + name: String, + table: String, + tpe: String, + default: Option, + is_required: bool, +} + +#[derive(Debug)] +struct IntrospectedForeignKey { + name: String, + table: String, + column: String, + referenced_table: String, + referenced_column: String, +} diff --git a/server/prisma-rs/libs/database-inspector/src/empty_impl.rs b/server/prisma-rs/libs/database-inspector/src/empty_impl.rs new file mode 100644 index 0000000000..15b982e66a --- /dev/null +++ b/server/prisma-rs/libs/database-inspector/src/empty_impl.rs @@ -0,0 +1,8 @@ +// use crate::*; + +// pub struct EmptyDatabaseInspectorImpl; +// impl DatabaseInspector for EmptyDatabaseInspectorImpl { +// fn introspect(&self, schema: String) -> DatabaseSchema { +// DatabaseSchema { tables: Vec::new() } +// } +// } diff --git a/server/prisma-rs/libs/database-inspector/src/lib.rs b/server/prisma-rs/libs/database-inspector/src/lib.rs index 664be681b5..1ed65ab0a6 100644 --- a/server/prisma-rs/libs/database-inspector/src/lib.rs +++ b/server/prisma-rs/libs/database-inspector/src/lib.rs @@ -1,60 +1,14 @@ -pub trait DatabaseInspector { - fn inspect(schema: String) -> DatabaseSchema; -} - -pub struct EmptyDatabaseInspectorImpl; - -impl DatabaseInspector for EmptyDatabaseInspectorImpl { - fn inspect(schema: String) -> DatabaseSchema { - DatabaseSchema { - tables: get_table_names(&schema) - .into_iter() - .map(|t| get_table(&schema, &t)) - .collect(), - } - } -} - -fn get_table_names(_schema: &String) -> Vec { - let _sql: &'static str = " -SELECT - table_name -FROM - information_schema.tables -WHERE - table_schema = $schema AND - -- Views are not supported yet - table_type = 'BASE TABLE' - "; - - vec![] -} - -fn get_table(schema: &String, table: &String) -> Table { - let _cols = get_column(&schema, &table); - let _foreign = get_foreign_constraint(&schema, &table); - let _index = get_index(&schema, &table); - let _seq = get_sequence(&schema, &table); +mod database_inspector_impl; +mod empty_impl; - unimplemented!() -} - -fn get_column(_schema: &String, _table: &String) -> Column { - unimplemented!() -} - -fn get_foreign_constraint(_schema: &String, _table: &String) -> ForeignKey { - unimplemented!() -} - -fn get_sequence(_schema: &String, _table: &String) -> Sequence { - unimplemented!() -} +pub use database_inspector_impl::*; +pub use empty_impl::*; -fn get_index(_schema: &String, _table: &String) -> Index { - unimplemented!() +pub trait DatabaseInspector { + fn introspect(&self, schema: &String) -> DatabaseSchema; } +#[derive(Debug, PartialEq, Eq, Clone)] pub struct DatabaseSchema { pub tables: Vec, } @@ -63,32 +17,82 @@ impl DatabaseSchema { pub fn table(&self, name: &str) -> Option<&Table> { self.tables.iter().find(|t| t.name == name) } + + pub fn has_table(&self, name: &str) -> bool { + self.table(name).is_some() + } } +#[derive(Debug, PartialEq, Eq, Clone)] pub struct Table { pub name: String, pub columns: Vec, pub indexes: Vec, } +impl Table { + pub fn column(&self, name: &str) -> Option<&Column> { + self.columns.iter().find(|c| c.name == name) + } + + pub fn has_column(&self, name: &str) -> bool { + self.column(name).is_some() + } +} + +#[derive(Debug, PartialEq, Eq, Clone)] pub struct Column { pub name: String, - pub tpe: String, - pub nullable: bool, + pub tpe: ColumnType, + pub is_required: bool, pub foreign_key: Option, pub sequence: Option, } +impl Column { + pub fn new(name: String, tpe: ColumnType, is_required: bool) -> Column { + Column { + name, + tpe, + is_required, + foreign_key: None, + sequence: None, + } + } + + pub fn with_foreign_key(name: String, tpe: ColumnType, is_required: bool, foreign_key: ForeignKey) -> Column { + Column { + name, + tpe, + is_required, + foreign_key: Some(foreign_key), + sequence: None, + } + } +} + +#[derive(Debug, Copy, PartialEq, Eq, Clone)] +pub enum ColumnType { + Int, + Float, + Boolean, + String, + DateTime, +} + +#[derive(Debug, PartialEq, Eq, Clone)] pub struct ForeignKey { pub table: String, pub column: String, } +#[derive(Debug, PartialEq, Eq, Clone)] pub struct Sequence { pub name: String, pub current: u32, } +#[derive(Debug, PartialEq, Eq, Clone)] pub struct Index { pub name: String, pub columns: Vec, diff --git a/server/prisma-rs/libs/database-inspector/src/main.rs b/server/prisma-rs/libs/database-inspector/src/main.rs index 2a0236f6e4..1186488779 100644 --- a/server/prisma-rs/libs/database-inspector/src/main.rs +++ b/server/prisma-rs/libs/database-inspector/src/main.rs @@ -56,8 +56,8 @@ fn query_tables(c: &mut Connection) -> Vec { Ok(()) })() - .map_err(|e| panic!(e)) - .unwrap(); + .map_err(|e| panic!(e)) + .unwrap(); vec![] } diff --git a/server/prisma-rs/libs/database-inspector/tests/tests.rs b/server/prisma-rs/libs/database-inspector/tests/tests.rs new file mode 100644 index 0000000000..7274906931 --- /dev/null +++ b/server/prisma-rs/libs/database-inspector/tests/tests.rs @@ -0,0 +1,163 @@ +#![allow(non_snake_case)] +#![allow(unused)] + +use barrel::{backend::Sqlite as Squirrel, types, Migration}; +use database_inspector::*; +use rusqlite::{Connection, Result, NO_PARAMS}; +use std::{thread, time}; + +const SCHEMA: &str = "database_inspector_test"; + +#[test] +fn all_columns_types_must_work() { + let inspector = setup(|mut migration| { + migration.create_table("User", |t| { + t.add_column("int", types::integer()); + t.add_column("float", types::float()); + t.add_column("boolean", types::boolean()); + t.add_column("string1", types::text()); + t.add_column("string2", types::varchar(1)); + t.add_column("date_time", types::date()); + }); + }); + + let result = inspector.introspect(&SCHEMA.to_string()); + + let table = result.table("User").unwrap(); + let expected_columns = vec![ + Column { + name: "int".to_string(), + tpe: ColumnType::Int, + is_required: true, + foreign_key: None, + sequence: None, + }, + Column { + name: "float".to_string(), + tpe: ColumnType::Float, + is_required: true, + foreign_key: None, + sequence: None, + }, + Column { + name: "boolean".to_string(), + tpe: ColumnType::Boolean, + is_required: true, + foreign_key: None, + sequence: None, + }, + Column { + name: "string1".to_string(), + tpe: ColumnType::String, + is_required: true, + foreign_key: None, + sequence: None, + }, + Column { + name: "string2".to_string(), + tpe: ColumnType::String, + is_required: true, + foreign_key: None, + sequence: None, + }, + Column { + name: "date_time".to_string(), + tpe: ColumnType::DateTime, + is_required: true, + foreign_key: None, + sequence: None, + }, + ]; + + assert_eq!(table.columns, expected_columns); +} + +#[test] +fn is_required_must_work() { + let inspector = setup(|mut migration| { + migration.create_table("User", |t| { + t.add_column("column1", types::integer().nullable(false)); + t.add_column("column2", types::integer().nullable(true)); + }); + }); + + let result = inspector.introspect(&SCHEMA.to_string()); + + let user_table = result.table("User").unwrap(); + let expected_columns = vec![ + Column { + name: "column1".to_string(), + tpe: ColumnType::Int, + is_required: true, + foreign_key: None, + sequence: None, + }, + Column { + name: "column2".to_string(), + tpe: ColumnType::Int, + is_required: false, + foreign_key: None, + sequence: None, + }, + ]; + assert_eq!(user_table.columns, expected_columns); +} + +#[test] +fn foreign_keys_must_work() { + let inspector = setup(|mut migration| { + migration.create_table("City", |t| { + t.add_column("id", types::primary()); + }); + migration.create_table("User", |t| { + t.add_column("city", types::foreign("City(id)")); + }); + }); + + let result = inspector.introspect(&SCHEMA.to_string()); + + let user_table = result.table("User").unwrap(); + let expected_columns = vec![Column { + name: "city".to_string(), + tpe: ColumnType::Int, + is_required: true, + foreign_key: Some(ForeignKey { + table: "City".to_string(), + column: "id".to_string(), + }), + sequence: None, + }]; + assert_eq!(user_table.columns, expected_columns); +} + +fn setup(mut migrationFn: F) -> Box +where + F: FnMut(&mut Migration) -> (), +{ + let connection = Connection::open_in_memory() + .and_then(|c| { + let server_root = std::env::var("SERVER_ROOT").expect("Env var SERVER_ROOT required but not found."); + let path = format!("{}/db", server_root); + let database_file_path = dbg!(format!("{}/{}.db", path, SCHEMA)); + std::fs::remove_file(database_file_path.clone()); // ignore potential errors + thread::sleep(time::Duration::from_millis(100)); + + c.execute("ATTACH DATABASE ? AS ?", &[database_file_path.as_ref(), SCHEMA]) + .map(|_| c) + }) + .and_then(|c| { + let mut migration = Migration::new().schema(SCHEMA); + migrationFn(&mut migration); + let full_sql = migration.make::(); + for sql in full_sql.split(";") { + dbg!(sql); + if (sql != "") { + c.execute(&sql, NO_PARAMS).unwrap(); + } + } + Ok(c) + }) + .unwrap(); + + Box::new(DatabaseInspectorImpl::new(connection)) +} diff --git a/server/prisma-rs/libs/datamodel/Cargo.toml b/server/prisma-rs/libs/datamodel/Cargo.toml new file mode 100644 index 0000000000..e700ba357c --- /dev/null +++ b/server/prisma-rs/libs/datamodel/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "datamodel" +version = "0.1.0" +authors = ["Emanuel Joebstl "] +edition = "2018" + +[dependencies] +clap = "2.33.0" +pest = "2.0" +pest_derive = "2.0" +chrono = { version = "0.4.6", features = ["serde"] } +serde = { version = "1.0.90", features = ["derive"] } +serde_json = "1.0" diff --git a/server/prisma-rs/libs/datamodel/README.md b/server/prisma-rs/libs/datamodel/README.md new file mode 100644 index 0000000000..cd9e4cf16e --- /dev/null +++ b/server/prisma-rs/libs/datamodel/README.md @@ -0,0 +1,26 @@ +# Parser Prototype for Prisma Datamodel v2 + +Language: Rust + +Build System: Cargo + +**Please consider this a WIP prototype. API's might change.** + +### Design goals + +* Strict parsing: A duplicate directive, unknown directive, unknown argument or extra argument is an error. +* Accumulate errors to present them at the end instead of throwing (TODO) + +### Usage + +``` +let file = fs::read_to_string(&args[1]).expect(&format!("Unable to open file {}", args[1])); + +let ast = parser::parse(&file); +let validator = Validator::new(); +let dml = validator.validate(&ast); +``` + +### Error Handling + +Currently, we panic on the first error. This will change in the future, and `Validator::validate` will return a proper `Result` object. \ No newline at end of file diff --git a/server/prisma-rs/libs/datamodel/src/ast/mod.rs b/server/prisma-rs/libs/datamodel/src/ast/mod.rs new file mode 100644 index 0000000000..0d383832b3 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/ast/mod.rs @@ -0,0 +1,147 @@ +pub mod parser; + +#[derive(Debug, Clone, Copy)] +pub struct Span { + pub start: usize, + pub end: usize, +} + +impl Span { + pub fn new(start: usize, end: usize) -> Span { + Span { start, end } + } + pub fn empty() -> Span { + Span { start: 0, end: 0 } + } + pub fn from_pest(s: &pest::Span) -> Span { + Span { + start: s.start(), + end: s.end(), + } + } +} + +impl std::fmt::Display for Span { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(f, "[{} - {}]", self.start, self.end) + } +} + +#[derive(Debug)] +pub enum FieldArity { + Required, + Optional, + List, +} + +#[derive(Debug)] +pub struct Comment { + pub text: String, + pub is_error: bool, +} + +#[derive(Debug)] +pub struct DirectiveArgument { + pub name: String, + pub value: Value, + pub span: Span, +} + +#[derive(Debug, Clone)] +pub enum Value { + NumericValue(String, Span), + BooleanValue(String, Span), + StringValue(String, Span), + ConstantValue(String, Span), +} + +#[derive(Debug)] +pub struct Directive { + pub name: String, + pub arguments: Vec, + pub span: Span, +} + +pub trait WithDirectives { + fn directives(&self) -> &Vec; +} + +pub trait WithComments { + fn comments(&self) -> &Vec; +} + +#[derive(Debug)] +pub struct Field { + pub field_type: String, + pub field_link: Option, + pub name: String, + pub arity: FieldArity, + pub default_value: Option, + pub directives: Vec, + pub comments: Vec, + pub span: Span, +} + +impl WithDirectives for Field { + fn directives(&self) -> &Vec { + &self.directives + } +} + +impl WithComments for Field { + fn comments(&self) -> &Vec { + &self.comments + } +} + +#[derive(Debug)] +pub struct Enum { + pub name: String, + pub values: Vec, + pub directives: Vec, + pub comments: Vec, +} + +impl WithDirectives for Enum { + fn directives(&self) -> &Vec { + &self.directives + } +} + +impl WithComments for Enum { + fn comments(&self) -> &Vec { + &self.comments + } +} + +#[derive(Debug)] +pub struct Model { + pub name: String, + pub fields: Vec, + pub directives: Vec, + pub comments: Vec, +} + +impl WithDirectives for Model { + fn directives(&self) -> &Vec { + &self.directives + } +} + +impl WithComments for Model { + fn comments(&self) -> &Vec { + &self.comments + } +} + +#[derive(Debug)] +pub enum ModelOrEnum { + Enum(Enum), + Model(Model), +} + +#[derive(Debug)] +pub struct Schema { + pub models: Vec, + pub comments: Vec, +} diff --git a/server/prisma-rs/libs/datamodel/src/ast/parser/datamodel.pest b/server/prisma-rs/libs/datamodel/src/ast/parser/datamodel.pest new file mode 100644 index 0000000000..aecbe7f946 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/ast/parser/datamodel.pest @@ -0,0 +1,62 @@ +// Global (builtin) expressions + +// Treat every whitespace the same +WHITESPACE = _{ SPACE_SEPARATOR | LINE_SEPARATOR | PARAGRAPH_SEPARATOR | NEWLINE } +// Comment ignores everything until end of line. +// COMMENT = @{ "#" ~ (!NEWLINE ~ ANY)* ~ NEWLINE } + +// Prisma Datamodel expressions + +// Base building blocks +// TODO: Ask for proper format. +identifier = @{ ASCII_ALPHA ~ ASCII_ALPHANUMERIC* } + +// Literals +numeric_literal = @{ ("-")? ~ ASCII_DIGIT+ ~("." ~ ASCII_DIGIT+)? } + +string_escaped_predefined = { "n" | "r" | "t" | "\\" | "0" | "\"" | "'" } +string_escape = { "\\" ~ string_escaped_predefined } +string_raw = { (!("\\" | "\"" | NEWLINE) ~ ANY)+ } +string_content = @{ (string_raw | string_escape)* } +string_literal = { "\"" ~ string_content ~ "\"" } + +boolean_true = { "true" } +boolean_false = { "false" } +boolean_literal = @{ boolean_true | boolean_false } + +constant_Literal = @{ ASCII_ALPHA_UPPER+ } // TABLE, EMBED etc. + +any_literal = { numeric_literal | string_literal | boolean_literal | constant_Literal } + +// Directives +directive_argument_name = { (!":" ~ identifier)+ } +directive_argument_value = { any_literal } +directive_argument = { (directive_argument_name ~ ":" ~ directive_argument_value) } +directive_arguments = { "(" ~ ((directive_argument ~ ("," ~ directive_argument)*)?) ~ ")" } +directive_single_argument = { "(" ~ directive_argument_value ~ ")" } +// A directive either has one unnamed argument or any number of named arguments or no argument. +directive = { "@" ~ identifier ~ (directive_arguments | directive_single_argument ) ? } + +// Model declarations - flattend for easy parsing +optional_type = { identifier ~ ("?") } +base_type = { identifier } // Called base type to not conflict with type rust keyword +list_type = { identifier ~ "[]" } + +// Pest is greedy, order is very important here. +field_type = { list_type | optional_type | base_type } +// This is a field link identifier which comes in braces after the type. +field_link = @{ identifier } + +// Field +default_value = { "=" ~ any_literal } +field_declaration = { identifier ~ (":")? ~ field_type ~ ("(" ~ field_link ~ ")")? ~ default_value? ~ directive* } + +// Model +model_declaration = { "model" ~ identifier ~ "{" ~ field_declaration+ ~ "}" ~ directive* } + +// Enum +enum_field_declaration = @{ ASCII_ALPHA_UPPER+ } +enum_declaration = { "enum" ~ identifier ~ "{" ~ enum_field_declaration+ ~ "}" ~ directive* } + +// Datamodel +datamodel = { SOI ~ (model_declaration | enum_declaration)+ ~ EOI } \ No newline at end of file diff --git a/server/prisma-rs/libs/datamodel/src/ast/parser/mod.rs b/server/prisma-rs/libs/datamodel/src/ast/parser/mod.rs new file mode 100644 index 0000000000..6151d0327d --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/ast/parser/mod.rs @@ -0,0 +1,305 @@ +use pest::Parser; + +// This is how PEG grammars work: +// https://pest.rs/book/grammars/peg.html + +// This is the basic syntax of Pest grammar files: +// https://pest.rs/book/grammars/syntax.html#cheat-sheet + +#[derive(Parser)] +#[grammar = "ast/parser/datamodel.pest"] +pub struct PrismaDatamodelParser; + +use crate::ast::*; + +#[derive(Debug)] +pub struct ParserError { + pub message: String, + pub span: Span, +} + +impl ParserError { + pub fn new(message: &str, span: &Span) -> ParserError { + ParserError { + message: String::from(message), + span: span.clone(), + } + } +} + +impl std::fmt::Display for ParserError { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(f, "{}, {}", self.message, self.span) + } +} + +impl std::error::Error for ParserError { + fn description(&self) -> &str { + self.message.as_str() + } + + fn cause(&self) -> Option<&std::error::Error> { + None + } +} + +// Macro to match all children in a parse tree +macro_rules! match_children ( + ($token:ident, $current:ident, $($pattern:pat => $result:expr),*) => ( + // Explicit clone, as into_inner consumes the pair. + // We only need a reference to the pair later for logging. + for $current in $token.clone().into_inner() { + match $current.as_rule() { + $( + $pattern => $result + ),* + } + } + ); +); + +// Macro to match the first child in a parse tree +macro_rules! match_first ( + ($token:ident, $current:ident, $($pattern:pat => $result:expr),*) => ( { + // Explicit clone, as into_inner consumes the pair. + // We only need a reference to the pair later for logging. + let $current = $token.clone().into_inner().next().unwrap(); + match $current.as_rule() { + $( + $pattern => $result + ),* + } + } + ); +); + +fn parse_string_literal(token: &pest::iterators::Pair<'_, Rule>) -> String { + return match_first! { token, current, + Rule::string_content => current.as_str().to_string(), + _ => unreachable!("Encountered impossible string content during parsing: {:?}", current.tokens()) + }; +} + +// Literals +fn parse_literal(token: &pest::iterators::Pair<'_, Rule>) -> Value { + return match_first! { token, current, + Rule::numeric_literal => Value::NumericValue(current.as_str().to_string(), Span::from_pest(¤t.as_span())), + Rule::string_literal => Value::StringValue(parse_string_literal(¤t), Span::from_pest(¤t.as_span())), + Rule::boolean_literal => Value::BooleanValue(current.as_str().to_string(), Span::from_pest(¤t.as_span())), + Rule::constant_Literal => Value::ConstantValue(current.as_str().to_string(), Span::from_pest(¤t.as_span())), + _ => unreachable!("Encounterd impossible literal during parsing: {:?}", current.tokens()) + }; +} + +// Directive parsing +fn parse_directive_arg_value(token: &pest::iterators::Pair<'_, Rule>) -> Value { + return match_first! { token, current, + Rule::any_literal => parse_literal(¤t), + _ => unreachable!("Encounterd impossible value during parsing: {:?}", current.tokens()) + }; +} + +fn parse_directive_default_arg(token: &pest::iterators::Pair<'_, Rule>, arguments: &mut Vec) { + match_children! { token, current, + Rule::directive_argument_value => arguments.push(DirectiveArgument { + name: String::from(""), + value: parse_directive_arg_value(¤t), + span: Span::from_pest(¤t.as_span()) + }), + _ => unreachable!("Encounterd impossible directive default argument during parsing: {:?}", current.tokens()) + }; +} + +fn parse_directive_arg(token: &pest::iterators::Pair<'_, Rule>) -> DirectiveArgument { + let mut name: Option = None; + let mut argument: Option = None; + + match_children! { token, current, + Rule::directive_argument_name => name = Some(current.as_str().to_string()), + Rule::directive_argument_value => argument = Some(parse_directive_arg_value(¤t)), + _ => unreachable!("Encounterd impossible directive argument during parsing: {:?}", current.tokens()) + }; + + return match (name, argument) { + (Some(name), Some(value)) => DirectiveArgument { + name: name, + value: value, + span: Span::from_pest(&token.as_span()), + }, + _ => panic!( + "Encounterd impossible directive arg during parsing: {:?}", + token.as_str() + ), + }; +} + +fn parse_directive_args(token: &pest::iterators::Pair<'_, Rule>, arguments: &mut Vec) { + match_children! { token, current, + Rule::directive_argument => arguments.push(parse_directive_arg(¤t)), + _ => unreachable!("Encounterd impossible directive argument during parsing: {:?}", current.tokens()) + } +} + +fn parse_directive(token: &pest::iterators::Pair<'_, Rule>) -> Directive { + let mut name: Option = None; + let mut arguments: Vec = vec![]; + + match_children! { token, current, + Rule::identifier => name = Some(current.as_str().to_string()), + Rule::directive_arguments => parse_directive_args(¤t, &mut arguments), + Rule::directive_single_argument => parse_directive_default_arg(¤t, &mut arguments), + _ => unreachable!("Encounterd impossible directive during parsing: {:?}", current.tokens()) + }; + + return match name { + Some(name) => Directive { + name, + arguments, + span: Span::from_pest(&token.as_span()), + }, + _ => panic!("Encounterd impossible type during parsing: {:?}", token.as_str()), + }; +} + +// Base type parsing +fn parse_base_type(token: &pest::iterators::Pair<'_, Rule>) -> String { + return match_first! { token, current, + Rule::identifier => current.as_str().to_string(), + _ => unreachable!("Encounterd impossible type during parsing: {:?}", current.tokens()) + }; +} + +fn parse_field_type(token: &pest::iterators::Pair<'_, Rule>) -> (FieldArity, String) { + return match_first! { token, current, + Rule::optional_type => (FieldArity::Optional, parse_base_type(¤t)), + Rule::base_type => (FieldArity::Required, parse_base_type(¤t)), + Rule::list_type => (FieldArity::List, parse_base_type(¤t)), + _ => unreachable!("Encounterd impossible field during parsing: {:?}", current.tokens()) + }; +} + +// Field parsing +fn parse_default_value(token: &pest::iterators::Pair<'_, Rule>) -> Value { + return match_first! { token, current, + Rule::any_literal => parse_literal(¤t), + _ => unreachable!("Encounterd impossible value during parsing: {:?}", current.tokens()) + }; +} + +fn parse_field(token: &pest::iterators::Pair<'_, Rule>) -> Field { + let mut name: Option = None; + let mut directives: Vec = vec![]; + let mut default_value: Option = None; + let mut field_type: Option<(FieldArity, String)> = None; + let mut field_link: Option = None; + + match_children! { token, current, + Rule::identifier => name = Some(current.as_str().to_string()), + Rule::field_type => field_type = Some(parse_field_type(¤t)), + Rule::field_link => field_link = Some(current.as_str().to_string()), + Rule::default_value => default_value = Some(parse_default_value(¤t)), + Rule::directive => directives.push(parse_directive(¤t)), + _ => unreachable!("Encounterd impossible field declaration during parsing: {:?}", current.tokens()) + } + + return match (name, field_type) { + (Some(name), Some((arity, field_type))) => Field { + field_type: field_type, + field_link: field_link, + name, + arity, + default_value, + directives, + comments: vec![], + span: Span::from_pest(&token.as_span()) + }, + _ => panic!( + "Encounterd impossible field declaration during parsing: {:?}", + token.as_str() + ), + }; +} + +// Model parsing +fn parse_model(token: &pest::iterators::Pair<'_, Rule>) -> Model { + let mut name: Option = None; + let mut directives: Vec = vec![]; + let mut fields: Vec = vec![]; + + match_children! { token, current, + Rule::identifier => name = Some(current.as_str().to_string()), + Rule::directive => directives.push(parse_directive(¤t)), + Rule::field_declaration => fields.push(parse_field(¤t)), + _ => unreachable!("Encounterd impossible model declaration during parsing: {:?}", current.tokens()) + } + + return match name { + Some(name) => Model { + name, + fields, + directives, + comments: vec![], + }, + _ => panic!( + "Encounterd impossible model declaration during parsing: {:?}", + token.as_str() + ), + }; +} + +// Enum parsing +fn parse_enum(token: &pest::iterators::Pair<'_, Rule>) -> Enum { + let mut name: Option = None; + let mut directives: Vec = vec![]; + let mut values: Vec = vec![]; + + match_children! { token, current, + Rule::identifier => name = Some(current.as_str().to_string()), + Rule::directive => directives.push(parse_directive(¤t)), + Rule::enum_field_declaration => values.push(current.as_str().to_string()), + _ => unreachable!("Encounterd impossible enum declaration during parsing: {:?}", current.tokens()) + } + + return match name { + Some(name) => Enum { + name, + values, + directives, + comments: vec![], + }, + _ => panic!( + "Encounterd impossible enum declaration during parsing: {:?}", + token.as_str() + ), + }; +} + +// Whole datamodel parsing +pub fn parse(datamodel_string: &str) -> Result { + let mut datamodel_result = PrismaDatamodelParser::parse(Rule::datamodel, datamodel_string); + + match datamodel_result { + Ok(mut datamodel_wrapped) => { + let datamodel = datamodel_wrapped.next().unwrap(); + let mut models: Vec = vec![]; + + match_children! { datamodel, current, + Rule::model_declaration => models.push(ModelOrEnum::Model(parse_model(¤t))), + Rule::enum_declaration => models.push(ModelOrEnum::Enum(parse_enum(¤t))), + Rule::EOI => {}, + _ => panic!("Encounterd impossible datamodel declaration during parsing: {:?}", current.tokens()) + } + + Ok(Schema { + models, + comments: vec![], + }) + } + Err(err) => match err.location { + pest::error::InputLocation::Pos(pos) => Err(ParserError::new("Error during parsing", &Span::new(pos, pos))), + pest::error::InputLocation::Span((from, to)) => { + Err(ParserError::new("Error during parsing", &Span::new(from, to))) + } + }, + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/comment.rs b/server/prisma-rs/libs/datamodel/src/dml/comment.rs new file mode 100644 index 0000000000..8e6db1e34c --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/comment.rs @@ -0,0 +1,7 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +pub struct Comment { + pub text: String, + pub is_error: bool, +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/enummodel.rs b/server/prisma-rs/libs/datamodel/src/dml/enummodel.rs new file mode 100644 index 0000000000..c2858d0dd3 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/enummodel.rs @@ -0,0 +1,29 @@ +use super::comment::*; +use super::traits::*; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +pub struct Enum { + pub name: String, + pub values: Vec, + pub comments: Vec, +} + +impl Enum { + pub fn new(name: &str, values: Vec) -> Enum { + Enum { + name: String::from(name), + values: values, + comments: vec![], + } + } +} + +impl WithName for Enum { + fn name(&self) -> &String { + &self.name + } + fn set_name(&mut self, name: &str) { + self.name = String::from(name) + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/field.rs b/server/prisma-rs/libs/datamodel/src/dml/field.rs new file mode 100644 index 0000000000..f39edd6ca2 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/field.rs @@ -0,0 +1,81 @@ +use super::comment::*; +use super::id::*; +use super::relation::*; +use super::scalar::*; +use super::traits::*; +use serde::{Deserialize, Serialize}; + +// This is duplicate for now, but explicitely required +// since we want to seperate ast and dml. +#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum FieldArity { + Required, + Optional, + List, +} + +// TODO: Maybe we include a seperate struct for relations which can be generic? +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +pub enum FieldType { + Enum(String), + Relation(RelationInfo), + ConnectorSpecific { + base_type: ScalarType, + connector_type: Option, + }, + Base(ScalarType), +} + +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +pub struct IdInfo { + pub strategy: IdStrategy, + pub sequence: Option, +} + +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +pub struct Field { + pub name: String, + pub arity: FieldArity, + pub field_type: FieldType, + pub database_name: Option, + pub default_value: Option, + pub is_unique: bool, + pub id_info: Option, + pub scalar_list_strategy: Option, + pub comments: Vec, +} + +impl WithName for Field { + fn name(&self) -> &String { + &self.name + } + fn set_name(&mut self, name: &str) { + self.name = String::from(name) + } +} + +impl WithDatabaseName for Field { + fn database_name(&self) -> &Option { + &self.database_name + } + fn set_database_name(&mut self, database_name: &Option) { + self.database_name = database_name.clone() + } +} + +impl Field { + pub fn new(name: &str, field_type: FieldType) -> Field { + Field { + name: String::from(name), + arity: FieldArity::Required, + field_type: field_type, + database_name: None, + default_value: None, + is_unique: false, + id_info: None, + scalar_list_strategy: None, + comments: vec![], + } + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/id.rs b/server/prisma-rs/libs/datamodel/src/dml/id.rs new file mode 100644 index 0000000000..1fc4c79653 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/id.rs @@ -0,0 +1,66 @@ +use super::traits::*; +use super::validator::value::ValueParserError; +use crate::ast; +use serde::{Deserialize, Serialize}; + +use std::str::FromStr; + +#[derive(Debug, Copy, PartialEq, Clone, Serialize, Deserialize)] +pub enum IdStrategy { + Auto, + None, +} + +impl FromStr for IdStrategy { + type Err = ValueParserError; + + fn from_str(s: &str) -> Result { + match s { + "AUTO" => Ok(IdStrategy::Auto), + "NONE" => Ok(IdStrategy::None), + _ => Err(ValueParserError::new( + &format!("Invalid id strategy {}.", s), + s, + &ast::Span::empty(), + )), + } + } +} + +#[derive(Debug, Copy, PartialEq, Clone, Serialize, Deserialize)] +pub enum ScalarListStrategy { + Embedded, + Relation, +} + +impl FromStr for ScalarListStrategy { + type Err = ValueParserError; + + fn from_str(s: &str) -> Result { + match s { + "EMBEDDED" => Ok(ScalarListStrategy::Embedded), + "RELATION" => Ok(ScalarListStrategy::Relation), + _ => Err(ValueParserError::new( + &format!("Invalid scalar list strategy {}.", s), + s, + &ast::Span::empty(), + )), + } + } +} + +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +pub struct Sequence { + pub name: String, + pub initial_value: i32, + pub allocation_size: i32, +} + +impl WithName for Sequence { + fn name(&self) -> &String { + &self.name + } + fn set_name(&mut self, name: &str) { + self.name = String::from(name) + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/mod.rs b/server/prisma-rs/libs/datamodel/src/dml/mod.rs new file mode 100644 index 0000000000..17c7886d90 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/mod.rs @@ -0,0 +1,25 @@ +// TODOs to answer together with rust teams: +// * Should this structure be mutatble or immutable? +// * Should this structure contain circular references? (Would make renaming models/fields MUCH easier) +// * How do we handle ocnnector specific settings, like indeces? Maybe inheritance, traits and having a Connector? +mod comment; +mod enummodel; +mod field; +mod id; +mod model; +mod relation; +mod scalar; +mod schema; +mod traits; + +pub use comment::*; +pub use enummodel::*; +pub use field::*; +pub use id::*; +pub use model::*; +pub use relation::*; +pub use scalar::*; +pub use schema::*; +pub use traits::*; + +pub mod validator; diff --git a/server/prisma-rs/libs/datamodel/src/dml/model.rs b/server/prisma-rs/libs/datamodel/src/dml/model.rs new file mode 100644 index 0000000000..36317c1fe5 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/model.rs @@ -0,0 +1,59 @@ +use super::comment::*; +use super::field::*; +use super::traits::*; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +pub struct Model { + pub name: String, + fields: Vec, + pub comments: Vec, + pub database_name: Option, + pub is_embedded: bool, +} + +impl Model { + pub fn new(name: &str) -> Model { + Model { + name: String::from(name), + fields: vec![], + comments: vec![], + database_name: None, + is_embedded: false, + } + } + + pub fn add_field(&mut self, field: Field) { + self.fields.push(field) + } + + pub fn fields(&self) -> std::slice::Iter { + self.fields.iter() + } + + pub fn fields_mut(&mut self) -> std::slice::IterMut { + self.fields.iter_mut() + } + + pub fn find_field(&self, name: &str) -> Option<&Field> { + self.fields().find(|f| f.name == *name) + } +} + +impl WithName for Model { + fn name(&self) -> &String { + &self.name + } + fn set_name(&mut self, name: &str) { + self.name = String::from(name) + } +} + +impl WithDatabaseName for Model { + fn database_name(&self) -> &Option { + &self.database_name + } + fn set_database_name(&mut self, database_name: &Option) { + self.database_name = database_name.clone() + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/relation.rs b/server/prisma-rs/libs/datamodel/src/dml/relation.rs new file mode 100644 index 0000000000..0e3b892310 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/relation.rs @@ -0,0 +1,42 @@ +use super::validator::value::ValueParserError; +use serde::{Deserialize, Serialize}; + +use std::str::FromStr; +use crate::ast; + +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +pub struct RelationInfo { + pub to: String, + pub to_field: String, + pub name: Option, + pub on_delete: OnDeleteStrategy, +} + +impl RelationInfo { + pub fn new(to: &str, to_field: &str) -> RelationInfo { + RelationInfo { + to: String::from(to), + to_field: String::from(to_field), + name: None, + on_delete: OnDeleteStrategy::None, + } + } +} + +#[derive(Debug, Copy, PartialEq, Clone, Serialize, Deserialize)] +pub enum OnDeleteStrategy { + Cascade, + None, +} + +impl FromStr for OnDeleteStrategy { + type Err = ValueParserError; + + fn from_str(s: &str) -> Result { + match s { + "CASCADE" => Ok(OnDeleteStrategy::Cascade), + "NONE" => Ok(OnDeleteStrategy::None), + _ => Err(ValueParserError::new(&format!("Invalid onDelete strategy {}.", s), s, &ast::Span::empty())), + } + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/scalar.rs b/server/prisma-rs/libs/datamodel/src/dml/scalar.rs new file mode 100644 index 0000000000..35fb9e040f --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/scalar.rs @@ -0,0 +1,25 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Copy, PartialEq, Clone, Serialize, Deserialize)] +pub enum ScalarType { + Int, + Float, + Decimal, + Boolean, + String, + DateTime, + Enum, +} + +// TODO, Check if data types are correct +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +pub enum Value { + Int(i32), + Float(f32), + Decimal(f32), + Boolean(bool), + String(String), + DateTime(DateTime), + ConstantLiteral(String), +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/schema.rs b/server/prisma-rs/libs/datamodel/src/dml/schema.rs new file mode 100644 index 0000000000..82c26833c2 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/schema.rs @@ -0,0 +1,65 @@ +use super::comment::*; +use super::enummodel::*; +use super::model::*; +use serde::{Deserialize, Serialize}; + +// TODO: Is schema the right name here? +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +pub struct Schema { + enums: Vec, + models: Vec, + pub comments: Vec, +} + +impl Schema { + pub fn new() -> Schema { + Schema { + models: vec![], + enums: vec![], + comments: vec![], + } + } + + pub fn empty() -> Schema { + Self::new() + } + + pub fn has_model(&self, name: &str) -> bool { + match self.find_model(name) { + Some(_) => true, + None => false, + } + } + + pub fn add_enum(&mut self, en: Enum) { + self.enums.push(en); + } + + pub fn add_model(&mut self, model: Model) { + self.models.push(model); + } + + pub fn models(&self) -> std::slice::Iter { + self.models.iter() + } + + pub fn enums(&self) -> std::slice::Iter { + self.enums.iter() + } + + pub fn models_mut(&mut self) -> std::slice::IterMut { + self.models.iter_mut() + } + + pub fn enums_mut(&mut self) -> std::slice::IterMut { + self.enums.iter_mut() + } + + pub fn find_model(&self, name: &str) -> Option<&Model> { + self.models().find(|m| m.name == *name) + } + + pub fn find_enum(&self, name: &str) -> Option<&Enum> { + self.enums().find(|m| m.name == *name) + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/traits.rs b/server/prisma-rs/libs/datamodel/src/dml/traits.rs new file mode 100644 index 0000000000..2b3b121f24 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/traits.rs @@ -0,0 +1,11 @@ +// Setters are a bit untypical for rust, +// but we want to have "composeable" struct creation. +pub trait WithName { + fn name(&self) -> &String; + fn set_name(&mut self, name: &str); +} + +pub trait WithDatabaseName { + fn database_name(&self) -> &Option; + fn set_database_name(&mut self, database_name: &Option); +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/validator/argument/mod.rs b/server/prisma-rs/libs/datamodel/src/dml/validator/argument/mod.rs new file mode 100644 index 0000000000..eba262d574 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/validator/argument/mod.rs @@ -0,0 +1,43 @@ +use crate::ast; +use crate::dml::validator::value; + +pub struct DirectiveArguments<'a> { + arguments: &'a Vec, + span: ast::Span +} + +impl<'a> DirectiveArguments<'a> { + pub fn new(arguments: &'a Vec, span: ast::Span) -> DirectiveArguments { + DirectiveArguments { arguments: arguments, span: span.clone() } + } + + pub fn span(&self) -> &ast::Span { + &self.span + } + + pub fn arg(&self, name: &str) -> Box { + for arg in self.arguments { + if arg.name == name { + return Box::new(value::WrappedValue { + value: arg.value.clone() + }); + } + } + return Box::new(value::WrappedErrorValue { + message: format!("Argument '{:?}' not found", name), + raw: String::from(""), + span: self.span + }); + } + + pub fn default_arg(&self, name: &str) -> Box { + let arg = self.arg(name); + + if arg.is_valid() { + return arg; + } else { + // Fallback to default arg without name. + return self.arg(""); + } + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/db.rs b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/db.rs new file mode 100644 index 0000000000..8886015243 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/db.rs @@ -0,0 +1,19 @@ +use crate::dml; +use crate::dml::validator::directive::{Args, DirectiveValidator, Error}; + +pub struct DbDirectiveValidator {} + +impl DirectiveValidator for DbDirectiveValidator { + fn directive_name(&self) -> &'static str { + &"db" + } + fn validate_and_apply(&self, args: &Args, obj: &mut T) -> Option { + match args.default_arg("name").as_str() { + Ok(value) => obj.set_database_name(&Some(value)), + // self.parser_error would be better here, but we cannot call it due to rust limitations. + Err(err) => return Some(Error::new(&err.message, "db", &err.span)), + }; + + return None; + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/default.rs b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/default.rs new file mode 100644 index 0000000000..d5b6366a0b --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/default.rs @@ -0,0 +1,24 @@ +use crate::dml; +use crate::dml::validator::directive::{Args, DirectiveValidator, Error}; + +pub struct DefaultDirectiveValidator {} + +impl DirectiveValidator for DefaultDirectiveValidator { + fn directive_name(&self) -> &'static str { + &"default" + } + fn validate_and_apply(&self, args: &Args, field: &mut dml::Field) -> Option { + // TODO: This is most likely duplicate code. + if let dml::FieldType::Base(scalar_type) = field.field_type { + match args.default_arg("value").as_type(&scalar_type) { + // TODO: Here, a default value directive can override the default value syntax sugar. + Ok(value) => field.default_value = Some(value), + Err(err) => return self.parser_error(&err), + } + } else { + return self.error("Cannot set a default value on a non-scalar field.", &args.span()); + } + + return None; + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/embedded.rs b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/embedded.rs new file mode 100644 index 0000000000..bbf002dc3d --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/embedded.rs @@ -0,0 +1,14 @@ +use crate::dml; +use crate::dml::validator::directive::{Args, DirectiveValidator, Error}; + +pub struct EmbeddedDirectiveValidator {} + +impl DirectiveValidator for EmbeddedDirectiveValidator { + fn directive_name(&self) -> &'static str { + &"embedded" + } + fn validate_and_apply(&self, args: &Args, obj: &mut dml::Model) -> Option { + obj.is_embedded = true; + return None; + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/mod.rs b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/mod.rs new file mode 100644 index 0000000000..5d5260b745 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/mod.rs @@ -0,0 +1,94 @@ +use crate::ast; +use crate::dml; +use crate::dml::validator::argument::DirectiveArguments; +use crate::dml::validator::directive::{DirectiveValidator, DirectiveValidationError}; + +use std::collections::HashMap; + +mod db; +mod default; +mod embedded; +mod ondelete; +mod primary; +mod relation; +mod scalarlist; +mod sequence; +mod unique; + +// TODO: This should not be in the builtin mod. +pub struct DirectiveListValidator { + known_directives: HashMap<&'static str, Box>>, +} + +impl DirectiveListValidator { + pub fn new() -> Self { + DirectiveListValidator { + known_directives: HashMap::new(), + } + } + + pub fn add(&mut self, validator: Box>) { + let name = validator.directive_name(); + + if self.known_directives.contains_key(name) { + panic!("Duplicate directive definition: {:?}", name); + } + + self.known_directives.insert(name, validator); + } + + pub fn validate_and_apply(&self, ast: &ast::WithDirectives, t: &mut T) -> Vec { + let mut errors = Vec::::new(); + + for directive in ast.directives() { + match self.known_directives.get(directive.name.as_str()) { + Some(validator) => { + if let Some(err) = validator.validate_and_apply(&DirectiveArguments::new(&directive.arguments, directive.span), t) { + errors.push(err); + } + } + None => errors.push(DirectiveValidationError::new("Encountered unknown directive", &directive.name, &directive.span)) + }; + } + + errors + } +} + +pub fn new_builtin_field_directives() -> DirectiveListValidator { + let mut validator = DirectiveListValidator:: { + known_directives: HashMap::new(), + }; + + validator.add(Box::new(db::DbDirectiveValidator {})); + validator.add(Box::new(primary::PrimaryDirectiveValidator {})); + validator.add(Box::new(scalarlist::ScalarListDirectiveValidator {})); + validator.add(Box::new(sequence::SequenceDirectiveValidator {})); + validator.add(Box::new(unique::UniqueDirectiveValidator {})); + validator.add(Box::new(default::DefaultDirectiveValidator {})); + validator.add(Box::new(relation::RelationDirectiveValidator {})); + validator.add(Box::new(ondelete::OnDeleteDirectiveValidator {})); + + return validator; +} + +pub fn new_builtin_model_directives() -> DirectiveListValidator { + let mut validator = DirectiveListValidator:: { + known_directives: HashMap::new(), + }; + + validator.add(Box::new(db::DbDirectiveValidator {})); + validator.add(Box::new(embedded::EmbeddedDirectiveValidator {})); + + return validator; +} + +pub fn new_builtin_enum_directives() -> DirectiveListValidator { + let mut validator = DirectiveListValidator:: { + known_directives: HashMap::new(), + }; + + // Adds are missing + + return validator; +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/ondelete.rs b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/ondelete.rs new file mode 100644 index 0000000000..8166f76d75 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/ondelete.rs @@ -0,0 +1,21 @@ +use crate::dml; +use crate::dml::validator::directive::{Args, DirectiveValidator, Error}; + +pub struct OnDeleteDirectiveValidator {} + +impl DirectiveValidator for OnDeleteDirectiveValidator { + fn directive_name(&self) -> &'static str { + &"onDelete" + } + fn validate_and_apply(&self, args: &Args, field: &mut dml::Field) -> Option { + if let Ok(strategy) = args.arg("strategy").as_constant_literal() { + match (strategy.parse::(), &mut field.field_type) { + (Ok(strategy), dml::FieldType::Relation(relation_info)) => relation_info.on_delete = strategy, + (Err(err), _) => return self.parser_error(&err), + _ => return self.error("Invalid field type, not a relation.", &args.span()), + } + } + + return None; + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/primary.rs b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/primary.rs new file mode 100644 index 0000000000..1887123ffb --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/primary.rs @@ -0,0 +1,27 @@ +use crate::dml; +use crate::dml::validator::directive::{Args, DirectiveValidator, Error}; + +pub struct PrimaryDirectiveValidator {} + +impl DirectiveValidator for PrimaryDirectiveValidator { + fn directive_name(&self) -> &'static str { + &"primary" + } + fn validate_and_apply(&self, args: &Args, obj: &mut dml::Field) -> Option { + let mut id_info = dml::IdInfo { + strategy: dml::IdStrategy::Auto, + sequence: None, + }; + + if let Ok(strategy) = args.arg("name").as_constant_literal() { + match strategy.parse::() { + Ok(strategy) => id_info.strategy = strategy, + Err(err) => return self.parser_error(&err), + } + } + + obj.id_info = Some(id_info); + + return None; + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/relation.rs b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/relation.rs new file mode 100644 index 0000000000..db84dfaf89 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/relation.rs @@ -0,0 +1,21 @@ +use crate::dml; +use crate::dml::validator::directive::{Args, DirectiveValidator, Error}; + +pub struct RelationDirectiveValidator {} + +impl DirectiveValidator for RelationDirectiveValidator { + fn directive_name(&self) -> &'static str { + &"relation" + } + fn validate_and_apply(&self, args: &Args, field: &mut dml::Field) -> Option { + if let Ok(name) = args.arg("name").as_str() { + match &mut field.field_type { + // TODO: Check if name is already set. + dml::FieldType::Relation(relation_info) => relation_info.name = Some(name), + _ => return self.error("Invalid field type, not a relation.", &args.span()), + } + } + + return None; + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/scalarlist.rs b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/scalarlist.rs new file mode 100644 index 0000000000..b1159faf85 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/scalarlist.rs @@ -0,0 +1,23 @@ +use crate::dml; +use crate::dml::validator::directive::{Args, DirectiveValidator, Error}; + +pub struct ScalarListDirectiveValidator {} + +impl DirectiveValidator for ScalarListDirectiveValidator { + fn directive_name(&self) -> &'static str { + &"scalarList" + } + fn validate_and_apply(&self, args: &Args, obj: &mut dml::Field) -> Option { + // TODO: Throw when field is not of type scalar and arity is list. + // TODO: We can probably lift this pattern to a macro. + + if let Ok(strategy) = args.arg("strategy").as_constant_literal() { + match strategy.parse::() { + Ok(strategy) => obj.scalar_list_strategy = Some(strategy), + Err(err) => return self.parser_error(&err), + } + } + + return None; + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/sequence.rs b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/sequence.rs new file mode 100644 index 0000000000..eb70546782 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/sequence.rs @@ -0,0 +1,37 @@ +use crate::dml; +use crate::dml::validator::directive::{Args, DirectiveValidator, Error}; + +pub struct SequenceDirectiveValidator {} + +impl DirectiveValidator for SequenceDirectiveValidator { + fn directive_name(&self) -> &'static str { + &"sequence" + } + fn validate_and_apply(&self, args: &Args, obj: &mut dml::Field) -> Option { + // TODO: Handle fields according to tests: + // https://github.com/prisma/prisma/blob/master/server/servers/deploy/src/test/scala/com/prisma/deploy/migration/validation/SequenceDirectiveSpec.scala + + let mut seq = dml::Sequence { + name: "".to_string(), + allocation_size: 0, + initial_value: 0, + }; + + match args.arg("name").as_str() { + Ok(name) => seq.name = name, + Err(err) => return self.parser_error(&err), + } + + match args.arg("allocationSize").as_int() { + Ok(allocation_size) => seq.allocation_size = allocation_size, + Err(err) => return self.parser_error(&err), + } + + match args.arg("initialValie").as_int() { + Ok(initial_value) => seq.initial_value = initial_value, + Err(err) => return self.parser_error(&err), + } + + return None; + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/unique.rs b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/unique.rs new file mode 100644 index 0000000000..7d18c904d3 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/unique.rs @@ -0,0 +1,14 @@ +use crate::dml; +use crate::dml::validator::directive::{Args, DirectiveValidator, Error}; + +pub struct UniqueDirectiveValidator {} + +impl DirectiveValidator for UniqueDirectiveValidator { + fn directive_name(&self) -> &'static str { + &"unique" + } + fn validate_and_apply(&self, args: &Args, obj: &mut dml::Field) -> Option { + obj.is_unique = true; + return None; + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/validator/directive/mod.rs b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/mod.rs new file mode 100644 index 0000000000..94f64363b6 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/validator/directive/mod.rs @@ -0,0 +1,60 @@ +use crate::dml; +use std::fmt; +use crate::ast; + +pub mod builtin; + +#[derive(Debug)] +pub struct DirectiveValidationError { + pub message: String, + pub directive_name: String, + pub span: ast::Span +} + +impl DirectiveValidationError { + pub fn new(message: &str, directive_name: &str, span: &ast::Span) -> DirectiveValidationError { + DirectiveValidationError { + message: String::from(message), + directive_name: String::from(directive_name), + span: span.clone() + } + } +} + +impl fmt::Display for DirectiveValidationError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.message) + } +} + +impl std::error::Error for DirectiveValidationError { + fn description(&self) -> &str { + self.message.as_str() + } + + fn cause(&self) -> Option<&std::error::Error> { + None + } +} + +pub type Error = DirectiveValidationError; +pub type Args<'a> = dml::validator::argument::DirectiveArguments<'a>; + +// TODO Narrow to type, enum, field, if possible +pub trait DirectiveValidator { + fn directive_name(&self) -> &'static str; + // TODO: Proper error type + fn validate_and_apply(&self, args: &Args, obj: &mut T) -> Option; + + fn error(&self, msg: &str, span: &ast::Span) -> Option { + Some(Error::new(msg, self.directive_name(), span)) + } + + fn parser_error(&self, err: &dml::validator::value::ValueParserError) -> Option { + Some(Error::new(&err.message, self.directive_name(), &err.span)) + } +} + +pub trait ModelDirectiveValidator: DirectiveValidator {} +pub trait EnumDirectiveValidator: DirectiveValidator {} +pub trait FieldDirectiveValidato: DirectiveValidator {} diff --git a/server/prisma-rs/libs/datamodel/src/dml/validator/mod.rs b/server/prisma-rs/libs/datamodel/src/dml/validator/mod.rs new file mode 100644 index 0000000000..0d95237752 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/validator/mod.rs @@ -0,0 +1,139 @@ +use crate::{ast, dml}; + +pub mod argument; +pub mod directive; +pub mod value; + +use directive::DirectiveValidationError; +use directive::builtin::{new_builtin_enum_directives, new_builtin_field_directives, new_builtin_model_directives, DirectiveListValidator}; +use value::{ValueValidator, WrappedValue}; + +pub trait DirectiveSource { + fn get_directives(validator: &mut DirectiveListValidator); +} + +// TODO: Naming +pub struct Validator { + field_directives: DirectiveListValidator, + model_directives: DirectiveListValidator, + enum_directives: DirectiveListValidator, +} + +impl Validator { + pub fn new() -> Self { + Validator { + field_directives: new_builtin_field_directives(), + model_directives: new_builtin_model_directives(), + enum_directives: new_builtin_enum_directives() + } + } + + pub fn validate(&self, ast_schema: &ast::Schema) -> Result> { + let mut schema = dml::Schema::new(); + + for ast_obj in &ast_schema.models { + match ast_obj { + ast::ModelOrEnum::Enum(en) => schema.add_enum(self.validate_enum(&en)?), + ast::ModelOrEnum::Model(ty) => schema.add_model(self.validate_model(&ty, ast_schema)?), + } + } + + return Ok(schema); + } + + fn validate_model(&self, ast_model: &ast::Model, ast_schema: &ast::Schema) -> Result> { + let mut model = dml::Model::new(&ast_model.name); + + for ast_field in &ast_model.fields { + model.add_field(self.validate_field(ast_field, ast_schema)?); + } + + let errs = self.model_directives.validate_and_apply(ast_model, &mut model); + + if(errs.len() > 0) { + return Err(errs); + } + + return Ok(model); + } + + fn validate_enum(&self, ast_enum: &ast::Enum) -> Result> { + let mut en = dml::Enum::new(&ast_enum.name, ast_enum.values.clone()); + + let errs = self.enum_directives.validate_and_apply(ast_enum, &mut en); + + if(errs.len() > 0) { + return Err(errs); + } + + return Ok(en); + } + + fn validate_field(&self, ast_field: &ast::Field, ast_schema: &ast::Schema) -> Result> { + let field_type = self.validate_field_type(&ast_field.field_type, &ast_field.span, ast_schema)?; + + let mut field = dml::Field::new(&ast_field.name, field_type.clone()); + + field.arity = self.validate_field_arity(&ast_field.arity); + + if let Some(value) = &ast_field.default_value { + if let dml::FieldType::Base(base_type) = &field_type { + // TODO: Proper error handling. + // TODO: WrappedValue is not the tool of choice here, + // there should be a static func for converting stuff. + field.default_value = Some( + (WrappedValue { value: value.clone() }) + .as_type(base_type) + .expect("Unable to parse."), + ); + } else { + unimplemented!("Found a default value for a non-scalar type.") + } + } + + let errs = self.field_directives.validate_and_apply(ast_field, &mut field); + + if(errs.len() > 0) { + return Err(errs); + } + + return Ok(field); + } + + fn validate_field_arity(&self, ast_field: &ast::FieldArity) -> dml::FieldArity { + match ast_field { + ast::FieldArity::Required => dml::FieldArity::Required, + ast::FieldArity::Optional => dml::FieldArity::Optional, + ast::FieldArity::List => dml::FieldArity::List, + } + } + + fn validate_field_type(&self, type_name: &str, span: &ast::Span, ast_schema: &ast::Schema) -> Result> { + match type_name { + "ID" => Ok(dml::FieldType::Base(dml::ScalarType::Int)), + "Int" => Ok(dml::FieldType::Base(dml::ScalarType::Int)), + "Float" => Ok(dml::FieldType::Base(dml::ScalarType::Float)), + "Decimal" => Ok(dml::FieldType::Base(dml::ScalarType::Decimal)), + "Boolean" => Ok(dml::FieldType::Base(dml::ScalarType::Boolean)), + "String" => Ok(dml::FieldType::Base(dml::ScalarType::String)), + "DateTime" => Ok(dml::FieldType::Base(dml::ScalarType::DateTime)), + // Distinguish between relation and enum. + _ => { + for model in &ast_schema.models { + match &model { + // TODO: Get primary key field and hook up String::from. + ast::ModelOrEnum::Model(model) if model.name == *type_name => { + return Ok(dml::FieldType::Relation(dml::RelationInfo::new(&type_name, ""))) + } + ast::ModelOrEnum::Enum(en) if en.name == *type_name => { + return Ok(dml::FieldType::Enum(String::from(type_name))) + } + _ => {} + } + } + + Err(vec![DirectiveValidationError::new("Unknown type encountered.", "", span)]) + } + } + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dml/validator/value/mod.rs b/server/prisma-rs/libs/datamodel/src/dml/validator/value/mod.rs new file mode 100644 index 0000000000..6315da8b18 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dml/validator/value/mod.rs @@ -0,0 +1,242 @@ +use crate::ast; +use crate::dml; + +use chrono::{DateTime, Utc}; +use std::error; +use std::error::Error; +use std::fmt; + + +// TODO: This class is terrible and should most likely return an instance of Result<> + +#[derive(Debug)] +pub struct ValueParserError { + pub message: String, + pub raw: String, + pub span: ast::Span, +} + +impl ValueParserError { + pub fn wrap( + result: Result, + raw_value: &str, + span: &ast::Span, + ) -> Result { + match result { + Ok(val) => Ok(val), + Err(err) => Err(ValueParserError::new(err.description(), raw_value, span)), + } + } + + pub fn new(message: &str, raw: &str, span: &ast::Span) -> ValueParserError { + ValueParserError { + message: String::from(message), + raw: String::from(raw), + span: span.clone(), + } + } +} + +impl fmt::Display for ValueParserError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}, Value: {}, Location: {}", self.message, self.raw, self.span) + } +} + +impl error::Error for ValueParserError { + fn description(&self) -> &str { + self.message.as_str() + } + + fn cause(&self) -> Option<&error::Error> { + None + } +} + +macro_rules! wrap_value ( + ($value:expr, $wrapper:expr, $raw:expr, $span:expr) => ({ + match $value { + Ok(val) => Ok($wrapper(val)), + Err(err) => Err(ValueParserError::new(err.description(), $raw, $span)) + } + }) +); + +pub trait ValueValidator { + fn is_valid(&self) -> bool; + + fn raw(&self) -> &String; + fn span(&self) -> &ast::Span; + fn as_str(&self) -> Result; + fn as_int(&self) -> Result; + fn as_float(&self) -> Result; + fn as_decimal(&self) -> Result; + fn as_bool(&self) -> Result; + fn as_date_time(&self) -> Result, ValueParserError>; + fn as_constant_literal(&self) -> Result; + + fn as_type(&self, scalar_type: &dml::ScalarType) -> Result { + match scalar_type { + dml::ScalarType::Int => wrap_value!(self.as_int(), dml::Value::Int, self.raw(), self.span()), + dml::ScalarType::Float => wrap_value!(self.as_float(), dml::Value::Float, self.raw(), self.span()), + dml::ScalarType::Decimal => wrap_value!(self.as_decimal(), dml::Value::Decimal, self.raw(), self.span()), + dml::ScalarType::Boolean => wrap_value!(self.as_bool(), dml::Value::Boolean, self.raw(), self.span()), + dml::ScalarType::DateTime => { + wrap_value!(self.as_date_time(), dml::Value::DateTime, self.raw(), self.span()) + } + dml::ScalarType::Enum => wrap_value!(self.as_str(), dml::Value::ConstantLiteral, self.raw(), self.span()), + dml::ScalarType::String => wrap_value!(self.as_str(), dml::Value::String, self.raw(), self.span()), + } + } +} + +// TODO: Inject error accumulation. +// TODO: Inject location (line etc.) information into error type. +pub struct WrappedValue { + pub value: ast::Value, +} + +impl ValueValidator for WrappedValue { + fn is_valid(&self) -> bool { + true + } + + fn raw(&self) -> &String { + match &self.value { + ast::Value::StringValue(x, _) => x, + ast::Value::NumericValue(x, _) => x, + ast::Value::BooleanValue(x, _) => x, + ast::Value::ConstantValue(x, _) => x, + } + } + + fn span(&self) -> &ast::Span { + match &self.value { + ast::Value::StringValue(_, s) => s, + ast::Value::NumericValue(_, s) => s, + ast::Value::BooleanValue(_, s) => s, + ast::Value::ConstantValue(_, s) => s, + } + } + + fn as_str(&self) -> Result { + match &self.value { + ast::Value::StringValue(value, _) => Ok(value.to_string()), + _ => Err(ValueParserError::new( + &format!("Expected String Value, received {:?}", self.value), + self.raw(), + self.span(), + )), + } + } + + fn as_int(&self) -> Result { + match &self.value { + ast::Value::NumericValue(value, span) => ValueParserError::wrap(value.parse::(), value, span), + _ => Err(ValueParserError::new( + &format!("Expected Numeric Value, received {:?}", self.value), + self.raw(), + self.span(), + )), + } + } + + fn as_float(&self) -> Result { + match &self.value { + ast::Value::NumericValue(value, span) => ValueParserError::wrap(value.parse::(), value, span), + _ => Err(ValueParserError::new( + &format!("Expected Numeric Value, received {:?}", self.value), + self.raw(), + self.span(), + )), + } + } + + // TODO: Ask which decimal type to take. + fn as_decimal(&self) -> Result { + match &self.value { + ast::Value::NumericValue(value, span) => ValueParserError::wrap(value.parse::(), value, span), + _ => Err(ValueParserError::new( + &format!("Expected Numeric Value, received {:?}", self.value), + self.raw(), + self.span(), + )), + } + } + + fn as_bool(&self) -> Result { + match &self.value { + ast::Value::BooleanValue(value, span) => ValueParserError::wrap(value.parse::(), value, span), + _ => Err(ValueParserError::new( + &format!("Expected Boolean Value, received {:?}", self.value), + self.raw(), + self.span(), + )), + } + } + + // TODO: Ask which datetime type to use. + fn as_date_time(&self) -> Result, ValueParserError> { + match &self.value { + ast::Value::StringValue(value, span) => ValueParserError::wrap(value.parse::>(), value, span), + _ => Err(ValueParserError::new( + &format!("Expected Boolean Value, received {:?}", self.value), + self.raw(), + self.span(), + )), + } + } + + fn as_constant_literal(&self) -> Result { + match &self.value { + ast::Value::ConstantValue(value, _) => Ok(value.to_string()), + _ => Err(ValueParserError::new( + &format!("Expected Constant Value, received {:?}", self.value), + self.raw(), + self.span(), + )), + } + } +} + +pub struct WrappedErrorValue { + pub message: String, + pub raw: String, + pub span: ast::Span, +} + +impl ValueValidator for WrappedErrorValue { + fn is_valid(&self) -> bool { + false + } + + fn raw(&self) -> &String { + &self.raw + } + + fn span(&self) -> &ast::Span { + &self.span + } + + fn as_str(&self) -> Result { + Err(ValueParserError::new(&self.message, &self.raw, &self.span)) + } + fn as_int(&self) -> Result { + Err(ValueParserError::new(&self.message, &self.raw, &self.span)) + } + fn as_float(&self) -> Result { + Err(ValueParserError::new(&self.message, &self.raw, &self.span)) + } + fn as_decimal(&self) -> Result { + Err(ValueParserError::new(&self.message, &self.raw, &self.span)) + } + fn as_bool(&self) -> Result { + Err(ValueParserError::new(&self.message, &self.raw, &self.span)) + } + fn as_date_time(&self) -> Result, ValueParserError> { + Err(ValueParserError::new(&self.message, &self.raw, &self.span)) + } + fn as_constant_literal(&self) -> Result { + Err(ValueParserError::new(&self.message, &self.raw, &self.span)) + } +} diff --git a/server/prisma-rs/libs/datamodel/src/dmmf/mod.rs b/server/prisma-rs/libs/datamodel/src/dmmf/mod.rs new file mode 100644 index 0000000000..f243880b6e --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/dmmf/mod.rs @@ -0,0 +1,132 @@ +use crate::dml; +use serde; +use serde_json; + +// This is a simple JSON serialization using Serde. +// The JSON format follows the DMMF spec, but is incomplete. + +#[derive(Debug, serde::Serialize)] +pub struct Field { + pub name: String, + pub kind: String, + pub dbName: Option, + pub arity: String, + pub isUnique: bool, + #[serde(rename = "type")] + pub field_type: String, +} + +#[derive(Debug, serde::Serialize)] +pub struct Model { + pub isEnum: bool, + pub name: String, + pub isEmbedded: bool, + pub dbName: Option, + pub fields: Vec, +} + +#[derive(Debug, serde::Serialize)] +pub struct Enum { + pub isEnum: bool, + pub name: String, + pub values: Vec, +} + +#[derive(Debug, serde::Serialize)] +pub struct Datamodel { + pub models: Vec, +} + +fn get_field_kind(field: &dml::Field) -> String { + match field.field_type { + dml::FieldType::Relation(_) => String::from("relation"), + dml::FieldType::Enum(_) => String::from("enum"), + dml::FieldType::Base(_) => String::from("scalar"), + _ => unimplemented!("DMMF does not support field type {:?}", field.field_type), + } +} + +fn type_to_string(scalar: &dml::ScalarType) -> String { + match scalar { + dml::ScalarType::Int => String::from("Int"), + dml::ScalarType::Decimal => String::from("Decimal"), + dml::ScalarType::Float => String::from("Float"), + dml::ScalarType::Boolean => String::from("Boolean"), + dml::ScalarType::String => String::from("String"), + dml::ScalarType::DateTime => String::from("DateTime"), + dml::ScalarType::Enum => panic!("Enum is an internally used type and should never be rendered."), + } +} + +fn get_field_type(field: &dml::Field) -> String { + match &field.field_type { + dml::FieldType::Relation(relation_info) => relation_info.to.clone(), + dml::FieldType::Enum(t) => t.clone(), + dml::FieldType::Base(t) => type_to_string(t), + dml::FieldType::ConnectorSpecific { + base_type: t, + connector_type: _, + } => type_to_string(t), + } +} + +fn get_field_arity(field: &dml::Field) -> String { + match field.arity { + dml::FieldArity::Required => String::from("required"), + dml::FieldArity::Optional => String::from("optional"), + dml::FieldArity::List => String::from("list"), + } +} + +pub fn enum_to_dmmf(en: &dml::Enum) -> Enum { + Enum { + name: en.name.clone(), + values: en.values.clone(), + isEnum: true, + } +} + +pub fn field_to_dmmf(field: &dml::Field) -> Field { + Field { + name: field.name.clone(), + kind: get_field_kind(field), + dbName: field.database_name.clone(), + arity: get_field_arity(field), + isUnique: field.is_unique, + field_type: get_field_type(field), + } +} + +pub fn model_to_dmmf(model: &dml::Model) -> Model { + Model { + name: model.name.clone(), + dbName: model.database_name.clone(), + isEmbedded: model.is_embedded, + fields: model.fields().map(&field_to_dmmf).collect(), + isEnum: false, + } +} + +pub fn schema_to_dmmf(schema: &dml::Schema) -> Datamodel { + let mut datamodel = Datamodel { models: vec![] }; + + for model in schema.models() { + datamodel + .models + .push(serde_json::to_value(&model_to_dmmf(&model)).expect("Failed to render enum")) + } + + for enum_model in schema.enums() { + datamodel + .models + .push(serde_json::to_value(&enum_to_dmmf(&enum_model)).expect("Failed to render enum")) + } + + return datamodel; +} + +pub fn render_to_dmmf(schema: &dml::Schema) -> String { + let dmmf = schema_to_dmmf(schema); + + return serde_json::to_string_pretty(&dmmf).expect("Failed to render JSON"); +} diff --git a/server/prisma-rs/libs/datamodel/src/lib.rs b/server/prisma-rs/libs/datamodel/src/lib.rs new file mode 100644 index 0000000000..e2642e343c --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/lib.rs @@ -0,0 +1,10 @@ +pub mod ast; +pub use ast::parser; +pub mod dml; +pub use dml::validator::Validator; +pub use dml::*; + +// Pest grammar generation on compile time. +extern crate pest; +#[macro_use] +extern crate pest_derive; diff --git a/server/prisma-rs/libs/datamodel/src/main.rs b/server/prisma-rs/libs/datamodel/src/main.rs new file mode 100644 index 0000000000..c7026952ee --- /dev/null +++ b/server/prisma-rs/libs/datamodel/src/main.rs @@ -0,0 +1,72 @@ +use std::fs; + +pub mod ast; +pub mod dmmf; +use ast::parser; +pub mod dml; +use dml::validator::Validator; + +// Pest grammar generation on compile time. +extern crate pest; +#[macro_use] +extern crate pest_derive; + +extern crate clap; +use clap::{App, Arg, SubCommand}; + +fn main() { + let formats = ["sorenbs", "matthewmueller"]; + + let matches = App::new("Prisma Datamodel Playgroung") + .version("0.1") + .author("Emanuel Jöbstl ") + .about("Alpha implementation of different datamodel definition grammars.") + .arg( + Arg::with_name("INPUT") + .help("Sets the input datamodel file to use") + .required(true) + .index(1), + ) + .get_matches(); + + let file_name = matches.value_of("INPUT").unwrap(); + let file = fs::read_to_string(&file_name).expect(&format!("Unable to open file {}", file_name)); + + match parser::parse(&file) { + Ok(ast) => { + let validator = Validator::new(); + + match validator.validate(&ast) { + Ok(dml) => { + let json = dmmf::render_to_dmmf(&dml); + println!("{}", json); + } + Err(errors) => { + for error in errors { + println!(""); + println!("Error: {}", error.message); + println!("File: {}:", file_name); + println!(""); + let line = &file[..error.span.end].matches("\n").count(); + let text = &file[error.span.start..error.span.end]; + println!("{} | {}", line, text); + println!(""); + } + } + } + } + Err(error) => { + println!(""); + println!("Error while parsing, unexpected token"); + println!("File: {}:", file_name); + println!(""); + let line = &file[..error.span.end].matches("\n").count(); + let text = file.split("\n").collect::>()[*line]; + println!("{} | {}", line, text); + println!(""); + } + } + + + +} diff --git a/server/prisma-rs/libs/datamodel/tests/base_types.rs b/server/prisma-rs/libs/datamodel/tests/base_types.rs new file mode 100644 index 0000000000..0bee018180 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/tests/base_types.rs @@ -0,0 +1,96 @@ +mod common; +use common::*; +use datamodel::dml; + +#[test] +fn parse_scalar_types() { + let dml = r#" + model User { + firstName: String + age: Int + isPro: Boolean + balance: Decimal + averageGrade: Float + } + "#; + + let schema = parse_and_validate(dml); + let user_model = schema.assert_has_model("User"); + user_model + .assert_has_field("firstName") + .assert_base_type(&dml::ScalarType::String); + user_model + .assert_has_field("age") + .assert_base_type(&dml::ScalarType::Int); + user_model + .assert_has_field("isPro") + .assert_base_type(&dml::ScalarType::Boolean); + user_model + .assert_has_field("balance") + .assert_base_type(&dml::ScalarType::Decimal); + user_model + .assert_has_field("averageGrade") + .assert_base_type(&dml::ScalarType::Float); +} + +#[test] +fn parse_field_arity() { + let dml = r#" + model Post { + text: String + photo: String? + comments: String[] + } + "#; + + let schema = parse_and_validate(dml); + let post_model = schema.assert_has_model("Post"); + post_model + .assert_has_field("text") + .assert_base_type(&dml::ScalarType::String) + .assert_arity(&dml::FieldArity::Required); + post_model + .assert_has_field("photo") + .assert_base_type(&dml::ScalarType::String) + .assert_arity(&dml::FieldArity::Optional); + post_model + .assert_has_field("comments") + .assert_base_type(&dml::ScalarType::String) + .assert_arity(&dml::FieldArity::List); +} + +#[test] +fn parse_defaults() { + let dml = r#" + model User { + firstName: String = "Hello" + age: Int = 21 + isPro: Boolean = false + balance: Decimal = 1.2 + averageGrade: Float = 3.4 + } + "#; + + let schema = parse_and_validate(dml); + let user_model = schema.assert_has_model("User"); + user_model + .assert_has_field("firstName") + .assert_base_type(&dml::ScalarType::String) + .assert_default_value(dml::Value::String(String::from("Hello"))); + user_model + .assert_has_field("age") + .assert_base_type(&dml::ScalarType::Int) + .assert_default_value(dml::Value::Int(21)); + user_model + .assert_has_field("isPro") + .assert_base_type(&dml::ScalarType::Boolean) + .assert_default_value(dml::Value::Boolean(false)); + user_model + .assert_has_field("balance") + .assert_base_type(&dml::ScalarType::Decimal) + .assert_default_value(dml::Value::Decimal(1.2)); + user_model + .assert_has_field("averageGrade") + .assert_base_type(&dml::ScalarType::Float) + .assert_default_value(dml::Value::Float(3.4)); +} diff --git a/server/prisma-rs/libs/datamodel/tests/basic.rs b/server/prisma-rs/libs/datamodel/tests/basic.rs new file mode 100644 index 0000000000..8e0bdc4855 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/tests/basic.rs @@ -0,0 +1,38 @@ +mod common; +use common::*; +use datamodel::dml; + +#[test] +fn parse_basic_model() { + let dml = r#" + model User { + firstName: String + lastName: String + } + "#; + + let schema = parse_and_validate(dml); + let user_model = schema.assert_has_model("User"); + user_model.assert_is_embedded(false); + user_model + .assert_has_field("firstName") + .assert_base_type(&dml::ScalarType::String); + user_model + .assert_has_field("lastName") + .assert_base_type(&dml::ScalarType::String); +} + +#[test] +fn parse_basic_enum() { + let dml = r#" + enum Roles { + ADMIN + USER + } + "#; + + let schema = parse_and_validate(dml); + let role_enum = schema.assert_has_enum("Roles"); + role_enum.assert_has_value("ADMIN"); + role_enum.assert_has_value("USER"); +} diff --git a/server/prisma-rs/libs/datamodel/tests/builtin_directives.rs b/server/prisma-rs/libs/datamodel/tests/builtin_directives.rs new file mode 100644 index 0000000000..9b5fd8d3ba --- /dev/null +++ b/server/prisma-rs/libs/datamodel/tests/builtin_directives.rs @@ -0,0 +1,27 @@ +mod common; +use common::*; +use datamodel::dml; + +#[test] +fn db_directive() { + let dml = r#" + model User { + firstName: String @db("first_name") + } + @db("user") + + model Post { + text: String @db(name: "post_text") + } + @db(name: "posti") + "#; + + let schema = parse_and_validate(dml); + let user_model = schema.assert_has_model("User").assert_with_db_name("user"); + user_model + .assert_has_field("firstName") + .assert_with_db_name("first_name"); + + let post_model = schema.assert_has_model("Post").assert_with_db_name("posti"); + post_model.assert_has_field("text").assert_with_db_name("post_text"); +} diff --git a/server/prisma-rs/libs/datamodel/tests/common.rs b/server/prisma-rs/libs/datamodel/tests/common.rs new file mode 100644 index 0000000000..46dae130bf --- /dev/null +++ b/server/prisma-rs/libs/datamodel/tests/common.rs @@ -0,0 +1,135 @@ +extern crate datamodel; + +use datamodel::dml; +use datamodel::Validator; + +pub trait FieldAsserts { + fn assert_base_type(&self, t: &dml::ScalarType) -> &Self; + fn assert_enum_type(&self, en: &str) -> &Self; + fn assert_relation_to(&self, t: &str) -> &Self; + fn assert_relation_to_field(&self, t: &str) -> &Self; + fn assert_arity(&self, arity: &dml::FieldArity) -> &Self; + fn assert_with_db_name(&self, t: &str) -> &Self; + fn assert_default_value(&self, t: dml::Value) -> &Self; +} + +pub trait ModelAsserts { + fn assert_has_field(&self, t: &str) -> &dml::Field; + fn assert_is_embedded(&self, t: bool) -> &Self; + fn assert_with_db_name(&self, t: &str) -> &Self; +} + +pub trait EnumAsserts { + fn assert_has_value(&self, t: &str) -> &Self; +} + +pub trait SchemaAsserts { + fn assert_has_model(&self, t: &str) -> &dml::Model; + fn assert_has_enum(&self, t: &str) -> &dml::Enum; +} + +impl FieldAsserts for dml::Field { + fn assert_base_type(&self, t: &dml::ScalarType) -> &Self { + if let dml::FieldType::Base(base_type) = &self.field_type { + assert_eq!(base_type, t); + } else { + panic!("Scalar expected, but found {:?}", self.field_type); + } + + return self; + } + + fn assert_enum_type(&self, en: &str) -> &Self { + if let dml::FieldType::Enum(enum_type) = &self.field_type { + assert_eq!(enum_type, en); + } else { + panic!("Enum expected, but found {:?}", self.field_type); + } + + return self; + } + + fn assert_relation_to(&self, t: &str) -> &Self { + if let dml::FieldType::Relation(info) = &self.field_type { + assert_eq!(info.to, t); + } else { + panic!("Relation expected, but found {:?}", self.field_type); + } + + return self; + } + + fn assert_relation_to_field(&self, t: &str) -> &Self { + if let dml::FieldType::Relation(info) = &self.field_type { + assert_eq!(info.to_field, t); + } else { + panic!("Relation expected, but found {:?}", self.field_type); + } + + return self; + } + + fn assert_arity(&self, arity: &dml::FieldArity) -> &Self { + assert_eq!(self.arity, *arity); + + return self; + } + + fn assert_with_db_name(&self, t: &str) -> &Self { + assert_eq!(self.database_name, Some(String::from(t))); + + return self; + } + + fn assert_default_value(&self, t: dml::Value) -> &Self { + assert_eq!(self.default_value, Some(t)); + + return self; + } +} + +impl SchemaAsserts for dml::Schema { + fn assert_has_model(&self, t: &str) -> &dml::Model { + self.find_model(&String::from(t)) + .expect(format!("Model {} not found", t).as_str()) + } + fn assert_has_enum(&self, t: &str) -> &dml::Enum { + self.find_enum(&String::from(t)) + .expect(format!("Enum {} not found", t).as_str()) + } +} + +impl ModelAsserts for dml::Model { + fn assert_has_field(&self, t: &str) -> &dml::Field { + self.find_field(&String::from(t)) + .expect(format!("Field {} not found", t).as_str()) + } + fn assert_is_embedded(&self, t: bool) -> &Self { + assert_eq!(self.is_embedded, t); + + return self; + } + fn assert_with_db_name(&self, t: &str) -> &Self { + assert_eq!(self.database_name, Some(String::from(t))); + + return self; + } +} + +impl EnumAsserts for dml::Enum { + fn assert_has_value(&self, t: &str) -> &Self { + let pred = String::from(t); + self.values + .iter() + .find(|x| **x == pred) + .expect(format!("Field {} not found", t).as_str()); + + return self; + } +} + +pub fn parse_and_validate(input: &str) -> dml::Schema { + let ast = datamodel::parser::parse(&String::from(input)).expect("Unable to parse datamodel."); + let validator = datamodel::validator::Validator::new(); + validator.validate(&ast).expect("Validation error") +} diff --git a/server/prisma-rs/libs/datamodel/tests/parser.rs b/server/prisma-rs/libs/datamodel/tests/parser.rs new file mode 100644 index 0000000000..ab352e4fd9 --- /dev/null +++ b/server/prisma-rs/libs/datamodel/tests/parser.rs @@ -0,0 +1,57 @@ +extern crate datamodel; + +#[test] +fn test_parser_should_not_crash() { + let dml = r#" +model User { + id: ID @primary + createdAt: DateTime + email: String @unique + name: String? + role: Role + posts: Post[] @onDelete(CASCADE) + profile: Profile? +} +@db(name: "user") + +model Profile { + id: ID @primary + user: User + bio: String +} +@db("profile") + +model Post { + id: ID @primary + createdAt: DateTime + updatedAt: DateTime + title: String @default("Default-Title") + wasLiked: boolean @default(false) + author: User @relation(name: "author") + published: Boolean = false + categories: Category[] +} +@db(name: "post") + +model Category { + id ID @primary + name String + posts Post[] + cat CategoryEnum +} +@db(name: "category") + +model PostToCategory { + post: Post(id) + category: Category +} +@db(name: "post_to_category") + +enum CategoryEnum { + A + B + C +}"#; + + datamodel::parser::parse(&String::from(dml)); +} diff --git a/server/prisma-rs/libs/datamodel/tests/relations.rs b/server/prisma-rs/libs/datamodel/tests/relations.rs new file mode 100644 index 0000000000..2af8264cec --- /dev/null +++ b/server/prisma-rs/libs/datamodel/tests/relations.rs @@ -0,0 +1,62 @@ +mod common; +use common::*; +use datamodel::dml; + +#[test] +fn resolve_relation() { + let dml = r#" + model User { + firstName: String + posts: Post[] + } + + model Post { + text: String + user: User + } + "#; + + let schema = parse_and_validate(dml); + let user_model = schema.assert_has_model("User"); + user_model + .assert_has_field("firstName") + .assert_base_type(&dml::ScalarType::String); + user_model + .assert_has_field("posts") + .assert_relation_to("Post") + .assert_arity(&dml::FieldArity::List); + + let post_model = schema.assert_has_model("Post"); + post_model + .assert_has_field("text") + .assert_base_type(&dml::ScalarType::String); + post_model.assert_has_field("user").assert_relation_to("User"); +} + +#[test] +fn resolve_enum_field() { + let dml = r#" + model User { + email: String + role: Role + } + + enum Role { + ADMIN + USER + PRO + } + "#; + + let schema = parse_and_validate(dml); + let user_model = schema.assert_has_model("User"); + user_model + .assert_has_field("email") + .assert_base_type(&dml::ScalarType::String); + user_model.assert_has_field("role").assert_enum_type("Role"); + + let role_enum = schema.assert_has_enum("Role"); + role_enum.assert_has_value("ADMIN"); + role_enum.assert_has_value("PRO"); + role_enum.assert_has_value("USER"); +} diff --git a/server/prisma-rs/libs/nullable/src/lib.rs b/server/prisma-rs/libs/nullable/src/lib.rs index 5859281550..00d8b91d25 100644 --- a/server/prisma-rs/libs/nullable/src/lib.rs +++ b/server/prisma-rs/libs/nullable/src/lib.rs @@ -1,6 +1,6 @@ use serde::{Deserialize, Deserializer, Serialize, Serializer}; -#[derive(Debug, Eq, PartialEq)] +#[derive(Debug, Eq, PartialEq, Hash, Clone)] pub enum Nullable { /// Explicit null value provided. Null, diff --git a/server/prisma-rs/libs/prisma-common/src/config/connection_string.rs b/server/prisma-rs/libs/prisma-common/src/config/connection_string.rs index da5cedc6d5..aab42c65b5 100644 --- a/server/prisma-rs/libs/prisma-common/src/config/connection_string.rs +++ b/server/prisma-rs/libs/prisma-common/src/config/connection_string.rs @@ -4,6 +4,8 @@ use url::Url; #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase")] pub struct ConnectionStringConfig { + pub connector: String, + #[serde(with = "url_serde")] pub uri: Url, diff --git a/server/prisma-rs/libs/prisma-common/src/config/mod.rs b/server/prisma-rs/libs/prisma-common/src/config/mod.rs index 33953b2d73..5b8342af93 100644 --- a/server/prisma-rs/libs/prisma-common/src/config/mod.rs +++ b/server/prisma-rs/libs/prisma-common/src/config/mod.rs @@ -37,6 +37,14 @@ pub enum PrismaDatabase { } impl PrismaDatabase { + pub fn connector(&self) -> &str { + match self { + PrismaDatabase::Explicit(config) => &config.connector, + PrismaDatabase::ConnectionString(config) => &config.connector, + PrismaDatabase::File(config) => &config.connector, + } + } + pub fn db_name(&self) -> Option { match self { PrismaDatabase::Explicit(config) => config.database.clone(), @@ -44,6 +52,14 @@ impl PrismaDatabase { PrismaDatabase::File(config) => Some(config.db_name()), } } + + pub fn schema(&self) -> Option { + match self { + PrismaDatabase::Explicit(config) => config.schema.clone(), + PrismaDatabase::ConnectionString(config) => config.schema.clone(), + PrismaDatabase::File(config) => config.schema.clone(), + } + } } #[derive(Deserialize, Debug)] @@ -73,7 +89,7 @@ pub fn load() -> Result { }; let config = substitute_env_vars(config)?; - Ok(serde_yaml::from_str(&config).expect("Unable to parse YML config.")) + Ok(serde_yaml::from_str(&config.replace("\\n", "\n")).expect("Unable to parse YML config.")) } /// Attempts to find a valid Prisma config either via env var or file discovery. diff --git a/server/prisma-rs/libs/prisma-inflector/Cargo.toml b/server/prisma-rs/libs/prisma-inflector/Cargo.toml new file mode 100644 index 0000000000..7740d1633f --- /dev/null +++ b/server/prisma-rs/libs/prisma-inflector/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "prisma-inflector" +version = "0.1.0" +authors = ["Dominic Petrick "] +edition = "2018" + +[dependencies] +lazy_static = "1.3" +regex = "1.1" +unicode-segmentation = "1.2" \ No newline at end of file diff --git a/server/prisma-rs/libs/prisma-inflector/LICENSE b/server/prisma-rs/libs/prisma-inflector/LICENSE new file mode 100644 index 0000000000..9006b97b32 --- /dev/null +++ b/server/prisma-rs/libs/prisma-inflector/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/server/prisma-rs/libs/prisma-inflector/src/categories.rs b/server/prisma-rs/libs/prisma-inflector/src/categories.rs new file mode 100644 index 0000000000..20549878ba --- /dev/null +++ b/server/prisma-rs/libs/prisma-inflector/src/categories.rs @@ -0,0 +1,89 @@ +lazy_static! { + pub static ref CATEGORY_EX_ICES: Vec<&'static str> = vec![ + "codex", "murex", "silex", + ]; + + pub static ref CATEGORY_IX_ICES: Vec<&'static str> = vec![ + "radix", "helix", + ]; + + pub static ref CATEGORY_UM_A: Vec<&'static str> = vec![ + "bacterium", "agendum", "desideratum", "erratum", "stratum", "datum", "ovum", + "extremum", "candelabrum", + ]; + + // Always us -> i + pub static ref CATEGORY_US_I: Vec<&'static str> = vec![ + "alumnus", "alveolus", "bacillus", "bronchus", "locus", "nucleus", "stimulus", + "meniscus", "thesaurus", + ]; + + pub static ref CATEGORY_ON_A: Vec<&'static str> = vec![ + "criterion", "perihelion", "aphelion", "phenomenon", "prolegomenon", "noumenon", + "organon", "asyndeton", "hyperbaton", + ]; + + pub static ref CATEGORY_A_AE: Vec<&'static str> = vec!["alumna", "alga", "vertebra", "persona"]; + + // Always o -> os + pub static ref CATEGORY_O_OS: Vec<&'static str> = vec![ + "albino", "archipelago", "armadillo", "commando", "crescendo", "fiasco", + "ditto", "dynamo", "embryo", "ghetto", "guano", "inferno", "jumbo", "lumbago", + "magneto", "manifesto", "medico", "octavo", "photo", "pro", "quarto", "canto", + "lingo", "generalissimo", "stylo", "rhino", "casino", "auto", "macro", "zero", + ]; + + // Classical o -> i (normally -> os) + pub static ref CATEGORY_O_I: Vec<&'static str> = vec![ + "solo", "soprano", "basso", "alto", "contralto", "tempo", "piano", "virtuoso", + ]; + + pub static ref CATEGORY_EN_INA: Vec<&'static str> = vec![ + "stamen", "foramen", "lumen", + ]; + + // -a to -as (anglicized) or -ata (classical) + pub static ref CATEGORY_A_ATA: Vec<&'static str> = vec![ + "anathema", "enema", "oedema", "bema", "enigma", "sarcoma", "carcinoma", "gumma", + "schema", "charisma", "lemma", "soma", "diploma", "lymphoma", "stigma", "dogma", + "magma", "stoma", "drama", "melisma", "trauma", "edema", "miasma", + ]; + + pub static ref CATEGORY_IS_IDES: Vec<&'static str> = vec![ + "iris", "clitoris" + ]; + + // -us to -uses (anglicized) or -us (classical) + pub static ref CATEGORY_US_US: Vec<&'static str> = vec![ + "apparatus", "impetus", "prospectus", "cantus", "nexus", "sinus", "coitus", "plexus", + "status", "hiatus", + ]; + + pub static ref CATEGORY_NONE_I: Vec<&'static str> = vec![ + "afreet", "afrit", "efreet", + ]; + + pub static ref CATEGORY_NONE_IM: Vec<&'static str> = vec![ + "cherub", "goy", "seraph", + ]; + + pub static ref CATEGORY_EX_EXES: Vec<&'static str> = vec![ + "apex", "latex", "vertex", "cortex", "pontifex", "vortex", "index", "simplex", + ]; + + pub static ref CATEGORY_IX_IXES: Vec<&'static str> = vec![ + "appendix", + ]; + + pub static ref CATEGORY_S_ES: Vec<&'static str> = vec![ + "acropolis", "chaos", "lens", "aegis", "cosmos", "mantis", "alias", "dais", "marquis", + "asbestos", "digitalis", "metropolis", "atlas", "epidermis", "pathos", "bathos", "ethos", + "pelvis", "bias", "gas", "polis", "caddis", "glottis", "rhinoceros", "cannabis", "glottis", + "sassafras", "canvas", "ibis", "trellis", + ]; + + pub static ref CATEGORY_MAN_MANS: Vec<&'static str> = vec![ + "human", "Alabaman", "Bahaman", "Burman", "German", "Hiroshiman", "Liman", "Nakayaman", + "Oklahoman", "Panaman", "Selman", "Sonaman", "Tacoman", "Yakiman", "Yokohaman", "Yuman", + ]; +} diff --git a/server/prisma-rs/libs/prisma-inflector/src/exceptions.rs b/server/prisma-rs/libs/prisma-inflector/src/exceptions.rs new file mode 100644 index 0000000000..d63c43fd16 --- /dev/null +++ b/server/prisma-rs/libs/prisma-inflector/src/exceptions.rs @@ -0,0 +1,82 @@ +lazy_static! { + pub static ref UNCOUNTABLE: Vec<&'static str> = vec![ + // endings + "fish", "ois", "sheep", "deer", "pox", "itis", + + // words + "bison", "flounder", "pliers", "bream", + "gallows", "proceedings", "breeches", "graffiti", "rabies", + "britches", "headquarters", "salmon", "carp", "herpes", + "scissors", "chassis", "high-jinks", "sea-bass", "clippers", + "homework", "series", "cod", "innings", "shears", + "contretemps", "jackanapes", "species", "corps", "mackerel", + "swine", "debris", "measles", "trout", "diabetes", "mews", + "tuna", "djinn", "mumps", "whiting", "eland", "news", + "wildebeest", "elk", "pincers", "sugar", + ]; + + pub static ref STANDARD_IRREGULAR: Vec<(&'static str, &'static str)> = vec![ + ("child", "children"), // classical + ("ephemeris", "ephemerides"), // classical + ("mongoose", "mongoose"), // anglicized + ("mythos", "mythoi"), // classical + ("soliloquy", "soliloquies"), // anglicized + ("trilby", "trilbys"), // anglicized + ("genus", "genera"), // classical + ("quiz", "quizzes"), + ]; + + pub static ref IRREGULAR_ANGLICIZED: Vec<(&'static str, &'static str)> = vec![ + ("beef", "beefs"), + ("brother", "brothers"), + ("cow", "cows"), + ("genie", "genies"), + ("money", "moneys"), + ("octopus", "octopuses"), + ("opus", "opuses"), + ]; + + pub static ref IRREGULAR_CLASSICAL: Vec<(&'static str, &'static str)> = vec![ + ("beef", "beeves"), + ("brother", "brethren"), + ("cos", "kine"), + ("genie", "genii"), + ("money", "monies"), + ("octopus", "octopodes"), + ("opus", "opera"), + ]; + + pub static ref IRREGULAR_SUFFIX_INFLECTIONS: Vec<(&'static str, &'static str)> = vec![ + ("man$", "men"), + ("([lm])ouse$", "${1}ice"), + ("tooth$", "teeth"), + ("goose$", "geese"), + ("foot$", "feet"), + ("zoon$", "zoa"), + ("([csx])is$", "${1}es"), + ]; + + pub static ref MODERN_CLASSICAL_INFLECTIONS: Vec<(&'static str, &'static str)> = vec![ + ("trix$", "trices"), + ("eau$", "eaux"), + ("ieu$", "ieux"), + ("(..[iay])nx$", "${1}nges"), + ]; + + pub static ref ADDITIONAL_SUFFIX_INFLECTIONS: Vec<(&'static str, &'static str)> = vec![ + // The suffixes -ch, -sh, and -ss all take -es in the plural (churches, classes, etc)... + (r"([cs])h$", "${1}hes"), + ("ss$", "sses"), + + // Certain words ending in -f or -fe take -ves in the plural (lives, wolves, etc)... + ("([aeo]l)f$", "${1}ves"), + ("([^d]ea)f$", "${1}ves"), + ("(ar)f$", "${1}ves"), + ("([nlw]i)fe$", "${1}ves"), + + // Words ending in -y take -ys + ("([aeiou])y$", "${1}ys"), + ("y$", "ies"), + ]; + +} diff --git a/server/prisma-rs/libs/prisma-inflector/src/inflector.rs b/server/prisma-rs/libs/prisma-inflector/src/inflector.rs new file mode 100644 index 0000000000..9c66d46e32 --- /dev/null +++ b/server/prisma-rs/libs/prisma-inflector/src/inflector.rs @@ -0,0 +1,220 @@ +use super::{ + categories, exceptions, + rules::{Pluralize, Rule}, +}; +use regex::Regex; + +#[derive(Debug, PartialEq)] +pub enum Mode { + Anglicized, + Classical, +} + +#[derive(Debug)] +pub struct Inflector { + pub mode: Mode, + rules: Vec, + _inhibit: (), +} + +impl Inflector { + pub fn pluralize(&self, s: &str) -> String { + for rule in &self.rules { + if let Some(s) = rule.pluralize(s) { + return s; + } + } + + panic!("Invariant violation: Inflector should always fall back to catch-all case -s.") + } + + pub fn new(mode: Mode) -> Inflector { + let mut rules = vec![]; + + // Rules for words that do not inflect in the plural (such as fish, travois, chassis, nationality endings + rules.push(Self::category_rule("", "", &exceptions::UNCOUNTABLE)); + + // Handle standard irregular plurals (mongooses, oxen, etc.) + exceptions::STANDARD_IRREGULAR.iter().for_each(|irr| { + Self::irregular(irr.0, irr.1).into_iter().for_each(|r| rules.push(r)); + }); + + // Handle additional standard irregular plurals + // I don't know why Rust is throwing a type error here without .to_vec (lazy static issues?) + let additional_irregulars = match mode { + Mode::Anglicized => exceptions::IRREGULAR_ANGLICIZED.to_vec(), + Mode::Classical => exceptions::IRREGULAR_CLASSICAL.to_vec(), + }; + + additional_irregulars.iter().for_each(|irr| { + Self::irregular(irr.0, irr.1).into_iter().for_each(|r| rules.push(r)); + }); + + rules.push(Self::category_rule("", "s", &categories::CATEGORY_MAN_MANS)); + + // Handle irregular inflections for common suffixes + exceptions::IRREGULAR_SUFFIX_INFLECTIONS + .iter() + .for_each(|(singular, plural)| { + rules.push(Self::regex_rule(singular, plural)); + }); + + // Handle fully assimilated classical inflections + rules.push(Self::category_rule("ex", "ices", &categories::CATEGORY_EX_ICES)); + rules.push(Self::category_rule("ix", "ices", &categories::CATEGORY_IX_ICES)); + rules.push(Self::category_rule("um", "a", &categories::CATEGORY_UM_A)); + rules.push(Self::category_rule("on", "a", &categories::CATEGORY_ON_A)); + rules.push(Self::category_rule("a", "ae", &categories::CATEGORY_A_AE)); + + // Handle classical variants of modern inflections + if mode == Mode::Classical { + exceptions::MODERN_CLASSICAL_INFLECTIONS + .iter() + .for_each(|(singular, plural)| { + rules.push(Self::regex_rule(singular, plural)); + }); + + rules.push(Self::category_rule("en", "ina", &categories::CATEGORY_EN_INA)); + rules.push(Self::category_rule("a", "ata", &categories::CATEGORY_A_ATA)); + rules.push(Self::category_rule("is", "ides", &categories::CATEGORY_IS_IDES)); + rules.push(Self::category_rule("", "", &categories::CATEGORY_US_US)); + rules.push(Self::category_rule("o", "i", &categories::CATEGORY_O_I)); + rules.push(Self::category_rule("", "i", &categories::CATEGORY_NONE_I)); + rules.push(Self::category_rule("", "im", &categories::CATEGORY_NONE_IM)); + rules.push(Self::category_rule("ex", "ices", &categories::CATEGORY_EX_EXES)); + rules.push(Self::category_rule("ix", "ices", &categories::CATEGORY_IX_IXES)); + }; + + rules.push(Self::category_rule("us", "i", &categories::CATEGORY_US_I)); + rules.push(Self::regex_rule("([zx])$", "${1}es")); + rules.push(Self::category_rule("", "es", &categories::CATEGORY_S_ES)); + rules.push(Self::category_rule("", "es", &categories::CATEGORY_IS_IDES)); + rules.push(Self::category_rule("", "es", &categories::CATEGORY_US_US)); + rules.push(Self::regex_rule("(us)$", "${1}es")); + rules.push(Self::category_rule("", "s", &categories::CATEGORY_A_ATA)); + + exceptions::ADDITIONAL_SUFFIX_INFLECTIONS + .iter() + .for_each(|(singular, plural)| { + rules.push(Self::regex_rule(singular, plural)); + }); + + // Some words ending in -o take -os (including does preceded by a vowel) + rules.push(Self::category_rule("o", "os", &categories::CATEGORY_O_I)); + rules.push(Self::category_rule("o", "os", &categories::CATEGORY_O_OS)); + rules.push(Self::regex_rule("([aeiou])o$", "${1}os")); + + // The rest take -oes + rules.push(Self::regex_rule("o$", "oes")); + rules.push(Self::regex_rule("ulum", "ula")); + rules.push(Self::category_rule("", "es", &categories::CATEGORY_A_ATA)); + rules.push(Self::regex_rule("s$", "ses")); + + // Global fallback, just assume that the plural adds -s + rules.push(Self::regex_rule("$", "s")); + + Inflector { + mode, + rules, + _inhibit: (), + } + } + + fn irregular(singular: &'static str, plural: &'static str) -> Vec { + let first_singular = singular.chars().next().unwrap(); + let first_plural = plural.chars().next().unwrap(); + + // Rules are all 1-byte characters, so we can use slices. + if first_singular == first_plural { + vec![Rule::regex( + Regex::new(&format!( + "(?i)({}){}$", + first_singular.to_owned(), + singular[1..].to_owned() + )) + .unwrap(), + format!("${{1}}{}", plural[1..].to_owned()), + )] + } else { + vec![ + Rule::regex( + Regex::new(&format!( + "{}(?i){}$", + first_singular.to_uppercase(), + singular[1..].to_owned() + )) + .unwrap(), + format!("{}{}", first_plural.to_uppercase(), plural[1..].to_owned()), + ), + Rule::regex( + Regex::new(&format!( + "{}(?i){}$", + first_singular.to_lowercase(), + singular[1..].to_owned() + )) + .unwrap(), + format!("{}{}", first_plural.to_lowercase(), plural[1..].to_owned()), + ), + ] + } + } + + fn regex_rule(singular: &'static str, plural: &'static str) -> Rule { + Rule::regex(Regex::new(&format!("(?i){}", singular)).unwrap(), plural.into()) + } + + fn category_rule(singular: &'static str, plural: &'static str, words: &'static [&'static str]) -> Rule { + Rule::category(singular.into(), plural.into(), words) + } +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_example_word_list() { + let examples = vec![ + ("alga", "algae"), + ("nova", "novas"), + ("dogma", "dogmas"), + ("Woman", "Women"), + ("church", "churches"), + ("quick_chateau", "quick_chateaus"), + ("codex", "codices"), + ("index", "indexes"), + ("NightWolf", "NightWolves"), + ("Milieu", "Milieus"), + ("basis", "bases"), + ("iris", "irises"), + ("phalanx", "phalanxes"), + ("tempo", "tempos"), + ("foot", "feet"), + ("series", "series"), + ("WorldAtlas", "WorldAtlases"), + ("wish", "wishes"), + ("Bacterium", "Bacteria"), + ("medium", "mediums"), + ("Genus", "Genera"), + ("stimulus", "stimuli"), + ("opus", "opuses"), + ("status", "statuses"), + ("Box", "Boxes"), + ("ferry", "ferries"), + ("protozoon", "protozoa"), + ("cherub", "cherubs"), + ("human", "humans"), + ("sugar", "sugar"), + ("virus", "viruses"), + ("gastrostomy", "gastrostomies"), + ("baculum", "bacula"), + ("pancreas", "pancreases"), + ]; + + let inflector = Inflector::new(Mode::Anglicized); + + examples.into_iter().for_each(|(singular, expected_plural)| { + assert_eq!(inflector.pluralize(singular), expected_plural); + }); + } +} diff --git a/server/prisma-rs/libs/prisma-inflector/src/lib.rs b/server/prisma-rs/libs/prisma-inflector/src/lib.rs new file mode 100644 index 0000000000..e3b3f5c1bf --- /dev/null +++ b/server/prisma-rs/libs/prisma-inflector/src/lib.rs @@ -0,0 +1,24 @@ +#[macro_use] +extern crate lazy_static; + +mod categories; +mod exceptions; +mod inflector; +mod rules; + +use inflector::{Inflector, Mode}; + +lazy_static! { + static ref DEFAULT: Inflector = Inflector::new(Mode::Anglicized); + static ref CLASSICAL: Inflector = Inflector::new(Mode::Classical); +} + +/// Default inflector, anglecized mode. +pub fn default() -> &'static Inflector { + &DEFAULT +} + +/// Inflector, classical mode. +pub fn classical() -> &'static Inflector { + &CLASSICAL +} diff --git a/server/prisma-rs/libs/prisma-inflector/src/rules.rs b/server/prisma-rs/libs/prisma-inflector/src/rules.rs new file mode 100644 index 0000000000..a5fc706b94 --- /dev/null +++ b/server/prisma-rs/libs/prisma-inflector/src/rules.rs @@ -0,0 +1,81 @@ +use regex::Regex; +use unicode_segmentation::UnicodeSegmentation; + +pub trait Pluralize { + fn pluralize(&self, s: &str) -> Option; +} + +#[derive(Debug)] +pub enum Rule { + Category(CategoryRule), + Regex(RegexRule), +} + +impl Rule { + pub fn category(singular: String, plural: String, words: &'static [&'static str]) -> Rule { + Rule::Category(CategoryRule { + singular, + plural, + words, + }) + } + + pub fn regex(singular: Regex, plural: String) -> Rule { + Rule::Regex(RegexRule { singular, plural }) + } +} + +impl Pluralize for Rule { + fn pluralize(&self, s: &str) -> Option { + match self { + Rule::Category(c) => c.pluralize(s), + Rule::Regex(r) => r.pluralize(s), + } + } +} + +#[derive(Debug)] +pub struct CategoryRule { + singular: String, + plural: String, + words: &'static [&'static str], +} + +impl Pluralize for CategoryRule { + fn pluralize(&self, s: &str) -> Option { + let normalized = s.to_lowercase().to_owned(); + + for suffix in self.words { + if normalized.ends_with(suffix) { + if !normalized.ends_with(&self.singular) { + panic!("Invariant violation: Invalid inflection rule match: {}.", self.singular); + } + + let chars = s.graphemes(true).collect::>(); + let end_index = chars.len() - self.singular.len(); + let result = format!("{}{}", chars[0..end_index].join(""), self.plural); + + return Some(result); + } + } + + None + } +} + +#[derive(Debug)] +pub struct RegexRule { + singular: Regex, + plural: String, +} + +impl Pluralize for RegexRule { + fn pluralize(&self, s: &str) -> Option { + let candidate = self.singular.replace(s, &self.plural as &str); + if candidate == s { + None + } else { + Some(candidate.to_string()) + } + } +} diff --git a/server/prisma-rs/libs/prisma-query b/server/prisma-rs/libs/prisma-query deleted file mode 160000 index c813301714..0000000000 --- a/server/prisma-rs/libs/prisma-query +++ /dev/null @@ -1 +0,0 @@ -Subproject commit c813301714f5a12b127ce60e890116c2dcb07059 diff --git a/server/prisma-rs/migration-engine/Cargo.toml b/server/prisma-rs/migration-engine/Cargo.toml deleted file mode 100644 index 992b478056..0000000000 --- a/server/prisma-rs/migration-engine/Cargo.toml +++ /dev/null @@ -1,24 +0,0 @@ -[package] -name = "migration-engine" -version = "0.1.0" -authors = ["Marcus Böhm "] -edition = "2018" - -[dependencies] -nullable = { path = "../libs/nullable" } -database-inspector = { path = "../libs/database-inspector" } -prisma-models = { path = "../prisma-models" } -chrono = { version = "0.4", features = ["serde"] } -jsonrpc-core = "10.1.0" -serde = "1.0" -serde_json = "1.0" -serde_derive = "1.0" -boolinator = "2.4.0" - -[[bin]] -name = "migration-engine-rpc" -path = "src/bin/rpc_api_bin.rs" - -[[bin]] -name = "suggest-migrations" -path = "src/bin/suggest_migrations.rs" diff --git a/server/prisma-rs/migration-engine/connectors/migration-connector/Cargo.toml b/server/prisma-rs/migration-engine/connectors/migration-connector/Cargo.toml new file mode 100644 index 0000000000..aae0fbd474 --- /dev/null +++ b/server/prisma-rs/migration-engine/connectors/migration-connector/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "migration-connector" +version = "0.1.0" +authors = ["Marcus Böhm "] +edition = "2018" + +[dependencies] +datamodel = { path = "../../../libs/datamodel" } +nullable = { path = "../../../libs/nullable" } +chrono = { version = "0.4" } +serde = "1.0" +serde_json = "1.0" +serde_derive = "1.0" diff --git a/server/prisma-rs/migration-engine/connectors/migration-connector/src/lib.rs b/server/prisma-rs/migration-engine/connectors/migration-connector/src/lib.rs new file mode 100644 index 0000000000..b46d6e48b0 --- /dev/null +++ b/server/prisma-rs/migration-engine/connectors/migration-connector/src/lib.rs @@ -0,0 +1,183 @@ +mod migration_applier; +pub mod steps; + +use chrono::{DateTime, Utc}; +use datamodel::Schema; +pub use migration_applier::*; +use serde::Serialize; +use std::fmt::Debug; +use std::sync::Arc; +pub use steps::MigrationStep; + +#[macro_use] +extern crate serde_derive; + +pub trait MigrationConnector { + type DatabaseMigrationStep: DatabaseMigrationStepExt + 'static; + + fn initialize(&self); + + fn reset(&self); + + fn migration_persistence(&self) -> Arc; + + fn database_steps_inferrer(&self) -> Arc>; + fn database_step_applier(&self) -> Arc>; + fn destructive_changes_checker(&self) -> Arc>; + + fn migration_applier(&self) -> Box> { + let applier = MigrationApplierImpl { + migration_persistence: self.migration_persistence(), + step_applier: self.database_step_applier(), + }; + Box::new(applier) + } +} + +pub trait DatabaseMigrationStepExt: Debug + Serialize {} + +pub trait DatabaseMigrationStepsInferrer { + fn infer(&self, previous: &Schema, next: &Schema, steps: Vec) -> Vec; +} + +pub trait DatabaseMigrationStepApplier { + fn apply(&self, step: T); +} + +pub trait DestructiveChangesChecker { + fn check(&self, steps: Vec) -> Vec; +} + +pub enum MigrationResult { + Error(MigrationWarning), + Warning(MigrationError), +} + +#[derive(Debug, Serialize)] +pub struct MigrationWarning { + pub tpe: String, + pub description: String, + pub field: Option, +} + +#[derive(Debug, Serialize)] +pub struct MigrationError { + pub tpe: String, + pub description: String, + pub field: Option, +} + +pub trait MigrationPersistence { + // returns the last successful Migration + fn last(&self) -> Option; + + // this power the listMigrations command + fn load_all(&self) -> Vec; + + // writes the migration to the Migration table + fn create(&self, migration: Migration) -> Migration; + + // used by the MigrationApplier to write the progress of a Migration into the database + fn update(&self, params: &MigrationUpdateParams); +} + +#[derive(Debug, PartialEq, Clone)] +pub struct Migration { + pub name: String, + pub revision: usize, + pub status: MigrationStatus, + pub applied: usize, + pub rolled_back: usize, + pub datamodel: Schema, + pub datamodel_steps: Vec, + pub database_steps: String, + pub errors: Vec, + pub started_at: DateTime, + pub finished_at: Option>, +} + +#[derive(Debug, Clone)] +pub struct MigrationUpdateParams { + pub name: String, + pub revision: usize, + pub status: MigrationStatus, + pub applied: usize, + pub rolled_back: usize, + pub errors: Vec, + pub finished_at: Option>, +} + +impl Migration { + pub fn new(name: String) -> Migration { + Migration { + name: name, + revision: 0, + status: MigrationStatus::Pending, + applied: 0, + rolled_back: 0, + datamodel: Schema::empty(), + datamodel_steps: Vec::new(), + database_steps: "[]".to_string(), + errors: Vec::new(), + started_at: Self::timestamp_without_nanos(), + finished_at: None, + } + } + + pub fn update_params(&self) -> MigrationUpdateParams { + MigrationUpdateParams { + name: self.name.clone(), + revision: self.revision.clone(), + status: self.status.clone(), + applied: self.applied, + rolled_back: self.rolled_back, + errors: self.errors.clone(), + finished_at: self.finished_at.clone(), + } + } + + // SQLite does not store nano precision. Therefore we cut it so we can assert equality in our tests. + pub fn timestamp_without_nanos() -> DateTime { + let timestamp = Utc::now().timestamp_millis(); + let nsecs = ((timestamp % 1000) * 1_000_000) as u32; + let secs = (timestamp / 1000) as i64; + let naive = chrono::NaiveDateTime::from_timestamp(secs, nsecs); + let datetime: DateTime = DateTime::from_utc(naive, Utc); + datetime + } +} + +#[derive(Debug, Serialize, PartialEq, Clone)] +pub enum MigrationStatus { + Pending, + InProgress, + Success, + RollingBack, + RollbackSuccess, + RollbackFailure, +} + +impl MigrationStatus { + pub fn code(&self) -> &str { + match self { + MigrationStatus::Pending => "Pending", + MigrationStatus::InProgress => "InProgress", + MigrationStatus::Success => "Success", + MigrationStatus::RollingBack => "RollingBack", + MigrationStatus::RollbackSuccess => "RollbackSuccess", + MigrationStatus::RollbackFailure => "RollbackFailure", + } + } + + pub fn from_str(s: String) -> MigrationStatus { + match s.as_ref() { + "Pending" => MigrationStatus::Pending, + "InProgress" => MigrationStatus::InProgress, + "Success" => MigrationStatus::Success, + "RollingBack" => MigrationStatus::RollingBack, + "RollbackSuccess" => MigrationStatus::RollbackSuccess, + "RollbackFailure" => MigrationStatus::RollbackFailure, + _ => panic!("MigrationStatus {:?} is not known", s), + } + } +} diff --git a/server/prisma-rs/migration-engine/connectors/migration-connector/src/migration_applier.rs b/server/prisma-rs/migration-engine/connectors/migration-connector/src/migration_applier.rs new file mode 100644 index 0000000000..aa00deed41 --- /dev/null +++ b/server/prisma-rs/migration-engine/connectors/migration-connector/src/migration_applier.rs @@ -0,0 +1,34 @@ +use crate::*; +use std::sync::Arc; + +pub trait MigrationApplier { + fn apply_steps(&self, migration: Migration, steps: Vec); +} + +#[allow(unused, dead_code)] +pub struct MigrationApplierImpl { + pub migration_persistence: Arc, + pub step_applier: Arc>, +} + +#[allow(unused, dead_code)] +impl MigrationApplier for MigrationApplierImpl { + fn apply_steps(&self, migration: Migration, steps: Vec) { + // todo: refactor those procedural updates into proper domain methods on the Migration struct + assert_eq!(migration.status, MigrationStatus::Pending); // what other states are valid here? + + let mut migration_updates = migration.update_params(); + migration_updates.status = MigrationStatus::InProgress; + self.migration_persistence.update(&migration_updates); + + for step in steps { + self.step_applier.apply(step); + migration_updates.applied = migration_updates.applied + 1; + self.migration_persistence.update(&migration_updates); + } + + migration_updates.status = MigrationStatus::Success; + migration_updates.finished_at = Some(Migration::timestamp_without_nanos()); + self.migration_persistence.update(&migration_updates); + } +} diff --git a/server/prisma-rs/migration-engine/connectors/migration-connector/src/steps.rs b/server/prisma-rs/migration-engine/connectors/migration-connector/src/steps.rs new file mode 100644 index 0000000000..06ebe5cca6 --- /dev/null +++ b/server/prisma-rs/migration-engine/connectors/migration-connector/src/steps.rs @@ -0,0 +1,247 @@ +use datamodel::*; +use nullable::Nullable; + +#[derive(Debug, Deserialize, Serialize, PartialEq, Clone)] +#[serde(tag = "stepType")] +pub enum MigrationStep { + CreateModel(CreateModel), + UpdateModel(UpdateModel), + DeleteModel(DeleteModel), + CreateField(CreateField), + DeleteField(DeleteField), + UpdateField(UpdateField), + CreateEnum(CreateEnum), + UpdateEnum(UpdateEnum), + DeleteEnum(DeleteEnum), + // CreateRelation(CreateRelation), + // DeleteRelation(DeleteRelation), +} + +pub trait WithDbName { + fn db_name(&self) -> String; +} + +#[derive(Debug, Deserialize, Serialize, PartialEq, Eq, Hash, Clone)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct CreateModel { + pub name: String, + + #[serde(skip_serializing_if = "Option::is_none")] + pub db_name: Option, + + pub embedded: bool, +} + +#[derive(Debug, Deserialize, Serialize, PartialEq, Eq, Hash, Clone)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct UpdateModel { + pub name: String, + + #[serde(skip_serializing_if = "Option::is_none")] + pub new_name: Option, + + #[serde( + default, + skip_serializing_if = "Option::is_none", + deserialize_with = "nullable::optional_nullable_deserialize" + )] + pub db_name: Option>, + + #[serde(skip_serializing_if = "Option::is_none")] + pub embedded: Option, +} + +#[derive(Debug, Deserialize, Serialize, PartialEq, Clone)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct DeleteModel { + pub name: String, +} + +#[derive(Debug, Deserialize, Serialize, PartialEq, Clone)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct CreateField { + pub model: String, + + pub name: String, + + #[serde(rename = "type")] + pub tpe: FieldType, + + pub arity: FieldArity, + + #[serde(skip_serializing_if = "Option::is_none")] + pub db_name: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub is_created_at: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub is_updated_at: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub id: Option, // fixme: change to behaviour + + #[serde(skip_serializing_if = "Option::is_none")] + pub default: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub scalar_list: Option, +} + +impl WithDbName for CreateField { + fn db_name(&self) -> String { + match self.db_name { + Some(ref db_name) => db_name.clone(), + None => self.name.clone(), + } + } +} + +#[derive(Debug, Deserialize, Serialize, PartialEq, Clone)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct UpdateField { + pub model: String, + + pub name: String, + + #[serde(skip_serializing_if = "Option::is_none")] + pub new_name: Option, + + #[serde(rename = "type", skip_serializing_if = "Option::is_none")] + pub tpe: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub arity: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub db_name: Option>, + + #[serde(skip_serializing_if = "Option::is_none")] + pub is_created_at: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub is_updated_at: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub id: Option>, // fixme: change to behaviour + + #[serde(skip_serializing_if = "Option::is_none")] + pub default: Option>, + + #[serde(skip_serializing_if = "Option::is_none")] + pub scalar_list: Option>, +} + +impl UpdateField { + pub fn is_any_option_set(&self) -> bool { + self.new_name.is_some() + || self.arity.is_some() + || self.db_name.is_some() + || self.is_created_at.is_some() + || self.is_updated_at.is_some() + || self.id.is_some() + || self.default.is_some() + || self.scalar_list.is_some() + } +} + +#[derive(Debug, Deserialize, Serialize, PartialEq, Clone)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct DeleteField { + pub model: String, + pub name: String, +} + +#[derive(Debug, Deserialize, Serialize, PartialEq, Clone)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct CreateEnum { + pub name: String, + pub values: Vec, +} + +#[derive(Debug, Deserialize, Serialize, PartialEq, Clone)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct UpdateEnum { + pub name: String, + + #[serde(skip_serializing_if = "Option::is_none")] + pub new_name: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub values: Option>, +} + +#[derive(Debug, Deserialize, Serialize, PartialEq, Clone)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct DeleteEnum { + pub name: String, +} + +// #[derive(Debug, Deserialize, Serialize, PartialEq)] +// #[serde(rename_all = "camelCase", deny_unknown_fields)] +// pub struct CreateRelation { +// pub name: String, +// pub model_a: RelationFieldSpec, +// pub model_b: RelationFieldSpec, + +// #[serde(skip_serializing_if = "Option::is_none")] +// pub table: Option, +// } + +// #[derive(Debug, Deserialize, Serialize, PartialEq)] +// #[serde(rename_all = "camelCase", deny_unknown_fields)] +// pub struct UpdateRelation { +// pub name: String, + +// #[serde(skip_serializing_if = "Option::is_none")] +// pub new_name: Option, + +// #[serde(skip_serializing_if = "Option::is_none")] +// pub model_a: Option, + +// #[serde(skip_serializing_if = "Option::is_none")] +// pub model_b: Option, + +// #[serde(skip_serializing_if = "Option::is_none")] +// pub table: Option, +// } + +// #[derive(Debug, Deserialize, Serialize, PartialEq)] +// #[serde(rename_all = "camelCase", deny_unknown_fields)] +// pub struct DeleteRelation { +// pub name: String, +// } + +// // fixme: this data structure is used in create and update. It does not allow to set field to null though in update. +// // fixme: the field inline_link does not allow to customize the underlying db name right now. +// #[derive(Debug, Deserialize, Serialize, PartialEq)] +// #[serde(rename_all = "camelCase", deny_unknown_fields)] +// pub struct RelationFieldSpec { +// pub name: String, + +// #[serde(skip_serializing_if = "Option::is_none")] +// pub field: Option, + +// #[serde(skip_serializing_if = "Option::is_none")] +// pub is_list: Option, + +// #[serde(skip_serializing_if = "Option::is_none")] +// pub is_optional: Option, + +// #[serde(skip_serializing_if = "Option::is_none")] +// pub on_delete: Option, // fixme: change to proper enum + +// #[serde(skip_serializing_if = "Option::is_none")] +// pub inline_link: Option, +// } + +// // fixme: this strucut does not allow to customize the db name of the link table. +// #[derive(Debug, Deserialize, Serialize, PartialEq)] +// #[serde(rename_all = "camelCase", deny_unknown_fields)] +// pub struct LinkTableSpec { +// #[serde(skip_serializing_if = "Option::is_none")] +// pub model_a_column: Option, + +// #[serde(skip_serializing_if = "Option::is_none")] +// pub model_b_column: Option, +// } diff --git a/server/prisma-rs/migration-engine/connectors/migration-connector/tests/steps_tests.rs b/server/prisma-rs/migration-engine/connectors/migration-connector/tests/steps_tests.rs new file mode 100644 index 0000000000..fe1f372d9b --- /dev/null +++ b/server/prisma-rs/migration-engine/connectors/migration-connector/tests/steps_tests.rs @@ -0,0 +1,323 @@ +#![allow(non_snake_case)] + +use datamodel::*; +use migration_connector::steps::*; +use nullable::Nullable::*; + +#[test] +fn minimal_CreateModel_must_work() { + let json = r#"{"stepType":"CreateModel","name":"Blog","embedded":false}"#; + let expected_struct = MigrationStep::CreateModel(CreateModel { + name: "Blog".to_string(), + db_name: None, + embedded: false, + }); + assert_symmetric_serde(json, expected_struct); +} + +#[test] +fn full_CreateModel_must_work() { + let json = r#"{"stepType":"CreateModel","name":"Blog","dbName":"blog","embedded":true}"#; + let expected_struct = MigrationStep::CreateModel(CreateModel { + name: "Blog".to_string(), + db_name: Some("blog".to_string()), + embedded: true, + }); + assert_symmetric_serde(json, expected_struct); +} + +#[test] +fn minimal_UpdateModel_must_work() { + let json = r#"{"stepType":"UpdateModel","name":"Blog"}"#; + let expected_struct = MigrationStep::UpdateModel(UpdateModel { + name: "Blog".to_string(), + new_name: None, + db_name: None, + embedded: None, + }); + assert_symmetric_serde(json, expected_struct); +} + +#[test] +fn full_UpdateModel_must_work() { + let json = r#"{"stepType":"UpdateModel","name":"Blog","newName":"MyBlog","dbName":"blog","embedded":true}"#; + let expected_struct = MigrationStep::UpdateModel(UpdateModel { + name: "Blog".to_string(), + new_name: Some("MyBlog".to_string()), + db_name: Some(NotNull("blog".to_string())), + embedded: Some(true), + }); + assert_symmetric_serde(json, expected_struct); +} + +#[test] +fn DeleteModel_must_work() { + let json = r#"{"stepType":"DeleteModel","name":"Blog"}"#; + let expected_struct = MigrationStep::DeleteModel(DeleteModel { + name: "Blog".to_string(), + }); + assert_symmetric_serde(json, expected_struct); +} + +#[test] +fn minimal_CreateField_must_work() { + let json = + r#"{"stepType":"CreateField","model":"Blog","name":"title","type":{"Base":"String"},"arity":"required"}"#; + let expected_struct = MigrationStep::CreateField(CreateField { + model: "Blog".to_string(), + name: "title".to_string(), + tpe: FieldType::Base(ScalarType::String), + arity: FieldArity::Required, + db_name: None, + is_created_at: None, + is_updated_at: None, + id: None, + default: None, + scalar_list: None, + }); + assert_symmetric_serde(json, expected_struct); +} + +// TODO: bring back once we have decided on field behavious +#[test] +fn full_CreateField_must_work() { + let json = r#"{ + "stepType":"CreateField", + "model":"Blog", + "name":"title", + "type":{"Base":"String"}, + "arity":"optional", + "dbName":"blog", + "isCreatedAt":true, + "isUpdatedAt":true, + "default":{"String":"default"}, + "scalarList": "Embedded" + }"#; + let expected_struct = MigrationStep::CreateField(CreateField { + model: "Blog".to_string(), + name: "title".to_string(), + tpe: FieldType::Base(ScalarType::String), + arity: FieldArity::Optional, + db_name: Some("blog".to_string()), + is_created_at: Some(true), + is_updated_at: Some(true), + id: None, // TODO: adapt once added to CreateField + default: Some(Value::String("default".to_string())), + scalar_list: Some(ScalarListStrategy::Embedded), + }); + + assert_symmetric_serde(json, expected_struct); +} + +#[test] +fn minimal_UpdateField_must_work() { + let json = r#"{"stepType":"UpdateField","model":"Blog","name":"title"}"#; + let expected_struct = MigrationStep::UpdateField(UpdateField { + model: "Blog".to_string(), + name: "title".to_string(), + new_name: None, + tpe: None, + arity: None, + db_name: None, + is_created_at: None, + is_updated_at: None, + id: None, + default: None, + scalar_list: None, + }); + assert_symmetric_serde(json, expected_struct); +} + +#[test] +fn full_UpdateField_must_work() { + let json = r#"{"stepType":"UpdateField","model":"Blog","name":"title","newName":"MyBlog","type":{"Base":"String"},"arity":"optional","dbName":"blog","isCreatedAt":true,"isUpdatedAt":true,"default":{"String":"default"},"scalarList":"Embedded"}"#; + let expected_struct = MigrationStep::UpdateField(UpdateField { + model: "Blog".to_string(), + name: "title".to_string(), + new_name: Some("MyBlog".to_string()), + tpe: Some(FieldType::Base(ScalarType::String)), + arity: Some(FieldArity::Optional), + db_name: Some(NotNull("blog".to_string())), + is_created_at: Some(true), + is_updated_at: Some(true), + id: None, + default: Some(NotNull(Value::String("default".to_string()))), + scalar_list: Some(NotNull(ScalarListStrategy::Embedded)), + }); + assert_symmetric_serde(json, expected_struct); +} + +#[test] +fn DeleteField_must_work() { + let json = r#"{"stepType":"DeleteField","model":"Blog","name":"title"}"#; + let expected_struct = MigrationStep::DeleteField(DeleteField { + model: "Blog".to_string(), + name: "title".to_string(), + }); + assert_symmetric_serde(json, expected_struct); +} + +// #[test] +// fn CreateEnum_must_work() { +// let json = r#"{"stepType":"CreateEnum","name":"BlogCategory","values":["Politics","Tech"]}"#; +// let expected_struct = MigrationStep::CreateEnum(CreateEnum { +// name: "BlogCategory".to_string(), +// values: vec!["Politics".to_string(), "Tech".to_string()], +// }); +// assert_symmetric_serde(json, expected_struct); +// } + +// #[test] +// fn minimal_UpdateEnum_must_work() { +// let json = r#"{"stepType":"UpdateEnum","name":"BlogCategory"}"#; +// let expected_struct = MigrationStep::UpdateEnum(UpdateEnum { +// name: "BlogCategory".to_string(), +// new_name: None, +// values: None, +// }); +// assert_symmetric_serde(json, expected_struct); +// } + +// #[test] +// fn full_Update_Enum_must_work() { +// let json = r#"{"stepType":"UpdateEnum","name":"BlogCategory","newName":"MyBlogCategory","values":["Tech"]}"#; +// let expected_struct = MigrationStep::UpdateEnum(UpdateEnum { +// name: "BlogCategory".to_string(), +// new_name: Some("MyBlogCategory".to_string()), +// values: Some(vec!["Tech".to_string()]), +// }); +// assert_symmetric_serde(json, expected_struct); +// } + +// #[test] +// fn DeleteEnum_must_work() { +// let json = r#"{"stepType":"DeleteEnum","name":"BlogCategory"}"#; +// let expected_struct = MigrationStep::DeleteEnum(DeleteEnum { +// name: "BlogCategory".to_string(), +// }); +// assert_symmetric_serde(json, expected_struct); +// } + +// #[test] +// fn minimal_CreateRelation_must_work() { +// let json = r#"{ +// "stepType":"CreateRelation", +// "name":"BlogToPosts", +// "modelA": { "name":"Blog" }, +// "modelB": { "name":"Post" } +// }"#; +// let expected_struct = MigrationStep::CreateRelation(CreateRelation { +// name: "BlogToPosts".to_string(), +// model_a: RelationFieldSpec { +// name: "Blog".to_string(), +// field: None, +// is_list: false, +// is_optional: false, +// on_delete: None, +// inline_link: None, +// }, +// model_b: RelationFieldSpec { +// name: "Post".to_string(), +// field: None, +// is_list: false, +// is_optional: false, +// on_delete: None, +// inline_link: None, +// }, +// table: None, +// }); +// assert_symmetric_serde(json, expected_struct); +// } + +// #[test] +// fn full_CreateRelation_with_link_table_must_work() { +// let json = r#"{ +// "stepType":"CreateRelation", +// "name":"BlogToPosts", +// "modelA": { "name":"Blog","field":"posts","isList":true,"onDelete":"SET_NULL","inlineLink":true}, +// "modelB": { "name":"Post","field":"blog","isOptional":true,"onDelete":"CASCADE"}, +// "table": { "modelAColumn":"blog", "modelBColumn":"post" } +// }"#; +// let expected_struct = MigrationStep::CreateRelation(CreateRelation { +// name: "BlogToPosts".to_string(), +// model_a: RelationFieldSpec { +// name: "Blog".to_string(), +// field: Some("posts".to_string()), +// is_list: Some(true), +// is_optional: false, +// on_delete: Some("SET_NULL".to_string()), +// inline_link: Some(true), +// }, +// model_b: RelationFieldSpec { +// name: "Post".to_string(), +// field: Some("blog".to_string()), +// is_list: false, +// is_optional: Some(true), +// on_delete: Some("CASCADE".to_string()), +// inline_link: None, +// }, +// table: Some(LinkTableSpec { +// model_a_column: Some("blog".to_string()), +// model_b_column: Some("post".to_string()), +// }), +// }); +// assert_symmetric_serde(json, expected_struct); +// } + +// #[test] +// fn CreateRelation_forcing_the_link_table_must_work() { +// let json = r#"{ +// "stepType":"CreateRelation", +// "name":"BlogToPosts", +// "modelA": { "name":"Blog" }, +// "modelB": { "name":"Post" }, +// "table": { } +// }"#; +// let expected_struct = MigrationStep::CreateRelation(CreateRelation { +// name: "BlogToPosts".to_string(), +// model_a: RelationFieldSpec { +// name: "Blog".to_string(), +// field: None, +// is_list: false, +// is_optional: false, +// on_delete: None, +// inline_link: None, +// }, +// model_b: RelationFieldSpec { +// name: "Post".to_string(), +// field: None, +// is_list: false, +// is_optional: false, +// on_delete: None, +// inline_link: None, +// }, +// table: Some(LinkTableSpec { +// model_a_column: None, +// model_b_column: None, +// }), +// }); +// assert_symmetric_serde(json, expected_struct); +// } + +// #[test] +// fn DeletRelation_must_work() { +// let json = r#"{"stepType":"DeleteRelation","name":"BlogToPost"}"#; +// let expected_struct = MigrationStep::DeleteRelation(DeleteRelation { +// name: "BlogToPost".to_string(), +// }); +// assert_symmetric_serde(json, expected_struct); +// } + +fn assert_symmetric_serde(json: &str, expected: MigrationStep) { + let serde_value: serde_json::Value = serde_json::from_str(&json).expect("The provided input was invalid json."); + let deserialized: MigrationStep = serde_json::from_str(&json).expect("Deserialization failed."); + let serialized_again = serde_json::to_value(&deserialized).expect("Serialization failed"); + assert_eq!( + deserialized, expected, + "The provided json could not be serialized into the expected struct." + ); + assert_eq!( + serialized_again, serde_value, + "Reserializing did not produce the original json input." + ); +} diff --git a/server/prisma-rs/migration-engine/connectors/sql-migration-connector/Cargo.toml b/server/prisma-rs/migration-engine/connectors/sql-migration-connector/Cargo.toml new file mode 100644 index 0000000000..afb5c551f9 --- /dev/null +++ b/server/prisma-rs/migration-engine/connectors/sql-migration-connector/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "sql-migration-connector" +version = "0.1.0" +authors = ["Marcus Böhm "] +edition = "2018" + +[dependencies] +migration-connector = { path = "../migration-connector" } +datamodel = { path = "../../../libs/datamodel" } +chrono = { version = "0.4" } +prisma-query = { git = "https://github.com/prisma/prisma-query.git" } +database-inspector = { path = "../../../libs/database-inspector" } +serde_json = "1.0" +serde = "1.0" +rusqlite = { version = "0.16", features = ["chrono", "bundled"] } +barrel = { version = "0.5.4", features = ["sqlite3"] } +itertools = "0.8" diff --git a/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/database_schema_calculator.rs b/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/database_schema_calculator.rs new file mode 100644 index 0000000000..efedc41c1d --- /dev/null +++ b/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/database_schema_calculator.rs @@ -0,0 +1,253 @@ +use database_inspector::*; +use datamodel::*; +use std::collections::HashSet; + +pub struct DatabaseSchemaCalculator<'a> { + data_model: &'a Schema, +} + +impl<'a> DatabaseSchemaCalculator<'a> { + pub fn calculate(data_model: &Schema) -> DatabaseSchema { + let calculator = DatabaseSchemaCalculator { data_model }; + calculator.calculate_internal() + } + + fn calculate_internal(&self) -> DatabaseSchema { + let mut tables = Vec::new(); + let mut model_tables = self.calculate_model_tables(); + let mut scalar_list_tables = self.calculate_scalar_list_tables(); + let mut relation_tables = self.calculate_relation_tables(); + + tables.append(&mut model_tables); + tables.append(&mut scalar_list_tables); + tables.append(&mut relation_tables); + + DatabaseSchema { tables } + } + + fn calculate_model_tables(&self) -> Vec
{ + self.data_model + .models() + .map(|model| { + let columns = model + .fields() + .flat_map(|f| match (&f.field_type, &f.arity) { + (FieldType::Base(scalar), arity) if arity != &FieldArity::List => Some(Column { + name: f.name.clone(), + tpe: column_type(scalar), + is_required: arity == &FieldArity::Required, + foreign_key: None, + sequence: None, + }), + _ => None, + }) + .collect(); + Table { + name: model.name.clone(), + columns: columns, + indexes: Vec::new(), + } + }) + .collect() + } + + fn calculate_scalar_list_tables(&self) -> Vec
{ + let mut result = Vec::new(); + + for model in self.data_model.models() { + let list_fields: Vec<&Field> = model + .fields() + .filter(|f| f.arity == FieldArity::List && is_scalar(f)) + .collect(); + for field in list_fields { + let id_field = id_field(&model); // todo: find actual id field + let table = Table { + name: format!("{}_{}", model.name.clone(), field.name.clone()), + columns: vec![ + Column::new("nodeId".to_string(), column_type(&scalar_type(&id_field)), true), + Column::new("position".to_string(), ColumnType::Int, true), + Column::new("value".to_string(), column_type(&scalar_type(&field)), true), + ], + indexes: Vec::new(), + }; + result.push(table); + } + } + + result + } + + fn calculate_relation_tables(&self) -> Vec
{ + let mut result = Vec::new(); + for relation in self.calculate_relations().iter() { + match &relation.manifestation { + RelationManifestation::Table { + model_a_column, + model_b_column, + } if relation.is_many_to_many() => { + let table = Table { + name: relation.table_name(), + columns: vec![ + Column::with_foreign_key( + model_a_column.to_string(), + column_type(&scalar_type(id_field(&relation.model_a))), + true, + ForeignKey { + table: relation.model_a.name.to_string(), + column: id_field(&relation.model_a).name.to_string(), + }, + ), + Column::with_foreign_key( + model_b_column.to_string(), + column_type(&scalar_type(id_field(&relation.model_b))), + true, + ForeignKey { + table: relation.model_b.name.to_string(), + column: id_field(&relation.model_b).name.to_string(), + }, + ), + ], + indexes: Vec::new(), + }; + result.push(table); + } + _ => {} + } + } + result + } + + #[allow(unused)] + fn calculate_relations(&self) -> Vec { + let mut result = Vec::new(); + for model in self.data_model.models() { + for field in model.fields() { + match &field.field_type { + FieldType::Relation(relation_info) => { + let RelationInfo { + to, + to_field, + name, + on_delete, + } = relation_info; + let related_model = self.data_model.find_model(&to).unwrap(); + // TODO: handle case of implicit back relation field + let related_field = related_model + .fields() + .find(|f| related_type(f) == Some(model.name.to_string())) + .unwrap() + .clone(); + let manifestation = RelationManifestation::Table { + model_a_column: "A".to_string(), + model_b_column: "B".to_string(), + }; + let (model_a, model_b, field_a, field_b) = match () { + _ if &model.name < &related_model.name => { + (model.clone(), related_model.clone(), field.clone(), related_field) + } + _ if &related_model.name < &model.name => { + (related_model.clone(), model.clone(), related_field, field.clone()) + } + _ => (model.clone(), related_model.clone(), field.clone(), related_field), + }; + + result.push(Relation { + model_a: model_a, + model_b: model_b, + field_a: field_a, + field_b: field_b, + manifestation, + }) + } + _ => {} + } + } + } + result.dedup_by(|rel1, rel2| rel1 == rel2); + result + } +} + +#[derive(PartialEq)] +struct Relation { + model_a: Model, + model_b: Model, + field_a: Field, + field_b: Field, + manifestation: RelationManifestation, +} + +impl Relation { + fn name(&self) -> String { + // TODO: must replicate behaviour of `generateRelationName` from `SchemaInferrer` + format!("{}To{}", &self.model_a.name, &self.model_b.name) + } + + fn table_name(&self) -> String { + format!("_{}", self.name()) + } + + fn is_many_to_many(&self) -> bool { + self.field_a.arity == FieldArity::List && self.field_b.arity == FieldArity::List + } +} + +#[derive(PartialEq)] +enum RelationManifestation { + Inline { + in_table_of_model: String, + column: String, + }, + Table { + model_a_column: String, + model_b_column: String, + }, +} + +fn id_field(model: &Model) -> &Field { + model.fields().next().clone().unwrap() +} + +fn related_type(field: &Field) -> Option { + match &field.field_type { + FieldType::Relation(relation_info) => { + let RelationInfo { + to, + to_field, + name, + on_delete, + } = relation_info; + Some(to.to_string()) + } + _ => None, + } +} + +fn is_scalar(field: &Field) -> bool { + match field.field_type { + FieldType::Base(_) => true, + _ => false, + } +} + +fn scalar_type(field: &Field) -> &ScalarType { + match &field.field_type { + FieldType::Base(ref scalar) => scalar, + x => panic!(format!( + "only scalar types are suported here. Type is {:?} on field {}", + x, field.name + )), + } +} + +fn column_type(scalar_type: &ScalarType) -> ColumnType { + match scalar_type { + ScalarType::Int => ColumnType::Int, + ScalarType::Float => ColumnType::Float, + ScalarType::Boolean => ColumnType::Boolean, + ScalarType::Enum => ColumnType::String, + ScalarType::String => ColumnType::String, + ScalarType::DateTime => ColumnType::DateTime, + ScalarType::Decimal => unimplemented!(), + } +} diff --git a/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/database_schema_differ.rs b/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/database_schema_differ.rs new file mode 100644 index 0000000000..0e6e2fd367 --- /dev/null +++ b/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/database_schema_differ.rs @@ -0,0 +1,149 @@ +use crate::sql_database_migration_steps_inferrer::wrap_as_step; +use crate::sql_migration_step::*; +use database_inspector::{Column, DatabaseSchema, Table}; + +pub struct DatabaseSchemaDiffer { + previous: DatabaseSchema, + next: DatabaseSchema, +} + +impl DatabaseSchemaDiffer { + pub fn diff(previous: DatabaseSchema, next: DatabaseSchema) -> Vec { + let differ = DatabaseSchemaDiffer { previous, next }; + differ.diff_internal() + } + + fn diff_internal(&self) -> Vec { + let mut result = Vec::new(); + result.append(&mut wrap_as_step(self.create_tables(), |x| { + SqlMigrationStep::CreateTable(x) + })); + result.append(&mut wrap_as_step(self.drop_tables(), |x| { + SqlMigrationStep::DropTable(x) + })); + result.append(&mut wrap_as_step(self.alter_tables(), |x| { + SqlMigrationStep::AlterTable(x) + })); + result + } + + fn create_tables(&self) -> Vec { + let mut result = Vec::new(); + for next_table in &self.next.tables { + if !self.previous.has_table(&next_table.name) { + let primary_columns = next_table + .indexes + .iter() + .find(|i| i.unique) + .map(|i| i.columns.clone()) + .unwrap_or(Vec::new()); + + let create = CreateTable { + name: next_table.name.clone(), + columns: Self::column_descriptions(&next_table.columns), + primary_columns: primary_columns, + }; + result.push(create); + } + } + result + } + + fn drop_tables(&self) -> Vec { + let mut result = Vec::new(); + for previous_table in &self.previous.tables { + if !self.next.has_table(&previous_table.name) && previous_table.name != "_Migration" { + let drop = DropTable { + name: previous_table.name.clone(), + }; + result.push(drop); + } + } + result + } + + fn alter_tables(&self) -> Vec { + let mut result = Vec::new(); + for previous_table in &self.previous.tables { + if let Some(next_table) = self.next.table(&previous_table.name) { + let mut changes = Vec::new(); + changes.append(&mut Self::drop_columns(&previous_table, &next_table)); + changes.append(&mut Self::add_columns(&previous_table, &next_table)); + changes.append(&mut Self::alter_columns(&previous_table, &next_table)); + + if !changes.is_empty() { + let update = AlterTable { + table: previous_table.name.clone(), + changes: changes, + }; + result.push(update); + } + } + } + result + } + + fn drop_columns(previous: &Table, next: &Table) -> Vec { + let mut result = Vec::new(); + for previous_column in &previous.columns { + if !next.has_column(&previous_column.name) { + let change = DropColumn { + name: previous_column.name.clone(), + }; + result.push(TableChange::DropColumn(change)); + } + } + result + } + + fn add_columns(previous: &Table, next: &Table) -> Vec { + let mut result = Vec::new(); + for next_column in &next.columns { + if !previous.has_column(&next_column.name) { + let change = AddColumn { + column: Self::column_description(next_column), + }; + result.push(TableChange::AddColumn(change)); + } + } + result + } + + fn alter_columns(previous: &Table, next: &Table) -> Vec { + let mut result = Vec::new(); + for next_column in &next.columns { + if let Some(previous_column) = previous.column(&next_column.name) { + if previous_column != next_column { + let change = AlterColumn { + name: previous_column.name.clone(), + column: Self::column_description(next_column), + }; + result.push(TableChange::AlterColumn(change)); + } + } + } + result + } + + fn column_descriptions(columns: &Vec) -> Vec { + columns.iter().map(Self::column_description).collect() + } + + fn column_description(column: &Column) -> ColumnDescription { + ColumnDescription { + name: column.name.clone(), + tpe: Self::convert_column_type(column.tpe), + required: column.is_required, + } + } + + fn convert_column_type(inspector_type: database_inspector::ColumnType) -> ColumnType { + match inspector_type { + database_inspector::ColumnType::Boolean => ColumnType::Boolean, + database_inspector::ColumnType::Int => ColumnType::Int, + database_inspector::ColumnType::Float => ColumnType::Float, + database_inspector::ColumnType::String => ColumnType::String, + database_inspector::ColumnType::DateTime => ColumnType::DateTime, + } + } +} diff --git a/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/lib.rs b/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/lib.rs new file mode 100644 index 0000000000..3e60e0175f --- /dev/null +++ b/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/lib.rs @@ -0,0 +1,111 @@ +mod database_schema_calculator; +mod database_schema_differ; +mod sql_database_migration_steps_inferrer; +mod sql_database_step_applier; +mod sql_destructive_changes_checker; +mod sql_migration_persistence; +mod sql_migration_step; + +use barrel; +use barrel::backend::Sqlite; +use barrel::types; +use database_inspector::DatabaseInspectorImpl; +use migration_connector::*; +use rusqlite::{Connection, NO_PARAMS}; +use sql_database_migration_steps_inferrer::*; +use sql_database_step_applier::*; +use sql_destructive_changes_checker::*; +use sql_migration_persistence::*; +pub use sql_migration_step::*; +use std::sync::Arc; + +#[allow(unused, dead_code)] +pub struct SqlMigrationConnector { + schema_name: String, + migration_persistence: Arc, + sql_database_migration_steps_inferrer: Arc>, + database_step_applier: Arc>, + destructive_changes_checker: Arc>, +} + +impl SqlMigrationConnector { + // FIXME: this must take the config as a param at some point + pub fn new(schema_name: String) -> SqlMigrationConnector { + let migration_persistence = Arc::new(SqlMigrationPersistence::new(Self::new_conn(&schema_name))); + let sql_database_migration_steps_inferrer = Arc::new(SqlDatabaseMigrationStepsInferrer { + inspector: Box::new(DatabaseInspectorImpl::new(Self::new_conn(&schema_name))), + schema_name: schema_name.to_string(), + }); + let database_step_applier = Arc::new(SqlDatabaseStepApplier::new( + Self::new_conn(&schema_name), + schema_name.clone(), + )); + let destructive_changes_checker = Arc::new(SqlDestructiveChangesChecker {}); + SqlMigrationConnector { + schema_name, + migration_persistence, + sql_database_migration_steps_inferrer, + database_step_applier, + destructive_changes_checker, + } + } + + fn new_conn(name: &str) -> Connection { + let conn = Connection::open_in_memory().unwrap(); + let server_root = std::env::var("SERVER_ROOT").expect("Env var SERVER_ROOT required but not found."); + let path = format!("{}/db", server_root); + let database_file_path = format!("{}/{}.db", path, name); + conn.execute("ATTACH DATABASE ? AS ?", &[database_file_path.as_ref(), name]) + .unwrap(); + conn + } +} + +impl MigrationConnector for SqlMigrationConnector { + type DatabaseMigrationStep = SqlMigrationStep; + + fn initialize(&self) { + let conn = Self::new_conn(&self.schema_name); + let mut m = barrel::Migration::new().schema(self.schema_name.clone()); + m.create_table_if_not_exists("_Migration", |t| { + t.add_column("revision", types::primary()); + t.add_column("name", types::text()); + t.add_column("datamodel", types::text()); + t.add_column("status", types::text()); + t.add_column("applied", types::integer()); + t.add_column("rolled_back", types::integer()); + t.add_column("datamodel_steps", types::text()); + t.add_column("database_steps", types::text()); + t.add_column("errors", types::text()); + t.add_column("started_at", types::date()); + t.add_column("finished_at", types::date().nullable(true)); + }); + + let sql_str = dbg!(m.make::()); + + dbg!(conn.execute(&sql_str, NO_PARAMS).unwrap()); + } + + fn reset(&self) { + let conn = Self::new_conn(&self.schema_name); + let sql_str = format!(r#"DELETE FROM "{}"."_Migration";"#, self.schema_name); + + dbg!(conn.execute(&sql_str, NO_PARAMS).unwrap()); + } + + fn migration_persistence(&self) -> Arc { + Arc::clone(&self.migration_persistence) + } + + fn database_steps_inferrer(&self) -> Arc> { + Arc::clone(&self.sql_database_migration_steps_inferrer) + } + + fn database_step_applier(&self) -> Arc> { + Arc::clone(&self.database_step_applier) + } + + fn destructive_changes_checker(&self) -> Arc> { + Arc::clone(&self.destructive_changes_checker) + } +} diff --git a/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/sql_database_migration_steps_inferrer.rs b/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/sql_database_migration_steps_inferrer.rs new file mode 100644 index 0000000000..fb6f797dd5 --- /dev/null +++ b/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/sql_database_migration_steps_inferrer.rs @@ -0,0 +1,99 @@ +use crate::database_schema_calculator::DatabaseSchemaCalculator; +use crate::database_schema_differ::DatabaseSchemaDiffer; +use crate::sql_migration_step::*; +use database_inspector::DatabaseInspector; +use datamodel::*; +use itertools::{Either, Itertools}; +use migration_connector::steps::*; +use migration_connector::*; +use std::collections::HashMap; + +pub struct SqlDatabaseMigrationStepsInferrer { + pub inspector: Box, + pub schema_name: String, +} + +#[allow(unused, dead_code)] +impl DatabaseMigrationStepsInferrer for SqlDatabaseMigrationStepsInferrer { + fn infer(&self, previous: &Schema, next: &Schema, steps: Vec) -> Vec { + let current_database_schema = self.inspector.introspect(&self.schema_name); + let expected_database_schema = DatabaseSchemaCalculator::calculate(next); + let steps = DatabaseSchemaDiffer::diff(current_database_schema, expected_database_schema); + steps + // let creates: Vec = steps + // .into_iter() + // .flat_map(|step| match step { + // MigrationStep::CreateModel(x) => Some(CreateModelOrField::Model(x)), + // MigrationStep::CreateField(x) => Some(CreateModelOrField::Field(x)), + // _ => None, + // }) + // .collect(); + // let (create_models, create_fields): (Vec, Vec) = + // creates.into_iter().partition_map(|step| match step { + // CreateModelOrField::Model(x) => Either::Left(x), + // CreateModelOrField::Field(x) => Either::Right(x), + // }); + // let mut create_fields_map: HashMap> = HashMap::new(); + // for (model_name, create_fieldses) in &create_fields.into_iter().group_by(|cf| cf.model.clone()) { + // create_fields_map.insert(model_name, create_fieldses.into_iter().collect()); + // } + + // let mut grouped_steps: HashMap> = HashMap::new(); + + // for cm in create_models { + // let cfs = create_fields_map.remove(&cm.name).unwrap_or(Vec::new()); + // grouped_steps.insert(cm, cfs); + // } + + // let mut create_tables: Vec = Vec::new(); + // for (create_model, create_fields) in grouped_steps { + // let id_column = create_fields.iter().find(|f| f.id.is_some()).map(|f| f.db_name()); + // let columns = create_fields + // .into_iter() + // .map(|cf| ColumnDescription { + // name: cf.name, + // tpe: column_type(cf.tpe), + // required: cf.arity == FieldArity::Required, + // }) + // .collect(); + // let primary_columns = id_column.map(|c| vec![c]).unwrap_or(Vec::new()); + + // let create_table = CreateTable { + // name: create_model.name, + // columns: columns, + // primary_columns: primary_columns, + // }; + // create_tables.push(create_table); + // } + + // let mut sql_steps = Vec::new(); + // sql_steps.append(&mut wrap_as_step(create_tables, |x| SqlMigrationStep::CreateTable(x))); + // sql_steps + } +} + +fn column_type(ft: FieldType) -> ColumnType { + match ft { + FieldType::Base(scalar) => match scalar { + ScalarType::Boolean => ColumnType::Boolean, + ScalarType::String => ColumnType::String, + ScalarType::Int => ColumnType::Int, + ScalarType::Float => ColumnType::Float, + ScalarType::DateTime => ColumnType::DateTime, + _ => unimplemented!(), + }, + _ => panic!("Only scalar types are supported here"), + } +} + +pub fn wrap_as_step(steps: Vec, mut wrap_fn: F) -> Vec +where + F: FnMut(T) -> SqlMigrationStep, +{ + steps.into_iter().map(|x| wrap_fn(x)).collect() +} + +enum CreateModelOrField { + Model(CreateModel), + Field(CreateField), +} diff --git a/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/sql_database_step_applier.rs b/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/sql_database_step_applier.rs new file mode 100644 index 0000000000..ca54c1cff1 --- /dev/null +++ b/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/sql_database_step_applier.rs @@ -0,0 +1,70 @@ +use crate::*; +use barrel::Migration as BarrelMigration; +use datamodel::ScalarType; +use migration_connector::*; +use rusqlite::{Connection, NO_PARAMS}; + +pub struct SqlDatabaseStepApplier { + connection: Connection, + schema_name: String, +} + +impl SqlDatabaseStepApplier { + pub fn new(connection: Connection, schema_name: String) -> Self { + SqlDatabaseStepApplier { + connection, + schema_name, + } + } +} + +#[allow(unused, dead_code)] +impl DatabaseMigrationStepApplier for SqlDatabaseStepApplier { + fn apply(&self, step: SqlMigrationStep) { + let mut migration = BarrelMigration::new().schema(self.schema_name.clone()); + + match dbg!(step) { + SqlMigrationStep::CreateTable(CreateTable { + name, + columns, + primary_columns, + }) => { + migration.create_table(name, move |t| { + for column in columns.clone() { + let tpe = column_description_to_barrel_type(&column); + t.add_column(column.name, tpe); + } + if primary_columns.len() > 0 { + let column_names: Vec = primary_columns + .clone() + .into_iter() + .map(|col| format!("\"{}\"", col)) + .collect(); + t.inject_custom(format!("PRIMARY KEY ({})", column_names.join(","))); + } + }); + } + x => panic!(format!("{:?} not implemented yet here", x)), + }; + let sql_string = dbg!(self.make_sql_string(migration)); + dbg!(self.connection.execute(&sql_string, NO_PARAMS)).unwrap(); + } +} + +impl SqlDatabaseStepApplier { + fn make_sql_string(&self, migration: BarrelMigration) -> String { + // TODO: this should pattern match on the connector type once we have this information available + migration.make::() + } +} + +fn column_description_to_barrel_type(column_description: &ColumnDescription) -> barrel::types::Type { + let tpe = match column_description.tpe { + ColumnType::Boolean => barrel::types::boolean(), + ColumnType::DateTime => barrel::types::date(), + ColumnType::Float => barrel::types::float(), + ColumnType::Int => barrel::types::integer(), + ColumnType::String => barrel::types::text(), + }; + tpe.nullable(!column_description.required) +} diff --git a/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/sql_destructive_changes_checker.rs b/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/sql_destructive_changes_checker.rs new file mode 100644 index 0000000000..75f27ec2f5 --- /dev/null +++ b/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/sql_destructive_changes_checker.rs @@ -0,0 +1,11 @@ +use crate::SqlMigrationStep; +use migration_connector::*; + +pub struct SqlDestructiveChangesChecker {} + +#[allow(unused, dead_code)] +impl DestructiveChangesChecker for SqlDestructiveChangesChecker { + fn check(&self, steps: Vec) -> Vec { + vec![] + } +} diff --git a/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/sql_migration_persistence.rs b/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/sql_migration_persistence.rs new file mode 100644 index 0000000000..40dbbea72b --- /dev/null +++ b/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/sql_migration_persistence.rs @@ -0,0 +1,148 @@ +#[allow(unused, dead_code)] +use chrono::*; +use datamodel::Schema; +use migration_connector::*; +use prisma_query::{ast::*, visitor::*}; +use rusqlite::{Connection, Row}; +use serde_json; + +pub struct SqlMigrationPersistence { + connection: Connection, +} + +impl SqlMigrationPersistence { + pub fn new(conn: Connection) -> SqlMigrationPersistence { + SqlMigrationPersistence { connection: conn } + } +} + +#[allow(unused, dead_code)] +impl MigrationPersistence for SqlMigrationPersistence { + fn last(&self) -> Option { + let conditions = STATUS_COLUMN.equals("Success"); + let query = Select::from_table(TABLE_NAME) + .so_that(conditions) + .order_by(REVISION_COLUMN.descend()); + let (sql_str, params) = Sqlite::build(query); + + let result = self.connection.query_row(&sql_str, params, parse_row); + result.ok() + } + + fn load_all(&self) -> Vec { + let query = Select::from_table(TABLE_NAME); + let (sql_str, params) = dbg!(Sqlite::build(query)); + + let mut stmt = self.connection.prepare_cached(&sql_str).unwrap(); + let mut rows = stmt.query(params).unwrap(); + let mut result = Vec::new(); + + while let Some(row) = rows.next() { + result.push(parse_row(&row.unwrap())); + } + + result + } + + fn create(&self, migration: Migration) -> Migration { + let finished_at_value = match migration.finished_at { + Some(x) => x.timestamp_millis().into(), + None => ParameterizedValue::Null, + }; + let mut cloned = migration.clone(); + // let status_value = serde_json::to_string(&migration.status).unwrap(); + let model_steps_json = serde_json::to_string(&migration.datamodel_steps).unwrap(); + let database_steps_json = migration.database_steps; + let errors_json = serde_json::to_string(&migration.errors).unwrap(); + + let query = Insert::single_into(TABLE_NAME) + .value(NAME_COLUMN, migration.name) + .value(DATAMODEL_COLUMN, "".to_string()) // todo: serialize datamodel + .value(STATUS_COLUMN, migration.status.code()) + .value(APPLIED_COLUMN, migration.applied) + .value(ROLLED_BACK_COLUMN, migration.rolled_back) + .value(DATAMODEL_STEPS_COLUMN, model_steps_json) + .value(DATABASE_STEPS_COLUMN, database_steps_json) + .value(ERRORS_COLUMN, errors_json) + .value( + STARTED_AT_COLUMN, + ParameterizedValue::Integer(migration.started_at.timestamp_millis()), + ) + .value(FINISHED_AT_COLUMN, finished_at_value); + + let (sql_str, params) = dbg!(Sqlite::build(query)); + + let result = dbg!(self.connection.execute(&sql_str, params)); + + cloned.revision = self.connection.last_insert_rowid() as usize; + cloned + } + + fn update(&self, params: &MigrationUpdateParams) { + let finished_at_value = match params.finished_at { + Some(x) => x.timestamp_millis().into(), + None => ParameterizedValue::Null, + }; + let errors_json = serde_json::to_string(¶ms.errors).unwrap(); + let query = Update::table(TABLE_NAME) + .set(STATUS_COLUMN, params.status.code()) + .set(APPLIED_COLUMN, params.applied) + .set(ROLLED_BACK_COLUMN, params.rolled_back) + .set(ERRORS_COLUMN, errors_json) + .set(FINISHED_AT_COLUMN, finished_at_value) + .so_that( + NAME_COLUMN + .equals(params.name.clone()) + .and(REVISION_COLUMN.equals(params.revision)), + ); + + let (sql_str, params) = dbg!(Sqlite::build(query)); + + let result = dbg!(self.connection.execute(&sql_str, params)); + } +} + +fn timestamp_to_datetime(timestamp: i64) -> DateTime { + let nsecs = ((timestamp % 1000) * 1_000_000) as u32; + let secs = (timestamp / 1000) as i64; + let naive = chrono::NaiveDateTime::from_timestamp(secs, nsecs); + let datetime: DateTime = DateTime::from_utc(naive, Utc); + + datetime +} + +fn parse_row(row: &Row) -> Migration { + let revision: u32 = row.get(REVISION_COLUMN); + let applied: u32 = row.get(APPLIED_COLUMN); + let rolled_back: u32 = row.get(ROLLED_BACK_COLUMN); + let errors_json: String = row.get(ERRORS_COLUMN); + let errors: Vec = serde_json::from_str(&errors_json).unwrap(); + let finished_at: Option = row.get(FINISHED_AT_COLUMN); + let database_steps_json: String = row.get(DATABASE_STEPS_COLUMN); + Migration { + name: row.get(NAME_COLUMN), + revision: revision as usize, + datamodel: Schema::empty(), + status: MigrationStatus::from_str(row.get(STATUS_COLUMN)), + applied: applied as usize, + rolled_back: rolled_back as usize, + datamodel_steps: Vec::new(), + database_steps: database_steps_json, + errors: errors, + started_at: timestamp_to_datetime(row.get(STARTED_AT_COLUMN)), + finished_at: finished_at.map(timestamp_to_datetime), + } +} + +static TABLE_NAME: &str = "_Migration"; +static NAME_COLUMN: &str = "name"; +static REVISION_COLUMN: &str = "revision"; +static DATAMODEL_COLUMN: &str = "datamodel"; +static STATUS_COLUMN: &str = "status"; +static APPLIED_COLUMN: &str = "applied"; +static ROLLED_BACK_COLUMN: &str = "rolled_back"; +static DATAMODEL_STEPS_COLUMN: &str = "datamodel_steps"; +static DATABASE_STEPS_COLUMN: &str = "database_steps"; +static ERRORS_COLUMN: &str = "errors"; +static STARTED_AT_COLUMN: &str = "started_at"; +static FINISHED_AT_COLUMN: &str = "finished_at"; diff --git a/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/sql_migration_step.rs b/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/sql_migration_step.rs new file mode 100644 index 0000000000..5e0f6dd05a --- /dev/null +++ b/server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/sql_migration_step.rs @@ -0,0 +1,69 @@ +use datamodel::ScalarType; +use migration_connector::DatabaseMigrationStepExt; +use serde::Serialize; + +impl DatabaseMigrationStepExt for SqlMigrationStep {} + +#[derive(Debug, Serialize)] +pub enum SqlMigrationStep { + CreateTable(CreateTable), + AlterTable(AlterTable), + DropTable(DropTable), +} + +#[derive(Debug, Serialize)] +pub struct CreateTable { + pub name: String, + pub columns: Vec, + pub primary_columns: Vec, +} + +#[derive(Debug, Serialize)] +pub struct DropTable { + pub name: String, +} + +#[derive(Debug, Serialize)] +pub struct AlterTable { + pub table: String, + pub changes: Vec, +} + +#[derive(Debug, Serialize)] +pub enum TableChange { + AddColumn(AddColumn), + AlterColumn(AlterColumn), + DropColumn(DropColumn), +} + +#[derive(Debug, Serialize)] +pub struct AddColumn { + pub column: ColumnDescription, +} + +#[derive(Debug, Serialize)] +pub struct DropColumn { + pub name: String, +} + +#[derive(Debug, Serialize)] +pub struct AlterColumn { + pub name: String, + pub column: ColumnDescription, +} + +#[derive(Debug, Serialize, Clone)] +pub struct ColumnDescription { + pub name: String, + pub tpe: ColumnType, + pub required: bool, +} + +#[derive(Debug, Copy, PartialEq, Eq, Clone, Serialize)] +pub enum ColumnType { + Int, + Float, + Boolean, + String, + DateTime, +} diff --git a/server/prisma-rs/migration-engine/Cargo.lock b/server/prisma-rs/migration-engine/core/Cargo.lock similarity index 100% rename from server/prisma-rs/migration-engine/Cargo.lock rename to server/prisma-rs/migration-engine/core/Cargo.lock diff --git a/server/prisma-rs/migration-engine/core/Cargo.toml b/server/prisma-rs/migration-engine/core/Cargo.toml new file mode 100644 index 0000000000..86f42d6bf7 --- /dev/null +++ b/server/prisma-rs/migration-engine/core/Cargo.toml @@ -0,0 +1,31 @@ +[package] +name = "migration-core" +version = "0.1.0" +authors = ["Marcus Böhm "] +edition = "2018" + +[dependencies] +migration-connector = { path = "../connectors/migration-connector" } +nullable = { path = "../../libs/nullable" } +database-inspector = { path = "../../libs/database-inspector" } +datamodel = { path = "../../libs/datamodel" } +prisma-models = { path = "../../prisma-models" } +chrono = { version = "0.4", features = ["serde"] } +jsonrpc-core = "10.1.0" +serde = "1.0" +serde_json = "1.0" +serde_derive = "1.0" +boolinator = "2.4.0" +sql-migration-connector = { path = "../connectors/sql-migration-connector" } + +[[bin]] +name = "migration-engine-rpc" +path = "src/bin/rpc_api_bin.rs" + +[[bin]] +name = "infer-migrations-steps" +path = "src/bin/infer_migration_steps_bin.rs" + +[[bin]] +name = "apply-migration" +path = "src/bin/apply_migration_bin.rs" diff --git a/server/prisma-rs/migration-engine/core/datamodel.prisma b/server/prisma-rs/migration-engine/core/datamodel.prisma new file mode 100644 index 0000000000..aff9f07d3f --- /dev/null +++ b/server/prisma-rs/migration-engine/core/datamodel.prisma @@ -0,0 +1,20 @@ +model Blog { + id: String @primary + name: String + viewCount: Int + posts: Post[] + authors: Author[] +} + +model Author { + id: String @primary + name: String? + authors: Blog[] +} + +model Post { + id: Int @primary + title: String + tags: String[] + blog: Blog +} \ No newline at end of file diff --git a/server/prisma-rs/migration-engine/rpc_examples/applyNextMigrationStep.json b/server/prisma-rs/migration-engine/core/rpc_examples/applyNextMigrationStep.json similarity index 100% rename from server/prisma-rs/migration-engine/rpc_examples/applyNextMigrationStep.json rename to server/prisma-rs/migration-engine/core/rpc_examples/applyNextMigrationStep.json diff --git a/server/prisma-rs/migration-engine/rpc_examples/startMigration.json b/server/prisma-rs/migration-engine/core/rpc_examples/startMigration.json similarity index 100% rename from server/prisma-rs/migration-engine/rpc_examples/startMigration.json rename to server/prisma-rs/migration-engine/core/rpc_examples/startMigration.json diff --git a/server/prisma-rs/migration-engine/rpc_examples/suggestMigrationSteps.json b/server/prisma-rs/migration-engine/core/rpc_examples/suggestMigrationSteps.json similarity index 100% rename from server/prisma-rs/migration-engine/rpc_examples/suggestMigrationSteps.json rename to server/prisma-rs/migration-engine/core/rpc_examples/suggestMigrationSteps.json diff --git a/server/prisma-rs/migration-engine/core/src/bin/apply_migration_bin.rs b/server/prisma-rs/migration-engine/core/src/bin/apply_migration_bin.rs new file mode 100644 index 0000000000..a2b3112915 --- /dev/null +++ b/server/prisma-rs/migration-engine/core/src/bin/apply_migration_bin.rs @@ -0,0 +1,25 @@ +use migration_connector::steps::*; +use migration_core::commands::apply_migration::*; +use migration_core::commands::command::*; +use migration_core::migration_engine::MigrationEngine; +use std::io::{self, Read}; + +fn main() { + let mut buffer = String::new(); + io::stdin().read_to_string(&mut buffer).unwrap(); + + let steps: Vec = serde_json::from_str(&buffer).expect("deserializing the migration steps failed"); + + let input = ApplyMigrationInput { + project_info: "the-project-info".to_string(), + migration_id: "the-migration-id".to_string(), + steps: steps, + force: false, + }; + let cmd = ApplyMigrationCommand::new(input); + let engine = MigrationEngine::new(); + let output = cmd.execute(engine); + + let json = serde_json::to_string_pretty(&output).unwrap(); + println!("{}", json) +} diff --git a/server/prisma-rs/migration-engine/core/src/bin/infer_migration_steps_bin.rs b/server/prisma-rs/migration-engine/core/src/bin/infer_migration_steps_bin.rs new file mode 100644 index 0000000000..4d1c8dc1df --- /dev/null +++ b/server/prisma-rs/migration-engine/core/src/bin/infer_migration_steps_bin.rs @@ -0,0 +1,19 @@ +use migration_core::commands::command::*; +use migration_core::commands::infer_migration_steps::*; +use migration_core::migration_engine::MigrationEngine; +use std::fs; + +fn main() { + let data_model = fs::read_to_string("datamodel.prisma").unwrap(); + let input = InferMigrationStepsInput { + project_info: "the-project-info".to_string(), + migration_id: "the-migration-id".to_string(), + data_model: data_model, + }; + let cmd = InferMigrationStepsCommand::new(input); + let engine = MigrationEngine::new(); + let output = cmd.execute(engine); + + let json = serde_json::to_string_pretty(&output).unwrap(); + println!("{}", json) +} diff --git a/server/prisma-rs/migration-engine/src/bin/rpc_api_bin.rs b/server/prisma-rs/migration-engine/core/src/bin/rpc_api_bin.rs similarity index 94% rename from server/prisma-rs/migration-engine/src/bin/rpc_api_bin.rs rename to server/prisma-rs/migration-engine/core/src/bin/rpc_api_bin.rs index 4b6ea36383..6ba92a94ec 100644 --- a/server/prisma-rs/migration-engine/src/bin/rpc_api_bin.rs +++ b/server/prisma-rs/migration-engine/core/src/bin/rpc_api_bin.rs @@ -1,4 +1,4 @@ -use migration_engine::rpc_api::RpcApi; +use migration_core::rpc_api::RpcApi; //use serde::de::DeserializeOwned; //use serde::Serialize; diff --git a/server/prisma-rs/migration-engine/core/src/commands/apply_migration.rs b/server/prisma-rs/migration-engine/core/src/commands/apply_migration.rs new file mode 100644 index 0000000000..97740c3bd4 --- /dev/null +++ b/server/prisma-rs/migration-engine/core/src/commands/apply_migration.rs @@ -0,0 +1,74 @@ +use crate::commands::command::MigrationCommand; +use crate::migration_engine::MigrationEngine; +use datamodel::dml::Schema; +use migration_connector::*; + +pub struct ApplyMigrationCommand { + input: ApplyMigrationInput, +} + +impl MigrationCommand for ApplyMigrationCommand { + type Input = ApplyMigrationInput; + type Output = ApplyMigrationOutput; + + fn new(input: Self::Input) -> Box { + Box::new(ApplyMigrationCommand { input }) + } + + fn execute(&self, engine: Box) -> Self::Output { + println!("{:?}", self.input); + let connector = engine.connector(); + let current_data_model = connector + .migration_persistence() + .last() + .map(|m| m.datamodel) + .unwrap_or(Schema::empty()); + + let next_data_model = engine + .datamodel_calculator() + .infer(¤t_data_model, self.input.steps.clone()); + + let database_migration_steps = + connector + .database_steps_inferrer() + .infer(¤t_data_model, &next_data_model, self.input.steps.clone()); + + let database_steps_json = serde_json::to_value(&database_migration_steps).unwrap(); + + let mut migration = Migration::new(self.input.migration_id.clone()); + migration.datamodel_steps = self.input.steps.clone(); + migration.database_steps = database_steps_json.to_string(); + let saved_migration = connector.migration_persistence().create(migration); + + connector + .migration_applier() + .apply_steps(saved_migration, database_migration_steps); + + ApplyMigrationOutput { + datamodel_steps: self.input.steps.clone(), + database_steps: database_steps_json, + errors: Vec::new(), + warnings: Vec::new(), + general_errors: Vec::new(), + } + } +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct ApplyMigrationInput { + pub project_info: String, + pub migration_id: String, + pub steps: Vec, + pub force: bool, +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ApplyMigrationOutput { + pub datamodel_steps: Vec, + pub database_steps: serde_json::Value, + pub warnings: Vec, + pub errors: Vec, + pub general_errors: Vec, +} diff --git a/server/prisma-rs/migration-engine/core/src/commands/apply_next_migration_step.rs b/server/prisma-rs/migration-engine/core/src/commands/apply_next_migration_step.rs new file mode 100644 index 0000000000..23386f227d --- /dev/null +++ b/server/prisma-rs/migration-engine/core/src/commands/apply_next_migration_step.rs @@ -0,0 +1,58 @@ +// use crate::commands::command::MigrationCommand; +// use chrono::prelude::*; + +// pub struct ApplyNextMigrationStepCommand { +// input: ApplyNextMigrationStepInput, +// } + +// impl MigrationCommand for ApplyNextMigrationStepCommand { +// type Input = ApplyNextMigrationStepInput; +// type Output = ApplyNextMigrationStepOutput; + +// fn new(input: Self::Input) -> Box { +// Box::new(ApplyNextMigrationStepCommand { input }) +// } + +// fn execute(&self) -> Self::Output { +// println!("{:?}", self.input); +// let response = ApplyNextMigrationStepOutput { +// status: MigrationStatus::InProgress, +// steps: 3, +// applied: 2, +// rolled_back: 0, +// errors: vec![], +// started_at: Utc::now(), +// updated_at: Utc::now(), +// }; +// response +// } +// } + +// #[derive(Debug, Deserialize)] +// #[serde(rename_all = "camelCase", deny_unknown_fields)] +// pub struct ApplyNextMigrationStepInput { +// pub project: String, +// } + +// #[derive(Debug, Serialize)] +// #[serde(rename_all = "camelCase")] +// pub struct ApplyNextMigrationStepOutput { +// pub status: MigrationStatus, +// pub steps: i32, +// pub applied: i32, +// pub rolled_back: i32, +// pub errors: Vec, +// pub started_at: DateTime, +// pub updated_at: DateTime, +// } + +// // TODO: use the one defined in the connector interface instead +// #[derive(Debug, Serialize)] +// pub enum MigrationStatus { +// Pending, +// InProgress, +// Success, +// RollingBack, +// RollbackSuccess, +// RollbackFailure, +// } diff --git a/server/prisma-rs/migration-engine/src/commands/command.rs b/server/prisma-rs/migration-engine/core/src/commands/command.rs similarity index 62% rename from server/prisma-rs/migration-engine/src/commands/command.rs rename to server/prisma-rs/migration-engine/core/src/commands/command.rs index 7a34d18de5..208fa47dbd 100644 --- a/server/prisma-rs/migration-engine/src/commands/command.rs +++ b/server/prisma-rs/migration-engine/core/src/commands/command.rs @@ -1,3 +1,4 @@ +use crate::migration_engine::MigrationEngine; use serde::de::DeserializeOwned; use serde::Serialize; @@ -6,5 +7,6 @@ pub trait MigrationCommand { type Output: Serialize; fn new(input: Self::Input) -> Box; - fn execute(&self) -> Self::Output; + + fn execute(&self, engine: Box) -> Self::Output; } diff --git a/server/prisma-rs/migration-engine/core/src/commands/infer_migration_steps.rs b/server/prisma-rs/migration-engine/core/src/commands/infer_migration_steps.rs new file mode 100644 index 0000000000..ef3671c5d4 --- /dev/null +++ b/server/prisma-rs/migration-engine/core/src/commands/infer_migration_steps.rs @@ -0,0 +1,67 @@ +use crate::commands::command::MigrationCommand; +use crate::migration_engine::MigrationEngine; +use datamodel::dml::*; +use migration_connector::steps::*; +use migration_connector::*; + +pub struct InferMigrationStepsCommand { + input: InferMigrationStepsInput, +} + +impl MigrationCommand for InferMigrationStepsCommand { + type Input = InferMigrationStepsInput; + type Output = InferMigrationStepsOutput; + + fn new(input: Self::Input) -> Box { + Box::new(InferMigrationStepsCommand { input }) + } + + fn execute(&self, engine: Box) -> Self::Output { + let connector = engine.connector(); + let current_data_model = connector + .migration_persistence() + .last() + .map(|m| m.datamodel) + .unwrap_or(Schema::empty()); + + let next_data_model = engine.parse_datamodel(&self.input.data_model); + + let model_migration_steps = engine + .datamodel_migration_steps_inferrer() + .infer(current_data_model.clone(), next_data_model.clone()); + + let database_migration_steps = connector.database_steps_inferrer().infer( + ¤t_data_model, + &next_data_model, + model_migration_steps.clone(), + ); + + let database_steps_json = serde_json::to_value(&database_migration_steps).unwrap(); + + InferMigrationStepsOutput { + datamodel_steps: model_migration_steps, + database_steps: database_steps_json, + errors: vec![], + warnings: vec![], + general_errors: vec![], + } + } +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct InferMigrationStepsInput { + pub project_info: String, + pub migration_id: String, + pub data_model: String, +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct InferMigrationStepsOutput { + pub datamodel_steps: Vec, + pub database_steps: serde_json::Value, + pub warnings: Vec, + pub errors: Vec, + pub general_errors: Vec, +} diff --git a/server/prisma-rs/migration-engine/core/src/commands/list_migrations.rs b/server/prisma-rs/migration-engine/core/src/commands/list_migrations.rs new file mode 100644 index 0000000000..bff3381b99 --- /dev/null +++ b/server/prisma-rs/migration-engine/core/src/commands/list_migrations.rs @@ -0,0 +1,36 @@ +use crate::commands::command::MigrationCommand; +use crate::migration_engine::MigrationEngine; +use migration_connector::steps::*; +use migration_connector::*; + +pub struct ListMigrationStepsCommand { + input: ListMigrationStepsInput, +} + +impl MigrationCommand for ListMigrationStepsCommand { + type Input = ListMigrationStepsInput; + type Output = Vec; + + fn new(input: Self::Input) -> Box { + Box::new(ListMigrationStepsCommand { input }) + } + + fn execute(&self, engine: Box) -> Self::Output { + println!("{:?}", self.input); + vec![] + } +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct ListMigrationStepsInput { + pub project_info: String, +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ListMigrationStepsOutput { + pub id: String, + pub steps: Vec, + pub status: MigrationStatus, +} diff --git a/server/prisma-rs/migration-engine/core/src/commands/migration_progress.rs b/server/prisma-rs/migration-engine/core/src/commands/migration_progress.rs new file mode 100644 index 0000000000..65760294c6 --- /dev/null +++ b/server/prisma-rs/migration-engine/core/src/commands/migration_progress.rs @@ -0,0 +1,49 @@ +use crate::commands::command::MigrationCommand; +use crate::migration_engine::MigrationEngine; +use chrono::*; +use migration_connector::*; + +pub struct MigrationProgressCommand { + input: MigrationProgressInput, +} + +impl MigrationCommand for MigrationProgressCommand { + type Input = MigrationProgressInput; + type Output = MigrationProgressOutput; + + fn new(input: Self::Input) -> Box { + Box::new(MigrationProgressCommand { input }) + } + + fn execute(&self, engine: Box) -> Self::Output { + println!("{:?}", self.input); + MigrationProgressOutput { + state: MigrationStatus::Pending, + steps: 1, + applied: 0, + rolled_back: 0, + errors: vec![], + started_at: Utc::now(), + finished_at: Utc::now(), + } + } +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct MigrationProgressInput { + pub project_info: String, + pub migration_id: String, +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct MigrationProgressOutput { + state: MigrationStatus, + steps: u32, + applied: u32, + rolled_back: u32, + errors: Vec, + started_at: DateTime, + finished_at: DateTime, +} diff --git a/server/prisma-rs/migration-engine/src/commands/mod.rs b/server/prisma-rs/migration-engine/core/src/commands/mod.rs similarity index 70% rename from server/prisma-rs/migration-engine/src/commands/mod.rs rename to server/prisma-rs/migration-engine/core/src/commands/mod.rs index a8e1756b40..b107c54b2f 100644 --- a/server/prisma-rs/migration-engine/src/commands/mod.rs +++ b/server/prisma-rs/migration-engine/core/src/commands/mod.rs @@ -1,7 +1,12 @@ +pub mod apply_migration; pub mod apply_next_migration_step; pub mod command; +pub mod infer_migration_steps; +pub mod list_migrations; +pub mod migration_progress; pub mod start_migration; pub mod suggest_migration_step; +pub mod unapply_migration; #[derive(Debug, Serialize)] #[serde(rename_all = "camelCase")] diff --git a/server/prisma-rs/migration-engine/core/src/commands/start_migration.rs b/server/prisma-rs/migration-engine/core/src/commands/start_migration.rs new file mode 100644 index 0000000000..728dcc91c0 --- /dev/null +++ b/server/prisma-rs/migration-engine/core/src/commands/start_migration.rs @@ -0,0 +1,45 @@ +// use super::DataModelWarningOrError; +// use crate::commands::command::MigrationCommand; +// use migration_connector::steps::*; + +// pub struct StartMigrationCommand { +// input: StartMigrationInput, +// } + +// impl MigrationCommand for StartMigrationCommand { +// type Input = StartMigrationInput; +// type Output = StartMigrationOutput; + +// fn new( +// input: StartMigrationInput, +// connector: Box>, +// ) -> Box { +// Box::new(StartMigrationCommand { input }) +// } + +// fn execute(&self) -> StartMigrationOutput { +// println!("{:?}", self.input); +// let response = StartMigrationOutput { +// data_model_errors: vec![], +// data_model_warnings: vec![], +// general_errors: vec![], +// }; +// response +// } +// } + +// #[derive(Debug, Deserialize)] +// #[serde(rename_all = "camelCase", deny_unknown_fields)] +// pub struct StartMigrationInput { +// pub project: String, +// pub steps: Vec, +// pub data_model: String, +// } + +// #[derive(Debug, Serialize)] +// #[serde(rename_all = "camelCase")] +// pub struct StartMigrationOutput { +// pub data_model_errors: Vec, +// pub data_model_warnings: Vec, +// pub general_errors: Vec, +// } diff --git a/server/prisma-rs/migration-engine/core/src/commands/suggest_migration_step.rs b/server/prisma-rs/migration-engine/core/src/commands/suggest_migration_step.rs new file mode 100644 index 0000000000..027e6e4b10 --- /dev/null +++ b/server/prisma-rs/migration-engine/core/src/commands/suggest_migration_step.rs @@ -0,0 +1,71 @@ +// use super::DataModelWarningOrError; +// use crate::commands::command::MigrationCommand; +// use crate::migration::migration_steps_inferrer::{MigrationStepsInferrer, MigrationStepsInferrerImpl}; +// use crate::migration::schema_inferer::*; +// use database_inspector::{DatabaseInspector, EmptyDatabaseInspectorImpl}; +// use migration_connector::steps::*; + +// pub struct SuggestMigrationStepsCommand { +// input: SuggestMigrationStepsInput, +// } + +// impl MigrationCommand for SuggestMigrationStepsCommand { +// type Input = SuggestMigrationStepsInput; +// type Output = SuggestMigrationStepsOutput; + +// fn new( +// input: Self::Input, +// connector: Box>, +// ) -> Box { +// Box::new(SuggestMigrationStepsCommand { input }) +// } + +// fn execute(&self) -> Self::Output { +// let database_schema = EmptyDatabaseInspectorImpl::inspect("foo".to_string()); +// let schema = LegacySchemaInferer::infer(self.input.data_model.clone()); +// let steps = MigrationStepsInferrerImpl::infer(&schema, &database_schema); +// // inferrer.infer("sjka0"); +// // let warning = DataModelWarningOrError { +// // tpe: "Blog".to_owned(), +// // field: Some("title".to_owned()), +// // message: "This is danger".to_owned(), +// // }; +// // let steps = vec![ +// // MigrationStep::CreateModel(CreateModel { +// // name: "Blog".to_owned(), +// // db_name: None, +// // embedded: None, +// // }), +// // MigrationStep::UpdateModel(UpdateModel { +// // name: "Blog".to_owned(), +// // new_name: None, +// // db_name: Some(Null), +// // embedded: Some(true), +// // }), +// // MigrationStep::DeleteModel(DeleteModel { +// // name: "Post".to_owned(), +// // }), +// // ]; + +// SuggestMigrationStepsOutput { +// steps: steps, +// errors: vec![], +// warnings: vec![], +// } +// } +// } + +// #[derive(Debug, Deserialize)] +// #[serde(rename_all = "camelCase", deny_unknown_fields)] +// pub struct SuggestMigrationStepsInput { +// pub project: String, +// pub data_model: String, +// } + +// #[derive(Debug, Serialize)] +// #[serde(rename_all = "camelCase")] +// pub struct SuggestMigrationStepsOutput { +// pub steps: Vec, +// pub errors: Vec, +// pub warnings: Vec, +// } diff --git a/server/prisma-rs/migration-engine/core/src/commands/unapply_migration.rs b/server/prisma-rs/migration-engine/core/src/commands/unapply_migration.rs new file mode 100644 index 0000000000..52fe9db73a --- /dev/null +++ b/server/prisma-rs/migration-engine/core/src/commands/unapply_migration.rs @@ -0,0 +1,46 @@ +use super::list_migrations::ListMigrationStepsOutput; +use crate::commands::command::MigrationCommand; +use crate::migration_engine::MigrationEngine; +use migration_connector::*; + +pub struct UnapplyMigrationCommand { + input: UnapplyMigrationInput, +} + +impl MigrationCommand for UnapplyMigrationCommand { + type Input = UnapplyMigrationInput; + type Output = UnapplyMigrationOutput; + + fn new(input: Self::Input) -> Box { + Box::new(UnapplyMigrationCommand { input }) + } + + fn execute(&self, engine: Box) -> Self::Output { + println!("{:?}", self.input); + UnapplyMigrationOutput { + rolled_back: ListMigrationStepsOutput { + id: "foo".to_string(), + steps: Vec::new(), + status: MigrationStatus::Pending, + }, + active: ListMigrationStepsOutput { + id: "bar".to_string(), + steps: Vec::new(), + status: MigrationStatus::Pending, + }, + } + } +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct UnapplyMigrationInput { + pub project_info: String, +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct UnapplyMigrationOutput { + pub rolled_back: ListMigrationStepsOutput, + pub active: ListMigrationStepsOutput, +} diff --git a/server/prisma-rs/migration-engine/src/lib.rs b/server/prisma-rs/migration-engine/core/src/lib.rs similarity index 78% rename from server/prisma-rs/migration-engine/src/lib.rs rename to server/prisma-rs/migration-engine/core/src/lib.rs index 1c224ddfd6..cb5f878942 100644 --- a/server/prisma-rs/migration-engine/src/lib.rs +++ b/server/prisma-rs/migration-engine/core/src/lib.rs @@ -1,7 +1,7 @@ pub mod commands; pub mod migration; +pub mod migration_engine; pub mod rpc_api; -pub mod steps; #[macro_use] extern crate serde_derive; diff --git a/server/prisma-rs/migration-engine/core/src/migration/datamodel_calculator.rs b/server/prisma-rs/migration-engine/core/src/migration/datamodel_calculator.rs new file mode 100644 index 0000000000..616b6d56cd --- /dev/null +++ b/server/prisma-rs/migration-engine/core/src/migration/datamodel_calculator.rs @@ -0,0 +1,22 @@ +use datamodel::*; +use migration_connector::steps::*; + +pub trait DataModelCalculator { + fn infer(&self, current: &Schema, steps: Vec) -> Schema { + DataModelCalculatorImpl { current, steps }.infer() + } +} + +pub struct DataModelCalculatorSingleton {} +impl DataModelCalculator for DataModelCalculatorSingleton {} + +struct DataModelCalculatorImpl<'a> { + current: &'a Schema, + steps: Vec, +} + +impl<'a> DataModelCalculatorImpl<'a> { + fn infer(&self) -> Schema { + Schema::empty() + } +} diff --git a/server/prisma-rs/migration-engine/core/src/migration/datamodel_migration_steps_inferrer.rs b/server/prisma-rs/migration-engine/core/src/migration/datamodel_migration_steps_inferrer.rs new file mode 100644 index 0000000000..8ed6d41224 --- /dev/null +++ b/server/prisma-rs/migration-engine/core/src/migration/datamodel_migration_steps_inferrer.rs @@ -0,0 +1,183 @@ +use datamodel::*; +use migration_connector::steps::*; +use nullable::Nullable; + +pub trait DataModelMigrationStepsInferrer { + fn infer(&self, previous: Schema, next: Schema) -> Vec; +} + +pub struct DataModelMigrationStepsInferrerImplWrapper {} + +impl DataModelMigrationStepsInferrer for DataModelMigrationStepsInferrerImplWrapper { + fn infer(&self, previous: Schema, next: Schema) -> Vec { + let inferrer = DataModelMigrationStepsInferrerImpl { previous, next }; + inferrer.infer_internal() + } +} + +#[allow(dead_code)] +pub struct DataModelMigrationStepsInferrerImpl { + previous: Schema, + next: Schema, +} + +// TODO: this does not deal with renames yet +impl DataModelMigrationStepsInferrerImpl { + fn infer_internal(&self) -> Vec { + let mut result: Vec = Vec::new(); + let models_to_create = self.models_to_create(); + let models_to_delete = self.models_to_delete(); + let fields_to_create = self.fields_to_create(); + let fields_to_delete = self.fields_to_delete(&models_to_delete); + let fields_to_update = self.fields_to_update(); + + result.append(&mut Self::wrap_as_step(models_to_create, MigrationStep::CreateModel)); + result.append(&mut Self::wrap_as_step(models_to_delete, MigrationStep::DeleteModel)); + result.append(&mut Self::wrap_as_step(fields_to_create, MigrationStep::CreateField)); + result.append(&mut Self::wrap_as_step(fields_to_delete, MigrationStep::DeleteField)); + result.append(&mut Self::wrap_as_step(fields_to_update, MigrationStep::UpdateField)); + result + } + + fn models_to_create(&self) -> Vec { + let mut result = Vec::new(); + for next_model in self.next.models() { + if !self.previous.has_model(&next_model.name()) { + let step = CreateModel { + name: next_model.name().to_string(), + db_name: next_model.database_name.as_ref().cloned(), + embedded: next_model.is_embedded, + }; + result.push(step); + } + } + + result + } + + fn models_to_delete(&self) -> Vec { + let mut result = Vec::new(); + for previous_model in self.previous.models() { + if !self.next.has_model(&previous_model.name) { + let step = DeleteModel { + name: previous_model.name().to_string(), + }; + result.push(step); + } + } + + result + } + + fn fields_to_create(&self) -> Vec { + let mut result = Vec::new(); + for next_model in self.next.models() { + for next_field in next_model.fields() { + let must_create_field = match self.previous.find_model(&next_model.name) { + None => true, + Some(previous_model) => previous_model.find_field(&next_field.name).is_none(), + }; + if must_create_field { + let step = CreateField { + model: next_model.name.clone(), + name: next_field.name.clone(), + tpe: next_field.field_type.clone(), + arity: next_field.arity, + db_name: next_field.database_name.clone(), + default: next_field.default_value.clone(), + id: None, //field.id_behaviour_clone(), + is_created_at: None, + is_updated_at: None, + scalar_list: next_field.scalar_list_strategy, + }; + result.push(step); + } + } + } + result + } + + fn fields_to_delete(&self, models_to_delete: &Vec) -> Vec { + let mut result = Vec::new(); + for previous_model in self.previous.models() { + let model_is_deleted = models_to_delete + .iter() + .find(|dm| dm.name == previous_model.name) + .is_none(); + if model_is_deleted { + for previous_field in previous_model.fields() { + let must_delete_field = match self.next.find_model(&previous_model.name) { + None => true, + Some(next_model) => next_model.find_field(&previous_field.name).is_none(), + }; + if must_delete_field { + let step = DeleteField { + model: previous_model.name.clone(), + name: previous_field.name.clone(), + }; + result.push(step); + } + } + } + } + result + } + + fn fields_to_update(&self) -> Vec { + let mut result = Vec::new(); + for previous_model in self.previous.models() { + for previous_field in previous_model.fields() { + if let Some(next_field) = self + .next + .find_model(&previous_model.name) + .and_then(|m| m.find_field(&previous_field.name)) + { + let (p, n) = (previous_field, next_field); + let step = UpdateField { + model: previous_model.name.clone(), + name: p.name.clone(), + new_name: None, + tpe: Self::diff(&p.field_type, &n.field_type), + arity: Self::diff(&p.arity, &n.arity), + db_name: Self::diff_nullable(&p.database_name, &n.database_name), + is_created_at: None, + is_updated_at: None, + id: None, + default: Self::diff_nullable(&p.default_value, &n.default_value), + scalar_list: Self::diff_nullable(&p.scalar_list_strategy, &n.scalar_list_strategy), + }; + if step.is_any_option_set() { + result.push(step); + } + } + } + } + result + } + + fn diff(current: &T, updated: &T) -> Option { + if current == updated { + None + } else { + Some(updated.clone()) + } + } + + fn diff_nullable(current: &Option, updated: &Option) -> Option> { + if current == updated { + None + } else { + match updated { + None => Some(Nullable::Null), + Some(x) => Some(Nullable::NotNull(x.clone())), + } + } + } + + fn wrap_as_step(steps: Vec, mut wrap_fn: F) -> Vec + where + F: FnMut(T) -> MigrationStep, + { + steps.into_iter().map(|x| wrap_fn(x)).collect() + } +} diff --git a/server/prisma-rs/migration-engine/core/src/migration/migration_steps_inferrer.rs b/server/prisma-rs/migration-engine/core/src/migration/migration_steps_inferrer.rs new file mode 100644 index 0000000000..6e636f840c --- /dev/null +++ b/server/prisma-rs/migration-engine/core/src/migration/migration_steps_inferrer.rs @@ -0,0 +1,142 @@ +use database_inspector::DatabaseSchema; +use migration_connector::steps::*; +use prisma_models::*; + +pub trait MigrationStepsInferrer { + fn infer(next: &InternalDataModel, database_schema: &DatabaseSchema) -> Vec; +} + +#[allow(unused)] +pub struct MigrationStepsInferrerImpl<'a> { + schema: &'a InternalDataModel, + database_schema: &'a DatabaseSchema, +} + +impl<'a> MigrationStepsInferrer for MigrationStepsInferrerImpl<'a> { + fn infer(next: &InternalDataModel, database_schema: &DatabaseSchema) -> Vec { + let inferer = MigrationStepsInferrerImpl { + schema: next, + database_schema: database_schema, + }; + inferer.infer() + } +} + +impl<'a> MigrationStepsInferrerImpl<'a> { + fn infer(&self) -> Vec { + // let mut result: Vec = vec![]; + // let next_models = self.schema.models(); + // let mut create_model_steps: Vec = next_models + // .iter() + // .filter(|model| self.database_schema.table(model.db_name()).is_none()) + // .map(|model| { + // let step = CreateModel { + // name: model.name.clone(), + // db_name: model.db_name_opt().map(|x| x.to_string()), + // embedded: if model.is_embedded { + // Some(model.is_embedded) + // } else { + // None + // }, + // }; + // MigrationStep::CreateModel(step) + // }) + // .collect(); + + // let mut create_field_steps: Vec = vec![]; + // for model in next_models { + // // TODO: also create steps for relation fields + // for field in model.fields().scalar() { + // let step = CreateField { + // model: model.name.clone(), + // name: field.name.clone(), + // tpe: field.type_identifier.user_friendly_type_name(), + // db_name: field.db_name_opt().map(|f| f.to_string()), + // default: None, + // id: None, //field.id_behaviour_clone(), + // is_created_at: field.is_created_at().as_some_if_true(), + // is_updated_at: field.is_updated_at().as_some_if_true(), + // is_list: field.is_list.as_some_if_true(), + // is_optional: field.is_required.as_some_if_true(), + // scalar_list: None, //field.scalar_list_behaviour_clone(), + // }; + // create_field_steps.push(MigrationStep::CreateField(step)) + // } + // } + + // let mut create_enum_steps = vec![]; + // for prisma_enum in &self.schema.enums { + // let step = CreateEnum { + // name: prisma_enum.name.clone(), + // values: prisma_enum.values.clone(), + // }; + // create_enum_steps.push(MigrationStep::CreateEnum(step)); + // } + + // let mut create_relations = vec![]; + // let relations = self.schema.relations(); + // for relation in relations { + // let model_a = relation.model_a(); + // let model_b = relation.model_b(); + // let field_a = relation.field_a(); + // let field_b = relation.field_b(); + + // let step = CreateRelation { + // name: relation.name.clone(), + // model_a: RelationFieldSpec { + // name: model_a.name.clone(), + // field: Some(field_a.name.clone()), + // is_list: field_a.is_list.as_some_if_true(), + // is_optional: field_a.is_optional().as_some_if_true(), + // on_delete: None, //Some(relation.model_a_on_delete), + // inline_link: self.is_inlined_in_model(relation, &model_a).as_some_if_true(), + // }, + // model_b: RelationFieldSpec { + // name: model_b.name.clone(), + // field: Some(field_b.name.clone()), + // is_list: field_b.is_list.as_some_if_true(), + // is_optional: field_b.is_optional().as_some_if_true(), + // on_delete: None, //Some(relation.model_a_on_delete), + // inline_link: self.is_inlined_in_model(relation, &model_b).as_some_if_true(), + // }, + // table: match relation.manifestation { + // Some(RelationLinkManifestation::RelationTable(ref mani)) => Some(LinkTableSpec { + // model_a_column: Some(mani.model_a_column.clone()), + // model_b_column: Some(mani.model_b_column.clone()), + // }), + // _ => None, + // }, + // }; + // create_relations.push(MigrationStep::CreateRelation(step)); + // } + + // result.append(&mut create_model_steps); + // result.append(&mut create_field_steps); + // result.append(&mut create_enum_steps); + // result.append(&mut create_relations); + // result + vec![] + } + + #[allow(unused)] + fn is_inlined_in_model(&self, relation: &RelationRef, model: &ModelRef) -> bool { + match relation.manifestation { + Some(RelationLinkManifestation::Inline(ref mani)) => mani.in_table_of_model_name == model.name, + _ => false, + } + } +} + +trait ToOption { + fn as_some_if_true(self) -> Option; +} + +impl ToOption for bool { + fn as_some_if_true(self) -> Option { + if self { + Some(true) + } else { + None + } + } +} diff --git a/server/prisma-rs/migration-engine/core/src/migration/mod.rs b/server/prisma-rs/migration-engine/core/src/migration/mod.rs new file mode 100644 index 0000000000..6755f1b4bc --- /dev/null +++ b/server/prisma-rs/migration-engine/core/src/migration/mod.rs @@ -0,0 +1,4 @@ +pub mod datamodel_calculator; +pub mod datamodel_migration_steps_inferrer; +pub mod migration_steps_inferrer; +pub mod schema_inferer; diff --git a/server/prisma-rs/migration-engine/src/migration/schema_inferer.rs b/server/prisma-rs/migration-engine/core/src/migration/schema_inferer.rs similarity index 75% rename from server/prisma-rs/migration-engine/src/migration/schema_inferer.rs rename to server/prisma-rs/migration-engine/core/src/migration/schema_inferer.rs index b899edf61f..18ed3c414e 100644 --- a/server/prisma-rs/migration-engine/src/migration/schema_inferer.rs +++ b/server/prisma-rs/migration-engine/core/src/migration/schema_inferer.rs @@ -7,13 +7,13 @@ use std::process::Stdio; use std::sync::Arc; pub trait SchemaInferer { - fn infer(data_model: String) -> Arc; + fn infer(data_model: String) -> Arc; } pub struct LegacySchemaInferer; impl SchemaInferer for LegacySchemaInferer { - fn infer(data_model: String) -> Arc { + fn infer(data_model: String) -> Arc { let bin_path = "/Users/marcusboehm/R/github.com/prisma/prisma/server/images/schema-inferrer-bin/target/prisma-native-image/schema-inferrer-bin"; let cmd = Command::new(bin_path) .stdin(Stdio::null()) @@ -23,7 +23,7 @@ impl SchemaInferer for LegacySchemaInferer { .unwrap(); let input = SchemaInfererBinInput { data_model: data_model, - previous_schema: SchemaTemplate::default(), + previous_schema: InternalDataModelTemplate::default(), }; write!(cmd.stdin.unwrap(), "{}", serde_json::to_string(&input).unwrap()).unwrap(); let mut buffer = String::new(); @@ -32,7 +32,8 @@ impl SchemaInferer for LegacySchemaInferer { println!("received from the schema-inferrer-bin: {}", &buffer); - let schema: SchemaTemplate = serde_json::from_str(buffer.as_str()).expect("Deserializing the schema failed."); + let schema: InternalDataModelTemplate = + serde_json::from_str(buffer.as_str()).expect("Deserializing the schema failed."); schema.build("".to_string()) } } @@ -41,5 +42,5 @@ impl SchemaInferer for LegacySchemaInferer { #[serde(rename_all = "camelCase")] struct SchemaInfererBinInput { data_model: String, - previous_schema: SchemaTemplate, + previous_schema: InternalDataModelTemplate, } diff --git a/server/prisma-rs/migration-engine/core/src/migration_engine.rs b/server/prisma-rs/migration-engine/core/src/migration_engine.rs new file mode 100644 index 0000000000..445cde25d4 --- /dev/null +++ b/server/prisma-rs/migration-engine/core/src/migration_engine.rs @@ -0,0 +1,48 @@ +use crate::migration::datamodel_calculator::*; +use crate::migration::datamodel_migration_steps_inferrer::*; +use datamodel::dml::*; +use datamodel::validator::Validator; +use migration_connector::*; +use sql_migration_connector::SqlMigrationConnector; +use std::path::Path; +use std::sync::Arc; + +// todo: add MigrationConnector. does not work because of GAT shinenigans + +pub struct MigrationEngine { + datamodel_migration_steps_inferrer: Arc, + datamodel_calculator: Arc, +} + +impl MigrationEngine { + pub fn new() -> Box { + let engine = MigrationEngine { + datamodel_migration_steps_inferrer: Arc::new(DataModelMigrationStepsInferrerImplWrapper {}), + datamodel_calculator: Arc::new(DataModelCalculatorSingleton {}), + }; + engine.connector().initialize(); + Box::new(engine) + } + + pub fn datamodel_migration_steps_inferrer(&self) -> Arc { + Arc::clone(&self.datamodel_migration_steps_inferrer) + } + + pub fn datamodel_calculator(&self) -> Arc { + Arc::clone(&self.datamodel_calculator) + } + + pub fn connector(&self) -> Arc> { + let file_path = file!(); // todo: the sqlite file name must be taken from the config + let file_name = Path::new(file_path).file_stem().unwrap().to_str().unwrap(); + Arc::new(SqlMigrationConnector::new(file_name.to_string())) + } + + pub fn parse_datamodel(&self, datamodel_string: &String) -> Schema { + let ast = datamodel::parser::parse(datamodel_string).unwrap(); + // TODO: this would need capabilities + // TODO: Special directives are injected via EmptyAttachmentValidator. + let validator = Validator::new(); + validator.validate(&ast).unwrap() + } +} diff --git a/server/prisma-rs/migration-engine/src/rpc_api.rs b/server/prisma-rs/migration-engine/core/src/rpc_api.rs similarity index 71% rename from server/prisma-rs/migration-engine/src/rpc_api.rs rename to server/prisma-rs/migration-engine/core/src/rpc_api.rs index 59452d4927..70753b1cc6 100644 --- a/server/prisma-rs/migration-engine/src/rpc_api.rs +++ b/server/prisma-rs/migration-engine/core/src/rpc_api.rs @@ -1,11 +1,13 @@ -use crate::commands::apply_next_migration_step::ApplyNextMigrationStepCommand; use crate::commands::command::MigrationCommand; -use crate::commands::start_migration::StartMigrationCommand; -use crate::commands::suggest_migration_step::SuggestMigrationStepsCommand; +use crate::commands::infer_migration_steps::InferMigrationStepsCommand; +use crate::migration_engine::*; use jsonrpc_core; use jsonrpc_core::IoHandler; use jsonrpc_core::*; +use migration_connector::*; +use sql_migration_connector::*; use std::io; +use std::path::Path; pub struct RpcApi { io_handler: jsonrpc_core::IoHandler<()>, @@ -16,9 +18,7 @@ impl RpcApi { let mut rpc_api = RpcApi { io_handler: IoHandler::new(), }; - rpc_api.add_command_handler::("suggestMigrationSteps"); - rpc_api.add_command_handler::("startMigration"); - rpc_api.add_command_handler::("applyNextMigrationStep"); + rpc_api.add_command_handler::("inferMigrationSteps"); rpc_api } @@ -26,7 +26,8 @@ impl RpcApi { self.io_handler.add_method(name, |params: Params| { let input: T::Input = params.parse()?; let cmd = T::new(input); - let response_json = serde_json::to_value(&cmd.execute()).unwrap(); + let engine = MigrationEngine::new(); + let response_json = serde_json::to_value(&cmd.execute(engine)).unwrap(); Ok(response_json) }); } diff --git a/server/prisma-rs/migration-engine/core/tests/datamodel_steps_inferrer_tests.rs b/server/prisma-rs/migration-engine/core/tests/datamodel_steps_inferrer_tests.rs new file mode 100644 index 0000000000..95e2fa13fe --- /dev/null +++ b/server/prisma-rs/migration-engine/core/tests/datamodel_steps_inferrer_tests.rs @@ -0,0 +1,284 @@ +#![allow(non_snake_case)] + +use datamodel::dml::*; +use datamodel::validator::Validator; +use migration_connector::steps::*; +use migration_core::migration::datamodel_migration_steps_inferrer::*; +use nullable::*; + +#[test] +fn infer_CreateModel_if_it_does_not_exit_yet() { + let dm1 = Schema::empty(); + let dm2 = parse( + r#" + model Test { + id: String @primary + } + "#, + ); + + let steps = infer(dm1, dm2); + let expected = vec![ + MigrationStep::CreateModel(CreateModel { + name: "Test".to_string(), + db_name: None, + embedded: false, + }), + MigrationStep::CreateField(CreateField { + model: "Test".to_string(), + name: "id".to_string(), + tpe: FieldType::Base(ScalarType::String), + arity: FieldArity::Required, + db_name: None, + is_created_at: None, + is_updated_at: None, + id: None, + default: None, + scalar_list: None, + }), + ]; + assert_eq!(steps, expected); +} + +#[test] +fn infer_DeleteModel() { + let dm1 = parse( + r#" + model Test { + id: String + } + "#, + ); + let dm2 = Schema::empty(); + + let steps = infer(dm1, dm2); + let expected = vec![MigrationStep::DeleteModel(DeleteModel { + name: "Test".to_string(), + })]; + assert_eq!(steps, expected); +} + +#[test] +#[ignore] +fn infer_UpdateModel() { + // TODO: add tests for other properties as well + let dm1 = parse( + r#" + model Post { + id: String + } + "#, + ); + let dm2 = parse( + r#" + embed Post { + id: String + } + "#, + ); + + let steps = infer(dm1, dm2); + let expected = vec![MigrationStep::UpdateModel(UpdateModel { + name: "Test".to_string(), + new_name: None, + db_name: None, + embedded: Some(true), + })]; + assert_eq!(steps, expected); +} + +#[test] +fn infer_CreateField_if_it_does_not_exist_yet() { + let dm1 = parse( + r#" + model Test { + id: String + } + "#, + ); + let dm2 = parse( + r#" + model Test { + id: String + field: Int? + } + "#, + ); + + let steps = infer(dm1, dm2); + let expected = vec![MigrationStep::CreateField(CreateField { + model: "Test".to_string(), + name: "field".to_string(), + tpe: FieldType::Base(ScalarType::Int), + arity: FieldArity::Optional, + db_name: None, + is_created_at: None, + is_updated_at: None, + id: None, + default: None, + scalar_list: None, + })]; + assert_eq!(steps, expected); +} + +#[test] +fn infer_CreateField_if_relation_field_does_not_exist_yet() { + let dm1 = parse( + r#" + model Blog { + id: String + } + model Post { + id: String + } + "#, + ); + let dm2 = parse( + r#" + model Blog { + id: String + posts: Post[] + } + model Post { + id: String + blog: Blog? + } + "#, + ); + + let steps = infer(dm1, dm2); + let expected = vec![ + MigrationStep::CreateField(CreateField { + model: "Blog".to_string(), + name: "posts".to_string(), + tpe: FieldType::Relation(RelationInfo { + to: "Post".to_string(), + to_field: "".to_string(), + name: None, + on_delete: OnDeleteStrategy::None, + }), + arity: FieldArity::List, + db_name: None, + is_created_at: None, + is_updated_at: None, + id: None, + default: None, + scalar_list: None, + }), + MigrationStep::CreateField(CreateField { + model: "Post".to_string(), + name: "blog".to_string(), + tpe: FieldType::Relation(RelationInfo { + to: "Blog".to_string(), + to_field: "".to_string(), + name: None, + on_delete: OnDeleteStrategy::None, + }), + arity: FieldArity::Optional, + db_name: None, + is_created_at: None, + is_updated_at: None, + id: None, + default: None, + scalar_list: None, + }), + ]; + assert_eq!(steps, expected); +} + +#[test] +fn infer_DeleteField() { + let dm1 = parse( + r#" + model Test { + id: String + field: Int? + } + "#, + ); + let dm2 = parse( + r#" + model Test { + id: String + } + "#, + ); + + let steps = infer(dm1, dm2); + let expected = vec![MigrationStep::DeleteField(DeleteField { + model: "Test".to_string(), + name: "field".to_string(), + })]; + assert_eq!(steps, expected); +} + +#[test] +fn infer_UpdateField_simple() { + let dm1 = parse( + r#" + model Test { + id: String + field: Int? + } + "#, + ); + let dm2 = parse( + r#" + model Test { + id: String + field: Boolean @default(false) + } + "#, + ); + + let steps = infer(dm1, dm2); + let expected = vec![MigrationStep::UpdateField(UpdateField { + model: "Test".to_string(), + name: "field".to_string(), + new_name: None, + tpe: Some(FieldType::Base(ScalarType::Boolean)), + arity: Some(FieldArity::Required), + db_name: None, + is_created_at: None, + is_updated_at: None, + id: None, + default: Some(Nullable::NotNull(Value::Boolean(false))), + scalar_list: None, + })]; + assert_eq!(steps, expected); +} + +#[test] +#[ignore] +fn infer_CreateEnum() { + let dm1 = Schema::empty(); + let dm2 = parse( + r#" + enum Test { + A, + B + } + "#, + ); + + let steps = infer(dm1, dm2); + let expected = vec![MigrationStep::CreateEnum(CreateEnum { + name: "Test".to_string(), + values: vec!["A".to_string(), "B".to_string()], + })]; + assert_eq!(steps, expected); +} + +// TODO: we will need this in a lot of test files. Extract it. +fn parse(datamodel_string: &'static str) -> Schema { + let ast = datamodel::parser::parse(datamodel_string).unwrap(); + // TODO: this would need capabilities + // TODO: Special directives are injected via EmptyAttachmentValidator. + let validator = Validator::new(); + validator.validate(&ast).unwrap() +} + +fn infer(dm1: Schema, dm2: Schema) -> Vec { + let inferrer = DataModelMigrationStepsInferrerImplWrapper {}; + inferrer.infer(dm1, dm2) +} diff --git a/server/prisma-rs/migration-engine/core/tests/migration_persistence_tests.rs b/server/prisma-rs/migration-engine/core/tests/migration_persistence_tests.rs new file mode 100644 index 0000000000..2fa3238190 --- /dev/null +++ b/server/prisma-rs/migration-engine/core/tests/migration_persistence_tests.rs @@ -0,0 +1,95 @@ +#![allow(non_snake_case)] + +mod test_harness; + +use migration_connector::*; +use test_harness::*; + +#[test] +fn last_should_return_none_if_there_is_no_migration() { + run_test(|| { + let persistence = connector().migration_persistence(); + let result = persistence.last(); + assert_eq!(result.is_some(), false); + }); +} + +#[test] +fn last_must_return_none_if_there_is_no_successful_migration() { + run_test(|| { + let persistence = connector().migration_persistence(); + persistence.create(Migration::new("my_migration".to_string())); + let loaded = persistence.last(); + assert_eq!(loaded, None); + }); +} + +#[test] +fn load_all_should_return_empty_if_there_is_no_migration() { + run_test(|| { + let persistence = connector().migration_persistence(); + let result = persistence.load_all(); + assert_eq!(result.is_empty(), true); + }); +} + +#[test] +fn load_all_must_return_all_created_migrations() { + run_test(|| { + let persistence = connector().migration_persistence(); + let migration1 = persistence.create(Migration::new("migration_1".to_string())); + let migration2 = persistence.create(Migration::new("migration_2".to_string())); + let migration3 = persistence.create(Migration::new("migration_3".to_string())); + + let result = persistence.load_all(); + assert_eq!(result, vec![migration1, migration2, migration3]) + }); +} + +#[test] +fn create_should_allow_to_create_a_new_migration() { + run_test(|| { + let persistence = connector().migration_persistence(); + let mut migration = Migration::new("my_migration".to_string()); + migration.status = MigrationStatus::Success; + let result = persistence.create(migration.clone()); + migration.revision = result.revision; // copy over the revision so that the assertion can work.` + assert_eq!(result, migration); + let loaded = persistence.last().unwrap(); + assert_eq!(loaded, migration); + }); +} + +#[test] +fn create_should_increment_revisions() { + run_test(|| { + let persistence = connector().migration_persistence(); + let migration1 = persistence.create(Migration::new("migration_1".to_string())); + let migration2 = persistence.create(Migration::new("migration_2".to_string())); + assert_eq!(migration1.revision + 1, migration2.revision); + }); +} + +#[test] +fn update_must_work() { + run_test(|| { + let persistence = connector().migration_persistence(); + let migration = persistence.create(Migration::new("my_migration".to_string())); + + let mut params = migration.update_params(); + params.status = MigrationStatus::Success; + params.applied = 10; + params.rolled_back = 11; + params.errors = vec!["err1".to_string(), "err2".to_string()]; + params.finished_at = Some(Migration::timestamp_without_nanos()); + + persistence.update(¶ms); + + let loaded = persistence.last().unwrap(); + assert_eq!(loaded.status, params.status); + assert_eq!(loaded.applied, params.applied); + assert_eq!(loaded.rolled_back, params.rolled_back); + assert_eq!(loaded.errors, params.errors); + assert_eq!(loaded.finished_at, params.finished_at); + }); +} diff --git a/server/prisma-rs/migration-engine/core/tests/test_harness.rs b/server/prisma-rs/migration-engine/core/tests/test_harness.rs new file mode 100644 index 0000000000..ad26559bc4 --- /dev/null +++ b/server/prisma-rs/migration-engine/core/tests/test_harness.rs @@ -0,0 +1,25 @@ +use migration_connector::*; +use sql_migration_connector::SqlMigrationConnector; +use std::panic; +use std::path::Path; + +pub fn run_test(test: T) -> () +where + T: FnOnce() -> () + panic::UnwindSafe, +{ + // SETUP + let connector = connector(); + connector.initialize(); + connector.reset(); + + // TEST + let result = panic::catch_unwind(|| test()); + assert!(result.is_ok()) +} + +// TODO: swap this out with connector loader and do not hard code associated type +pub fn connector() -> Box> { + let file_path = dbg!(file!()); + let file_name = dbg!(Path::new(file_path).file_stem().unwrap().to_str().unwrap()); + Box::new(SqlMigrationConnector::new(file_name.to_string())) +} diff --git a/server/prisma-rs/migration-engine/datamodel.prisma b/server/prisma-rs/migration-engine/datamodel.prisma deleted file mode 100644 index eb25429839..0000000000 --- a/server/prisma-rs/migration-engine/datamodel.prisma +++ /dev/null @@ -1,10 +0,0 @@ -type Blog { - id: ID! @id - name: String! - posts: [Post] -} - -type Post { - id: ID! @id - title: String -} \ No newline at end of file diff --git a/server/prisma-rs/migration-engine/src/bin/suggest_migrations.rs b/server/prisma-rs/migration-engine/src/bin/suggest_migrations.rs deleted file mode 100644 index c7f0e042ee..0000000000 --- a/server/prisma-rs/migration-engine/src/bin/suggest_migrations.rs +++ /dev/null @@ -1,16 +0,0 @@ -use migration_engine::commands::command::*; -use migration_engine::commands::suggest_migration_step::*; -use std::fs; - -fn main() { - let data_model = fs::read_to_string("datamodel.prisma").unwrap(); - let input = SuggestMigrationStepsInput { - project: "the-project-id".to_string(), - data_model: data_model, - }; - let cmd = SuggestMigrationStepsCommand::new(input); - let output = cmd.execute(); - - let json = serde_json::to_string_pretty(&output).unwrap(); - println!("{}", json) -} diff --git a/server/prisma-rs/migration-engine/src/commands/apply_next_migration_step.rs b/server/prisma-rs/migration-engine/src/commands/apply_next_migration_step.rs deleted file mode 100644 index 3b6c9e1436..0000000000 --- a/server/prisma-rs/migration-engine/src/commands/apply_next_migration_step.rs +++ /dev/null @@ -1,56 +0,0 @@ -use crate::commands::command::MigrationCommand; -use chrono::prelude::*; - -pub struct ApplyNextMigrationStepCommand { - input: ApplyNextMigrationStepInput, -} - -impl MigrationCommand for ApplyNextMigrationStepCommand { - type Input = ApplyNextMigrationStepInput; - type Output = ApplyNextMigrationStepOutput; - - fn new(input: Self::Input) -> Box { - Box::new(ApplyNextMigrationStepCommand { input }) - } - - fn execute(&self) -> Self::Output { - let response = ApplyNextMigrationStepOutput { - status: MigrationStatus::InProgress, - steps: 3, - applied: 2, - rolled_back: 0, - errors: vec![], - started_at: Utc::now(), - updated_at: Utc::now(), - }; - response - } -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct ApplyNextMigrationStepInput { - pub project: String, -} - -#[derive(Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct ApplyNextMigrationStepOutput { - pub status: MigrationStatus, - pub steps: i32, - pub applied: i32, - pub rolled_back: i32, - pub errors: Vec, - pub started_at: DateTime, - pub updated_at: DateTime, -} - -#[derive(Debug, Serialize)] -pub enum MigrationStatus { - Pending, - InProgress, - Success, - RollingBack, - RollbackSuccess, - RollbackFailure, -} diff --git a/server/prisma-rs/migration-engine/src/commands/start_migration.rs b/server/prisma-rs/migration-engine/src/commands/start_migration.rs deleted file mode 100644 index 0e5270fd3e..0000000000 --- a/server/prisma-rs/migration-engine/src/commands/start_migration.rs +++ /dev/null @@ -1,41 +0,0 @@ -use super::DataModelWarningOrError; -use crate::commands::command::MigrationCommand; -use crate::steps::*; - -pub struct StartMigrationCommand { - input: StartMigrationInput, -} - -impl MigrationCommand for StartMigrationCommand { - type Input = StartMigrationInput; - type Output = StartMigrationOutput; - - fn new(input: StartMigrationInput) -> Box { - Box::new(StartMigrationCommand { input }) - } - - fn execute(&self) -> StartMigrationOutput { - let response = StartMigrationOutput { - data_model_errors: vec![], - data_model_warnings: vec![], - general_errors: vec![], - }; - response - } -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct StartMigrationInput { - pub project: String, - pub steps: Vec, - pub data_model: String, -} - -#[derive(Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct StartMigrationOutput { - pub data_model_errors: Vec, - pub data_model_warnings: Vec, - pub general_errors: Vec, -} diff --git a/server/prisma-rs/migration-engine/src/commands/suggest_migration_step.rs b/server/prisma-rs/migration-engine/src/commands/suggest_migration_step.rs deleted file mode 100644 index cbeeb94066..0000000000 --- a/server/prisma-rs/migration-engine/src/commands/suggest_migration_step.rs +++ /dev/null @@ -1,68 +0,0 @@ -use super::DataModelWarningOrError; -use crate::commands::command::MigrationCommand; -use crate::migration::migration_steps_inferrer::{MigrationStepsInferrer, MigrationStepsInferrerImpl}; -use crate::migration::schema_inferer::*; -use crate::steps::*; -use database_inspector::{DatabaseInspector, EmptyDatabaseInspectorImpl}; - -pub struct SuggestMigrationStepsCommand { - input: SuggestMigrationStepsInput, -} - -impl MigrationCommand for SuggestMigrationStepsCommand { - type Input = SuggestMigrationStepsInput; - type Output = SuggestMigrationStepsOutput; - - fn new(input: Self::Input) -> Box { - Box::new(SuggestMigrationStepsCommand { input }) - } - - fn execute(&self) -> Self::Output { - let database_schema = EmptyDatabaseInspectorImpl::inspect("foo".to_string()); - let schema = LegacySchemaInferer::infer(self.input.data_model.clone()); - let steps = MigrationStepsInferrerImpl::infer(&schema, &database_schema); - // inferrer.infer("sjka0"); - // let warning = DataModelWarningOrError { - // tpe: "Blog".to_owned(), - // field: Some("title".to_owned()), - // message: "This is danger".to_owned(), - // }; - // let steps = vec![ - // MigrationStep::CreateModel(CreateModel { - // name: "Blog".to_owned(), - // db_name: None, - // embedded: None, - // }), - // MigrationStep::UpdateModel(UpdateModel { - // name: "Blog".to_owned(), - // new_name: None, - // db_name: Some(Null), - // embedded: Some(true), - // }), - // MigrationStep::DeleteModel(DeleteModel { - // name: "Post".to_owned(), - // }), - // ]; - - SuggestMigrationStepsOutput { - steps: steps, - errors: vec![], - warnings: vec![], - } - } -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct SuggestMigrationStepsInput { - pub project: String, - pub data_model: String, -} - -#[derive(Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct SuggestMigrationStepsOutput { - pub steps: Vec, - pub errors: Vec, - pub warnings: Vec, -} diff --git a/server/prisma-rs/migration-engine/src/migration/migration_steps_inferrer.rs b/server/prisma-rs/migration-engine/src/migration/migration_steps_inferrer.rs deleted file mode 100644 index f21ea129ef..0000000000 --- a/server/prisma-rs/migration-engine/src/migration/migration_steps_inferrer.rs +++ /dev/null @@ -1,141 +0,0 @@ -use crate::steps::*; -use database_inspector::DatabaseSchema; - -use std::sync::Arc; - -pub trait MigrationStepsInferrer { - fn infer(next: &Schema, database_schema: &DatabaseSchema) -> Vec; -} - -pub struct MigrationStepsInferrerImpl<'a> { - schema: &'a Schema, - database_schema: &'a DatabaseSchema, -} - -impl<'a> MigrationStepsInferrer for MigrationStepsInferrerImpl<'a> { - fn infer(next: &Schema, database_schema: &DatabaseSchema) -> Vec { - let inferer = MigrationStepsInferrerImpl { - schema: next, - database_schema: database_schema, - }; - inferer.infer() - } -} - -impl<'a> MigrationStepsInferrerImpl<'a> { - fn infer(&self) -> Vec { - let mut result: Vec = vec![]; - let default = vec![]; - let next_models = self.schema.models.get().unwrap_or(&default); - let mut create_model_steps: Vec = next_models - .iter() - .filter(|model| self.database_schema.table(model.db_name()).is_none()) - .map(|model| { - let step = CreateModel { - name: model.name.clone(), - db_name: model.db_name_opt().map(|x| x.to_string()), - embedded: if model.is_embedded { - Some(model.is_embedded) - } else { - None - }, - }; - MigrationStep::CreateModel(step) - }) - .collect(); - - let mut create_field_steps: Vec = vec![]; - for model in next_models { - for field in model.fields().scalar() { - let step = CreateField { - model: model.name.clone(), - name: field.name.clone(), - tpe: field.type_identifier.user_friendly_type_name(), - db_name: field.db_name_opt().map(|f| f.to_string()), - default: None, - id: field.id_behaviour_clone(), - is_created_at: field.is_created_at().as_some_if_true(), - is_updated_at: field.is_updated_at().as_some_if_true(), - is_list: field.is_list.as_some_if_true(), - is_optional: field.is_required.as_some_if_true(), - scalar_list: field.scalar_list_behaviour_clone(), - }; - create_field_steps.push(MigrationStep::CreateField(step)) - } - } - - let mut create_enum_steps = vec![]; - for prisma_enum in &self.schema.enums { - let step = CreateEnum { - name: prisma_enum.name.clone(), - values: prisma_enum.values.clone(), - }; - create_enum_steps.push(MigrationStep::CreateEnum(step)); - } - - let mut create_relations = vec![]; - let empty_relations = vec![]; - let relations = self.schema.relations.get().unwrap_or(&empty_relations); - for relation in relations { - let model_a = relation.model_a(); - let model_b = relation.model_b(); - let field_a = relation.field_a(); - let field_b = relation.field_b(); - - let step = CreateRelation { - name: relation.name.clone(), - model_a: RelationFieldSpec { - name: model_a.name.clone(), - field: Some(field_a.name.clone()), - is_list: field_a.is_list.as_some_if_true(), - is_optional: field_a.is_optional().as_some_if_true(), - on_delete: Some(relation.model_a_on_delete), - inline_link: self.is_inlined_in_model(relation, &model_a).as_some_if_true(), - }, - model_b: RelationFieldSpec { - name: model_b.name.clone(), - field: Some(field_b.name.clone()), - is_list: field_b.is_list.as_some_if_true(), - is_optional: field_b.is_optional().as_some_if_true(), - on_delete: Some(relation.model_a_on_delete), - inline_link: self.is_inlined_in_model(relation, &model_b).as_some_if_true(), - }, - table: match relation.manifestation { - Some(RelationLinkManifestation::RelationTable(ref mani)) => Some(LinkTableSpec { - model_a_column: Some(mani.model_a_column.clone()), - model_b_column: Some(mani.model_b_column.clone()), - }), - _ => None, - }, - }; - create_relations.push(MigrationStep::CreateRelation(step)); - } - - result.append(&mut create_model_steps); - result.append(&mut create_field_steps); - result.append(&mut create_enum_steps); - result.append(&mut create_relations); - result - } - - fn is_inlined_in_model(&self, relation: &RelationRef, model: &ModelRef) -> bool { - match relation.manifestation { - Some(RelationLinkManifestation::Inline(ref mani)) => mani.in_table_of_model_name == model.name, - _ => false, - } - } -} - -trait ToOption { - fn as_some_if_true(self) -> Option; -} - -impl ToOption for bool { - fn as_some_if_true(self) -> Option { - if self { - Some(true) - } else { - None - } - } -} diff --git a/server/prisma-rs/migration-engine/src/migration/mod.rs b/server/prisma-rs/migration-engine/src/migration/mod.rs deleted file mode 100644 index 9cf2ffd7a7..0000000000 --- a/server/prisma-rs/migration-engine/src/migration/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod migration_steps_inferrer; -pub mod schema_inferer; diff --git a/server/prisma-rs/migration-engine/src/steps.rs b/server/prisma-rs/migration-engine/src/steps.rs deleted file mode 100644 index 51c2bb115c..0000000000 --- a/server/prisma-rs/migration-engine/src/steps.rs +++ /dev/null @@ -1,584 +0,0 @@ -use nullable::Nullable; -use prisma_models::prelude::*; - -#[derive(Debug, Deserialize, Serialize, Eq, PartialEq)] -#[serde(tag = "stepType")] -pub enum MigrationStep { - CreateModel(CreateModel), - UpdateModel(UpdateModel), - DeleteModel(DeleteModel), - CreateField(CreateField), - DeleteField(DeleteField), - UpdateField(UpdateField), - CreateEnum(CreateEnum), - UpdateEnum(UpdateEnum), - DeleteEnum(DeleteEnum), - CreateRelation(CreateRelation), - DeleteRelation(DeleteRelation), -} - -#[derive(Debug, Deserialize, Serialize, Eq, PartialEq)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct CreateModel { - pub name: String, - - #[serde(skip_serializing_if = "Option::is_none")] - pub db_name: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub embedded: Option, -} - -#[derive(Debug, Deserialize, Serialize, Eq, PartialEq)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct UpdateModel { - pub name: String, - - #[serde(skip_serializing_if = "Option::is_none")] - pub new_name: Option, - - #[serde( - default, - skip_serializing_if = "Option::is_none", - deserialize_with = "nullable::optional_nullable_deserialize" - )] - pub db_name: Option>, - - #[serde(skip_serializing_if = "Option::is_none")] - pub embedded: Option, -} - -#[derive(Debug, Deserialize, Serialize, Eq, PartialEq)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct DeleteModel { - pub name: String, -} - -#[derive(Debug, Deserialize, Serialize, Eq, PartialEq)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct CreateField { - pub model: String, - - pub name: String, - - #[serde(rename = "type")] - pub tpe: String, - - #[serde(skip_serializing_if = "Option::is_none")] - pub db_name: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub is_optional: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub is_list: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub is_created_at: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub is_updated_at: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub id: Option, // fixme: how could we scope this to IdBehaviour? - - #[serde(skip_serializing_if = "Option::is_none")] - pub default: Option, // fixme: change to PrismaValue - - #[serde(skip_serializing_if = "Option::is_none")] - pub scalar_list: Option, // fixme: change to behaviour -} - -#[derive(Debug, Deserialize, Serialize, Eq, PartialEq)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct UpdateField { - pub model: String, - - pub name: String, - - #[serde(skip_serializing_if = "Option::is_none")] - pub new_name: Option, - - #[serde(rename = "type", skip_serializing_if = "Option::is_none")] - pub tpe: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub db_name: Option>, - - #[serde(skip_serializing_if = "Option::is_none")] - pub is_optional: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub is_list: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub is_created_at: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub is_updated_at: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub id: Option>, // fixme: change to behaviour - - #[serde(skip_serializing_if = "Option::is_none")] - pub default: Option>, // fixme: change to PrismaValue - - #[serde(skip_serializing_if = "Option::is_none")] - pub scalar_list: Option>, // fixme: change to behaviour -} - -#[derive(Debug, Deserialize, Serialize, Eq, PartialEq)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct DeleteField { - pub model: String, - pub name: String, -} - -#[derive(Debug, Deserialize, Serialize, Eq, PartialEq)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct CreateEnum { - pub name: String, - pub values: Vec, -} - -#[derive(Debug, Deserialize, Serialize, Eq, PartialEq)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct UpdateEnum { - pub name: String, - - #[serde(skip_serializing_if = "Option::is_none")] - pub new_name: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub values: Option>, -} - -#[derive(Debug, Deserialize, Serialize, Eq, PartialEq)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct DeleteEnum { - pub name: String, -} - -#[derive(Debug, Deserialize, Serialize, Eq, PartialEq)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct CreateRelation { - pub name: String, - pub model_a: RelationFieldSpec, - pub model_b: RelationFieldSpec, - - #[serde(skip_serializing_if = "Option::is_none")] - pub table: Option, -} - -#[derive(Debug, Deserialize, Serialize, Eq, PartialEq)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct UpdateRelation { - pub name: String, - - #[serde(skip_serializing_if = "Option::is_none")] - pub new_name: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub model_a: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub model_b: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub table: Option, -} - -#[derive(Debug, Deserialize, Serialize, Eq, PartialEq)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct DeleteRelation { - pub name: String, -} - -// fixme: this data structure is used in create and update. It does not allow to set field to null though in update. -// fixme: the field inline_link does not allow to customize the underlying db name right now. -#[derive(Debug, Deserialize, Serialize, Eq, PartialEq)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct RelationFieldSpec { - pub name: String, - - #[serde(skip_serializing_if = "Option::is_none")] - pub field: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub is_list: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub is_optional: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub on_delete: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub inline_link: Option, -} - -// fixme: this strucut does not allow to customize the db name of the link table. -#[derive(Debug, Deserialize, Serialize, Eq, PartialEq)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct LinkTableSpec { - #[serde(skip_serializing_if = "Option::is_none")] - pub model_a_column: Option, - - #[serde(skip_serializing_if = "Option::is_none")] - pub model_b_column: Option, -} - -#[cfg(test)] -mod tests { - use crate::steps::*; - use nullable::Nullable::*; - use prisma_models::prelude::IdStrategy; - use prisma_models::prelude::ScalarListStrategy; - use prisma_models::Field; - use prisma_models::FieldBehaviour; - use prisma_models::OnDelete; - use prisma_models::Sequence; - use serde_json::Value; - - #[test] - fn minimal_CreateModel_must_work() { - let json = r#"{"stepType":"CreateModel","name":"Blog"}"#; - let expected_struct = MigrationStep::CreateModel(CreateModel { - name: "Blog".to_string(), - db_name: None, - embedded: None, - }); - assert_symmetric_serde(json, expected_struct); - } - - #[test] - fn full_CreateModel_must_work() { - let json = r#"{"stepType":"CreateModel","name":"Blog","dbName":"blog","embedded":true}"#; - let expected_struct = MigrationStep::CreateModel(CreateModel { - name: "Blog".to_string(), - db_name: Some("blog".to_string()), - embedded: Some(true), - }); - assert_symmetric_serde(json, expected_struct); - } - - #[test] - fn minimal_UpdateModel_must_work() { - let json = r#"{"stepType":"UpdateModel","name":"Blog"}"#; - let expected_struct = MigrationStep::UpdateModel(UpdateModel { - name: "Blog".to_string(), - new_name: None, - db_name: None, - embedded: None, - }); - assert_symmetric_serde(json, expected_struct); - } - - #[test] - fn full_UpdateModel_must_work() { - let json = r#"{"stepType":"UpdateModel","name":"Blog","newName":"MyBlog","dbName":"blog","embedded":true}"#; - let expected_struct = MigrationStep::UpdateModel(UpdateModel { - name: "Blog".to_string(), - new_name: Some("MyBlog".to_string()), - db_name: Some(NotNull("blog".to_string())), - embedded: Some(true), - }); - assert_symmetric_serde(json, expected_struct); - } - - #[test] - fn DeleteModel_must_work() { - let json = r#"{"stepType":"DeleteModel","name":"Blog"}"#; - let expected_struct = MigrationStep::DeleteModel(DeleteModel { - name: "Blog".to_string(), - }); - assert_symmetric_serde(json, expected_struct); - } - - #[test] - fn minimal_CreateField_must_work() { - let json = r#"{"stepType":"CreateField","model":"Blog","name":"title","type":"String"}"#; - let expected_struct = MigrationStep::CreateField(CreateField { - model: "Blog".to_string(), - name: "title".to_string(), - tpe: "String".to_string(), - db_name: None, - is_optional: None, - is_list: None, - is_created_at: None, - is_updated_at: None, - id: None, - default: None, - scalar_list: None, - }); - assert_symmetric_serde(json, expected_struct); - } - - #[test] - fn full_CreateField_must_work() { - let json = r#"{ - "stepType":"CreateField", - "model":"Blog", - "name":"title", - "type":"String", - "dbName":"blog", - "isOptional":true, - "isList":true, - "isCreatedAt":true, - "isUpdatedAt":true, - "id": { - "type": "id", - "strategy":"Sequence", - "sequence": { - "name": "My_Sequence", - "allocationSize": 5, - "initialValue": 100 - } - }, - "default":"default", - "scalarList": { - "type":"scalarList", - "strategy": "Embedded" - } - }"#; - let sequence = Sequence { - name: "My_Sequence".to_string(), - allocation_size: 5, - initial_value: 100, - }; - let expected_struct = MigrationStep::CreateField(CreateField { - model: "Blog".to_string(), - name: "title".to_string(), - tpe: "String".to_string(), - db_name: Some("blog".to_string()), - is_optional: Some(true), - is_list: Some(true), - is_created_at: Some(true), - is_updated_at: Some(true), - id: Some(FieldBehaviour::Id { - strategy: IdStrategy::Sequence, - sequence: Some(sequence), - }), - default: Some("default".to_string()), - scalar_list: Some(FieldBehaviour::ScalarList { - strategy: ScalarListStrategy::Embedded, - }), - }); - assert_symmetric_serde(json, expected_struct); - } - - #[test] - fn minimal_UpdateField_must_work() { - let json = r#"{"stepType":"UpdateField","model":"Blog","name":"title"}"#; - let expected_struct = MigrationStep::UpdateField(UpdateField { - model: "Blog".to_string(), - name: "title".to_string(), - new_name: None, - tpe: None, - db_name: None, - is_optional: None, - is_list: None, - is_created_at: None, - is_updated_at: None, - id: None, - default: None, - scalar_list: None, - }); - assert_symmetric_serde(json, expected_struct); - } - - #[test] - fn full_UpdateField_must_work() { - let json = r#"{"stepType":"UpdateField","model":"Blog","name":"title","newName":"MyBlog","type":"String","dbName":"blog","isOptional":true,"isList":true,"isCreatedAt":true,"isUpdatedAt":true,"id":"id","default":"default","scalarList":"scalarList"}"#; - let expected_struct = MigrationStep::UpdateField(UpdateField { - model: "Blog".to_string(), - name: "title".to_string(), - new_name: Some("MyBlog".to_string()), - tpe: Some("String".to_string()), - db_name: Some(NotNull("blog".to_string())), - is_optional: Some(true), - is_list: Some(true), - is_created_at: Some(true), - is_updated_at: Some(true), - id: Some(NotNull("id".to_string())), - default: Some(NotNull("default".to_string())), - scalar_list: Some(NotNull("scalarList".to_string())), - }); - assert_symmetric_serde(json, expected_struct); - } - - #[test] - fn DeleteField_must_work() { - let json = r#"{"stepType":"DeleteField","model":"Blog","name":"title"}"#; - let expected_struct = MigrationStep::DeleteField(DeleteField { - model: "Blog".to_string(), - name: "title".to_string(), - }); - assert_symmetric_serde(json, expected_struct); - } - - #[test] - fn CreateEnum_must_work() { - let json = r#"{"stepType":"CreateEnum","name":"BlogCategory","values":["Politics","Tech"]}"#; - let expected_struct = MigrationStep::CreateEnum(CreateEnum { - name: "BlogCategory".to_string(), - values: vec!["Politics".to_string(), "Tech".to_string()], - }); - assert_symmetric_serde(json, expected_struct); - } - - #[test] - fn minimal_UpdateEnum_must_work() { - let json = r#"{"stepType":"UpdateEnum","name":"BlogCategory"}"#; - let expected_struct = MigrationStep::UpdateEnum(UpdateEnum { - name: "BlogCategory".to_string(), - new_name: None, - values: None, - }); - assert_symmetric_serde(json, expected_struct); - } - - #[test] - fn full_Update_Enum_must_work() { - let json = r#"{"stepType":"UpdateEnum","name":"BlogCategory","newName":"MyBlogCategory","values":["Tech"]}"#; - let expected_struct = MigrationStep::UpdateEnum(UpdateEnum { - name: "BlogCategory".to_string(), - new_name: Some("MyBlogCategory".to_string()), - values: Some(vec!["Tech".to_string()]), - }); - assert_symmetric_serde(json, expected_struct); - } - - #[test] - fn DeleteEnum_must_work() { - let json = r#"{"stepType":"DeleteEnum","name":"BlogCategory"}"#; - let expected_struct = MigrationStep::DeleteEnum(DeleteEnum { - name: "BlogCategory".to_string(), - }); - assert_symmetric_serde(json, expected_struct); - } - - #[test] - fn minimal_CreateRelation_must_work() { - let json = r#"{ - "stepType":"CreateRelation", - "name":"BlogToPosts", - "modelA": { "name":"Blog" }, - "modelB": { "name":"Post" } - }"#; - let expected_struct = MigrationStep::CreateRelation(CreateRelation { - name: "BlogToPosts".to_string(), - model_a: RelationFieldSpec { - name: "Blog".to_string(), - field: None, - is_list: None, - is_optional: None, - on_delete: None, - inline_link: None, - }, - model_b: RelationFieldSpec { - name: "Post".to_string(), - field: None, - is_list: None, - is_optional: None, - on_delete: None, - inline_link: None, - }, - table: None, - }); - assert_symmetric_serde(json, expected_struct); - } - - #[test] - fn full_CreateRelation_with_link_table_must_work() { - let json = r#"{ - "stepType":"CreateRelation", - "name":"BlogToPosts", - "modelA": { "name":"Blog","field":"posts","isList":true,"onDelete":"SET_NULL","inlineLink":true}, - "modelB": { "name":"Post","field":"blog","isOptional":true,"onDelete":"CASCADE"}, - "table": { "modelAColumn":"blog", "modelBColumn":"post" } - }"#; - let expected_struct = MigrationStep::CreateRelation(CreateRelation { - name: "BlogToPosts".to_string(), - model_a: RelationFieldSpec { - name: "Blog".to_string(), - field: Some("posts".to_string()), - is_list: Some(true), - is_optional: None, - on_delete: Some(OnDelete::SetNull), - inline_link: Some(true), - }, - model_b: RelationFieldSpec { - name: "Post".to_string(), - field: Some("blog".to_string()), - is_list: None, - is_optional: Some(true), - on_delete: Some(OnDelete::Cascade), - inline_link: None, - }, - table: Some(LinkTableSpec { - model_a_column: Some("blog".to_string()), - model_b_column: Some("post".to_string()), - }), - }); - assert_symmetric_serde(json, expected_struct); - } - - #[test] - fn CreateRelation_forcing_the_link_table_must_work() { - let json = r#"{ - "stepType":"CreateRelation", - "name":"BlogToPosts", - "modelA": { "name":"Blog" }, - "modelB": { "name":"Post" }, - "table": { } - }"#; - let expected_struct = MigrationStep::CreateRelation(CreateRelation { - name: "BlogToPosts".to_string(), - model_a: RelationFieldSpec { - name: "Blog".to_string(), - field: None, - is_list: None, - is_optional: None, - on_delete: None, - inline_link: None, - }, - model_b: RelationFieldSpec { - name: "Post".to_string(), - field: None, - is_list: None, - is_optional: None, - on_delete: None, - inline_link: None, - }, - table: Some(LinkTableSpec { - model_a_column: None, - model_b_column: None, - }), - }); - assert_symmetric_serde(json, expected_struct); - } - - #[test] - fn DeletRelation_must_work() { - let json = r#"{"stepType":"DeleteRelation","name":"BlogToPost"}"#; - let expected_struct = MigrationStep::DeleteRelation(DeleteRelation { - name: "BlogToPost".to_string(), - }); - assert_symmetric_serde(json, expected_struct); - } - - fn assert_symmetric_serde(json: &str, expected: MigrationStep) { - let serde_value: Value = serde_json::from_str(&json).expect("The provided input was invalid json."); - let deserialized: MigrationStep = serde_json::from_str(&json).expect("Deserialization failed."); - let serialized_again = serde_json::to_value(&deserialized).expect("Serialization failed"); - assert_eq!( - deserialized, expected, - "The provided json could not be serialized into the expected struct." - ); - assert_eq!( - serialized_again, serde_value, - "Reserializing did not produce the original json input." - ); - } -} diff --git a/server/prisma-rs/prisma-models/Cargo.lock b/server/prisma-rs/prisma-models/Cargo.lock deleted file mode 100644 index 36fbc23e5b..0000000000 --- a/server/prisma-rs/prisma-models/Cargo.lock +++ /dev/null @@ -1,642 +0,0 @@ -[[package]] -name = "antidote" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "autocfg" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "bitflags" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "byteorder" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "bytes" -version = "0.4.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "cfg-if" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "chrono" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "cloudabi" -version = "0.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "debug_stub_derive" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "fuchsia-cprng" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "idna" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-normalization 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "iovec" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "itoa" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "libc" -version = "0.2.50" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "libsqlite3-sys" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", - "vcpkg 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "linked-hash-map" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "lock_api" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "log" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cfg-if 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "lru-cache" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "linked-hash-map 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "matches" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "num-integer" -version = "0.1.39" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "num-traits" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "once_cell" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "parking_lot" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "lock_api 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "parking_lot_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "parking_lot_core" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "smallvec 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "percent-encoding" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "pkg-config" -version = "0.3.14" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "prisma-common" -version = "0.0.0" -dependencies = [ - "prost 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "r2d2 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)", - "rusqlite 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)", - "url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)", - "url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "prisma-models" -version = "0.0.0" -dependencies = [ - "chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", - "debug_stub_derive 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "once_cell 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "prisma-common 0.0.0", - "prisma-query 0.1.0 (git+https://github.com/prisma/prisma-query.git)", - "rusqlite 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "prisma-query" -version = "0.1.0" -source = "git+https://github.com/prisma/prisma-query.git#92bfb95d439d71df41d111033e5fb5cb54ac403c" -dependencies = [ - "rusqlite 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "proc-macro2" -version = "0.4.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "prost" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "quote" -version = "0.3.15" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "quote" -version = "0.6.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "r2d2" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "antidote 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", - "scheduled-thread-pool 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_jitter 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_chacha" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_core" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_core" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "rand_hc" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_isaac" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_jitter" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_os" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", - "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_pcg" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_xorshift" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rdrand" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "redox_syscall" -version = "0.1.51" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "rusqlite" -version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", - "libsqlite3-sys 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lru-cache 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rustc_version" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "ryu" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "scheduled-thread-pool" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "antidote 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "scopeguard" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "semver" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "semver-parser" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "serde" -version = "1.0.89" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "serde_derive" -version = "1.0.89" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.27 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "serde_json" -version = "1.0.39" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", - "ryu 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "smallvec" -version = "0.6.9" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "syn" -version = "0.11.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", - "synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "syn" -version = "0.15.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "synom" -version = "0.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "time" -version = "0.1.42" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", - "redox_syscall 0.1.51 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "unicode-bidi" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "unicode-normalization" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "smallvec 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "unicode-xid" -version = "0.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "unicode-xid" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "url" -version = "1.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "idna 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "url_serde" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "serde 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)", - "url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "vcpkg" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "winapi" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "winapi" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[metadata] -"checksum antidote 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "34fde25430d87a9388dadbe6e34d7f72a462c8b43ac8d309b42b0a8505d7e2a5" -"checksum autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a6d640bee2da49f60a4068a7fae53acde8982514ab7bae8b8cea9e88cbcfd799" -"checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12" -"checksum byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a019b10a2a7cdeb292db131fc8113e57ea2a908f6e7894b0c3c671893b65dbeb" -"checksum bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c" -"checksum cfg-if 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "11d43355396e872eefb45ce6342e4374ed7bc2b3a502d1b28e36d6e23c05d1f4" -"checksum chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "45912881121cb26fad7c38c17ba7daa18764771836b34fab7d3fbd93ed633878" -"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" -"checksum debug_stub_derive 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "496b7f8a2f853313c3ca370641d7ff3e42c32974fdccda8f0684599ed0a3ff6b" -"checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" -"checksum idna 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "38f09e0f0b1fb55fdee1f17470ad800da77af5186a1a76c026b679358b7e844e" -"checksum iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe6e417e7d0975db6512b90796e8ce223145ac4e33c377e4a42882a0e88bb08" -"checksum itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1306f3464951f30e30d12373d31c79fbd52d236e5e896fd92f96ec7babbbe60b" -"checksum libc 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "aab692d7759f5cd8c859e169db98ae5b52c924add2af5fbbca11d12fefb567c1" -"checksum libsqlite3-sys 0.11.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3567bc1a0c84e2c0d71eeb4a1f08451babf7843babd733158777d9c686dad9f3" -"checksum linked-hash-map 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7860ec297f7008ff7a1e3382d7f7e1dcd69efc94751a2284bafc3d013c2aa939" -"checksum lock_api 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "62ebf1391f6acad60e5c8b43706dde4582df75c06698ab44511d15016bc2442c" -"checksum log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c84ec4b527950aa83a329754b01dbe3f58361d1c5efacd1f6d68c494d08a17c6" -"checksum lru-cache 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4d06ff7ff06f729ce5f4e227876cb88d10bc59cd4ae1e09fbb2bde15c850dc21" -"checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" -"checksum num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "e83d528d2677f0518c570baf2b7abdcf0cd2d248860b68507bdcb3e91d4c0cea" -"checksum num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0b3a5d7cc97d6d30d8b9bc8fa19bf45349ffe46241e8816f50f62f6d6aaabee1" -"checksum once_cell 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "532c29a261168a45ce28948f9537ddd7a5dd272cc513b3017b1e82a88f962c37" -"checksum parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ab41b4aed082705d1056416ae4468b6ea99d52599ecf3169b00088d43113e337" -"checksum parking_lot_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "94c8c7923936b28d546dfd14d4472eaf34c99b14e1c973a32b3e6d4eb04298c9" -"checksum percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831" -"checksum pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "676e8eb2b1b4c9043511a9b7bea0915320d7e502b0a079fb03f9635a5252b18c" -"checksum prisma-query 0.1.0 (git+https://github.com/prisma/prisma-query.git)" = "" -"checksum proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)" = "4d317f9caece796be1980837fd5cb3dfec5613ebdb04ad0956deea83ce168915" -"checksum prost 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b9f36c478cd43382388dfc3a3679af175c03d19ed8039e79a3e4447e944cd3f3" -"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a" -"checksum quote 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)" = "cdd8e04bd9c52e0342b406469d494fcb033be4bdbe5c606016defbb1681411e1" -"checksum r2d2 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)" = "5d746fc8a0dab19ccea7ff73ad535854e90ddb3b4b8cdce953dd5cd0b2e7bd22" -"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" -"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" -"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" -"checksum rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d0e7a549d590831370895ab7ba4ea0c1b6b011d106b5ff2da6eee112615e6dc0" -"checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" -"checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" -"checksum rand_jitter 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b9ea758282efe12823e0d952ddb269d2e1897227e464919a554f2a03ef1b832" -"checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" -"checksum rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" -"checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" -"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" -"checksum redox_syscall 0.1.51 (registry+https://github.com/rust-lang/crates.io-index)" = "423e376fffca3dfa06c9e9790a9ccd282fafb3cc6e6397d01dbf64f9bacc6b85" -"checksum rusqlite 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6381ddfe91dbb659b4b132168da15985bc84162378cf4fcdc4eb99c857d063e2" -"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" -"checksum ryu 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "eb9e9b8cde282a9fe6a42dd4681319bfb63f121b8a8ee9439c6f4107e58a46f7" -"checksum scheduled-thread-pool 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1a2ff3fc5223829be817806c6441279c676e454cc7da608faf03b0ccc09d3889" -"checksum scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "94258f53601af11e6a49f722422f6e3425c52b06245a5cf9bc09908b174f5e27" -"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" -"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" -"checksum serde 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)" = "92514fb95f900c9b5126e32d020f5c6d40564c27a5ea6d1d7d9f157a96623560" -"checksum serde_derive 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)" = "bb6eabf4b5914e88e24eea240bb7c9f9a2cbc1bbbe8d961d381975ec3c6b806c" -"checksum serde_json 1.0.39 (registry+https://github.com/rust-lang/crates.io-index)" = "5a23aa71d4a4d43fdbfaac00eff68ba8a06a51759a89ac3304323e800c4dd40d" -"checksum smallvec 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c4488ae950c49d403731982257768f48fada354a5203fe81f9bb6f43ca9002be" -"checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad" -"checksum syn 0.15.27 (registry+https://github.com/rust-lang/crates.io-index)" = "525bd55255f03c816e5d7f615587bd13030c7103354fadb104993dcee6a788ec" -"checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6" -"checksum time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "db8dcfca086c1143c9270ac42a2bbd8a7ee477b78ac8e45b19abfb0cbede4b6f" -"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" -"checksum unicode-normalization 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "141339a08b982d942be2ca06ff8b076563cbe223d1befd5450716790d44e2426" -"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc" -"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" -"checksum url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dd4e7c0d531266369519a4aa4f399d748bd37043b00bde1e4ff1f60a120b355a" -"checksum url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "74e7d099f1ee52f823d4bdd60c93c3602043c728f5db3b97bdb548467f7bddea" -"checksum vcpkg 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "def296d3eb3b12371b2c7d0e83bfe1403e4db2d7a0bba324a12b21c4ee13143d" -"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" -"checksum winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "92c1eb33641e276cfa214a0522acad57be5c56b10cb348b3c5117db75f3ac4b0" -"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" -"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/server/prisma-rs/prisma-models/Cargo.toml b/server/prisma-rs/prisma-models/Cargo.toml index 670074be05..bdf274a246 100644 --- a/server/prisma-rs/prisma-models/Cargo.toml +++ b/server/prisma-rs/prisma-models/Cargo.toml @@ -5,8 +5,7 @@ authors = ["Dominic Petrick ", "Julius de Bruijn RelationRef { self.relation - .get_or_init(|| self.model().schema().find_relation(&self.relation_name).unwrap()) + .get_or_init(|| self.model().internal_data_model().find_relation(&self.relation_name).unwrap()) .upgrade() .unwrap() } @@ -141,9 +141,9 @@ impl RelationField { pub fn as_column(&self) -> Column { let model = self.model(); - let schema = model.schema(); + let internal_data_model = model.internal_data_model(); let db_name = self.db_name(); - let parts = ((schema.db_name.as_ref(), model.db_name()), db_name.as_ref()); + let parts = ((internal_data_model.db_name.as_ref(), model.db_name()), db_name.as_ref()); parts.into() } diff --git a/server/prisma-rs/prisma-models/src/field/scalar.rs b/server/prisma-rs/prisma-models/src/field/scalar.rs index 073c55b614..30aef340ad 100644 --- a/server/prisma-rs/prisma-models/src/field/scalar.rs +++ b/server/prisma-rs/prisma-models/src/field/scalar.rs @@ -83,11 +83,11 @@ impl ScalarField { .expect("Model does not exist anymore. Parent model got deleted without deleting the child.") } - pub fn schema(&self) -> SchemaRef { - self.model().schema() + pub fn internal_data_model(&self) -> InternalDataModelRef { + self.model().internal_data_model() } - /// A field is an ID field if the name is `id` or `_id` in legacy schemas, + /// A field is an ID field if the name is `id` or `_id` in legacy internal_data_models, /// or if the field has Id behaviour defined. pub fn is_id(&self) -> bool { if self.model().is_legacy() { @@ -135,7 +135,7 @@ impl ScalarField { } pub fn as_column(&self) -> Column { - ((self.schema().db_name.as_str(), self.model().db_name()), self.db_name()).into() + ((self.internal_data_model().db_name.as_str(), self.model().db_name()), self.db_name()).into() } pub fn id_behaviour_clone(&self) -> Option { diff --git a/server/prisma-rs/prisma-models/src/fields.rs b/server/prisma-rs/prisma-models/src/fields.rs index 57e6ef46d9..12faf0317f 100644 --- a/server/prisma-rs/prisma-models/src/fields.rs +++ b/server/prisma-rs/prisma-models/src/fields.rs @@ -62,7 +62,7 @@ impl Fields { }) } - fn scalar(&self) -> Vec> { + pub fn scalar(&self) -> Vec> { self.scalar_weak().iter().map(|f| f.upgrade().unwrap()).collect() } @@ -106,16 +106,13 @@ impl Fields { } pub fn find_many_from_all(&self, names: &BTreeSet) -> Vec<&Field> { - self.all - .iter() - .filter(|field| names.contains(field.db_name().as_ref())) - .collect() + self.all.iter().filter(|field| names.contains(field.name())).collect() } pub fn find_many_from_scalar(&self, names: &BTreeSet) -> Vec> { self.scalar_weak() .iter() - .filter(|field| names.contains(field.upgrade().unwrap().db_name())) + .filter(|field| names.contains(&field.upgrade().unwrap().name)) .map(|field| field.upgrade().unwrap()) .collect() } @@ -123,7 +120,7 @@ impl Fields { pub fn find_many_from_relation(&self, names: &BTreeSet) -> Vec> { self.relation_weak() .iter() - .filter(|field| names.contains(&field.upgrade().unwrap().db_name())) + .filter(|field| names.contains(&field.upgrade().unwrap().name)) .map(|field| field.upgrade().unwrap()) .collect() } @@ -131,7 +128,7 @@ impl Fields { pub fn find_from_all(&self, name: &str) -> DomainResult<&Field> { self.all .iter() - .find(|field| field.db_name() == name) + .find(|field| field.name() == name) .ok_or_else(|| DomainError::FieldNotFound { name: name.to_string(), model: self.model().name.clone(), @@ -142,7 +139,7 @@ impl Fields { self.scalar_weak() .iter() .map(|field| field.upgrade().unwrap()) - .find(|field| field.db_name() == name) + .find(|field| field.name == name) .ok_or_else(|| DomainError::ScalarFieldNotFound { name: name.to_string(), model: self.model().name.clone(), diff --git a/server/prisma-rs/prisma-models/src/schema.rs b/server/prisma-rs/prisma-models/src/internal_data_model.rs similarity index 77% rename from server/prisma-rs/prisma-models/src/schema.rs rename to server/prisma-rs/prisma-models/src/internal_data_model.rs index 48cf162b11..7457a10fa2 100644 --- a/server/prisma-rs/prisma-models/src/schema.rs +++ b/server/prisma-rs/prisma-models/src/internal_data_model.rs @@ -2,21 +2,21 @@ use crate::prelude::*; use once_cell::sync::OnceCell; use std::sync::{Arc, Weak}; -pub type SchemaRef = Arc; -pub type SchemaWeakRef = Weak; +pub type InternalDataModelRef = Arc; +pub type InternalDataModelWeakRef = Weak; #[derive(Debug, Deserialize, Serialize, Default)] #[serde(rename_all = "camelCase")] -pub struct SchemaTemplate { +pub struct InternalDataModelTemplate { pub models: Vec, pub relations: Vec, - pub enums: Vec, + pub enums: Vec, pub version: Option, } #[derive(DebugStub)] -pub struct Schema { - pub enums: Vec, +pub struct InternalDataModel { + pub enums: Vec, pub version: Option, pub db_name: String, @@ -27,14 +27,14 @@ pub struct Schema { #[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] -pub struct PrismaEnum { +pub struct InternalEnum { pub name: String, pub values: Vec, } -impl SchemaTemplate { - pub fn build(self, db_name: String) -> SchemaRef { - let schema = Arc::new(Schema { +impl InternalDataModelTemplate { + pub fn build(self, db_name: String) -> InternalDataModelRef { + let internal_data_model = Arc::new(InternalDataModel { models: OnceCell::new(), relations: OnceCell::new(), enums: self.enums, @@ -46,23 +46,23 @@ impl SchemaTemplate { let models = self .models .into_iter() - .map(|mt| mt.build(Arc::downgrade(&schema))) + .map(|mt| mt.build(Arc::downgrade(&internal_data_model))) .collect(); - schema.models.set(models).unwrap(); + internal_data_model.models.set(models).unwrap(); let relations = self .relations .into_iter() - .map(|rt| rt.build(Arc::downgrade(&schema))) + .map(|rt| rt.build(Arc::downgrade(&internal_data_model))) .collect(); - schema.relations.set(relations).unwrap(); - schema + internal_data_model.relations.set(relations).unwrap(); + internal_data_model } } -impl Schema { +impl InternalDataModel { pub fn models(&self) -> &[ModelRef] { self.models.get().unwrap() } diff --git a/server/prisma-rs/prisma-models/src/lib.rs b/server/prisma-rs/prisma-models/src/lib.rs index d0e7132461..451920d155 100644 --- a/server/prisma-rs/prisma-models/src/lib.rs +++ b/server/prisma-rs/prisma-models/src/lib.rs @@ -17,7 +17,7 @@ mod prisma_value; mod project; mod relation; mod scalar_list_table; -mod schema; +mod internal_data_model; mod selected_fields; pub mod prelude; @@ -34,7 +34,7 @@ pub use prisma_value::*; pub use project::*; pub use relation::*; pub use scalar_list_table::*; -pub use schema::*; +pub use internal_data_model::*; pub use selected_fields::*; pub type DomainResult = Result; diff --git a/server/prisma-rs/prisma-models/src/model.rs b/server/prisma-rs/prisma-models/src/model.rs index 5ec77cb3a0..7a3067ab68 100644 --- a/server/prisma-rs/prisma-models/src/model.rs +++ b/server/prisma-rs/prisma-models/src/model.rs @@ -26,8 +26,8 @@ pub struct Model { fields: OnceCell, - #[debug_stub = "#SchemaWeakRef#"] - pub schema: SchemaWeakRef, + #[debug_stub = "#InternalDataModelWeakRef#"] + pub internal_data_model: InternalDataModelWeakRef, } #[derive(Debug, Deserialize, Serialize)] @@ -37,14 +37,14 @@ pub struct ModelManifestation { } impl ModelTemplate { - pub fn build(self, schema: SchemaWeakRef) -> ModelRef { + pub fn build(self, internal_data_model: InternalDataModelWeakRef) -> ModelRef { let model = Arc::new(Model { name: self.name, stable_identifier: self.stable_identifier, is_embedded: self.is_embedded, fields: OnceCell::new(), manifestation: self.manifestation, - schema: schema, + internal_data_model: internal_data_model, }); let fields = Fields::new( @@ -86,7 +86,7 @@ impl Model { } pub fn table(&self) -> Table { - (self.schema().db_name.as_str(), self.db_name()).into() + (self.internal_data_model().db_name.as_str(), self.db_name()).into() } pub fn fields(&self) -> &Fields { @@ -97,7 +97,7 @@ impl Model { } pub fn is_legacy(&self) -> bool { - self.schema().is_legacy() + self.internal_data_model().is_legacy() } pub fn db_name(&self) -> &str { @@ -108,10 +108,10 @@ impl Model { self.manifestation.as_ref().map(|mf| mf.db_name.as_ref()) } - pub fn schema(&self) -> SchemaRef { - self.schema + pub fn internal_data_model(&self) -> InternalDataModelRef { + self.internal_data_model .upgrade() - .expect("Schema does not exist anymore. Parent schema is deleted without deleting the child schema.") + .expect("InternalDataModel does not exist anymore. Parent internal_data_model is deleted without deleting the child internal_data_model.") } pub fn id_column(&self) -> Column { diff --git a/server/prisma-rs/prisma-models/src/node.rs b/server/prisma-rs/prisma-models/src/node.rs index 3a1da61bc9..f5de0468fa 100644 --- a/server/prisma-rs/prisma-models/src/node.rs +++ b/server/prisma-rs/prisma-models/src/node.rs @@ -27,7 +27,7 @@ impl SingleNode { Self { node, field_names } } - pub fn get_id_value(&self, model: ModelRef) -> DomainResult<&GraphqlId> { + pub fn get_id_value(&self, model: ModelRef) -> DomainResult { self.node.get_id_value(&self.field_names, model) } @@ -71,18 +71,6 @@ impl ManyNodes { pub fn reverse(&mut self) { self.nodes.reverse(); } - - /// Drops x records on the end of the wrapped records in place. - pub fn drop_right(&mut self, x: u32) { - self.nodes.truncate(self.nodes.len() - x as usize); - } - - /// Drops x records on the start of the wrapped records in place. - pub fn drop_left(&mut self, x: u32) { - self.reverse(); - self.drop_right(x); - self.reverse(); - } } #[derive(Debug, Default, Clone)] @@ -99,7 +87,7 @@ impl Node { } } - pub fn get_id_value(&self, field_names: &Vec, model: ModelRef) -> DomainResult<&GraphqlId> { + pub fn get_id_value(&self, field_names: &Vec, model: ModelRef) -> DomainResult { let id_field = model.fields().id(); let index = field_names .iter() @@ -112,10 +100,7 @@ impl Node { }) })?; - Ok(match &self.values[index] { - PrismaValue::GraphqlId(ref id) => id, - _ => unimplemented!(), - }) + Ok(GraphqlId::try_from(&self.values[index])?) } pub fn get_field_value(&self, field_names: &Vec, field: &str) -> DomainResult<&PrismaValue> { diff --git a/server/prisma-rs/prisma-models/src/prisma_value.rs b/server/prisma-rs/prisma-models/src/prisma_value.rs index 49cd2d9180..24e6bb9788 100644 --- a/server/prisma-rs/prisma-models/src/prisma_value.rs +++ b/server/prisma-rs/prisma-models/src/prisma_value.rs @@ -1,7 +1,6 @@ use crate::{DomainError, DomainResult}; -use chrono::{DateTime, Utc}; -use graphql_parser::query::Value as GraphqlValue; -use rusqlite::types::{FromSql, FromSqlResult, ValueRef}; +use chrono::prelude::*; +use graphql_parser::query::{Number, Value as GraphqlValue}; use serde::{Deserialize, Serialize}; use serde_json::Value; use std::{convert::TryFrom, fmt}; @@ -19,6 +18,16 @@ pub enum GraphqlId { UUID(Uuid), } +impl GraphqlId { + pub fn to_value(&self) -> GraphqlValue { + match self { + GraphqlId::String(s) => GraphqlValue::String(s.clone()), + GraphqlId::Int(i) => GraphqlValue::Int(Number::from((*i) as i32)), // This could cause issues! + GraphqlId::UUID(u) => GraphqlValue::String(u.to_string()), + } + } +} + #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] pub enum PrismaValue { String(String), @@ -50,11 +59,27 @@ impl PrismaValue { GraphqlValue::Float(f) => PrismaValue::Float(f.clone()), GraphqlValue::Int(i) => PrismaValue::Int(i.as_i64().unwrap()), GraphqlValue::Null => PrismaValue::Null, - GraphqlValue::String(s) => PrismaValue::String(s.clone()), + GraphqlValue::String(s) => Self::str_as_json(s) + .or_else(|| Self::str_as_datetime(s)) + .unwrap_or(PrismaValue::String(s.clone())), GraphqlValue::List(l) => PrismaValue::List(Some(l.iter().map(|i| Self::from_value(i)).collect())), - _ => unimplemented!(), + GraphqlValue::Object(obj) if obj.contains_key("set") => Self::from_value(obj.get("set").unwrap()), + value => panic!(format!("Unable to make {:?} to PrismaValue", value)), } } + + fn str_as_json(s: &str) -> Option { + serde_json::from_str(s).ok().map(|j| PrismaValue::Json(j)) + } + + // If you look at this and think: "What's up with Z?" then you're asking the right question. + // Feel free to try and fix it for cases with AND without Z. + fn str_as_datetime(s: &str) -> Option { + let fmt = "%Y-%m-%dT%H:%M:%S%.3f"; + Utc.datetime_from_str(s.trim_end_matches("Z"), fmt) + .ok() + .map(|dt| PrismaValue::DateTime(DateTime::::from_utc(dt.naive_utc(), Utc))) + } } impl fmt::Display for PrismaValue { @@ -167,6 +192,23 @@ impl TryFrom for GraphqlId { fn try_from(value: PrismaValue) -> DomainResult { match value { PrismaValue::GraphqlId(id) => Ok(id), + PrismaValue::Int(i) => Ok(GraphqlId::from(i)), + PrismaValue::String(s) => Ok(GraphqlId::from(s)), + PrismaValue::Uuid(u) => Ok(GraphqlId::from(u)), + _ => Err(DomainError::ConversionFailure("PrismaValue", "GraphqlId")), + } + } +} + +impl TryFrom<&PrismaValue> for GraphqlId { + type Error = DomainError; + + fn try_from(value: &PrismaValue) -> DomainResult { + match value { + PrismaValue::GraphqlId(id) => Ok(id.clone()), + PrismaValue::Int(i) => Ok(GraphqlId::from(*i)), + PrismaValue::String(s) => Ok(GraphqlId::from(s.clone())), + PrismaValue::Uuid(u) => Ok(GraphqlId::from(u.clone())), _ => Err(DomainError::ConversionFailure("PrismaValue", "GraphqlId")), } } @@ -189,7 +231,7 @@ impl From for DatabaseValue { match id { GraphqlId::String(s) => s.into(), GraphqlId::Int(i) => (i as i64).into(), - GraphqlId::UUID(u) => u.to_hyphenated_ref().to_string().into(), + GraphqlId::UUID(u) => u.into(), } } } @@ -208,28 +250,20 @@ impl From for DatabaseValue { PrismaValue::String(s) => s.into(), PrismaValue::Float(f) => (f as f64).into(), PrismaValue::Boolean(b) => b.into(), - PrismaValue::DateTime(d) => d.timestamp_millis().into(), + PrismaValue::DateTime(d) => d.into(), PrismaValue::Enum(e) => e.into(), - PrismaValue::Json(j) => j.into(), + PrismaValue::Json(j) => j.to_string().into(), PrismaValue::Int(i) => (i as i64).into(), PrismaValue::Relation(i) => (i as i64).into(), PrismaValue::Null => DatabaseValue::Parameterized(ParameterizedValue::Null), - PrismaValue::Uuid(u) => u.to_hyphenated_ref().to_string().into(), + PrismaValue::Uuid(u) => u.into(), PrismaValue::GraphqlId(id) => id.into(), + PrismaValue::List(Some(l)) => l.into(), PrismaValue::List(_) => panic!("List values are not supported here"), } } } -impl FromSql for GraphqlId { - fn column_result(value: ValueRef<'_>) -> FromSqlResult { - value - .as_str() - .map(|strval| GraphqlId::String(strval.to_string())) - .or_else(|_| value.as_i64().map(|intval| GraphqlId::Int(intval as usize))) - } -} - impl From<&str> for GraphqlId { fn from(s: &str) -> Self { GraphqlId::from(s.to_string()) @@ -248,6 +282,12 @@ impl From for GraphqlId { } } +impl From for GraphqlId { + fn from(id: i64) -> Self { + GraphqlId::Int(id as usize) + } +} + impl From for GraphqlId { fn from(uuid: Uuid) -> Self { GraphqlId::UUID(uuid) diff --git a/server/prisma-rs/prisma-models/src/project.rs b/server/prisma-rs/prisma-models/src/project.rs index 5003bbe214..bb39971641 100644 --- a/server/prisma-rs/prisma-models/src/project.rs +++ b/server/prisma-rs/prisma-models/src/project.rs @@ -9,7 +9,8 @@ pub type ProjectWeakRef = Weak; #[serde(rename_all = "camelCase")] pub struct ProjectTemplate { pub id: String, - pub schema: SchemaTemplate, + #[serde(rename = "schema")] + pub internal_data_model: InternalDataModelTemplate, #[serde(default)] pub manifestation: ProjectManifestation, @@ -22,7 +23,7 @@ pub struct ProjectTemplate { #[derive(Debug)] pub struct Project { pub id: String, - pub schema: OnceCell, + pub internal_data_model: OnceCell, pub revision: Revision, } @@ -31,11 +32,14 @@ impl Into for ProjectTemplate { let db_name = self.db_name(); let project = Arc::new(Project { id: self.id, - schema: OnceCell::new(), + internal_data_model: OnceCell::new(), revision: self.revision, }); - project.schema.set(self.schema.build(db_name)).unwrap(); + project + .internal_data_model + .set(self.internal_data_model.build(db_name)) + .unwrap(); project } @@ -58,8 +62,10 @@ impl ProjectTemplate { } impl Project { - pub fn schema(&self) -> &Schema { - self.schema.get().expect("Project has no schema set!") + pub fn internal_data_model(&self) -> &InternalDataModel { + self.internal_data_model + .get() + .expect("Project has no internal_data_model set!") } } @@ -118,7 +124,8 @@ mod tests { use std::fs::File; #[test] - fn test_relation_schema() { + #[ignore] + fn test_relation_internal_data_model() { let file = File::open("./relation_schema.json").unwrap(); let project_template: ProjectTemplate = serde_json::from_reader(file).unwrap(); let _project: ProjectRef = project_template.into(); diff --git a/server/prisma-rs/prisma-models/src/relation.rs b/server/prisma-rs/prisma-models/src/relation.rs index c1cebdb415..77d7cad57a 100644 --- a/server/prisma-rs/prisma-models/src/relation.rs +++ b/server/prisma-rs/prisma-models/src/relation.rs @@ -96,12 +96,12 @@ pub struct Relation { pub manifestation: Option, - #[debug_stub = "#SchemaWeakRef#"] - pub schema: SchemaWeakRef, + #[debug_stub = "#InternalDataModelWeakRef#"] + pub internal_data_model: InternalDataModelWeakRef, } impl RelationTemplate { - pub fn build(self, schema: SchemaWeakRef) -> RelationRef { + pub fn build(self, internal_data_model: InternalDataModelWeakRef) -> RelationRef { let relation = Relation { name: self.name, manifestation: self.manifestation, @@ -113,7 +113,7 @@ impl RelationTemplate { model_b: OnceCell::new(), field_a: OnceCell::new(), field_b: OnceCell::new(), - schema: schema, + internal_data_model: internal_data_model, }; Arc::new(relation) @@ -163,22 +163,22 @@ impl Relation { pub fn model_a(&self) -> ModelRef { self.model_a .get_or_init(|| { - let model = self.schema().find_model(&self.model_a_name).unwrap(); + let model = self.internal_data_model().find_model(&self.model_a_name).unwrap(); Arc::downgrade(&model) }) .upgrade() - .expect("Model A deleted without deleting the relations in schema.") + .expect("Model A deleted without deleting the relations in internal_data_model.") } /// A pointer to the second `Model` in the `Relation`. pub fn model_b(&self) -> ModelRef { self.model_b .get_or_init(|| { - let model = self.schema().find_model(&self.model_b_name).unwrap(); + let model = self.internal_data_model().find_model(&self.model_b_name).unwrap(); Arc::downgrade(&model) }) .upgrade() - .expect("Model B deleted without deleting the relations in schema.") + .expect("Model B deleted without deleting the relations in internal_data_model.") } /// A pointer to the `RelationField` in the first `Model` in the `Relation`. @@ -194,7 +194,7 @@ impl Relation { Arc::downgrade(&field) }) .upgrade() - .expect("Field A deleted without deleting the relations in schema.") + .expect("Field A deleted without deleting the relations in internal_data_model.") } /// A pointer to the `RelationField` in the second `Model` in the `Relation`. @@ -210,7 +210,7 @@ impl Relation { Arc::downgrade(&field) }) .upgrade() - .expect("Field B deleted without deleting the relations in schema.") + .expect("Field B deleted without deleting the relations in internal_data_model.") } pub fn model_a_column(&self) -> Column { @@ -272,9 +272,19 @@ impl Relation { use RelationLinkManifestation::*; match self.manifestation { - Some(RelationTable(ref m)) => m.table.clone().into(), - Some(Inline(ref m)) => self.schema().find_model(&m.in_table_of_model_name).unwrap().table(), - None => format!("_{}", self.name).into(), + Some(RelationTable(ref m)) => { + let db = self.model_a().internal_data_model().db_name.clone(); + (db, m.table.clone()).into() + } + Some(Inline(ref m)) => self + .internal_data_model() + .find_model(&m.in_table_of_model_name) + .unwrap() + .table(), + None => { + let db = self.model_a().internal_data_model().db_name.clone(); + (db, format!("_{}", self.name)).into() + } } } @@ -332,9 +342,9 @@ impl Relation { } } - fn schema(&self) -> SchemaRef { - self.schema + fn internal_data_model(&self) -> InternalDataModelRef { + self.internal_data_model .upgrade() - .expect("Schema does not exist anymore. Parent schema is deleted without deleting the child schema.") + .expect("InternalDataModel does not exist anymore. Parent internal_data_model is deleted without deleting the child internal_data_model.") } } diff --git a/server/prisma-rs/prisma-models/src/scalar_list_table.rs b/server/prisma-rs/prisma-models/src/scalar_list_table.rs index 1dcdb5e022..316ba90dc9 100644 --- a/server/prisma-rs/prisma-models/src/scalar_list_table.rs +++ b/server/prisma-rs/prisma-models/src/scalar_list_table.rs @@ -22,8 +22,8 @@ impl<'a> ScalarListTable<'a> { } pub fn table(&self) -> Table { - let schema = self.parent_field.schema(); - let database_name = schema.db_name.as_ref(); + let internal_data_model = self.parent_field.internal_data_model(); + let database_name = internal_data_model.db_name.as_ref(); Table::from((database_name, self.table_name.as_ref())) } diff --git a/server/prisma-rs/prisma-models/src/selected_fields.rs b/server/prisma-rs/prisma-models/src/selected_fields.rs index a54a4cd5dc..fe2d92b138 100644 --- a/server/prisma-rs/prisma-models/src/selected_fields.rs +++ b/server/prisma-rs/prisma-models/src/selected_fields.rs @@ -145,7 +145,18 @@ impl SelectedFields { } pub fn names(&self) -> Vec { - self.columns().iter().map(|c| c.name.clone()).collect() + let mut result: Vec = self.scalar_non_list().iter().map(|f| f.name.clone()).collect(); + + for rf in self.relation_inlined().iter() { + result.push(rf.name.clone()); + } + + if let Some(ref from_field) = self.from_field { + result.push(from_field.related_field().name.clone()); + result.push(from_field.name.clone()); + }; + + result } pub fn type_identifiers(&self) -> Vec { diff --git a/server/prisma-rs/query-engine/connectors/connector/Cargo.toml b/server/prisma-rs/query-engine/connectors/connector/Cargo.toml index 9043a4f319..3db237c9af 100644 --- a/server/prisma-rs/query-engine/connectors/connector/Cargo.toml +++ b/server/prisma-rs/query-engine/connectors/connector/Cargo.toml @@ -4,16 +4,11 @@ version = "0.1.0" authors = [] edition = "2018" -[features] -default = ["sql", "sqlite"] -sqlite = ["rusqlite", "libsqlite3-sys", "r2d2_sqlite"] -sql = ["r2d2"] - [dependencies] serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" once_cell = "0.1" -prisma-query = { path = "../../../libs/prisma-query" } +prisma-query = { git = "https://github.com/prisma/prisma-query.git" } prisma-models = { path = "../../../prisma-models" } prisma-common = { path = "../../../libs/prisma-common" } failure = "0.1" @@ -21,8 +16,3 @@ failure_derive = "0.1" uuid = "0.7" itertools = "0.8" chrono = { version = "0.4", features = ["serde"] } - -r2d2 = { version = "0.8", optional = true } -r2d2_sqlite = { version = "0.8", optional = true } -rusqlite = { version = "0.16", features = ["chrono", "bundled"], optional = true } -libsqlite3-sys = { version = "0.11", optional = true } diff --git a/server/prisma-rs/query-engine/connectors/connector/src/compare.rs b/server/prisma-rs/query-engine/connectors/connector/src/compare.rs index e3289ef038..f9c3ef1367 100644 --- a/server/prisma-rs/query-engine/connectors/connector/src/compare.rs +++ b/server/prisma-rs/query-engine/connectors/connector/src/compare.rs @@ -1,7 +1,7 @@ use crate::filter::Filter; use prisma_models::PrismaValue; -/// Comparing methods for scalars. +/// Comparing methods for scalar fields. pub trait ScalarCompare { fn is_in(&self, val: Option>) -> Filter where @@ -60,6 +60,7 @@ pub trait ScalarCompare { T: Into; } +/// Comparison methods for relational fields. pub trait RelationCompare { fn every_related(&self, filter: T) -> Filter where @@ -80,6 +81,7 @@ pub trait RelationCompare { fn one_relation_is_null(&self) -> Filter; } +/// Comparison methods for scalar list fields. pub trait ScalarListCompare { fn contains_element(&self, value: T) -> Filter where diff --git a/server/prisma-rs/query-engine/connectors/connector/src/connection.rs b/server/prisma-rs/query-engine/connectors/connector/src/connection.rs deleted file mode 100644 index 224a15a891..0000000000 --- a/server/prisma-rs/query-engine/connectors/connector/src/connection.rs +++ /dev/null @@ -1,76 +0,0 @@ -use crate::{ - error::ConnectorError, - row::{PrismaRow, ToPrismaRow}, - ConnectorResult, -}; -use prisma_models::TypeIdentifier; - -#[cfg(feature = "sqlite")] -use rusqlite::Connection as SqliteConnection; - -#[cfg(feature = "sqlite")] -use prisma_query::{ - ast::{Query, Select}, - visitor::{self, Visitor}, -}; - -/// handled per-database basis, `Transaction` providing a minimal interface over -/// different databases. -pub trait Transaction { - /// Write to the database, expecting no result data. On success, returns the - /// number of rows that were changed, inserted, or deleted. - fn write(&mut self, q: Query) -> ConnectorResult; - - /// Select multiple rows from the database. - fn read(&mut self, q: Select, idents: &[TypeIdentifier]) -> ConnectorResult>; - - /// Select one row from the database. - fn read_one(&mut self, q: Select, idents: &[TypeIdentifier]) -> ConnectorResult { - self.read(q.limit(1), idents)? - .into_iter() - .next() - .ok_or(ConnectorError::NodeDoesNotExist) - } - - /// Read the first column as an integer. - fn read_int(&mut self, q: Select) -> ConnectorResult; -} - -#[cfg(feature = "sqlite")] -impl<'a> Transaction for SqliteTransaction<'a> { - fn write(&mut self, q: Query) -> ConnectorResult { - let (sql, params) = visitor::Sqlite::build(q); - - let mut stmt = self.prepare_cached(&sql)?; - let changes = stmt.execute(params)?; - - Ok(changes) - } - - fn read(&mut self, q: Select, idents: &[TypeIdentifier]) -> ConnectorResult> { - let (sql, params) = visitor::Sqlite::build(q); - - let mut stmt = self.prepare_cached(&sql)?; - let mut rows = stmt.query(params)?; - let mut result = Vec::new(); - - while let Some(row) = rows.next() { - result.push(row?.to_prisma_row(idents)?); - } - - Ok(result) - } - - fn read_int(&mut self, q: Select) -> ConnectorResult { - let (sql, params) = visitor::Sqlite::build(q); - - let mut stmt = self.prepare_cached(&sql)?; - let mut rows = stmt.query(params)?; - - if let Some(row) = rows.next() { - Ok(row?.get_checked(1)?) - } else { - Ok(0) - } - } -} diff --git a/server/prisma-rs/query-engine/connectors/connector/src/data_resolver.rs b/server/prisma-rs/query-engine/connectors/connector/src/data_resolver.rs index 1802d69091..090ff4e981 100644 --- a/server/prisma-rs/query-engine/connectors/connector/src/data_resolver.rs +++ b/server/prisma-rs/query-engine/connectors/connector/src/data_resolver.rs @@ -2,13 +2,16 @@ use crate::{filter::NodeSelector, query_arguments::QueryArguments, ConnectorResu use prisma_models::prelude::*; use prisma_models::ScalarFieldRef; +/// Methods for fetching data. pub trait DataResolver { + /// Find one record. fn get_node_by_where( &self, node_selector: &NodeSelector, selected_fields: &SelectedFields, ) -> ConnectorResult>; + /// Filter many records. fn get_nodes( &self, model: ModelRef, @@ -16,6 +19,7 @@ pub trait DataResolver { selected_fields: &SelectedFields, ) -> ConnectorResult; + /// Filter records related to the parent. fn get_related_nodes( &self, from_field: RelationFieldRef, @@ -24,13 +28,17 @@ pub trait DataResolver { selected_fields: &SelectedFields, ) -> ConnectorResult; + /// Fetch scalar list values for the parent. fn get_scalar_list_values_by_node_ids( &self, list_field: ScalarFieldRef, node_ids: Vec, ) -> ConnectorResult>; + /// Count the items in the model with the given arguments. fn count_by_model(&self, model: ModelRef, query_arguments: QueryArguments) -> ConnectorResult; + + /// Count the items in the table. fn count_by_table(&self, database: &str, table: &str) -> ConnectorResult; } diff --git a/server/prisma-rs/query-engine/connectors/connector/src/database_mutaction_executor.rs b/server/prisma-rs/query-engine/connectors/connector/src/database_mutaction_executor.rs index edcb97c7d7..f3850f4c0a 100644 --- a/server/prisma-rs/query-engine/connectors/connector/src/database_mutaction_executor.rs +++ b/server/prisma-rs/query-engine/connectors/connector/src/database_mutaction_executor.rs @@ -4,9 +4,13 @@ use crate::{ }; use serde_json::Value; +/// Methods for writing data. pub trait DatabaseMutactionExecutor { - fn execute_raw(&self, _query: String) -> ConnectorResult; + /// Execute raw SQL string without any safety guarantees, returning the result as JSON. + fn execute_raw(&self, db_name: String, query: String) -> ConnectorResult; + /// Executes the mutaction and all nested mutactions, returning the result + /// of the topmost mutaction. fn execute( &self, db_name: String, diff --git a/server/prisma-rs/query-engine/connectors/connector/src/error.rs b/server/prisma-rs/query-engine/connectors/connector/src/error.rs index 941ed712be..98a66ab66a 100644 --- a/server/prisma-rs/query-engine/connectors/connector/src/error.rs +++ b/server/prisma-rs/query-engine/connectors/connector/src/error.rs @@ -3,12 +3,6 @@ use failure::{Error, Fail}; use prisma_models::prelude::{DomainError, GraphqlId, ModelRef, PrismaValue}; use std::fmt; -#[cfg(feature = "sqlite")] -use rusqlite; - -#[cfg(feature = "sqlite")] -use libsqlite3_sys as ffi; - #[derive(Debug)] pub struct NodeSelectorInfo { pub model: String, @@ -102,6 +96,9 @@ pub enum ConnectorError { #[fail(display = "Conversion error: {}", _0)] ConversionError(Error), + + #[fail(display = "Database creation error: {}", _0)] + DatabaseCreationError(&'static str), } impl From for ConnectorError { @@ -109,61 +106,3 @@ impl From for ConnectorError { ConnectorError::DomainError(e) } } - -impl From for ConnectorError { - fn from(e: serde_json::error::Error) -> ConnectorError { - ConnectorError::ConversionError(e.into()) - } -} - -#[cfg(feature = "sql")] -impl From for ConnectorError { - fn from(e: r2d2::Error) -> ConnectorError { - ConnectorError::ConnectionError(e.into()) - } -} - -#[cfg(feature = "sqlite")] -impl From for ConnectorError { - fn from(e: rusqlite::Error) -> ConnectorError { - match e { - rusqlite::Error::QueryReturnedNoRows => ConnectorError::NodeDoesNotExist, - - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 2067, - }, - Some(description), - ) => { - let splitted: Vec<&str> = description.split(": ").collect(); - - ConnectorError::UniqueConstraintViolation { - field_name: splitted[1].into(), - } - } - - rusqlite::Error::SqliteFailure( - ffi::Error { - code: ffi::ErrorCode::ConstraintViolation, - extended_code: 1555, - }, - Some(description), - ) => { - let splitted: Vec<&str> = description.split(": ").collect(); - - ConnectorError::UniqueConstraintViolation { - field_name: splitted[1].into(), - } - } - - e => ConnectorError::QueryError(e.into()), - } - } -} - -impl From for ConnectorError { - fn from(e: uuid::parser::ParseError) -> ConnectorError { - ConnectorError::ColumnReadFailure(e.into()) - } -} diff --git a/server/prisma-rs/query-engine/connectors/connector/src/filter/relation.rs b/server/prisma-rs/query-engine/connectors/connector/src/filter/relation.rs index 3c681067a7..f94d4525a6 100644 --- a/server/prisma-rs/query-engine/connectors/connector/src/filter/relation.rs +++ b/server/prisma-rs/query-engine/connectors/connector/src/filter/relation.rs @@ -41,7 +41,7 @@ impl RelationCompare for Arc { /// # use serde_json; /// # use std::{fs::File, sync::Arc}; /// # - /// # let tmp: SchemaTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); + /// # let tmp: InternalDataModelTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); /// # let schema = tmp.build(String::from("test")); /// # let user = schema.find_model("User").unwrap(); /// # let site = schema.find_model("Site").unwrap(); @@ -92,7 +92,7 @@ impl RelationCompare for Arc { /// # use serde_json; /// # use std::{fs::File, sync::Arc}; /// # - /// # let tmp: SchemaTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); + /// # let tmp: InternalDataModelTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); /// # let schema = tmp.build(String::from("test")); /// # let user = schema.find_model("User").unwrap(); /// # let site = schema.find_model("Site").unwrap(); @@ -143,7 +143,7 @@ impl RelationCompare for Arc { /// # use serde_json; /// # use std::{fs::File, sync::Arc}; /// # - /// # let tmp: SchemaTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); + /// # let tmp: InternalDataModelTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); /// # let schema = tmp.build(String::from("test")); /// # let user = schema.find_model("User").unwrap(); /// # let site = schema.find_model("Site").unwrap(); @@ -194,7 +194,7 @@ impl RelationCompare for Arc { /// # use serde_json; /// # use std::{fs::File, sync::Arc}; /// # - /// # let tmp: SchemaTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); + /// # let tmp: InternalDataModelTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); /// # let schema = tmp.build(String::from("test")); /// # let user = schema.find_model("User").unwrap(); /// # let site = schema.find_model("Site").unwrap(); @@ -245,7 +245,7 @@ impl RelationCompare for Arc { /// # use serde_json; /// # use std::{fs::File, sync::Arc}; /// # - /// # let tmp: SchemaTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); + /// # let tmp: InternalDataModelTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); /// # let schema = tmp.build(String::from("test")); /// # let user = schema.find_model("User").unwrap(); /// # diff --git a/server/prisma-rs/query-engine/connectors/connector/src/filter/scalar.rs b/server/prisma-rs/query-engine/connectors/connector/src/filter/scalar.rs index 8fa65fb31d..9a6daa4542 100644 --- a/server/prisma-rs/query-engine/connectors/connector/src/filter/scalar.rs +++ b/server/prisma-rs/query-engine/connectors/connector/src/filter/scalar.rs @@ -58,7 +58,7 @@ impl ScalarCompare for Arc { /// # use serde_json; /// # use std::{fs::File, sync::Arc}; /// # - /// # let tmp: SchemaTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); + /// # let tmp: InternalDataModelTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); /// # let schema = tmp.build(String::from("test")); /// # let model = schema.find_model("User").unwrap(); /// # @@ -91,7 +91,7 @@ impl ScalarCompare for Arc { /// # use serde_json; /// # use std::{fs::File, sync::Arc}; /// # - /// # let tmp: SchemaTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); + /// # let tmp: InternalDataModelTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); /// # let schema = tmp.build(String::from("test")); /// # let model = schema.find_model("User").unwrap(); /// # @@ -124,7 +124,7 @@ impl ScalarCompare for Arc { /// # use serde_json; /// # use std::{fs::File, sync::Arc}; /// # - /// # let tmp: SchemaTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); + /// # let tmp: InternalDataModelTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); /// # let schema = tmp.build(String::from("test")); /// # let model = schema.find_model("User").unwrap(); /// # @@ -157,7 +157,7 @@ impl ScalarCompare for Arc { /// # use serde_json; /// # use std::{fs::File, sync::Arc}; /// # - /// # let tmp: SchemaTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); + /// # let tmp: InternalDataModelTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); /// # let schema = tmp.build(String::from("test")); /// # let model = schema.find_model("User").unwrap(); /// # @@ -190,7 +190,7 @@ impl ScalarCompare for Arc { /// # use serde_json; /// # use std::{fs::File, sync::Arc}; /// # - /// # let tmp: SchemaTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); + /// # let tmp: InternalDataModelTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); /// # let schema = tmp.build(String::from("test")); /// # let model = schema.find_model("User").unwrap(); /// # @@ -223,7 +223,7 @@ impl ScalarCompare for Arc { /// # use serde_json; /// # use std::{fs::File, sync::Arc}; /// # - /// # let tmp: SchemaTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); + /// # let tmp: InternalDataModelTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); /// # let schema = tmp.build(String::from("test")); /// # let model = schema.find_model("User").unwrap(); /// # @@ -256,7 +256,7 @@ impl ScalarCompare for Arc { /// # use serde_json; /// # use std::{fs::File, sync::Arc}; /// # - /// # let tmp: SchemaTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); + /// # let tmp: InternalDataModelTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); /// # let schema = tmp.build(String::from("test")); /// # let model = schema.find_model("User").unwrap(); /// # @@ -289,7 +289,7 @@ impl ScalarCompare for Arc { /// # use serde_json; /// # use std::{fs::File, sync::Arc}; /// # - /// # let tmp: SchemaTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); + /// # let tmp: InternalDataModelTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); /// # let schema = tmp.build(String::from("test")); /// # let model = schema.find_model("User").unwrap(); /// # @@ -322,7 +322,7 @@ impl ScalarCompare for Arc { /// # use serde_json; /// # use std::{fs::File, sync::Arc}; /// # - /// # let tmp: SchemaTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); + /// # let tmp: InternalDataModelTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); /// # let schema = tmp.build(String::from("test")); /// # let model = schema.find_model("User").unwrap(); /// # @@ -355,7 +355,7 @@ impl ScalarCompare for Arc { /// # use serde_json; /// # use std::{fs::File, sync::Arc}; /// # - /// # let tmp: SchemaTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); + /// # let tmp: InternalDataModelTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); /// # let schema = tmp.build(String::from("test")); /// # let model = schema.find_model("User").unwrap(); /// # @@ -388,7 +388,7 @@ impl ScalarCompare for Arc { /// # use serde_json; /// # use std::{fs::File, sync::Arc}; /// # - /// # let tmp: SchemaTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); + /// # let tmp: InternalDataModelTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); /// # let schema = tmp.build(String::from("test")); /// # let model = schema.find_model("User").unwrap(); /// # @@ -421,7 +421,7 @@ impl ScalarCompare for Arc { /// # use serde_json; /// # use std::{fs::File, sync::Arc}; /// # - /// # let tmp: SchemaTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); + /// # let tmp: InternalDataModelTemplate = serde_json::from_reader(File::open("../sql-connector/test_schema.json").unwrap()).unwrap(); /// # let schema = tmp.build(String::from("test")); /// # let model = schema.find_model("User").unwrap(); /// # diff --git a/server/prisma-rs/query-engine/connectors/connector/src/mutaction/mod.rs b/server/prisma-rs/query-engine/connectors/connector/src/mutaction/mod.rs index 71f130d444..fd23ee0e23 100644 --- a/server/prisma-rs/query-engine/connectors/connector/src/mutaction/mod.rs +++ b/server/prisma-rs/query-engine/connectors/connector/src/mutaction/mod.rs @@ -3,7 +3,6 @@ mod create_node; mod delete_node; mod node_address; mod path; -mod relay_id; mod result; mod update_node; mod upsert_node; @@ -12,7 +11,6 @@ pub use create_node::*; pub use delete_node::*; pub use node_address::*; pub use path::*; -pub use relay_id::*; pub use result::*; pub use update_node::*; pub use upsert_node::*; diff --git a/server/prisma-rs/query-engine/connectors/connector/src/mutaction/relay_id.rs b/server/prisma-rs/query-engine/connectors/connector/src/mutaction/relay_id.rs deleted file mode 100644 index ea33592285..0000000000 --- a/server/prisma-rs/query-engine/connectors/connector/src/mutaction/relay_id.rs +++ /dev/null @@ -1,33 +0,0 @@ -use prisma_query::ast::{Column, Insert, Table}; - -pub struct RelayId<'a> { - database_name: &'a str, -} - -impl<'a> RelayId<'a> { - const TABLE_NAME: &'static str = "_RealayId"; - const ID: &'static str = "id"; - const STABLE_MODEL_IDENTIFIER: &'static str = "stableModelIdentifier"; - - pub fn new(database_name: &'a str) -> Self { - Self { database_name } - } - - pub fn create(&self, stable_identifier: &str) -> Insert { - Insert::single_into(self.table()) - .value(self.stable_identifier_column(), stable_identifier) - .into() - } - - pub fn id_column(&self) -> Column { - Column::from(Self::ID).table(self.table()) - } - - fn table(&self) -> Table { - Table::from((self.database_name, Self::TABLE_NAME)) - } - - fn stable_identifier_column(&self) -> Column { - Column::from(Self::STABLE_MODEL_IDENTIFIER).table(self.table()) - } -} diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/Cargo.toml b/server/prisma-rs/query-engine/connectors/sql-connector/Cargo.toml index 63b55c52ef..218a568f5e 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/Cargo.toml +++ b/server/prisma-rs/query-engine/connectors/sql-connector/Cargo.toml @@ -5,15 +5,17 @@ authors = [] edition = "2018" [features] -default = ["sqlite"] +default = ["sqlite", "postgresql"] sqlite = ["rusqlite", "libsqlite3-sys", "r2d2_sqlite"] +postgresql = ["postgres", "r2d2_postgres", "tokio-postgres-native-tls", "tokio-postgres", "native-tls", "rust_decimal"] [dependencies] connector = { path = "../connector" } prisma-models = { path = "../../../prisma-models" } itertools = "0.8" chrono = { version = "0.4", features = ["serde"] } -prisma-query = { path = "../../../libs/prisma-query" } +prisma-query = { git = "https://github.com/prisma/prisma-query.git" } +prisma-common = { path = "../../../libs/prisma-common" } serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" parking_lot = "0.7" @@ -23,7 +25,14 @@ failure_derive = "0.1" r2d2 = "0.8" cuid = { git = "https://github.com/prisma/cuid-rust" } rand = "0.6" +rust_decimal = { git = "https://github.com/pimeys/rust-decimal.git", optional = true, features = ["postgres"] } r2d2_sqlite = { version = "0.8", optional = true } libsqlite3-sys = { version = "0.11", optional = true } rusqlite = { version = "0.16", features = ["chrono", "bundled"], optional = true } + +postgres = { version = "0.16.0-rc.1", features = ["with-serde_json-1", "with-chrono-0_4", "with-uuid-0_7"], optional = true} +r2d2_postgres = { version = "0.15.0-rc.1", optional = true } +tokio-postgres-native-tls = { version = "0.1.0-rc.1", optional = true } +tokio-postgres = { version = "0.4.0-rc.2", optional = true } +native-tls = { version = "0.2", optional = true } diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/database/mod.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/database/mod.rs index 35a7dea896..72b82e3f38 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/database/mod.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/database/mod.rs @@ -1,9 +1,12 @@ +mod postgresql; mod sqlite; use crate::Transactional; +pub use postgresql::*; pub use sqlite::*; -/// A common interface for relational SQL databases. +/// A wrapper for relational databases due to trait restrictions. Implements the +/// needed traits. pub struct SqlDatabase where T: Transactional, diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/database/postgresql.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/database/postgresql.rs new file mode 100644 index 0000000000..b24f4a9c48 --- /dev/null +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/database/postgresql.rs @@ -0,0 +1,471 @@ +use crate::{ + error::SqlError, MutationBuilder, RawQuery, SqlId, SqlResult, SqlRow, ToSqlRow, Transaction, Transactional, +}; +use chrono::{DateTime, NaiveDateTime, Utc}; +use connector::{error::*, ConnectorResult}; +use native_tls::TlsConnector; +use postgres::{ + types::{FromSql, ToSql, Type as PostgresType}, + Client, Config, Row as PostgresRow, Transaction as PostgresTransaction, +}; +use prisma_common::config::{ConnectionLimit, ConnectionStringConfig, ExplicitConfig, PrismaDatabase}; +use prisma_models::{GraphqlId, PrismaValue, ProjectRef, TypeIdentifier}; +use prisma_query::{ + ast::{Query, Select}, + visitor::{self, Visitor}, +}; +use r2d2_postgres::PostgresConnectionManager; +use rust_decimal::Decimal; +use serde_json::{Map, Number, Value}; +use std::{convert::TryFrom, str::FromStr}; +use tokio_postgres::config::SslMode; +use tokio_postgres_native_tls::MakeTlsConnector; +use uuid::Uuid; + +type Pool = r2d2::Pool>; + +/// The World's Most Advanced Open Source Relational Database +pub struct PostgreSql { + pool: Pool, +} + +impl TryFrom<&PrismaDatabase> for PostgreSql { + type Error = ConnectorError; + + fn try_from(db: &PrismaDatabase) -> ConnectorResult { + match db { + PrismaDatabase::ConnectionString(ref config) => Ok(PostgreSql::try_from(config)?), + PrismaDatabase::Explicit(ref config) => Ok(PostgreSql::try_from(config)?), + _ => Err(ConnectorError::DatabaseCreationError( + "Could not understand the configuration format.", + )), + } + } +} + +impl TryFrom<&ExplicitConfig> for PostgreSql { + type Error = SqlError; + + fn try_from(e: &ExplicitConfig) -> SqlResult { + let db_name = e.database.as_ref().map(|x| x.as_str()).unwrap_or("postgres"); + let mut config = Config::new(); + + config.host(&e.host); + config.port(e.port); + config.user(&e.user); + config.ssl_mode(SslMode::Prefer); + config.dbname(db_name); + + if let Some(ref pw) = e.password { + config.password(pw); + } + + Ok(Self::new(config, e.limit())?) + } +} + +impl TryFrom<&ConnectionStringConfig> for PostgreSql { + type Error = SqlError; + + fn try_from(s: &ConnectionStringConfig) -> SqlResult { + let db_name = s.database.as_ref().map(|x| x.as_str()).unwrap_or("postgres"); + let mut config = Config::from_str(s.uri.as_str())?; + + config.ssl_mode(SslMode::Prefer); + config.dbname(db_name); + + Ok(Self::new(config, s.limit())?) + } +} + +impl Transactional for PostgreSql { + fn with_transaction(&self, _: &str, f: F) -> SqlResult + where + F: FnOnce(&mut Transaction) -> SqlResult, + { + self.with_client(|client| { + let mut tx = client.transaction()?; + let result = f(&mut tx); + + if result.is_ok() { + tx.commit()?; + } + + result + }) + } +} + +impl<'a> FromSql<'a> for SqlId { + fn from_sql(ty: &PostgresType, raw: &'a [u8]) -> Result> { + let res = match *ty { + PostgresType::INT2 => SqlId::Int(i16::from_sql(ty, raw)? as usize), + PostgresType::INT4 => SqlId::Int(i32::from_sql(ty, raw)? as usize), + PostgresType::INT8 => SqlId::Int(i64::from_sql(ty, raw)? as usize), + PostgresType::UUID => SqlId::UUID(Uuid::from_sql(ty, raw)?), + _ => SqlId::String(String::from_sql(ty, raw)?), + }; + + Ok(res) + } + + fn accepts(ty: &PostgresType) -> bool { + <&str as FromSql>::accepts(ty) + || ::accepts(ty) + || ::accepts(ty) + || ::accepts(ty) + || ::accepts(ty) + } +} + +impl<'a> Transaction for PostgresTransaction<'a> { + fn write(&mut self, q: Query) -> SqlResult> { + let id = match q { + insert @ Query::Insert(_) => { + let (sql, params) = dbg!(visitor::Postgres::build(insert)); + + let params: Vec<&ToSql> = params.iter().map(|pv| pv as &ToSql).collect(); + let stmt = self.prepare(&sql)?; + let rows = self.query(&stmt, params.as_slice())?; + + rows.into_iter().rev().next().map(|row| { + let id: SqlId = row.get(0); + GraphqlId::from(id) + }) + } + query => { + let (sql, params) = dbg!(visitor::Postgres::build(query)); + let params: Vec<&ToSql> = params.iter().map(|pv| pv as &ToSql).collect(); + + let stmt = self.prepare(&sql)?; + self.execute(&stmt, params.as_slice())?; + + None + } + }; + + Ok(id) + } + + fn filter(&mut self, q: Select, idents: &[TypeIdentifier]) -> SqlResult> { + let (sql, params) = dbg!(visitor::Postgres::build(q)); + let params: Vec<&ToSql> = params.iter().map(|pv| pv as &ToSql).collect(); + + let stmt = self.prepare(&sql)?; + let rows = self.query(&stmt, params.as_slice())?; + let mut result = Vec::new(); + + for row in rows { + result.push(row.to_prisma_row(idents)?); + } + + Ok(result) + } + + fn truncate(&mut self, project: ProjectRef) -> SqlResult<()> { + self.write(Query::from("SET CONSTRAINTS ALL DEFERRED"))?; + + for delete in MutationBuilder::truncate_tables(project) { + self.delete(delete)?; + } + + Ok(()) + } + + fn raw(&mut self, q: RawQuery) -> SqlResult { + let stmt = self.prepare(dbg!(&q.0))?; + + if q.is_select() { + let rows = self.query(&stmt, &[])?; + let mut result = Vec::new(); + + for row in rows { + let mut object = Map::new(); + for (i, column) in row.columns().into_iter().enumerate() { + let value = match *column.type_() { + PostgresType::BOOL => match row.try_get(i)? { + Some(val) => Value::Bool(val), + None => Value::Null, + }, + PostgresType::INT2 => match row.try_get(i)? { + Some(val) => { + let val: i16 = val; + Value::Number(Number::from(val)) + } + None => Value::Null, + }, + PostgresType::INT4 => match row.try_get(i)? { + Some(val) => { + let val: i32 = val; + Value::Number(Number::from(val)) + } + None => Value::Null, + }, + PostgresType::INT8 => match row.try_get(i)? { + Some(val) => { + let val: i64 = val; + Value::Number(Number::from(val)) + } + None => Value::Null, + }, + PostgresType::NUMERIC => match row.try_get(i)? { + Some(val) => { + let val: Decimal = val; + let val: f64 = val.to_string().parse().unwrap(); + Value::Number(Number::from_f64(val).unwrap()) + } + None => Value::Null, + }, + PostgresType::FLOAT4 => match row.try_get(i)? { + Some(val) => { + let val: f32 = val; + Value::Number(Number::from_f64(val as f64).unwrap()) + } + None => Value::Null, + }, + PostgresType::FLOAT8 => match row.try_get(i)? { + Some(val) => { + let val: f64 = val; + Value::Number(Number::from_f64(val).unwrap()) + } + None => Value::Null, + }, + PostgresType::TIMESTAMP => match row.try_get(i)? { + Some(val) => { + let ts: NaiveDateTime = val; + let dt = DateTime::::from_utc(ts, Utc); + Value::String(dt.to_rfc3339()) + } + None => Value::Null, + }, + PostgresType::UUID => match row.try_get(i)? { + Some(val) => { + let val: Uuid = val; + Value::String(val.to_hyphenated().to_string()) + } + None => Value::Null, + }, + PostgresType::INT2_ARRAY => match row.try_get(i)? { + Some(val) => { + let val: Vec = val; + Value::Array(val.into_iter().map(Value::from).collect()) + } + None => Value::Null, + }, + PostgresType::INT4_ARRAY => match row.try_get(i)? { + Some(val) => { + let val: Vec = val; + Value::Array(val.into_iter().map(Value::from).collect()) + } + None => Value::Null, + }, + PostgresType::INT8_ARRAY => match row.try_get(i)? { + Some(val) => { + let val: Vec = val; + Value::Array(val.into_iter().map(Value::from).collect()) + } + None => Value::Null, + }, + PostgresType::FLOAT4_ARRAY => match row.try_get(i)? { + Some(val) => { + let val: Vec = val; + Value::Array( + val.into_iter() + .map(|f| Number::from_f64(f as f64).unwrap()) + .map(Value::Number) + .collect(), + ) + } + None => Value::Null, + }, + PostgresType::FLOAT8_ARRAY => match row.try_get(i)? { + Some(val) => { + let val: Vec = val; + Value::Array( + val.into_iter() + .map(|f| Value::Number(Number::from_f64(f).unwrap())) + .collect(), + ) + } + None => Value::Null, + }, + PostgresType::BOOL_ARRAY => match row.try_get(i)? { + Some(val) => { + let val: Vec = val; + Value::Array(val.into_iter().map(Value::from).collect()) + } + None => Value::Null, + }, + PostgresType::TIMESTAMP_ARRAY => match row.try_get(i)? { + Some(val) => { + let val: Vec = val; + + let val: Vec = val + .into_iter() + .map(|ts| DateTime::::from_utc(ts, Utc)) + .map(|dt| dt.to_rfc3339()) + .map(Value::from) + .collect(); + + Value::Array(val) + } + None => Value::Null, + }, + PostgresType::NUMERIC_ARRAY => match row.try_get(i)? { + Some(val) => { + let val: Vec = val; + + let val: Vec = val + .into_iter() + .map(|d| d.to_string()) + .map(|s| s.parse::().unwrap()) + .map(|f| Number::from_f64(f).unwrap()) + .map(Value::Number) + .collect(); + + Value::Array(val) + } + None => Value::Null, + }, + PostgresType::TEXT_ARRAY | PostgresType::NAME_ARRAY | PostgresType::VARCHAR_ARRAY => { + match row.try_get(i)? { + Some(val) => { + let val: Vec<&str> = val; + Value::Array(val.into_iter().map(Value::from).collect()) + } + None => Value::Null, + } + } + _ => match row.try_get(i)? { + Some(val) => Value::String(val), + None => Value::Null, + }, + }; + + object.insert(String::from(column.name()), value); + } + + result.push(Value::Object(object)); + } + + Ok(Value::Array(result)) + } else { + let changes = self.execute(&stmt, &[])?; + + Ok(Value::Number(Number::from(changes))) + } + } +} + +impl ToSqlRow for PostgresRow { + fn to_prisma_row<'b, T>(&'b self, idents: T) -> SqlResult + where + T: IntoIterator, + { + fn convert(row: &PostgresRow, i: usize, typid: &TypeIdentifier) -> SqlResult { + let result = match typid { + TypeIdentifier::String => match row.try_get(i)? { + Some(val) => PrismaValue::String(val), + None => PrismaValue::Null, + }, + TypeIdentifier::GraphQLID | TypeIdentifier::Relation => match row.try_get(i)? { + Some(val) => { + let id: SqlId = val; + PrismaValue::GraphqlId(GraphqlId::from(id)) + } + None => PrismaValue::Null, + }, + TypeIdentifier::Float => match *row.columns()[i].type_() { + PostgresType::NUMERIC => match row.try_get(i)? { + Some(val) => { + let dec: Decimal = val; + let dec_s = dec.to_string(); + PrismaValue::Float(dec_s.parse().unwrap()) + } + None => PrismaValue::Null, + }, + _ => match row.try_get(i)? { + Some(val) => PrismaValue::Float(val), + None => PrismaValue::Null, + }, + }, + TypeIdentifier::Int => match *row.columns()[i].type_() { + PostgresType::INT2 => match row.try_get(i)? { + Some(val) => { + let val: i16 = val; + PrismaValue::Int(val as i64) + } + None => PrismaValue::Null, + }, + PostgresType::INT4 => match row.try_get(i)? { + Some(val) => { + let val: i32 = val; + PrismaValue::Int(val as i64) + } + None => PrismaValue::Null, + }, + _ => PrismaValue::Int(row.try_get(i)?), + }, + TypeIdentifier::Boolean => match row.try_get(i)? { + Some(val) => PrismaValue::Boolean(val), + None => PrismaValue::Null, + }, + TypeIdentifier::Enum => match row.try_get(i)? { + Some(val) => PrismaValue::Enum(val), + None => PrismaValue::Null, + }, + TypeIdentifier::Json => match row.try_get(i)? { + Some(val) => { + let j_str: &str = val; + PrismaValue::Json(serde_json::from_str(j_str)?) + } + None => PrismaValue::Null, + }, + TypeIdentifier::UUID => match row.try_get(i)? { + Some(val) => PrismaValue::Uuid(val), + None => PrismaValue::Null, + }, + TypeIdentifier::DateTime => match row.try_get(i)? { + Some(val) => { + let ts: NaiveDateTime = val; + PrismaValue::DateTime(DateTime::::from_utc(ts, Utc)) + } + None => PrismaValue::Null, + }, + }; + + Ok(result) + } + + let mut row = SqlRow::default(); + + for (i, typid) in idents.into_iter().enumerate() { + row.values.push(convert(self, i, typid)?); + } + + Ok(row) + } +} + +impl PostgreSql { + fn new(config: Config, connections: u32) -> SqlResult { + let mut tls_builder = TlsConnector::builder(); + tls_builder.danger_accept_invalid_certs(true); // For Heroku + + let tls = MakeTlsConnector::new(tls_builder.build()?); + + let manager = PostgresConnectionManager::new(config, tls); + let pool = r2d2::Pool::builder().max_size(connections).build(manager)?; + + Ok(PostgreSql { pool }) + } + + fn with_client(&self, f: F) -> SqlResult + where + F: FnOnce(&mut Client) -> SqlResult, + { + let mut client = self.pool.get()?; + let result = f(&mut client); + result + } +} diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/database/sqlite.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/database/sqlite.rs index 7b768c4dc2..4b1ac1bd42 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/database/sqlite.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/database/sqlite.rs @@ -1,13 +1,18 @@ -use crate::*; -use connector::*; -use prisma_models::{ProjectRef, TypeIdentifier}; +use crate::{MutationBuilder, RawQuery, SqlId, SqlResult, SqlRow, ToSqlRow, Transaction, Transactional}; +use chrono::{DateTime, Utc}; +use prisma_models::{GraphqlId, PrismaValue, ProjectRef, TypeIdentifier}; use prisma_query::{ ast::{Query, Select}, visitor::{self, Visitor}, }; use r2d2_sqlite::SqliteConnectionManager; -use rusqlite::{Connection, Transaction as SqliteTransaction, NO_PARAMS}; +use rusqlite::{ + types::{FromSql, FromSqlResult, Type as SqliteType, ValueRef}, + Connection, Error as SqliteError, Row as SqliteRow, Transaction as SqliteTransaction, NO_PARAMS, +}; +use serde_json::{Map, Number, Value}; use std::collections::HashSet; +use uuid::Uuid; type Pool = r2d2::Pool; @@ -20,9 +25,9 @@ pub struct Sqlite { } impl Transactional for Sqlite { - fn with_transaction(&self, db: &str, f: F) -> ConnectorResult + fn with_transaction(&self, db: &str, f: F) -> SqlResult where - F: FnOnce(&mut Transaction) -> ConnectorResult, + F: FnOnce(&mut Transaction) -> SqlResult, { self.with_connection(db, |ref mut conn| { let mut tx = conn.transaction()?; @@ -40,17 +45,16 @@ impl Transactional for Sqlite { } impl<'a> Transaction for SqliteTransaction<'a> { - fn write(&mut self, q: Query) -> ConnectorResult { + fn write(&mut self, q: Query) -> SqlResult> { let (sql, params) = dbg!(visitor::Sqlite::build(q)); + let mut stmt = self.prepare_cached(&sql)?; + stmt.execute(params)?; - Ok(WriteItems { - count: stmt.execute(params)? as usize, - last_id: self.last_insert_rowid() as usize, - }) + Ok(Some(GraphqlId::Int(self.last_insert_rowid() as usize))) } - fn filter(&mut self, q: Select, idents: &[TypeIdentifier]) -> ConnectorResult> { + fn filter(&mut self, q: Select, idents: &[TypeIdentifier]) -> SqlResult> { let (sql, params) = dbg!(visitor::Sqlite::build(q)); let mut stmt = self.prepare_cached(&sql)?; @@ -64,7 +68,7 @@ impl<'a> Transaction for SqliteTransaction<'a> { Ok(result) } - fn truncate(&mut self, project: ProjectRef) -> ConnectorResult<()> { + fn truncate(&mut self, project: ProjectRef) -> SqlResult<()> { self.write(Query::from("PRAGMA foreign_keys = OFF"))?; for delete in MutationBuilder::truncate_tables(project) { @@ -75,11 +79,127 @@ impl<'a> Transaction for SqliteTransaction<'a> { Ok(()) } + + fn raw(&mut self, q: RawQuery) -> SqlResult { + let columns: Vec = self + .prepare_cached(&q.0)? + .column_names() + .into_iter() + .map(ToString::to_string) + .collect(); + + let mut stmt = self.prepare_cached(&q.0)?; + + if q.is_select() { + let mut rows = stmt.query(NO_PARAMS)?; + let mut result = Vec::new(); + + while let Some(row) = rows.next() { + let mut object = Map::new(); + let row = row?; + + for (i, column) in columns.iter().enumerate() { + let value = match row.get_raw(i) { + ValueRef::Null => Value::Null, + ValueRef::Integer(i) => Value::Number(Number::from(i)), + ValueRef::Real(f) => Value::Number(Number::from_f64(f).unwrap()), + ValueRef::Text(s) => Value::String(String::from(s)), + ValueRef::Blob(b) => Value::String(String::from_utf8(b.to_vec()).unwrap()), + }; + + object.insert(String::from(column.as_ref()), value); + } + + result.push(Value::Object(object)); + } + + Ok(Value::Array(result)) + } else { + let changes = stmt.execute(NO_PARAMS)?; + + Ok(Value::Number(Number::from(changes))) + } + } +} + +impl FromSql for SqlId { + fn column_result(value: ValueRef<'_>) -> FromSqlResult { + value + .as_str() + .and_then(|strval| { + let res = Uuid::from_slice(strval.as_bytes()) + .map(|uuid| SqlId::UUID(uuid)) + .unwrap_or_else(|_| SqlId::String(strval.to_string())); + + Ok(res) + }) + .or_else(|_| value.as_i64().map(|intval| SqlId::Int(intval as usize))) + } +} + +impl<'a, 'stmt> ToSqlRow for SqliteRow<'a, 'stmt> { + fn to_prisma_row<'b, T>(&'b self, idents: T) -> SqlResult + where + T: IntoIterator, + { + fn convert(row: &SqliteRow, i: usize, typid: &TypeIdentifier) -> SqlResult { + let result = match typid { + TypeIdentifier::String => row.get_checked(i).map(|val| PrismaValue::String(val)), + TypeIdentifier::GraphQLID | TypeIdentifier::Relation => row.get_checked(i).map(|val| { + let id: SqlId = val; + PrismaValue::GraphqlId(GraphqlId::from(id)) + }), + TypeIdentifier::Float => row.get_checked(i).map(|val| PrismaValue::Float(val)), + TypeIdentifier::Int => row.get_checked(i).map(|val| PrismaValue::Int(val)), + TypeIdentifier::Boolean => row.get_checked(i).map(|val| PrismaValue::Boolean(val)), + TypeIdentifier::Enum => row.get_checked(i).map(|val| PrismaValue::Enum(val)), + TypeIdentifier::Json => row.get_checked(i).and_then(|val| { + let val: String = val; + serde_json::from_str(&val).map(|r| PrismaValue::Json(r)).map_err(|err| { + SqliteError::FromSqlConversionFailure(i as usize, SqliteType::Text, Box::new(err)) + }) + }), + TypeIdentifier::UUID => { + let result: Result = row.get_checked(i); + + if let Ok(val) = result { + let uuid = Uuid::parse_str(val.as_ref())?; + + Ok(PrismaValue::Uuid(uuid)) + } else { + result.map(|s| PrismaValue::String(s)) + } + } + TypeIdentifier::DateTime => row.get_checked(i).map(|ts: i64| { + let nsecs = ((ts % 1000) * 1_000_000) as u32; + let secs = (ts / 1000) as i64; + let naive = chrono::NaiveDateTime::from_timestamp(secs, nsecs); + let datetime: DateTime = DateTime::from_utc(naive, Utc); + + PrismaValue::DateTime(datetime) + }), + }; + + match result { + Ok(pv) => Ok(pv), + Err(rusqlite::Error::InvalidColumnType(_, rusqlite::types::Type::Null)) => Ok(PrismaValue::Null), + Err(e) => Err(e.into()), + } + } + + let mut row = SqlRow::default(); + + for (i, typid) in idents.into_iter().enumerate() { + row.values.push(convert(self, i, typid)?); + } + + Ok(row) + } } impl Sqlite { /// Creates a new SQLite pool connected into local memory. - pub fn new(databases_folder_path: String, connection_limit: u32, test_mode: bool) -> ConnectorResult { + pub fn new(databases_folder_path: String, connection_limit: u32, test_mode: bool) -> SqlResult { let pool = r2d2::Pool::builder() .max_size(connection_limit) .build(SqliteConnectionManager::memory())?; @@ -95,7 +215,7 @@ impl Sqlite { /// or created to the configured database file. /// /// The database is then attached to the memory with an alias of `{db_name}`. - fn attach_database(&self, conn: &mut Connection, db_name: &str) -> ConnectorResult<()> { + fn attach_database(&self, conn: &mut Connection, db_name: &str) -> SqlResult<()> { let mut stmt = conn.prepare("PRAGMA database_list")?; let databases: HashSet = stmt @@ -117,9 +237,9 @@ impl Sqlite { Ok(()) } - fn with_connection(&self, db: &str, f: F) -> ConnectorResult + fn with_connection(&self, db: &str, f: F) -> SqlResult where - F: FnOnce(&mut Connection) -> ConnectorResult, + F: FnOnce(&mut Connection) -> SqlResult, { let mut conn = self.pool.get()?; self.attach_database(&mut conn, db)?; diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/error.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/error.rs new file mode 100644 index 0000000000..d800748a24 --- /dev/null +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/error.rs @@ -0,0 +1,210 @@ +use connector::error::*; +use failure::{Error, Fail}; +use prisma_models::prelude::DomainError; + +#[cfg(feature = "sqlite")] +use rusqlite; + +#[cfg(feature = "sqlite")] +use libsqlite3_sys as ffi; + +#[derive(Debug, Fail)] +pub enum SqlError { + #[fail(display = "Unique constraint failed: {}", field_name)] + UniqueConstraintViolation { field_name: String }, + + #[fail(display = "Node does not exist.")] + NodeDoesNotExist, + + #[fail(display = "Column does not exist")] + ColumnDoesNotExist, + + #[fail(display = "Error creating a database connection.")] + ConnectionError(Error), + + #[fail(display = "Error querying the database: {}", _0)] + QueryError(Error), + + #[fail(display = "The provided arguments are not supported.")] + InvalidConnectionArguments, + + #[fail(display = "The column value was different from the model")] + ColumnReadFailure(Error), + + #[fail(display = "Field cannot be null: {}", field)] + FieldCannotBeNull { field: String }, + + #[fail(display = "{}", _0)] + DomainError(DomainError), + + #[fail(display = "Node not found: {}", _0)] + NodeNotFoundForWhere(NodeSelectorInfo), + + #[fail( + display = "Violating a relation {} between {} and {}", + relation_name, model_a_name, model_b_name + )] + RelationViolation { + relation_name: String, + model_a_name: String, + model_b_name: String, + }, + + #[fail( + display = "The relation {} has no node for the model {} connected to a Node for the model {} on your mutation path.", + relation_name, parent_name, child_name + )] + NodesNotConnected { + relation_name: String, + parent_name: String, + parent_where: Option, + child_name: String, + child_where: Option, + }, + + #[fail(display = "Conversion error: {}", _0)] + ConversionError(Error), + + #[fail(display = "Database creation error: {}", _0)] + DatabaseCreationError(&'static str), +} + +impl From for ConnectorError { + fn from(sql: SqlError) -> Self { + match sql { + SqlError::UniqueConstraintViolation { field_name } => { + ConnectorError::UniqueConstraintViolation { field_name } + } + SqlError::NodeDoesNotExist => ConnectorError::NodeDoesNotExist, + SqlError::ColumnDoesNotExist => ConnectorError::ColumnDoesNotExist, + SqlError::ConnectionError(e) => ConnectorError::ConnectionError(e), + SqlError::InvalidConnectionArguments => ConnectorError::InvalidConnectionArguments, + SqlError::ColumnReadFailure(e) => ConnectorError::ColumnReadFailure(e), + SqlError::FieldCannotBeNull { field } => ConnectorError::FieldCannotBeNull { field }, + SqlError::DomainError(e) => ConnectorError::DomainError(e), + SqlError::NodeNotFoundForWhere(info) => ConnectorError::NodeNotFoundForWhere(info), + SqlError::RelationViolation { + relation_name, + model_a_name, + model_b_name, + } => ConnectorError::RelationViolation { + relation_name, + model_a_name, + model_b_name, + }, + SqlError::NodesNotConnected { + relation_name, + parent_name, + parent_where, + child_name, + child_where, + } => ConnectorError::NodesNotConnected { + relation_name, + parent_name, + parent_where, + child_name, + child_where, + }, + SqlError::ConversionError(e) => ConnectorError::ConversionError(e), + SqlError::DatabaseCreationError(e) => ConnectorError::DatabaseCreationError(e), + SqlError::QueryError(e) => ConnectorError::QueryError(e), + } + } +} + +impl From for SqlError { + fn from(e: DomainError) -> SqlError { + SqlError::DomainError(e) + } +} + +impl From for SqlError { + fn from(e: serde_json::error::Error) -> SqlError { + SqlError::ConversionError(e.into()) + } +} + +impl From for SqlError { + fn from(e: r2d2::Error) -> SqlError { + SqlError::ConnectionError(e.into()) + } +} + +#[cfg(feature = "sqlite")] +impl From for SqlError { + fn from(e: rusqlite::Error) -> SqlError { + match e { + rusqlite::Error::QueryReturnedNoRows => SqlError::NodeDoesNotExist, + + rusqlite::Error::SqliteFailure( + ffi::Error { + code: ffi::ErrorCode::ConstraintViolation, + extended_code: 2067, + }, + Some(description), + ) => { + let splitted: Vec<&str> = description.split(": ").collect(); + + SqlError::UniqueConstraintViolation { + field_name: splitted[1].into(), + } + } + + rusqlite::Error::SqliteFailure( + ffi::Error { + code: ffi::ErrorCode::ConstraintViolation, + extended_code: 1555, + }, + Some(description), + ) => { + let splitted: Vec<&str> = description.split(": ").collect(); + + SqlError::UniqueConstraintViolation { + field_name: splitted[1].into(), + } + } + + e => SqlError::QueryError(e.into()), + } + } +} + +impl From for SqlError { + fn from(e: uuid::parser::ParseError) -> SqlError { + SqlError::ColumnReadFailure(e.into()) + } +} + +#[cfg(feature = "postgresql")] +impl From for SqlError { + fn from(e: tokio_postgres::error::Error) -> SqlError { + use tokio_postgres::error::DbError; + + match e.code().map(|c| c.code()) { + // Don't look at me, I'm hideous ;(( + Some("23505") => { + let error = e.into_source().unwrap(); // boom + let db_error = error.downcast_ref::().unwrap(); // BOOM + + let table = db_error.table().unwrap(); // BOOM + let detail = db_error.detail().unwrap(); // KA-BOOM + + let splitted: Vec<&str> = detail.split(")=(").collect(); + let splitted: Vec<&str> = splitted[0].split(" (").collect(); + let field = splitted[1].replace("\"", ""); + + SqlError::UniqueConstraintViolation { + field_name: format!("{}.{}", table, field), + } + } + _ => SqlError::QueryError(e.into()), + } + } +} + +#[cfg(feature = "postgresql")] +impl From for SqlError { + fn from(e: native_tls::Error) -> SqlError { + SqlError::ConnectionError(e.into()) + } +} diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/filter_conversion.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/filter_conversion.rs index 4f9ef220fc..acb97027fa 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/filter_conversion.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/filter_conversion.rs @@ -56,17 +56,6 @@ impl Alias { /// A string representation of the current alias. The current mode can be /// overridden by defining the `mode_override`. - /// - /// ```rust - /// # use sql_connector::{Alias, AliasMode}; - /// - /// let alias = Alias::default(); - /// - /// assert_eq!(String::from("t0"), alias.to_string(None)); - /// assert_eq!(String::from("t1"), alias.inc(AliasMode::Table).to_string(None)); - /// assert_eq!(String::from("j1"), alias.inc(AliasMode::Join).to_string(None)); - /// assert_eq!(String::from("j0"), alias.to_string(Some(AliasMode::Join))); - /// ``` pub fn to_string(&self, mode_override: Option) -> String { match mode_override.unwrap_or(self.mode) { AliasMode::Table => format!("t{}", self.counter), @@ -95,35 +84,6 @@ trait AliasedSelect { impl AliasedCondition for Filter { /// Conversion from a `Filter` to a query condition tree. Aliased when in a nested `SELECT`. - /// - /// ```rust - /// # use sql_connector::*; - /// # use connector::*; - /// # use prisma_models::*; - /// # use connector::*; - /// # use prisma_query::ast::*; - /// # use serde_json; - /// # use std::{fs::File, sync::Arc}; - /// # - /// # let template: SchemaTemplate = serde_json::from_reader(File::open("./test_schema.json").unwrap()).unwrap(); - /// let schema = template.build(String::from("test")); - /// let model = schema.find_model("User").unwrap(); - /// let field = model.fields().find_from_scalar("name").unwrap(); - /// - /// // Without aliasing: - /// - /// let cond = field.equals("foo").aliased_cond(None); - /// let expected: ConditionTree = (("test", "User"), "name").equals("foo").into(); - /// - /// assert_eq!(expected, cond); - /// - /// // With aliasing: - /// - /// let cond = field.equals("foo").aliased_cond(Some(Alias::default())); - /// let expected: ConditionTree = ("t0", "name").equals("foo").into(); - /// - /// assert_eq!(expected, cond); - /// ``` fn aliased_cond(self, alias: Option) -> ConditionTree { match self { Filter::And(mut filters) => match filters.pop() { @@ -176,33 +136,6 @@ impl AliasedCondition for Filter { impl AliasedCondition for ScalarFilter { /// Conversion from a `ScalarFilter` to a query condition tree. Aliased when in a nested `SELECT`. - /// - /// ```rust - /// # use sql_connector::*; - /// # use prisma_models::*; - /// # use connector::{*, filter::*}; - /// # use prisma_query::ast::*; - /// # use serde_json; - /// # use std::{fs::File, sync::Arc}; - /// # - /// # let template: SchemaTemplate = serde_json::from_reader(File::open("./test_schema.json").unwrap()).unwrap(); - /// let schema = template.build(String::from("test")); - /// let model = schema.find_model("User").unwrap(); - /// let field = model.fields().find_from_scalar("name").unwrap(); - /// - /// let sf = ScalarFilter { - /// field: Arc::clone(&field), - /// condition: ScalarCondition::Equals(PrismaValue::from("foo")) - /// }; - /// - /// let expected: ConditionTree = (("test", "User"), "name").equals("foo").into(); - /// assert_eq!(expected, sf.clone().aliased_cond(None)); - /// - /// // With aliasing: - /// - /// let expected: ConditionTree = ("t0", "name").equals("foo").into(); - /// assert_eq!(expected, sf.aliased_cond(Some(Alias::default()))); - /// ``` fn aliased_cond(self, alias: Option) -> ConditionTree { let column = match alias { Some(ref alias) => self.field.as_column().table(alias.to_string(None)), @@ -244,92 +177,6 @@ impl AliasedCondition for ScalarFilter { impl AliasedCondition for RelationFilter { /// Conversion from a `RelationFilter` to a query condition tree. Aliased when in a nested `SELECT`. - /// - /// ```rust - /// # use sql_connector::*; - /// # use prisma_models::*; - /// # use connector::*; - /// # use prisma_query::ast::*; - /// # use serde_json; - /// # use std::{fs::File, sync::Arc}; - /// # - /// # let template: SchemaTemplate = serde_json::from_reader(File::open("./test_schema.json").unwrap()).unwrap(); - /// let schema = template.build(String::from("test")); - /// let user = schema.find_model("User").unwrap(); - /// let site = schema.find_model("Site").unwrap(); - /// - /// let rf = user.fields().find_from_relation_fields("sites").unwrap(); - /// let site_name = site.fields().find_from_scalar("name").unwrap(); - /// - /// // Every related records matches: - /// { - /// let cond = rf - /// .clone() - /// .every_related(site_name.clone().equals("Blog")) - /// .aliased_cond(None); - /// - /// let join_data = ("test", "Site") - /// .alias("j0") - /// .on(("j0", "id").equals(Column::from(("t0", "A")))); - /// - /// let sub_cond: ConditionTree = ("j0", "name").equals("Blog").into(); - /// let sub_select = Select::from_table(Table::from("_UserToSites").alias("t0")) - /// .column(("t0", "B")) - /// .so_that(sub_cond.not()) - /// .inner_join(join_data); - /// - /// let expected: ConditionTree = (("test", "User"), "id") - /// .not_in_selection(sub_select) - /// .into(); - /// - /// assert_eq!(expected, cond); - /// } - /// - /// // No related record matches: - /// { - /// let cond = rf - /// .clone() - /// .no_related(site_name.clone().equals("Blog")) - /// .aliased_cond(None); - /// - /// let join_data = ("test", "Site") - /// .alias("j0") - /// .on(("j0", "id").equals(Column::from(("t0", "A")))); - /// - /// let sub_select = Select::from_table(Table::from("_UserToSites").alias("t0")) - /// .column(("t0", "B")) - /// .so_that(("j0", "name").equals("Blog")) - /// .inner_join(join_data); - /// - /// let expected: ConditionTree = (("test", "User"), "id") - /// .not_in_selection(sub_select) - /// .into(); - /// - /// assert_eq!(expected, cond); - /// } - /// - /// // At least one related record matches: - /// { - /// let cond = rf - /// .at_least_one_related(site_name.equals("Blog")) - /// .aliased_cond(None); - /// - /// let join_data = ("test", "Site") - /// .alias("j0") - /// .on(("j0", "id").equals(Column::from(("t0", "A")))); - /// - /// let sub_select = Select::from_table(Table::from("_UserToSites").alias("t0")) - /// .column(("t0", "B")) - /// .so_that(("j0", "name").equals("Blog")) - /// .inner_join(join_data); - /// - /// let expected: ConditionTree = (("test", "User"), "id") - /// .in_selection(sub_select) - /// .into(); - /// - /// assert_eq!(expected, cond); - /// } - /// ``` fn aliased_cond(self, alias: Option) -> ConditionTree { let id = self.field.model().id_column(); @@ -429,35 +276,6 @@ impl AliasedSelect for RelationFilter { impl AliasedCondition for OneRelationIsNullFilter { /// Conversion from a `OneRelationIsNullFilter` to a query condition tree. Aliased when in a nested `SELECT`. - /// - /// ```rust - /// # use sql_connector::*; - /// # use prisma_models::*; - /// # use connector::*; - /// # use prisma_query::ast::*; - /// # use serde_json; - /// # use std::{fs::File, sync::Arc}; - /// # - /// # let template: SchemaTemplate = serde_json::from_reader(File::open("./test_schema.json").unwrap()).unwrap(); - /// let schema = template.build(String::from("test")); - /// let user = schema.find_model("User").unwrap(); - /// - /// // Not inlined in parent... - /// - /// let rf = user.fields().find_from_relation_fields("sites").unwrap(); - /// - /// let expected = { - /// let compare = Column::from((("test", "User"), "id")) - /// .not_in_selection(Select::from_table("_UserToSites").column("B")); - /// - /// ConditionTree::single(compare) - /// }; - /// - /// assert_eq!( - /// expected, - /// rf.one_relation_is_null().aliased_cond(None), - /// ); - /// ``` fn aliased_cond(self, alias: Option) -> ConditionTree { let alias = alias.map(|a| a.to_string(None)); diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/lib.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/lib.rs index 6bfc39a94f..97c3a93dcc 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/lib.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/lib.rs @@ -7,17 +7,22 @@ //! data. mod cursor_condition; +mod database; +mod error; mod filter_conversion; mod mutaction; mod ordering; mod query_builder; +mod raw_query; mod row; mod transactional; -pub mod database; +use filter_conversion::*; +use mutaction::*; +use raw_query::*; +use row::*; -pub use filter_conversion::*; -pub use mutaction::*; -pub use query_builder::SelectDefinition; -pub use row::*; +pub use database::*; pub use transactional::*; + +type SqlResult = Result; diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/builder.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/builder.rs index 6601ad4d9d..4339aae246 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/builder.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/builder.rs @@ -1,7 +1,7 @@ use prisma_models::prelude::*; use prisma_query::ast::*; -use connector::{error::ConnectorError, ConnectorResult}; +use crate::{error::SqlError, SqlResult}; pub struct MutationBuilder; @@ -31,15 +31,15 @@ impl MutationBuilder { let fields = fields .iter() - .map(|field| (field.name(), args.take_field_value(field.name()).unwrap())); + .map(|field| (field.db_name(), args.take_field_value(field.name()).unwrap())); let base = Insert::single_into(model.table()); let insert = fields .into_iter() - .fold(base, |acc, (name, value)| acc.value(name, value)); + .fold(base, |acc, (name, value)| acc.value(name.into_owned(), value)); - (insert.into(), return_id) + (Insert::from(insert).returning(vec![model_id.as_column()]), return_id) } pub fn create_relation(field: RelationFieldRef, parent_id: &GraphqlId, child_id: &GraphqlId) -> Query { @@ -114,11 +114,11 @@ impl MutationBuilder { Some(result) } - pub fn update_one(model: ModelRef, id: &GraphqlId, args: &PrismaArgs) -> ConnectorResult> { + pub fn update_one(model: ModelRef, id: &GraphqlId, args: &PrismaArgs) -> SqlResult> { Self::update_many(model, &[id; 1], args).map(|updates| updates.into_iter().next()) } - pub fn update_many(model: ModelRef, ids: &[&GraphqlId], args: &PrismaArgs) -> ConnectorResult> { + pub fn update_many(model: ModelRef, ids: &[&GraphqlId], args: &PrismaArgs) -> SqlResult> { if args.args.is_empty() || ids.is_empty() { return Ok(Vec::new()); } @@ -130,7 +130,7 @@ impl MutationBuilder { let field = fields.find_from_scalar(&name).unwrap(); if field.is_required && value.is_null() { - return Err(ConnectorError::FieldCannotBeNull { + return Err(SqlError::FieldCannotBeNull { field: field.name.clone(), }); } @@ -200,11 +200,11 @@ impl MutationBuilder { } pub fn truncate_tables(project: ProjectRef) -> Vec { - let models = project.schema().models(); + let models = project.internal_data_model().models(); let mut deletes = Vec::new(); deletes = project - .schema() + .internal_data_model() .relations() .iter() .map(|r| r.relation_table()) diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/delete_actions.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/delete_actions.rs index d814d99dc1..3e32a90736 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/delete_actions.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/delete_actions.rs @@ -1,4 +1,4 @@ -use connector::{error::ConnectorError, ConnectorResult}; +use crate::{error::SqlError, SqlResult}; use prisma_models::prelude::*; use prisma_query::ast::*; @@ -14,11 +14,11 @@ impl DeleteActions { /// connector, giving the connector the possibility to return an optional /// `GraphqlID` from the database, such as trying to read a row from the /// `SELECT`. - pub fn check_relation_violations(model: ModelRef, ids: &[&GraphqlId], mut f: F) -> ConnectorResult<()> + pub fn check_relation_violations(model: ModelRef, ids: &[&GraphqlId], mut f: F) -> SqlResult<()> where - F: FnMut(Select) -> ConnectorResult>, + F: FnMut(Select) -> SqlResult>, { - for rf in model.schema().fields_requiring_model(model) { + for rf in model.internal_data_model().fields_requiring_model(model) { let relation = rf.relation(); let condition = rf @@ -31,7 +31,7 @@ impl DeleteActions { .so_that(condition); if let Some(_) = f(select)? { - return Err(ConnectorError::RelationViolation { + return Err(SqlError::RelationViolation { relation_name: relation.name.clone(), model_a_name: relation.model_a().name.clone(), model_b_name: relation.model_b().name.clone(), diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions.rs index 0b3e3d42ef..7d42ba14ca 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions.rs @@ -11,23 +11,24 @@ pub use nested_disconnect::*; pub use nested_set::*; use crate::query_builder::QueryBuilder; -use connector::{error::*, filter::NodeSelector, ConnectorResult}; +use crate::{error::*, SqlResult}; +use connector::{error::NodeSelectorInfo, filter::NodeSelector}; use prisma_models::*; use prisma_query::ast::*; // TODO: Replace me with FnBox from std when it's stabilized in 1.35. // https://doc.rust-lang.org/std/boxed/trait.FnBox.html pub trait FnBox { - fn call_box(self: Box, exists: bool) -> ConnectorResult<()>; + fn call_box(self: Box, exists: bool) -> SqlResult<()>; } // TODO: Replace me with FnBox from std when it's stabilized in 1.35. // https://doc.rust-lang.org/std/boxed/trait.FnBox.html impl FnBox for F where - F: FnOnce(bool) -> ConnectorResult<()>, + F: FnOnce(bool) -> SqlResult<()>, { - fn call_box(self: Box, exists: bool) -> ConnectorResult<()> { + fn call_box(self: Box, exists: bool) -> SqlResult<()> { (*self)(exists) } } @@ -35,7 +36,7 @@ where pub type ResultCheck = Box; pub trait NestedActions { - fn required_check(&self, parent_id: &GraphqlId) -> ConnectorResult>; + fn required_check(&self, parent_id: &GraphqlId) -> SqlResult>; fn parent_removal(&self, parent_id: &GraphqlId) -> Option; fn child_removal(&self, child_id: &GraphqlId) -> Option; @@ -43,23 +44,23 @@ pub trait NestedActions { fn relation_field(&self) -> RelationFieldRef; fn relation(&self) -> RelationRef; - fn relation_violation(&self) -> ConnectorError { + fn relation_violation(&self) -> SqlError { let relation = self.relation(); - ConnectorError::RelationViolation { + SqlError::RelationViolation { relation_name: relation.name.clone(), model_a_name: relation.model_a().name.clone(), model_b_name: relation.model_b().name.clone(), } } - fn nodes_not_connected(&self, parent_id: Option, child_id: Option) -> ConnectorError { + fn nodes_not_connected(&self, parent_id: Option, child_id: Option) -> SqlError { let rf = self.relation_field(); let parent_where = parent_id.map(|parent_id| NodeSelectorInfo::for_id(rf.model(), &parent_id)); let child_where = child_id.map(|child_id| NodeSelectorInfo::for_id(rf.model(), &child_id)); - ConnectorError::NodesNotConnected { + SqlError::NodesNotConnected { relation_name: rf.relation().name.clone(), parent_name: rf.model().name.clone(), parent_where, diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_connect.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_connect.rs index 907f2e3fe8..12db337df8 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_connect.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_connect.rs @@ -1,5 +1,6 @@ use super::*; -use connector::{mutaction::NestedConnect, ConnectorResult}; +use crate::SqlResult; +use connector::mutaction::NestedConnect; use prisma_models::*; use prisma_query::ast::*; use std::sync::Arc; @@ -13,7 +14,7 @@ impl NestedActions for NestedConnect { self.relation_field().relation() } - fn required_check(&self, parent_id: &GraphqlId) -> ConnectorResult> { + fn required_check(&self, parent_id: &GraphqlId) -> SqlResult> { let p = Arc::clone(&self.relation_field); let c = p.related_field(); diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_create_node.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_create_node.rs index f8cc4694f2..f9f0409501 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_create_node.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_create_node.rs @@ -1,5 +1,6 @@ use super::*; -use connector::{mutaction::NestedCreateNode, ConnectorResult}; +use crate::SqlResult; +use connector::mutaction::NestedCreateNode; use prisma_models::*; use prisma_query::ast::*; use std::sync::Arc; @@ -13,7 +14,7 @@ impl NestedActions for NestedCreateNode { self.relation_field().relation() } - fn required_check(&self, parent_id: &GraphqlId) -> ConnectorResult> { + fn required_check(&self, parent_id: &GraphqlId) -> SqlResult> { if self.top_is_create { return Ok(None); } diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_delete.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_delete.rs index 12e9114d0c..49608de76e 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_delete.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_delete.rs @@ -1,5 +1,6 @@ use super::*; -use connector::{mutaction::NestedCreateNode, ConnectorResult}; +use crate::SqlResult; +use connector::mutaction::NestedCreateNode; use prisma_models::*; use prisma_query::ast::*; use std::sync::Arc; @@ -13,7 +14,7 @@ impl NestedActions for NestedDeleteNode { self.relation_field().relation() } - fn required_check(&self, _: &GraphqlId) -> ConnectorResult> { + fn required_check(&self, _: &GraphqlId) -> SqlResult> { Ok(None) } diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_delete_node.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_delete_node.rs index d162f3c4be..488e97f96d 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_delete_node.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_delete_node.rs @@ -1,5 +1,6 @@ use super::*; -use connector::{mutaction::NestedDeleteNode, ConnectorResult}; +use crate::SqlResult; +use connector::mutaction::NestedDeleteNode; use prisma_models::*; use prisma_query::ast::*; @@ -12,7 +13,7 @@ impl NestedActions for NestedDeleteNode { self.relation_field().relation() } - fn required_check(&self, _: &GraphqlId) -> ConnectorResult> { + fn required_check(&self, _: &GraphqlId) -> SqlResult> { Ok(None) } diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_disconnect.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_disconnect.rs index e1a15d59bc..f037832942 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_disconnect.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_disconnect.rs @@ -1,5 +1,6 @@ use super::*; -use connector::{mutaction::NestedDisconnect, ConnectorResult}; +use crate::SqlResult; +use connector::mutaction::NestedDisconnect; use prisma_models::*; use prisma_query::ast::*; use std::sync::Arc; @@ -13,7 +14,7 @@ impl NestedActions for NestedDisconnect { self.relation_field().relation() } - fn required_check(&self, _: &GraphqlId) -> ConnectorResult> { + fn required_check(&self, _: &GraphqlId) -> SqlResult> { let p = Arc::clone(&self.relation_field); let c = p.related_field(); diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_set.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_set.rs index 3e553140cb..a5e653dbd0 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_set.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/mutaction/nested_actions/nested_set.rs @@ -1,5 +1,6 @@ use super::*; -use connector::{mutaction::NestedSet, ConnectorResult}; +use crate::SqlResult; +use connector::mutaction::NestedSet; use prisma_models::*; use prisma_query::ast::*; @@ -12,7 +13,7 @@ impl NestedActions for NestedSet { self.relation_field().relation() } - fn required_check(&self, _: &GraphqlId) -> ConnectorResult> { + fn required_check(&self, _: &GraphqlId) -> SqlResult> { Ok(None) } diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/query_builder/mod.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/query_builder/mod.rs index 45a402d7ca..3ead47a7b5 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/query_builder/mod.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/query_builder/mod.rs @@ -107,13 +107,12 @@ impl QueryBuilder { } pub fn get_scalar_list_values_by_node_ids(list_field: ScalarFieldRef, node_ids: Vec) -> Select { - let model = list_field.model(); - let table_name = format!("{}_{}", model.db_name(), list_field.name); + let table = list_field.scalar_list_table().table(); - // I vant to suk your blaad... - Vlad the Impaler + // I vant to saak your blaad... - Vlad the Impaler let vhere = "nodeId".in_selection(node_ids); - let query = Select::from_table(table_name) + let query = Select::from_table(table) .column("nodeId") .column("value") .so_that(vhere); diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/query_builder/related_nodes.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/query_builder/related_nodes.rs index 7264ba460b..e1f4ca8c90 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/query_builder/related_nodes.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/query_builder/related_nodes.rs @@ -77,7 +77,15 @@ impl<'a> RelatedNodesQueryBuilder<'a> { .and(cursor_condition); let base_with_conditions = match self.order_by { - Some(order_by) => base_query.column(order_by.field.as_column()).so_that(conditions), + Some(order_by) => { + let column = order_by.field.as_column(); + + if self.selected_fields.columns().contains(&column) { + base_query.so_that(conditions) + } else { + base_query.column(order_by.field.as_column()).so_that(conditions) + } + } None => base_query.so_that(conditions), }; diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/raw_query.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/raw_query.rs new file mode 100644 index 0000000000..460b0c5343 --- /dev/null +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/raw_query.rs @@ -0,0 +1,20 @@ +pub struct RawQuery(pub String); + +impl RawQuery { + pub fn is_select(&self) -> bool { + let splitted: Vec<&str> = self.0.split(" ").collect(); + splitted + .first() + .map(|t| t.to_uppercase().trim() == "SELECT") + .unwrap_or(false) + } +} + +impl From for RawQuery +where + T: Into, +{ + fn from(s: T) -> Self { + RawQuery(s.into()) + } +} diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/row.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/row.rs index 7fa47506e6..a8e64e9bdb 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/row.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/row.rs @@ -1,88 +1,58 @@ -use chrono::{DateTime, Utc}; -use connector::ConnectorResult; -use prisma_models::{Node, PrismaValue, TypeIdentifier}; -use serde_json; +use crate::SqlResult; +use prisma_models::{GraphqlId, Node, PrismaValue, TypeIdentifier}; +use prisma_query::ast::DatabaseValue; use uuid::Uuid; -#[cfg(feature = "sqlite")] -use rusqlite::{types::Type as SqliteType, Error as SqliteError, Row as SqliteRow}; - /// An allocated representation of a `Row` returned from the database. #[derive(Debug, Clone, Default)] -pub struct PrismaRow { +pub struct SqlRow { pub values: Vec, } -impl From for Node { - fn from(row: PrismaRow) -> Node { +impl From for Node { + fn from(row: SqlRow) -> Node { Node::new(row.values) } } -pub trait ToPrismaRow { - /// Conversion from a database specific row to an allocated `PrismaRow`. To +pub trait ToSqlRow { + /// Conversion from a database specific row to an allocated `SqlRow`. To /// help deciding the right types, the provided `TypeIdentifier`s should map /// to the returned columns in the right order. - fn to_prisma_row<'b, T>(&'b self, idents: T) -> ConnectorResult + fn to_prisma_row<'b, T>(&'b self, idents: T) -> SqlResult where T: IntoIterator; } -#[cfg(feature = "sqlite")] -impl<'a, 'stmt> ToPrismaRow for SqliteRow<'a, 'stmt> { - fn to_prisma_row<'b, T>(&'b self, idents: T) -> ConnectorResult - where - T: IntoIterator, - { - fn convert(row: &SqliteRow, i: usize, typid: &TypeIdentifier) -> ConnectorResult { - let result = match typid { - TypeIdentifier::String => row.get_checked(i).map(|val| PrismaValue::String(val)), - TypeIdentifier::GraphQLID => row.get_checked(i).map(|val| PrismaValue::GraphqlId(val)), - TypeIdentifier::Float => row.get_checked(i).map(|val| PrismaValue::Float(val)), - TypeIdentifier::Relation => row.get_checked(i).map(|val| PrismaValue::GraphqlId(val)), - TypeIdentifier::Int => row.get_checked(i).map(|val| PrismaValue::Int(val)), - TypeIdentifier::Boolean => row.get_checked(i).map(|val| PrismaValue::Boolean(val)), - TypeIdentifier::Enum => row.get_checked(i).map(|val| PrismaValue::Enum(val)), - TypeIdentifier::Json => row.get_checked(i).and_then(|val| { - let val: String = val; - serde_json::from_str(&val).map(|r| PrismaValue::Json(r)).map_err(|err| { - SqliteError::FromSqlConversionFailure(i as usize, SqliteType::Text, Box::new(err)) - }) - }), - TypeIdentifier::UUID => { - let result: Result = row.get_checked(i); - - if let Ok(val) = result { - let uuid = Uuid::parse_str(val.as_ref())?; - - Ok(PrismaValue::Uuid(uuid)) - } else { - result.map(|s| PrismaValue::String(s)) - } - } - TypeIdentifier::DateTime => row.get_checked(i).map(|ts: i64| { - let nsecs = ((ts % 1000) * 1_000_000) as u32; - let secs = (ts / 1000) as i64; - let naive = chrono::NaiveDateTime::from_timestamp(secs, nsecs); - let datetime: DateTime = DateTime::from_utc(naive, Utc); - - PrismaValue::DateTime(datetime) - }), - }; +#[derive(Debug, PartialEq, Eq, Hash, Clone)] +pub enum SqlId { + String(String), + Int(usize), + UUID(Uuid), +} - match result { - Ok(pv) => Ok(pv), - Err(rusqlite::Error::InvalidColumnType(_, rusqlite::types::Type::Null)) => Ok(PrismaValue::Null), - Err(e) => Err(e.into()), - } +impl From for GraphqlId { + fn from(sql_id: SqlId) -> Self { + match sql_id { + SqlId::String(s) => GraphqlId::String(s), + SqlId::Int(i) => GraphqlId::Int(i), + SqlId::UUID(u) => GraphqlId::UUID(u), } + } +} - let mut row = PrismaRow::default(); - - for (i, typid) in idents.into_iter().enumerate() { - row.values.push(convert(self, i, typid)?); +impl From for DatabaseValue { + fn from(id: SqlId) -> DatabaseValue { + match id { + SqlId::String(s) => s.into(), + SqlId::Int(i) => (i as i64).into(), + SqlId::UUID(u) => u.into(), } + } +} - Ok(row) +impl From<&SqlId> for DatabaseValue { + fn from(id: &SqlId) -> DatabaseValue { + id.clone().into() } } diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/data_resolver.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/data_resolver.rs index 8b4093b4af..f1fd5acfd6 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/data_resolver.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/data_resolver.rs @@ -1,4 +1,4 @@ -use crate::{database::SqlDatabase, query_builder::QueryBuilder, Transactional}; +use crate::{database::SqlDatabase, error::SqlError, query_builder::QueryBuilder, Transactional}; use connector::{error::ConnectorError, filter::NodeSelector, *}; use itertools::Itertools; use prisma_models::*; @@ -18,7 +18,7 @@ where node_selector: &NodeSelector, selected_fields: &SelectedFields, ) -> ConnectorResult> { - let db_name = &node_selector.field.model().schema().db_name; + let db_name = &node_selector.field.model().internal_data_model().db_name; let query = QueryBuilder::get_nodes(node_selector.field.model(), selected_fields, node_selector); let field_names = selected_fields.names(); let idents = selected_fields.type_identifiers(); @@ -27,7 +27,7 @@ where .executor .with_transaction(db_name, |conn| match conn.find(query, idents.as_slice()) { Ok(result) => Ok(Some(result)), - Err(_e @ ConnectorError::NodeNotFoundForWhere(_)) => Ok(None), + Err(_e @ SqlError::NodeNotFoundForWhere(_)) => Ok(None), Err(e) => Err(e), })? .map(Node::from) @@ -42,7 +42,7 @@ where query_arguments: QueryArguments, selected_fields: &SelectedFields, ) -> ConnectorResult { - let db_name = &model.schema().db_name; + let db_name = &model.internal_data_model().db_name; let field_names = selected_fields.names(); let idents = selected_fields.type_identifiers(); let query = QueryBuilder::get_nodes(model, selected_fields, query_arguments); @@ -64,7 +64,7 @@ where query_arguments: QueryArguments, selected_fields: &SelectedFields, ) -> ConnectorResult { - let db_name = &from_field.model().schema().db_name; + let db_name = &from_field.model().internal_data_model().db_name; let idents = selected_fields.type_identifiers(); let field_names = selected_fields.names(); let query = QueryBuilder::get_related_nodes(from_field, from_node_ids, query_arguments, selected_fields); @@ -94,19 +94,26 @@ where } fn count_by_model(&self, model: ModelRef, query_arguments: QueryArguments) -> ConnectorResult { - let db_name = &model.schema().db_name; + let db_name = &model.internal_data_model().db_name; let query = QueryBuilder::count_by_model(model, query_arguments); - self.executor + let result = self + .executor .with_transaction(db_name, |conn| conn.find_int(query)) - .map(|count| count as usize) + .map(|count| count as usize)?; + + Ok(result) } fn count_by_table(&self, database: &str, table: &str) -> ConnectorResult { let query = QueryBuilder::count_by_table(database, table); - self.executor + + let result = self + .executor .with_transaction(database, |conn| conn.find_int(query)) - .map(|count| count as usize) + .map(|count| count as usize)?; + + Ok(result) } fn get_scalar_list_values_by_node_ids( @@ -114,7 +121,7 @@ where list_field: ScalarFieldRef, node_ids: Vec, ) -> ConnectorResult> { - let db_name = &list_field.model().schema().db_name; + let db_name = &list_field.model().internal_data_model().db_name; let type_identifier = list_field.type_identifier; let query = QueryBuilder::get_scalar_list_values_by_node_ids(list_field, node_ids); @@ -125,8 +132,8 @@ where .map(|row| { let mut iter = row.values.into_iter(); - let node_id = iter.next().ok_or(ConnectorError::ColumnDoesNotExist)?; - let value = iter.next().ok_or(ConnectorError::ColumnDoesNotExist)?; + let node_id = iter.next().ok_or(SqlError::ColumnDoesNotExist)?; + let value = iter.next().ok_or(SqlError::ColumnDoesNotExist)?; Ok(ScalarListElement { node_id: GraphqlId::try_from(node_id)?, diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mod.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mod.rs index be04e4e962..be04e867c6 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mod.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mod.rs @@ -4,14 +4,14 @@ mod mutaction_executor; pub use data_resolver::*; pub use mutaction_executor::*; -use crate::{query_builder::QueryBuilder, AliasedCondition, PrismaRow}; +use crate::{error::*, query_builder::QueryBuilder, AliasedCondition, RawQuery, SqlResult, SqlRow}; use connector::{ - error::*, + error::NodeSelectorInfo, filter::{Filter, NodeSelector}, - ConnectorResult, }; use prisma_models::*; use prisma_query::ast::*; +use serde_json::Value; use std::{convert::TryFrom, sync::Arc}; /// A `Transactional` presents a database able to spawn transactions, execute @@ -21,14 +21,9 @@ pub trait Transactional { /// Wrap a closure into a transaction. All actions done through the /// `Transaction` are commited automatically, or rolled back in case of any /// error. - fn with_transaction(&self, db: &str, f: F) -> ConnectorResult + fn with_transaction(&self, db: &str, f: F) -> SqlResult where - F: FnOnce(&mut Transaction) -> ConnectorResult; -} - -pub struct WriteItems { - pub count: usize, - pub last_id: usize, + F: FnOnce(&mut Transaction) -> SqlResult; } /// Abstraction of a database transaction. Start, commit and rollback should be @@ -36,32 +31,39 @@ pub struct WriteItems { /// different databases. pub trait Transaction { /// Burn them. BURN THEM ALL! - fn truncate(&mut self, project: ProjectRef) -> ConnectorResult<()>; + fn truncate(&mut self, project: ProjectRef) -> SqlResult<()>; /// Write to the database, returning the change count and last id inserted. - fn write(&mut self, q: Query) -> ConnectorResult; + fn write(&mut self, q: Query) -> SqlResult>; /// Select multiple rows from the database. - fn filter(&mut self, q: Select, idents: &[TypeIdentifier]) -> ConnectorResult>; + fn filter(&mut self, q: Select, idents: &[TypeIdentifier]) -> SqlResult>; + + /// Executes a raw query string with no parameterization or safety, + /// resulting a Json value. Do not use internally anywhere in the code. + /// Provides user an escape hatch for using the database directly. + fn raw(&mut self, q: RawQuery) -> SqlResult; /// Insert to the database. On success returns the last insert row id. - fn insert(&mut self, q: Insert) -> ConnectorResult { - Ok(self.write(q.into())?.last_id) + fn insert(&mut self, q: Insert) -> SqlResult> { + Ok(self.write(q.into())?) } /// Update the database. On success returns the number of rows updated. - fn update(&mut self, q: Update) -> ConnectorResult { - Ok(self.write(q.into())?.count) + fn update(&mut self, q: Update) -> SqlResult<()> { + self.write(q.into())?; + Ok(()) } /// Delete from the database. On success returns the number of rows deleted. - fn delete(&mut self, q: Delete) -> ConnectorResult { - Ok(self.write(q.into())?.count) + fn delete(&mut self, q: Delete) -> SqlResult<()> { + self.write(q.into())?; + Ok(()) } /// Find one full record selecting all scalar fields. - fn find_record(&mut self, node_selector: &NodeSelector) -> ConnectorResult { - use ConnectorError::*; + fn find_record(&mut self, node_selector: &NodeSelector) -> SqlResult { + use SqlError::*; let model = node_selector.field.model(); let selected_fields = SelectedFields::from(Arc::clone(&model)); @@ -79,15 +81,15 @@ pub trait Transaction { } /// Select one row from the database. - fn find(&mut self, q: Select, idents: &[TypeIdentifier]) -> ConnectorResult { + fn find(&mut self, q: Select, idents: &[TypeIdentifier]) -> SqlResult { self.filter(q.limit(1), idents)? .into_iter() .next() - .ok_or(ConnectorError::NodeDoesNotExist) + .ok_or(SqlError::NodeDoesNotExist) } /// Read the first column from the first row as an integer. - fn find_int(&mut self, q: Select) -> ConnectorResult { + fn find_int(&mut self, q: Select) -> SqlResult { // UNWRAP: A dataset will always have at least one column, even if it contains no data. let id = self.find(q, &[TypeIdentifier::Int])?.values.into_iter().next().unwrap(); @@ -95,7 +97,7 @@ pub trait Transaction { } /// Read the first column from the first row as an `GraphqlId`. - fn find_id(&mut self, node_selector: &NodeSelector) -> ConnectorResult { + fn find_id(&mut self, node_selector: &NodeSelector) -> SqlResult { let model = node_selector.field.model(); let filter = Filter::from(node_selector.clone()); @@ -103,13 +105,13 @@ pub trait Transaction { .filter_ids(model, filter)? .into_iter() .next() - .ok_or_else(|| ConnectorError::NodeNotFoundForWhere(NodeSelectorInfo::from(node_selector)))?; + .ok_or_else(|| SqlError::NodeNotFoundForWhere(NodeSelectorInfo::from(node_selector)))?; Ok(id) } /// Read the all columns as an `GraphqlId` - fn filter_ids(&mut self, model: ModelRef, filter: Filter) -> ConnectorResult> { + fn filter_ids(&mut self, model: ModelRef, filter: Filter) -> SqlResult> { let select = Select::from_table(model.table()) .column(model.fields().id().as_column()) .so_that(filter.aliased_cond(None)); @@ -117,7 +119,7 @@ pub trait Transaction { self.select_ids(select) } - fn select_ids(&mut self, select: Select) -> ConnectorResult> { + fn select_ids(&mut self, select: Select) -> SqlResult> { let mut rows = self.filter(select, &[TypeIdentifier::GraphQLID])?; let mut result = Vec::new(); @@ -137,23 +139,20 @@ pub trait Transaction { parent_field: RelationFieldRef, parent_id: &GraphqlId, selector: &Option, - ) -> ConnectorResult { + ) -> SqlResult { let ids = self.filter_ids_by_parents( Arc::clone(&parent_field), vec![parent_id], selector.clone().map(Filter::from), )?; - let id = ids - .into_iter() - .next() - .ok_or_else(|| ConnectorError::NodesNotConnected { - relation_name: parent_field.relation().name.clone(), - parent_name: parent_field.model().name.clone(), - parent_where: None, - child_name: parent_field.related_model().name.clone(), - child_where: selector.as_ref().map(NodeSelectorInfo::from), - })?; + let id = ids.into_iter().next().ok_or_else(|| SqlError::NodesNotConnected { + relation_name: parent_field.relation().name.clone(), + parent_name: parent_field.model().name.clone(), + parent_where: None, + child_name: parent_field.related_model().name.clone(), + child_where: selector.as_ref().map(NodeSelectorInfo::from), + })?; Ok(id) } @@ -165,7 +164,7 @@ pub trait Transaction { parent_field: RelationFieldRef, parent_ids: Vec<&GraphqlId>, selector: Option, - ) -> ConnectorResult> { + ) -> SqlResult> { let related_model = parent_field.related_model(); let relation = parent_field.relation(); let child_id_field = relation.column_for_relation_side(parent_field.relation_side.opposite()); diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/create.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/create.rs index 84d13dc7d7..f5e8c627b3 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/create.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/create.rs @@ -1,8 +1,7 @@ use crate::{ mutaction::{MutationBuilder, NestedActions}, - Transaction, + SqlResult, Transaction, }; -use connector::ConnectorResult; use prisma_models::{GraphqlId, ModelRef, PrismaArgs, PrismaListValue, RelationFieldRef}; use std::sync::Arc; @@ -12,7 +11,7 @@ pub fn execute( model: ModelRef, non_list_args: &PrismaArgs, list_args: &[(S, PrismaListValue)], -) -> ConnectorResult +) -> SqlResult where S: AsRef, { @@ -21,7 +20,7 @@ where let id = match returned_id { Some(id) => id, - None => GraphqlId::Int(last_id), + None => last_id.unwrap(), }; for (field_name, list_value) in list_args { @@ -45,7 +44,7 @@ pub fn execute_nested( relation_field: RelationFieldRef, non_list_args: &PrismaArgs, list_args: &[(S, PrismaListValue)], -) -> ConnectorResult +) -> SqlResult where S: AsRef, { diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/delete.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/delete.rs index e6d7833b04..305f5890a8 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/delete.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/delete.rs @@ -1,12 +1,9 @@ use crate::{ + error::SqlError, mutaction::{DeleteActions, MutationBuilder, NestedActions}, - Transaction, -}; -use connector::{ - error::{ConnectorError, NodeSelectorInfo}, - filter::NodeSelector, - ConnectorResult, + SqlResult, Transaction, }; +use connector::{error::NodeSelectorInfo, filter::NodeSelector}; use prisma_models::{GraphqlId, RelationFieldRef, SingleNode}; use std::sync::Arc; @@ -14,17 +11,17 @@ use std::sync::Arc; /// non-existing record will cause an error. /// /// Will return the deleted record if the delete was successful. -pub fn execute(conn: &mut Transaction, node_selector: &NodeSelector) -> ConnectorResult { +pub fn execute(conn: &mut Transaction, node_selector: &NodeSelector) -> SqlResult { let model = node_selector.field.model(); let record = conn.find_record(node_selector)?; let id = record.get_id_value(Arc::clone(&model)).unwrap(); - DeleteActions::check_relation_violations(Arc::clone(&model), &[id], |select| { + DeleteActions::check_relation_violations(Arc::clone(&model), &[&id], |select| { let ids = conn.select_ids(select)?; Ok(ids.into_iter().next()) })?; - for delete in MutationBuilder::delete_many(model, &[id]) { + for delete in MutationBuilder::delete_many(model, &[&id]) { conn.delete(delete)?; } @@ -46,7 +43,7 @@ pub fn execute_nested( actions: &NestedActions, node_selector: &Option, relation_field: RelationFieldRef, -) -> ConnectorResult<()> { +) -> SqlResult<()> { if let Some(ref node_selector) = node_selector { conn.find_id(node_selector)?; }; @@ -54,7 +51,7 @@ pub fn execute_nested( let child_id = conn .find_id_by_parent(Arc::clone(&relation_field), parent_id, node_selector) .map_err(|e| match e { - ConnectorError::NodesNotConnected { + SqlError::NodesNotConnected { relation_name, parent_name, parent_where: _, @@ -63,7 +60,7 @@ pub fn execute_nested( } => { let model = Arc::clone(&relation_field.model()); - ConnectorError::NodesNotConnected { + SqlError::NodesNotConnected { relation_name: relation_name, parent_name: parent_name, parent_where: Some(NodeSelectorInfo::for_id(model, parent_id)), diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/delete_many.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/delete_many.rs index c4a61986b3..de78c058ba 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/delete_many.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/delete_many.rs @@ -1,8 +1,8 @@ use crate::{ mutaction::{DeleteActions, MutationBuilder}, - Transaction, + SqlResult, Transaction, }; -use connector::{filter::Filter, ConnectorResult}; +use connector::filter::Filter; use prisma_models::{GraphqlId, ModelRef, RelationFieldRef}; use std::sync::Arc; @@ -10,11 +10,15 @@ use std::sync::Arc; /// any relations will cause an error. /// /// Will return the number records deleted. -pub fn execute(conn: &mut Transaction, model: ModelRef, filter: &Filter) -> ConnectorResult { +pub fn execute(conn: &mut Transaction, model: ModelRef, filter: &Filter) -> SqlResult { let ids = conn.filter_ids(Arc::clone(&model), filter.clone())?; let ids: Vec<&GraphqlId> = ids.iter().map(|id| &*id).collect(); let count = ids.len(); + if count == 0 { + return Ok(count); + } + DeleteActions::check_relation_violations(Arc::clone(&model), ids.as_slice(), |select| { let ids = conn.select_ids(select)?; Ok(ids.into_iter().next()) @@ -35,10 +39,14 @@ pub fn execute_nested( parent_id: &GraphqlId, filter: &Option, relation_field: RelationFieldRef, -) -> ConnectorResult { +) -> SqlResult { let ids = conn.filter_ids_by_parents(Arc::clone(&relation_field), vec![parent_id], filter.clone())?; let count = ids.len(); + if count == 0 { + return Ok(count); + } + let ids: Vec<&GraphqlId> = ids.iter().map(|id| &*id).collect(); let model = relation_field.model(); diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/mod.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/mod.rs index b56d98df8f..f97aff4665 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/mod.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/mod.rs @@ -6,8 +6,8 @@ mod relation; mod update; mod update_many; -use crate::{database::SqlDatabase, Transaction, Transactional}; -use connector::{error::ConnectorError, mutaction::*, ConnectorResult, DatabaseMutactionExecutor}; +use crate::{database::SqlDatabase, error::SqlError, RawQuery, SqlResult, Transaction, Transactional}; +use connector::{mutaction::*, ConnectorResult, DatabaseMutactionExecutor}; use serde_json::Value; use std::sync::Arc; @@ -20,8 +20,8 @@ where db_name: String, mutaction: TopLevelDatabaseMutaction, ) -> ConnectorResult { - self.executor.with_transaction(&db_name, |conn: &mut Transaction| { - fn create(conn: &mut Transaction, cn: &CreateNode) -> ConnectorResult { + let result = self.executor.with_transaction(&db_name, |conn: &mut Transaction| { + fn create(conn: &mut Transaction, cn: &CreateNode) -> SqlResult { let parent_id = create::execute(conn, Arc::clone(&cn.model), &cn.non_list_args, &cn.list_args)?; nested::execute(conn, &cn.nested_mutactions, &parent_id)?; @@ -31,7 +31,7 @@ where }) } - fn update(conn: &mut Transaction, un: &UpdateNode) -> ConnectorResult { + fn update(conn: &mut Transaction, un: &UpdateNode) -> SqlResult { let parent_id = update::execute(conn, &un.where_, &un.non_list_args, &un.list_args)?; nested::execute(conn, &un.nested_mutactions, &parent_id)?; @@ -42,12 +42,12 @@ where } match mutaction { - TopLevelDatabaseMutaction::CreateNode(ref cn) => create(conn, cn), - TopLevelDatabaseMutaction::UpdateNode(ref un) => update(conn, un), + TopLevelDatabaseMutaction::CreateNode(ref cn) => Ok(create(conn, cn)?), + TopLevelDatabaseMutaction::UpdateNode(ref un) => Ok(update(conn, un)?), TopLevelDatabaseMutaction::UpsertNode(ref ups) => match conn.find_id(&ups.where_) { - Err(_e @ ConnectorError::NodeNotFoundForWhere { .. }) => create(conn, &ups.create), - Err(e) => return Err(e), - Ok(_) => update(conn, &ups.update), + Err(_e @ SqlError::NodeNotFoundForWhere { .. }) => Ok(create(conn, &ups.create)?), + Err(e) => return Err(e.into()), + Ok(_) => Ok(update(conn, &ups.update)?), }, TopLevelDatabaseMutaction::UpdateNodes(ref uns) => { let count = update_many::execute( @@ -88,10 +88,16 @@ where }) } } - }) + })?; + + Ok(result) } - fn execute_raw(&self, _query: String) -> ConnectorResult { - Ok(Value::String("hello world!".to_string())) + fn execute_raw(&self, db_name: String, query: String) -> ConnectorResult { + let result = self + .executor + .with_transaction(&db_name, |conn: &mut Transaction| conn.raw(RawQuery::from(query)))?; + + Ok(result) } } diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/nested.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/nested.rs index c581f79141..b8ea01c364 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/nested.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/nested.rs @@ -1,11 +1,11 @@ use super::{create, delete, delete_many, relation, update, update_many}; -use crate::Transaction; -use connector::{error::ConnectorError, mutaction::*, ConnectorResult}; +use crate::{error::SqlError, SqlResult, Transaction}; +use connector::mutaction::*; use prisma_models::GraphqlId; use std::sync::Arc; -pub fn execute(conn: &mut Transaction, mutactions: &NestedMutactions, parent_id: &GraphqlId) -> ConnectorResult<()> { - fn create(conn: &mut Transaction, parent_id: &GraphqlId, cn: &NestedCreateNode) -> ConnectorResult<()> { +pub fn execute(conn: &mut Transaction, mutactions: &NestedMutactions, parent_id: &GraphqlId) -> SqlResult<()> { + fn create(conn: &mut Transaction, parent_id: &GraphqlId, cn: &NestedCreateNode) -> SqlResult<()> { let parent_id = create::execute_nested( conn, parent_id, @@ -20,7 +20,7 @@ pub fn execute(conn: &mut Transaction, mutactions: &NestedMutactions, parent_id: Ok(()) } - fn update(conn: &mut Transaction, parent_id: &GraphqlId, un: &NestedUpdateNode) -> ConnectorResult<()> { + fn update(conn: &mut Transaction, parent_id: &GraphqlId, un: &NestedUpdateNode) -> SqlResult<()> { let parent_id = update::execute_nested( conn, parent_id, @@ -48,7 +48,7 @@ pub fn execute(conn: &mut Transaction, mutactions: &NestedMutactions, parent_id: match id_opt { Ok(_) => update(conn, parent_id, &upsert_node.update)?, - Err(_e @ ConnectorError::NodesNotConnected { .. }) => create(conn, parent_id, &upsert_node.create)?, + Err(_e @ SqlError::NodesNotConnected { .. }) => create(conn, parent_id, &upsert_node.create)?, Err(e) => return Err(e), } } diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/relation.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/relation.rs index 25af246ba3..83579b610f 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/relation.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/relation.rs @@ -1,8 +1,8 @@ use crate::{ mutaction::{MutationBuilder, NestedActions}, - Transaction, + SqlResult, Transaction, }; -use connector::{filter::NodeSelector, ConnectorResult}; +use connector::filter::NodeSelector; use prisma_models::{GraphqlId, RelationFieldRef}; use std::sync::Arc; @@ -31,7 +31,7 @@ pub fn connect( actions: &NestedActions, node_selector: &NodeSelector, relation_field: RelationFieldRef, -) -> ConnectorResult<()> { +) -> SqlResult<()> { if let Some((select, check)) = actions.required_check(parent_id)? { let ids = conn.select_ids(select)?; check.call_box(ids.into_iter().next().is_some())? @@ -69,7 +69,7 @@ pub fn disconnect( parent_id: &GraphqlId, actions: &NestedActions, node_selector: &Option, -) -> ConnectorResult<()> { +) -> SqlResult<()> { if let Some((select, check)) = actions.required_check(parent_id)? { let ids = conn.select_ids(select)?; check.call_box(ids.into_iter().next().is_some())? @@ -106,7 +106,7 @@ pub fn set( actions: &NestedActions, node_selectors: &Vec, relation_field: RelationFieldRef, -) -> ConnectorResult<()> { +) -> SqlResult<()> { if let Some((select, check)) = actions.required_check(parent_id)? { let ids = conn.select_ids(select)?; check.call_box(ids.into_iter().next().is_some())? diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/update.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/update.rs index ca6e8a042b..45aa59f92a 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/update.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/update.rs @@ -1,5 +1,5 @@ -use crate::{mutaction::MutationBuilder, Transaction}; -use connector::{filter::NodeSelector, ConnectorResult}; +use crate::{mutaction::MutationBuilder, SqlResult, Transaction}; +use connector::filter::NodeSelector; use prisma_models::{GraphqlId, ModelRef, PrismaArgs, PrismaListValue, RelationFieldRef}; use std::sync::Arc; @@ -9,7 +9,7 @@ pub fn execute( node_selector: &NodeSelector, non_list_args: &PrismaArgs, list_args: &[(S, PrismaListValue)], -) -> ConnectorResult +) -> SqlResult where S: AsRef, { @@ -34,7 +34,7 @@ pub fn execute_nested( relation_field: RelationFieldRef, non_list_args: &PrismaArgs, list_args: &[(S, PrismaListValue)], -) -> ConnectorResult +) -> SqlResult where S: AsRef, { @@ -54,7 +54,7 @@ pub fn update_list_args( ids: &[GraphqlId], model: ModelRef, list_args: &[(S, PrismaListValue)], -) -> ConnectorResult<()> +) -> SqlResult<()> where S: AsRef, { diff --git a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/update_many.rs b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/update_many.rs index 4334cea664..68b30b532f 100644 --- a/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/update_many.rs +++ b/server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/update_many.rs @@ -1,6 +1,6 @@ use super::update; -use crate::{mutaction::MutationBuilder, Transaction}; -use connector::{filter::Filter, ConnectorResult}; +use crate::{mutaction::MutationBuilder, SqlResult, Transaction}; +use connector::filter::Filter; use prisma_models::{GraphqlId, ModelRef, PrismaArgs, PrismaListValue, RelationFieldRef}; use std::sync::Arc; @@ -14,13 +14,17 @@ pub fn execute( filter: &Filter, non_list_args: &PrismaArgs, list_args: &[(S, PrismaListValue)], -) -> ConnectorResult +) -> SqlResult where S: AsRef, { let ids = conn.filter_ids(Arc::clone(&model), filter.clone())?; let count = ids.len(); + if count == 0 { + return Ok(count); + } + let updates = { let ids: Vec<&GraphqlId> = ids.iter().map(|id| &*id).collect(); MutationBuilder::update_many(Arc::clone(&model), ids.as_slice(), non_list_args)? @@ -44,13 +48,17 @@ pub fn execute_nested( relation_field: RelationFieldRef, non_list_args: &PrismaArgs, list_args: &[(S, PrismaListValue)], -) -> ConnectorResult +) -> SqlResult where S: AsRef, { let ids = conn.filter_ids_by_parents(Arc::clone(&relation_field), vec![parent_id], filter.clone())?; let count = ids.len(); + if count == 0 { + return Ok(count); + } + let updates = { let ids: Vec<&GraphqlId> = ids.iter().map(|id| &*id).collect(); MutationBuilder::update_many(relation_field.related_model(), ids.as_slice(), non_list_args)? diff --git a/server/prisma-rs/query-engine/core/Cargo.toml b/server/prisma-rs/query-engine/core/Cargo.toml index 81d600f264..6cbd05e21c 100644 --- a/server/prisma-rs/query-engine/core/Cargo.toml +++ b/server/prisma-rs/query-engine/core/Cargo.toml @@ -7,6 +7,7 @@ edition = "2018" [dependencies] graphql-parser = "0.2.2" prisma-models = { path = "../../prisma-models" } +prisma-inflector = { path = "../../libs/prisma-inflector" } connector = { path = "../connectors/connector" } failure = "0.1" failure_derive = "0.1" @@ -15,6 +16,9 @@ uuid = "0.7" indexmap = "1.0" itertools = "0.8" serde_json = "1.0" +chrono = "0.4" +env_logger = "0.6" +log = "0.4" [dependencies.rust-inflector] version = "0.11" diff --git a/server/prisma-rs/query-engine/core/src/builders/filters.rs b/server/prisma-rs/query-engine/core/src/builders/filters.rs index 492debe8f5..1d44863c34 100644 --- a/server/prisma-rs/query-engine/core/src/builders/filters.rs +++ b/server/prisma-rs/query-engine/core/src/builders/filters.rs @@ -4,7 +4,7 @@ use graphql_parser::query::Value; use prisma_models::{Field, ModelRef, PrismaListValue, PrismaValue}; use std::{collections::BTreeMap, convert::TryFrom, sync::Arc}; -#[derive(PartialEq)] +#[derive(Debug, PartialEq)] enum FilterOp { In, NotIn, @@ -146,16 +146,28 @@ pub fn extract_filter(map: &BTreeMap, model: ModelRef) -> CoreRes } Field::Relation(r) => { let value = match v { - Value::Object(o) => o, - _ => panic!("Expected object value"), + Value::Object(o) => Some(o), + _ => None, // This handles `Null` values which might be valid! }; - Ok(match op { - FilterOp::Some => r.at_least_one_related(extract_filter(value, r.related_model())?), - FilterOp::None => r.no_related(extract_filter(value, r.related_model())?), - FilterOp::Every => r.every_related(extract_filter(value, r.related_model())?), - FilterOp::Field => r.to_one_related(extract_filter(value, r.related_model())?), - _ => unreachable!(), + Ok(match (op, value) { + (FilterOp::Some, Some(value)) => { + r.at_least_one_related(extract_filter(value, r.related_model())?) + } + (FilterOp::None, Some(value)) => { + r.no_related(extract_filter(value, r.related_model())?) + } + (FilterOp::Every, Some(value)) => { + r.every_related(extract_filter(value, r.related_model())?) + } + (FilterOp::Field, Some(value)) => { + r.to_one_related(extract_filter(value, r.related_model())?) + } + (FilterOp::Field, None) => r.one_relation_is_null(), + (op, val) => Err(CoreError::QueryValidationError(format!( + "Invalid filter: Operation {:?} with {:?}", + op, val + )))?, }) } } diff --git a/server/prisma-rs/query-engine/core/src/builders/inflector.rs b/server/prisma-rs/query-engine/core/src/builders/inflector.rs index d6cafb71d7..dcbbbe59c9 100644 --- a/server/prisma-rs/query-engine/core/src/builders/inflector.rs +++ b/server/prisma-rs/query-engine/core/src/builders/inflector.rs @@ -2,12 +2,16 @@ use lazy_static::lazy_static; use rust_inflector::Inflector as RustInflector; use std::collections::HashMap; -/// This is a remnant from the Scala inflector +// This is a remnant from the Scala inflector lazy_static! { pub static ref SINGULARIZE_EXCEPTIONS: HashMap<&'static str, &'static str> = - vec![("todoes", "todo"), ("children", "child")].into_iter().collect(); + vec![("todoes", "todo"), ("children", "child"), ("campuses", "campus")] + .into_iter() + .collect(); pub static ref PLURALIZE_EXCEPTIONS: HashMap<&'static str, &'static str> = - vec![("todo", "todoes"), ("child", "children")].into_iter().collect(); + vec![("todo", "todoes"), ("child", "children"), ("campus", "campuses")] + .into_iter() + .collect(); } pub struct Inflector; diff --git a/server/prisma-rs/query-engine/core/src/builders/many.rs b/server/prisma-rs/query-engine/core/src/builders/many.rs index 8add233b2d..2c99256983 100644 --- a/server/prisma-rs/query-engine/core/src/builders/many.rs +++ b/server/prisma-rs/query-engine/core/src/builders/many.rs @@ -34,7 +34,7 @@ impl<'f> BuilderExt for ManyBuilder<'f> { } .expect("`ManyQuery` builder not properly initialised!"); - let nested_builders = Self::collect_nested_queries(Arc::clone(&model), field, model.schema())?; + let nested_builders = Self::collect_nested_queries(Arc::clone(&model), field, model.internal_data_model())?; let nested = Self::build_nested_queries(nested_builders)?; let selected_fields = Self::collect_selected_fields(Arc::clone(&model), field, None)?; diff --git a/server/prisma-rs/query-engine/core/src/builders/many_rel.rs b/server/prisma-rs/query-engine/core/src/builders/many_rel.rs index b77f65b336..e4b07ac5b2 100644 --- a/server/prisma-rs/query-engine/core/src/builders/many_rel.rs +++ b/server/prisma-rs/query-engine/core/src/builders/many_rel.rs @@ -36,7 +36,7 @@ impl<'f> BuilderExt for ManyRelationBuilder<'f> { } .expect("`ManyRelatedRecordsQuery` builder not properly initialized!"); - let nested_builders = Self::collect_nested_queries(Arc::clone(&model), field, model.schema())?; + let nested_builders = Self::collect_nested_queries(Arc::clone(&model), field, model.internal_data_model())?; let nested = Self::build_nested_queries(nested_builders)?; let parent_field = Arc::clone(parent); diff --git a/server/prisma-rs/query-engine/core/src/builders/mod.rs b/server/prisma-rs/query-engine/core/src/builders/mod.rs index fae276d10e..974d456fe8 100644 --- a/server/prisma-rs/query-engine/core/src/builders/mod.rs +++ b/server/prisma-rs/query-engine/core/src/builders/mod.rs @@ -1,25 +1,30 @@ //! Query execution builders module -mod filters; +pub mod filters; mod inflector; mod many; mod many_rel; mod one_rel; mod root; mod single; +mod mutations; + +pub(crate) mod utils; pub use many::*; pub use many_rel::*; pub use one_rel::*; pub use root::*; pub use single::*; +pub use mutations::*; + +pub use self::inflector::Inflector; -use self::inflector::Inflector; use crate::{CoreError, CoreResult, ReadQuery}; -use connector::{filter::NodeSelector, QueryArguments}; +use connector::QueryArguments; use graphql_parser::query::{Field, Selection, Value}; use prisma_models::{ - Field as ModelField, GraphqlId, ModelRef, OrderBy, PrismaValue, RelationFieldRef, SchemaRef, SelectedField, + Field as ModelField, GraphqlId, ModelRef, OrderBy, RelationFieldRef, InternalDataModelRef, SelectedField, SelectedFields, SelectedRelationField, SelectedScalarField, SortOrder, }; use rust_inflector::Inflector as RustInflector; @@ -37,9 +42,9 @@ pub enum Builder<'field> { } impl<'a> Builder<'a> { - fn new(schema: SchemaRef, root_field: &'a Field) -> CoreResult { - // Find model for field - this is a temporary workaround before we have a data model definition (/ schema builder). - let builder: Option = schema + fn new(internal_data_model: InternalDataModelRef, root_field: &'a Field) -> CoreResult { + // Find model for field - this is a temporary workaround before we have a data model definition (/ internal_data_model builder). + let builder: Option = internal_data_model .models() .iter() .filter_map(|model| Builder::infer(model, root_field, None)) @@ -73,7 +78,7 @@ impl<'a> Builder<'a> { } else { let normalized = match model.name.as_str() { "AUser" => "aUser".to_owned(), // FIXME *quietly sobbing* - name => name.to_camel_case() + name => name.to_camel_case(), }; if field.name == normalized { @@ -106,38 +111,6 @@ pub trait BuilderExt { /// Last step that invokes query building fn build(self) -> CoreResult; - /// Get node selector from field and model - fn extract_node_selector(field: &Field, model: ModelRef) -> CoreResult { - println!("NODE SELECT"); - // FIXME: this expects at least one query arg... - let (_, value) = field.arguments.first().expect("no arguments found"); - match value { - Value::Object(obj) => { - let (field_name, value) = obj.iter().next().expect("object was empty"); - let field = model.fields().find_from_scalar(field_name).unwrap(); - let value = Self::value_to_prisma_value(value); - - Ok(NodeSelector { - field: Arc::clone(&field), - value: value, - }) - } - _ => unimplemented!(), - } - } - - /// Turning a GraphQL value to a PrismaValue - fn value_to_prisma_value(val: &Value) -> PrismaValue { - match val { - Value::String(ref s) => match serde_json::from_str(s) { - Ok(val) => PrismaValue::Json(val), - _ => PrismaValue::String(s.clone()), - }, - Value::Int(i) => PrismaValue::Int(i.as_i64().unwrap()), - _ => unimplemented!(), - } - } - fn extract_query_args(field: &Field, model: ModelRef) -> CoreResult { field .arguments @@ -258,7 +231,7 @@ pub trait BuilderExt { fn collect_nested_queries<'field>( model: ModelRef, ast_field: &'field Field, - _schema: SchemaRef, + _internal_data_model: InternalDataModelRef, ) -> CoreResult>> { ast_field .selection_set diff --git a/server/prisma-rs/query-engine/core/src/builders/mutations/ast.rs b/server/prisma-rs/query-engine/core/src/builders/mutations/ast.rs new file mode 100644 index 0000000000..ee889ffb51 --- /dev/null +++ b/server/prisma-rs/query-engine/core/src/builders/mutations/ast.rs @@ -0,0 +1,105 @@ +//! Simple wrapper for WriteQueries + +use crate::{builders::utils, BuilderExt, ManyBuilder, ReadQuery, SingleBuilder}; +use connector::mutaction::{ + DatabaseMutactionResult as MutationResult, NestedDatabaseMutaction as NestedMutation, + TopLevelDatabaseMutaction as RootMutation, Identifier, +}; +use graphql_parser::query::Field; +use prisma_models::ModelRef; +use std::sync::Arc; + +/// A top-level write query (mutation) +#[derive(Debug, Clone)] +pub struct WriteQuery { + /// The actual mutation object being built + pub inner: RootMutation, + + /// Required to create following ReadQuery + pub field: Field, + + /// Nested mutations + pub nested: Vec, +} + +/// Nested mutations are slightly different than top-level mutations. +#[derive(Debug, Clone)] +pub struct NestedWriteQuery { + /// The nested mutation being built + pub inner: NestedMutation, + + /// Required to create following ReadQuery + pub field: Field, + + /// NestedWriteQueries can only have nested children + pub nested: Vec, +} + +impl WriteQuery { + pub fn model(&self) -> ModelRef { + match self.inner { + RootMutation::CreateNode(ref node) => Arc::clone(&node.model), + RootMutation::UpdateNode(ref node) => node.where_.field.model.upgrade().unwrap(), + RootMutation::DeleteNode(ref node) => node.where_.field.model.upgrade().unwrap(), + RootMutation::UpsertNode(ref node) => node.where_.field.model.upgrade().unwrap(), + RootMutation::UpdateNodes(ref nodes) => Arc::clone(&nodes.model), + RootMutation::DeleteNodes(ref nodes) => Arc::clone(&nodes.model), + _ => unimplemented!(), + } + } + + /// This function generates a pre-fetch `ReadQuery` for appropriate `WriteQuery` types + pub fn generate_prefetch(&self) -> Option { + match self.inner { + RootMutation::DeleteNode(_) => SingleBuilder::new() + .setup(self.model(), &self.field) + .build() + .ok() + .map(|q| ReadQuery::RecordQuery(q)), + RootMutation::DeleteNodes(_) => ManyBuilder::new() + .setup(self.model(), &self.field) + .build() + .ok() + .map(|q| ReadQuery::ManyRecordsQuery(q)), + _ => None, + } + } + + /// Generate a `ReadQuery` from the encapsulated `WriteQuery` + #[warn(warnings)] + pub fn generate_read(&self, res: MutationResult) -> Option { + let field = match res.identifier { + Identifier::Id(gql_id) => utils::derive_field(&self.field, self.model(), gql_id), + _ => unimplemented!(), + }; + + match self.inner { + // We ignore Deletes because they were already handled + RootMutation::DeleteNode(_) | RootMutation::DeleteNodes(_) => None, + RootMutation::CreateNode(_) => SingleBuilder::new() + .setup(self.model(), &field) + .build() + .ok() + .map(|q| ReadQuery::RecordQuery(q)), + + RootMutation::UpdateNode(_) => SingleBuilder::new() + .setup(self.model(), &field) + .build() + .ok() + .map(|q| ReadQuery::RecordQuery(q)), + _ => unimplemented!(), + } + } +} + +impl NestedWriteQuery { + pub fn model(&self) -> ModelRef { + match self.inner { + NestedMutation::CreateNode(ref node) => node.relation_field.model.upgrade().unwrap(), + NestedMutation::UpdateNode(ref node) => node.relation_field.model.upgrade().unwrap(), + NestedMutation::UpsertNode(ref node) => node.relation_field.model.upgrade().unwrap(), + NestedMutation::DeleteNode(ref node) => node.relation_field.model.upgrade().unwrap(), + _ => unimplemented!(), + } + } +} diff --git a/server/prisma-rs/query-engine/core/src/builders/mutations/builder.rs b/server/prisma-rs/query-engine/core/src/builders/mutations/builder.rs new file mode 100644 index 0000000000..f8cdefd5da --- /dev/null +++ b/server/prisma-rs/query-engine/core/src/builders/mutations/builder.rs @@ -0,0 +1,181 @@ +//! Providing an interface to build WriteQueries + +use crate::{builders::utils, CoreError, CoreResult, WriteQuery}; +use connector::mutaction::{CreateNode, DeleteNode, DeleteNodes, TopLevelDatabaseMutaction, UpdateNode, UpsertNode}; +use graphql_parser::query::{Field, Value}; +use prisma_models::{InternalDataModelRef, ModelRef, PrismaArgs, PrismaValue}; + +use crate::Inflector; +use rust_inflector::Inflector as RustInflector; + +use std::collections::BTreeMap; +use std::sync::Arc; + +/// A TopLevelMutation builder +/// +/// It takes a graphql field and internal_data_model +/// and builds a mutation tree from it +#[derive(Debug)] +pub struct MutationBuilder<'field> { + field: &'field Field, + internal_data_model: InternalDataModelRef, +} + +type PrismaListArgs = Vec<(String, Option>)>; + +impl<'field> MutationBuilder<'field> { + pub fn new(internal_data_model: InternalDataModelRef, field: &'field Field) -> Self { + Self { + field, + internal_data_model, + } + } + + pub fn build(self) -> CoreResult { + let (non_list_args, list_args) = get_mutation_args(&self.field.arguments); + let (op, model) = parse_model_action( + self.field.alias.as_ref().unwrap_or_else(|| &self.field.name), + Arc::clone(&self.internal_data_model), + )?; + + let inner = match op { + Operation::Create => TopLevelDatabaseMutaction::CreateNode(CreateNode { + model, + non_list_args, + list_args, + nested_mutactions: Default::default(), + }), + Operation::Update => TopLevelDatabaseMutaction::UpdateNode(UpdateNode { + where_: utils::extract_node_selector(self.field, Arc::clone(&model))?, + non_list_args, + list_args, + nested_mutactions: Default::default(), + }), + Operation::Delete => TopLevelDatabaseMutaction::DeleteNode(DeleteNode { + where_: utils::extract_node_selector(self.field, Arc::clone(&model))?, + }), + Operation::DeleteMany => TopLevelDatabaseMutaction::DeleteNodes(DeleteNodes { + model, + filter: unsafe { std::mem::uninitialized() }, // BOOM + }), + Operation::Upsert => TopLevelDatabaseMutaction::UpsertNode(UpsertNode { + where_: utils::extract_node_selector(self.field, Arc::clone(&model))?, + create: CreateNode { + model: Arc::clone(&model), + non_list_args: non_list_args.clone(), + list_args: list_args.clone(), + nested_mutactions: Default::default(), + }, + update: UpdateNode { + where_: utils::extract_node_selector(self.field, Arc::clone(&model))?, + non_list_args, + list_args, + nested_mutactions: Default::default(), + }, + }), + _ => unimplemented!(), + }; + + // FIXME: Cloning is unethical and should be avoided + Ok(WriteQuery { + inner, + field: self.field.clone(), + nested: vec![], + }) + } +} + +/// Extract String-Value pairs into usable mutation arguments +fn get_mutation_args(args: &Vec<(String, Value)>) -> (PrismaArgs, PrismaListArgs) { + let (args, lists) = args + .iter() + .fold((BTreeMap::new(), vec![]), |(mut map, mut vec), (_, v)| { + match v { + Value::Object(o) => o.iter().for_each(|(k, v)| { + // If the child is an object, we are probably dealing with ScalarList values + match v { + Value::Object(o) if o.contains_key("set") => { + vec.push(( + k.clone(), + match o.get("set") { + Some(Value::List(l)) => Some( + l.iter() + .map(|v| PrismaValue::from_value(v)) + .collect::>(), + ), + None => None, + _ => unimplemented!(), // or unreachable? dunn duuuuun! + }, + )); + } + v => { + map.insert(k.clone(), PrismaValue::from_value(v)); + } + } + }), + _ => panic!("Unknown argument structure!"), + } + + (map, vec) + }); + (args.into(), lists) +} + +/// A simple enum to discriminate top-level actions +#[allow(dead_code)] // FIXME: Remove! +enum Operation { + Create, + Update, + Delete, + Upsert, + UpdateMany, + DeleteMany, + Reset, +} + +impl From<&str> for Operation { + fn from(s: &str) -> Self { + match s { + "create" => Operation::Create, + "update" => Operation::Update, + "updateMany" => Operation::UpdateMany, + "delete" => Operation::Delete, + "deleteMany" => Operation::DeleteMany, + "upsert" => Operation::Upsert, + _ => unimplemented!(), + } + } +} + +/// Parse the mutation name into an action and the model it should operate on +fn parse_model_action(name: &String, internal_data_model: InternalDataModelRef) -> CoreResult<(Operation, ModelRef)> { + let actions = vec!["create", "updateMany", "update", "deleteMany", "delete", "upsert"]; + + let action = match actions.iter().find(|action| name.starts_with(*action)) { + Some(a) => a, + None => return Err(CoreError::QueryValidationError(format!("Unknown action: {}", name))), + }; + let split: Vec<&str> = name.split(action).collect(); + let model_name = match split.get(1) { + Some(mn) => mn, + None => { + return Err(CoreError::QueryValidationError(format!( + "No model name for action `{}`", + name + ))) + } + }; + + let normalized = dbg!(Inflector::singularize(model_name).to_pascal_case()); + let model = match internal_data_model.models().iter().find(|m| m.name == normalized) { + Some(m) => m, + None => { + return Err(CoreError::QueryValidationError(format!( + "Model not found for mutation {}", + name + ))) + } + }; + + Ok((Operation::from(*action), Arc::clone(&model))) +} diff --git a/server/prisma-rs/query-engine/core/src/builders/mutations/mod.rs b/server/prisma-rs/query-engine/core/src/builders/mutations/mod.rs new file mode 100644 index 0000000000..b075ad43e3 --- /dev/null +++ b/server/prisma-rs/query-engine/core/src/builders/mutations/mod.rs @@ -0,0 +1,9 @@ +//! Mutation builder module + +mod ast; +mod builder; +mod results; + +pub use ast::*; +pub use builder::*; +pub use results::*; diff --git a/server/prisma-rs/query-engine/core/src/builders/mutations/results.rs b/server/prisma-rs/query-engine/core/src/builders/mutations/results.rs new file mode 100644 index 0000000000..3f4ad03669 --- /dev/null +++ b/server/prisma-rs/query-engine/core/src/builders/mutations/results.rs @@ -0,0 +1,16 @@ +//! WriteQuery results are kinda special + +use crate::ReadQuery; +use connector::mutaction::DatabaseMutactionResult; + +/// A structure that encodes the results from a database mutation +pub struct WriteQueryResult { + /// The immediate mutation return + pub inner: DatabaseMutactionResult, + + /// Nested mutation results + pub nested: Vec, + + /// Associated selection-set for this level + pub query: ReadQuery, +} diff --git a/server/prisma-rs/query-engine/core/src/builders/one_rel.rs b/server/prisma-rs/query-engine/core/src/builders/one_rel.rs index 0ecbd0f993..95a6a59870 100644 --- a/server/prisma-rs/query-engine/core/src/builders/one_rel.rs +++ b/server/prisma-rs/query-engine/core/src/builders/one_rel.rs @@ -36,7 +36,7 @@ impl<'f> BuilderExt for OneRelationBuilder<'f> { } .expect("`RelatedRecordQuery` builder not properly initialised!"); - let nested_builders = Self::collect_nested_queries(Arc::clone(&model), field, model.schema())?; + let nested_builders = Self::collect_nested_queries(Arc::clone(&model), field, model.internal_data_model())?; let nested = Self::build_nested_queries(nested_builders)?; let parent_field = Arc::clone(parent); diff --git a/server/prisma-rs/query-engine/core/src/builders/root.rs b/server/prisma-rs/query-engine/core/src/builders/root.rs index a6d40ee164..30df56c81c 100644 --- a/server/prisma-rs/query-engine/core/src/builders/root.rs +++ b/server/prisma-rs/query-engine/core/src/builders/root.rs @@ -1,19 +1,19 @@ use super::Builder; -use crate::{CoreResult, ReadQuery}; +use crate::{CoreResult, Query as PrismaQuery, MutationBuilder}; use graphql_parser::query::*; -use prisma_models::SchemaRef; +use prisma_models::InternalDataModelRef; use std::sync::Arc; #[derive(Debug)] pub struct RootBuilder { pub query: Document, - pub schema: SchemaRef, + pub internal_data_model: InternalDataModelRef, pub operation_name: Option, } impl RootBuilder { // FIXME: Find op name and only execute op! - pub fn build(self) -> CoreResult> { + pub fn build(self) -> CoreResult> { self.query .definitions .iter() @@ -31,19 +31,40 @@ impl RootBuilder { directives: _, selection_set, })) => self.build_query(&selection_set.items), + + Definition::Operation(OperationDefinition::Mutation(Mutation { + position: _, + name: _, + variable_definitions: _, + directives: _, + selection_set, + })) => self.build_mutation(&selection_set.items), _ => unimplemented!(), }) - .collect::>>>() // Collect all the "query trees" + .collect::>>>() // Collect all the "query trees" .map(|v| v.into_iter().flatten().collect()) } - fn build_query(&self, root_fields: &Vec) -> CoreResult> { + fn build_query(&self, root_fields: &Vec) -> CoreResult> { + root_fields + .iter() + .map(|item| { + // First query-level fields map to a model in our internal_data_model, either a plural or singular + match item { + Selection::Field(root_field) => Builder::new(Arc::clone(&self.internal_data_model), root_field)?.build().map(|q| PrismaQuery::Read(q)), + _ => unimplemented!(), + } + }) + .collect() + } + + /// Mutations do something to the database and then follow-up with a query + fn build_mutation(&self, root_fields: &Vec) -> CoreResult> { root_fields .iter() .map(|item| { - // First query-level fields map to a model in our schema, either a plural or singular match item { - Selection::Field(root_field) => Builder::new(Arc::clone(&self.schema), root_field)?.build(), + Selection::Field(root_field) => MutationBuilder::new(Arc::clone(&self.internal_data_model), root_field).build().map(|q| PrismaQuery::Write(q)), _ => unimplemented!(), } }) diff --git a/server/prisma-rs/query-engine/core/src/builders/single.rs b/server/prisma-rs/query-engine/core/src/builders/single.rs index 5fe21e773a..54089f1ece 100644 --- a/server/prisma-rs/query-engine/core/src/builders/single.rs +++ b/server/prisma-rs/query-engine/core/src/builders/single.rs @@ -1,4 +1,4 @@ -use super::BuilderExt; +use super::{BuilderExt, utils}; use crate::{query_ast::RecordQuery, CoreResult}; use graphql_parser::query::Field; @@ -34,11 +34,11 @@ impl<'f> BuilderExt for SingleBuilder<'f> { } .expect("`RecordQuery` builder not properly initialised!"); - let nested_builders = Self::collect_nested_queries(Arc::clone(&model), field, model.schema())?; + let nested_builders = Self::collect_nested_queries(Arc::clone(&model), field, model.internal_data_model())?; let nested = Self::build_nested_queries(nested_builders)?; let selected_fields = Self::collect_selected_fields(Arc::clone(&model), field, None)?; - let selector = Self::extract_node_selector(&field, Arc::clone(&model))?; + let selector = utils::extract_node_selector(&field, Arc::clone(&model))?; let name = field.alias.as_ref().unwrap_or(&field.name).clone(); let fields = Self::collect_selection_order(&field); diff --git a/server/prisma-rs/query-engine/core/src/builders/utils.rs b/server/prisma-rs/query-engine/core/src/builders/utils.rs new file mode 100644 index 0000000000..99a81d9e92 --- /dev/null +++ b/server/prisma-rs/query-engine/core/src/builders/utils.rs @@ -0,0 +1,52 @@ +//! A set of utilities to build (read & write) queries + +use graphql_parser::query::{Field, Value}; +use prisma_models::{ModelRef, PrismaValue, GraphqlId}; +use connector::filter::NodeSelector; +use crate::CoreResult; + +use std::sync::Arc; +use std::collections::BTreeMap; + +/// Get node selector from field and model +pub(crate) fn extract_node_selector(field: &Field, model: ModelRef) -> CoreResult { + + // FIXME: this expects at least one query arg... + let (_, value) = field.arguments.first().expect("no arguments found"); + match value { + Value::Object(obj) => { + let (field_name, value) = obj.iter().next().expect("object was empty"); + let field = model.fields().find_from_scalar(field_name).unwrap(); + let value = PrismaValue::from_value(value); + + Ok(NodeSelector { + field: Arc::clone(&field), + value: value, + }) + } + _ => unimplemented!(), + } +} + +/// A function that derives a field given a field +/// +/// This function is used when creating ReadQueries after a WriteQuery has succeeded +pub(crate) fn derive_field(field: &Field, model: ModelRef, id: GraphqlId) -> Field { + let mut new = field.clone(); + + // Remove alias and override Name + new.name = model.name.to_lowercase(); + new.alias = None; + + // Create a selection set for this ID + let id_name = model.fields().id().name.clone(); + let mut map = BTreeMap::new(); + map.insert(id_name, id.to_value()); + + // Then override the existing arguments + new.arguments = vec![ + ("where".into(), Value::Object(map)) + ]; + + new +} \ No newline at end of file diff --git a/server/prisma-rs/query-engine/core/src/executor/mod.rs b/server/prisma-rs/query-engine/core/src/executor/mod.rs new file mode 100644 index 0000000000..da63d66829 --- /dev/null +++ b/server/prisma-rs/query-engine/core/src/executor/mod.rs @@ -0,0 +1,75 @@ +//! A slightly more generic interface over executing read and write queries + +#![allow(warnings)] + +mod pipeline; +mod read; +mod write; + +use self::pipeline::*; + +pub use read::ReadQueryExecutor; +pub use write::WriteQueryExecutor; + +use crate::{ + BuilderExt, CoreError, CoreResult, Query, ReadQuery, ReadQueryResult, RecordQuery, SingleBuilder, WriteQuery, + WriteQueryResult, +}; +use connector::{filter::NodeSelector, QueryArguments}; +use connector::{ + mutaction::{DatabaseMutactionResult, TopLevelDatabaseMutaction}, + ConnectorResult, +}; + +use std::sync::Arc; + +use graphql_parser::query::{Field, Selection, Value}; +use prisma_models::{ + Field as ModelField, GraphqlId, InternalDataModelRef, ModelRef, OrderBy, PrismaValue, RelationFieldRef, + SelectedField, SelectedFields, SelectedRelationField, SelectedScalarField, SortOrder, +}; + +/// A wrapper around QueryExecutor +pub struct Executor { + pub read_exec: ReadQueryExecutor, + pub write_exec: WriteQueryExecutor, +} + +type FoldResult = ConnectorResult>; + +impl Executor { + /// Can be given a list of both ReadQueries and WriteQueries + /// + /// Will execute WriteQueries first, then all ReadQueries, while preserving order. + pub fn exec_all(&self, queries: Vec) -> CoreResult> { + // Give all queries to the pipeline module + let mut pipeline = QueryPipeline::from(queries); + + // Execute prefetch queries for destructive writes + let (idx, queries): (Vec<_>, Vec<_>) = pipeline.prefetch().into_iter().unzip(); + let results = self.read_exec.execute(&queries)?; + pipeline.store_prefetch(idx.into_iter().zip(results).collect()); + + // Execute write queries and generate required read queries + let (mut idx, mut queries) = (vec![], vec![]); + for (index, write) in pipeline.get_writes() { + let res = self.write_exec.execute(write.inner.clone())?; + + // Execute reads if they are required to be executed + if let (Some(index), Some(read)) = (index, write.generate_read(res)) { + idx.push(index); + queries.push(read); + } + } + let results = self.read_exec.execute(&queries)?; + pipeline.store_reads(idx.into_iter().zip(results.into_iter()).collect()); + + // Now execute all remaining reads + let (idx, queries): (Vec<_>, Vec<_>) = pipeline.get_reads().into_iter().unzip(); + let results = self.read_exec.execute(&queries)?; + pipeline.store_reads(idx.into_iter().zip(results).collect()); + + // Consume pipeline into return value + Ok(pipeline.consume()) + } +} diff --git a/server/prisma-rs/query-engine/core/src/executor/pipeline.rs b/server/prisma-rs/query-engine/core/src/executor/pipeline.rs new file mode 100644 index 0000000000..6d236cc0df --- /dev/null +++ b/server/prisma-rs/query-engine/core/src/executor/pipeline.rs @@ -0,0 +1,204 @@ +//! Handles query pipelines with mixed read-write queries +//! +//! The general information flow is as follows: +//! +//! - Run pre-fetch queries for every delete +//! - Run mutations +//! - Run other queries +//! +//! When running pre-fetch queries, the results need to be cached +//! and also those queries need to first be derived from the WriteQuery +//! they are based on. +//! +//! The `pipeline` module itself doesn't do this and relies on the +//! mutation builders for a lot of this. But the general lifecycle +//! of queries is implemented here + +#![allow(warnings)] + +use crate::{Query, ReadQuery, ReadQueryResult, WriteQuery}; +use indexmap::IndexMap; +use std::mem::replace; + +/// Represents the lifecycle of a query +/// +/// The way that queries are handled is encoded in the +/// enum variants. Check the module documentation on +/// more detail for what order queries get executed in. +/// +/// This type is to be considered an implementation detail +/// of the `QueryPipeline` type defined below. +/// +// TODO: maybe rename? +#[derive(Debug)] +enum Stage { + /// Stores a simple read query + Read(usize, ReadQuery), + /// Acts as a placeholder for when read queries are executed + ReadMark(usize), + /// Store a write query and an index + Write(usize, WriteQuery), + /// Stores the intermediate result of pre-feteching records + /// before executing destructive writes (i.e. deletes) + PreFetched(WriteQuery, ReadQueryResult), + /// Encodes the end-result of a local pipeline + Done(ReadQueryResult), +} + +/// A list of Queries and their stage that need to be processed +/// +/// Generally the order to call the associated functions in is +/// +/// 1. `prefetch()` +/// 2. `store_prefetch()` +/// 3. `get_writes()` +/// 4. `store_write_returns()` +/// 5. `get_reads()` +/// 6. `store_reads()` +/// 7. `consume()` +pub struct QueryPipeline(Vec); + +impl From> for QueryPipeline { + fn from(vec: Vec) -> Self { + Self( + vec.into_iter() + .zip(0..) + .map(|(q, idx)| match q { + Query::Write(query) => Stage::Write(idx, query), + Query::Read(query) => Stage::Read(idx, query), + }) + .collect(), + ) + } +} + +impl QueryPipeline { + /// Returns all queries that need pre-fetching data + /// + /// Under the hood this generates a new `ReadQuery` for every + /// `WriteQuery` which destructively acts on the database (i.e. deletes). + /// + /// It's recommended to iterate over the map, without disturbing key entries + /// because these are used later on to re-associate data into the pipeline. + /// + /// **Remember:** you need to call `store_prefetch` with the results + pub fn prefetch(&self) -> IndexMap { + self.0.iter().fold(IndexMap::new(), |mut map, query| { + if let Stage::Write(idx, query) = query { + if let Some(fetch) = query.generate_prefetch() { + map.insert(*idx, fetch); + } + } + map + }) + } + + /// Takes the set of pre-fetched results and re-associates it into the pipeline + pub fn store_prefetch(&mut self, mut data: IndexMap) { + self.0 = replace(&mut self.0, vec![]) // A small hack around ownership + .into_iter() + .map(|stage| match stage { + Stage::Write(idx, query) => match data.remove(&idx) { + Some(result) => Stage::PreFetched(query, result), + None => Stage::Write(idx, query), + }, + stage => stage, + }) + .collect(); + + // This _should_ never happen but we should warn-log it anyway + if data.len() != 0 { + warn!("Unused pre-fetch results in query pipeline!"); + } + } + + /// Get all write queries to execute + /// + /// Some of them will have an index associated to them. This is because + /// they will return a ReadQuery which has not yet been executed. + /// + /// This marker should also be used to determine which WriteQuery + /// must result in another ReadQuery and the pipeline then uses this + /// information to re-associate data to be in the expected order. + pub fn get_writes(&mut self) -> Vec<(Option, WriteQuery)> { + let (rest, writes) = replace(&mut self.0, vec![]) // A small hack around ownership + .into_iter() + .fold((vec![], vec![]), |(mut rest, mut writes), stage| { + match stage { + Stage::Write(idx, query) => { + rest.push(Stage::ReadMark(idx)); + writes.push((Some(idx), query)); + } + Stage::PreFetched(query, data) => { + rest.push(Stage::Done(data)); + writes.push((None, query)); + } + Stage::Read(idx, query) => rest.push(Stage::Read(idx, query)), + stage => panic!("Unexpected pipeline stage {:?} in function `get_writes`", stage), + }; + (rest, writes) + }); + + self.0 = rest; + writes + } + + /// Store read results at placeholder locations in the pipeline + /// + /// This function is invoked both after what the execution engines + /// does with the result of `get_writes()` and normal reads provided + /// by `get_reads()`. + pub fn store_reads(&mut self, mut data: IndexMap) { + self.0 = replace(&mut self.0, vec![]) // A small hack around ownership + .into_iter() + .map(|stage| match stage { + Stage::ReadMark(idx) => match data.remove(&idx) { + Some(result) => Stage::Done(result), + None => panic!("Expected data entry for index `{}`, but `None` was found!", idx), + }, + stage => stage, + }) + .collect(); + + // This _should_ never happen but we should warn-log it anyway + if data.len() != 0 { + warn!("Unused pre-fetch results in query pipeline!"); + } + } + + /// Get all remaining read queries and their pipeline indices + /// + /// Be sure to call `store_reads()` with query results! + pub fn get_reads(&mut self) -> Vec<(usize, ReadQuery)> { + let (rest, reads) = replace(&mut self.0, vec![]) // A small hack around ownership + .into_iter() + .fold((vec![], vec![]), |(mut rest, mut reads), stage| { + match stage { + Stage::Read(idx, query) => { + rest.push(Stage::ReadMark(idx)); + reads.push((idx, query)); + } + Stage::Done(data) => rest.push(Stage::Done(data)), + stage => panic!("Unexpected pipeline stage {:?} in function `get_reads`", stage), + }; + (rest, reads) + }); + + self.0 = rest; + reads + } + + /// Consumes the pipeline into a list of results + pub fn consume(self) -> Vec { + self.0 + .into_iter() + .map(|stage| match stage { + Stage::Done(data) => data, + stage => panic!( + "Called `consume` on non-final pipeline containing {:?} stage items!", + stage + ), + }) + .collect() + } +} diff --git a/server/prisma-rs/query-engine/core/src/read_query_executor.rs b/server/prisma-rs/query-engine/core/src/executor/read.rs similarity index 90% rename from server/prisma-rs/query-engine/core/src/read_query_executor.rs rename to server/prisma-rs/query-engine/core/src/executor/read.rs index 1dbc9c4fa2..dab4944182 100644 --- a/server/prisma-rs/query-engine/core/src/read_query_executor.rs +++ b/server/prisma-rs/query-engine/core/src/executor/read.rs @@ -4,19 +4,19 @@ use prisma_models::{GraphqlId, ScalarField, SelectedFields, SingleNode}; use query_ast::*; use std::{convert::TryFrom, sync::Arc}; +// Todo We could eliminate the trait object with enums. pub struct ReadQueryExecutor { - pub data_resolver: Arc, + pub data_resolver: Arc, } impl ReadQueryExecutor { pub fn execute(&self, queries: &[ReadQuery]) -> CoreResult> { + dbg!(queries); self.execute_internal(queries, vec![]) } - #[warn(warnings)] fn execute_internal(&self, queries: &[ReadQuery], parent_ids: Vec) -> CoreResult> { let mut results = vec![]; - dbg!(&queries); for query in queries { match query { @@ -34,6 +34,7 @@ impl ReadQueryExecutor { let list_fields = selected_fields.scalar_lists(); let lists = self.resolve_scalar_list_fields(ids.clone(), list_fields)?; let nested = self.execute_internal(&query.nested, ids)?; + let result = SingleReadQueryResult { name: query.name.clone(), fields: query.fields.clone(), @@ -71,12 +72,12 @@ impl ReadQueryExecutor { ReadQuery::RelatedRecordQuery(query) => { let selected_fields = Self::inject_required_fields(query.selected_fields.clone()); - let result = dbg!(self.data_resolver.get_related_nodes( + let result = self.data_resolver.get_related_nodes( Arc::clone(&query.parent_field), &parent_ids, query.args.clone(), &selected_fields, - )?); + )?; // If our result set contains more than one entry // we need to handle all of them! @@ -84,7 +85,7 @@ impl ReadQueryExecutor { for node in result.nodes.into_iter() { let single = SingleNode { node, - field_names: query.fields.clone() + field_names: result.field_names.clone(), }; let ids = vec![single.get_id_value(query.parent_field.related_model())?.clone()]; @@ -119,6 +120,15 @@ impl ReadQueryExecutor { lists, }; results.push(ReadQueryResult::Single(result)); + } else { + results.push(ReadQueryResult::Single(SingleReadQueryResult { + name: query.name.clone(), + fields: query.fields.clone(), + scalars: None, + nested: vec![], + selected_fields, + lists: vec![], + })); } } ReadQuery::ManyRelatedRecordsQuery(query) => { @@ -135,10 +145,7 @@ impl ReadQueryExecutor { let ids = scalars.get_id_values(Arc::clone(&query.parent_field.related_model()))?; let list_fields = selected_fields.scalar_lists(); let lists = self.resolve_scalar_list_fields(ids.clone(), list_fields)?; - let nested = scalars.nodes.iter().fold(vec![], |mut vec, _| { - vec.append(&mut self.execute_internal(&query.nested, ids.clone()).unwrap()); - vec - }); + let nested = self.execute_internal(&query.nested, ids.clone())?; results.push(ReadQueryResult::Many(ManyReadQueryResults::new( query.name.clone(), diff --git a/server/prisma-rs/query-engine/core/src/executor/write.rs b/server/prisma-rs/query-engine/core/src/executor/write.rs new file mode 100644 index 0000000000..c2c325dac6 --- /dev/null +++ b/server/prisma-rs/query-engine/core/src/executor/write.rs @@ -0,0 +1,15 @@ +use connector::mutaction::{DatabaseMutactionResult, TopLevelDatabaseMutaction}; +use connector::{ConnectorResult, DatabaseMutactionExecutor}; +use std::sync::Arc; + +/// A small wrapper around running WriteQueries +pub struct WriteQueryExecutor { + pub db_name: String, + pub write_executor: Arc, +} + +impl WriteQueryExecutor { + pub fn execute(&self, mutaction: TopLevelDatabaseMutaction) -> ConnectorResult { + self.write_executor.execute(self.db_name.clone(), mutaction) + } +} diff --git a/server/prisma-rs/query-engine/core/src/ir/lists.rs b/server/prisma-rs/query-engine/core/src/ir/lists.rs index 8611d24936..1f748a8409 100644 --- a/server/prisma-rs/query-engine/core/src/ir/lists.rs +++ b/server/prisma-rs/query-engine/core/src/ir/lists.rs @@ -1,9 +1,12 @@ //! Process a set of records into an IR List -use super::{maps::build_map, Item, List, Map}; +use super::{maps::build_map, trim_records, Item, List, Map}; use crate::{ManyReadQueryResults, ReadQueryResult}; use prisma_models::{GraphqlId, PrismaValue}; -use std::{collections::HashMap, sync::Arc}; +use std::{ + collections::{hash_map::IterMut, HashMap}, + sync::Arc, +}; #[derive(Debug)] enum ParentsWithRecords { @@ -12,6 +15,13 @@ enum ParentsWithRecords { } impl ParentsWithRecords { + pub fn iter_mut(&mut self) -> IterMut> { + match self { + ParentsWithRecords::Single(_) => panic!("Can't call iter_mut on single parent with record"), + ParentsWithRecords::Many(m) => m.iter_mut(), + } + } + pub fn contains_key(&self, key: &GraphqlId) -> bool { match self { ParentsWithRecords::Single(m) => m.contains_key(key), @@ -54,8 +64,6 @@ pub fn build_list(mut result: ManyReadQueryResults) -> List { // We need the ParentsWithRecords indirection to preserve information if the nesting is to-one or to-many. let mut nested_fields_to_groups: HashMap = HashMap::new(); - // todo: The code below might have issues with empty results. To test. - // Group nested results by parent ids and move them into the grouped map. nested.into_iter().for_each(|nested_result| match nested_result { ReadQueryResult::Single(single) => { @@ -73,8 +81,10 @@ pub fn build_list(mut result: ManyReadQueryResults) -> List { .get_mut(&single.name) .expect("Parents with records mapping must contain entries for all nested queries.");; - let nested_build = build_map(single); - parents_with_records.insert(parent_id.clone(), vec![Item::Map(Some(parent_id), nested_build)]); + match build_map(single) { + Some(m) => parents_with_records.insert(parent_id.clone(), vec![Item::Map(Some(parent_id), m)]), + None => parents_with_records.insert(parent_id.clone(), vec![Item::Value(PrismaValue::Null)]), + }; } } ReadQueryResult::Many(many) => { @@ -86,7 +96,9 @@ pub fn build_list(mut result: ManyReadQueryResults) -> List { .get_mut(&many.name) .expect("Parents with records mapping must contain entries for all nested queries."); + let query_args = many.query_arguments.clone(); let nested_build = build_list(many); + nested_build.into_iter().for_each(|item| match item { Item::Map(parent_opt, i) => { let parent_id = parent_opt @@ -105,6 +117,11 @@ pub fn build_list(mut result: ManyReadQueryResults) -> List { } _ => unreachable!(), }); + + // Post process results for this query + parents_with_records.iter_mut().for_each(|(_, v)| { + trim_records(v, &query_args); + }); } }); diff --git a/server/prisma-rs/query-engine/core/src/ir/maps.rs b/server/prisma-rs/query-engine/core/src/ir/maps.rs index d7d0de33c7..6afe3dd732 100644 --- a/server/prisma-rs/query-engine/core/src/ir/maps.rs +++ b/server/prisma-rs/query-engine/core/src/ir/maps.rs @@ -1,10 +1,10 @@ //! Process a record into an IR Map -use super::{lists::build_list, Item, Map}; +use super::{lists::build_list, trim_records, Item, Map}; use crate::{ReadQueryResult, SingleReadQueryResult}; use prisma_models::PrismaValue; -pub fn build_map(result: SingleReadQueryResult) -> Map { +pub fn build_map(result: SingleReadQueryResult) -> Option { // Build selected fields first let mut outer = match &result.scalars { Some(single) => single @@ -15,7 +15,7 @@ pub fn build_map(result: SingleReadQueryResult) -> Map { map.insert(name.clone(), Item::Value(val.clone())); map }), - None => panic!("No result found"), // FIXME: Can this ever happen? + None => return None, }; // Parent id for nested queries has to be the id of this record. @@ -25,9 +25,22 @@ pub fn build_map(result: SingleReadQueryResult) -> Map { outer = result.nested.into_iter().fold(outer, |mut map, query| { match query { ReadQueryResult::Single(nested) => { - map.insert(nested.name.clone(), Item::Map(parent_id.clone(), build_map(nested))) + let nested_name = nested.name.clone(); + match build_map(nested) { + Some(m) => map.insert(nested_name, Item::Map(parent_id.clone(), m)), + None => map.insert(nested_name, Item::Value(PrismaValue::Null)), + } + } + ReadQueryResult::Many(nested) => { + let query_name = nested.name.clone(); + let query_args = nested.query_arguments.clone(); + let mut nested_result = build_list(nested); + + // Trim excess data from the processed result set + trim_records(&mut nested_result, &query_args); + + map.insert(query_name, Item::List(nested_result)) } - ReadQueryResult::Many(nested) => map.insert(nested.name.clone(), Item::List(build_list(nested))), }; map @@ -44,11 +57,11 @@ pub fn build_map(result: SingleReadQueryResult) -> Map { // Re-order fields to be in-line with what the query specified // This also removes implicit fields - result.fields.iter().fold(Map::new(), |mut map, field| { + Some(result.fields.iter().fold(Map::new(), |mut map, field| { map.insert( field.clone(), outer.remove(field).expect("[Map]: Missing required field"), ); map - }) + })) } diff --git a/server/prisma-rs/query-engine/core/src/ir/mod.rs b/server/prisma-rs/query-engine/core/src/ir/mod.rs index 68f30c6d6f..33f47eedd9 100644 --- a/server/prisma-rs/query-engine/core/src/ir/mod.rs +++ b/server/prisma-rs/query-engine/core/src/ir/mod.rs @@ -7,12 +7,12 @@ //! //! This IR (intermediate representation) is meant for general //! processing and storage. It can also be easily serialised. -#![warn(warnings)] mod lists; mod maps; use crate::ReadQueryResult; +use connector::QueryArguments; use indexmap::IndexMap; use prisma_models::GraphqlId; use prisma_models::PrismaValue; @@ -23,6 +23,7 @@ pub type ResponseSet = Vec; /// A response can either be some `key-value` data representation /// or an error that occured. +#[derive(Debug)] pub enum Response { /// A data item has a name it will be returned under, and and actual item. Data(String, Item), @@ -64,13 +65,51 @@ impl Builder { self.0.into_iter().fold(vec![], |mut vec, res| { vec.push(match res { ReadQueryResult::Single(query) => { - Response::Data(query.name.clone(), Item::Map(None, maps::build_map(query))) + let query_name = query.name.clone(); + match maps::build_map(query) { + Some(m) => Response::Data(query_name, Item::Map(None, m)), + None => Response::Data(query_name, Item::Value(PrismaValue::Null)), + } } ReadQueryResult::Many(query) => { - Response::Data(query.name.clone(), Item::List(lists::build_list(query))) + let query_name = query.name.clone(); + let query_args = query.query_arguments.clone(); + let mut result = lists::build_list(query); + + // Trim excess data from the processed result set + trim_records(&mut result, &query_args); + Response::Data(query_name, Item::List(result)) } }); vec }) } } + +/// Removes the excess records added to by the database query layer based on the query arguments +/// This would be the right place to add pagination markers (has next page, etc.). +pub fn trim_records(data: &mut Vec, query_args: &QueryArguments) { + // The query engine reverses lists when querying for `last`, so we need to reverse again to have the intended order. + let reversed = query_args.last.is_some(); + if reversed { + data.reverse(); + } + + match (query_args.first, query_args.last) { + (Some(f), _) if data.len() > f as usize => drop_right(data, 1), + (_, Some(l)) if data.len() > l as usize => drop_left(data, 1), + _ => (), + }; +} + +/// Drops x records on the end of the wrapped records in place. +fn drop_right(vec: &mut Vec, x: u32) { + vec.truncate(vec.len() - x as usize); +} + +/// Drops x records on the start of the wrapped records in place. +fn drop_left(vec: &mut Vec, x: u32) { + vec.reverse(); + drop_right(vec, x); + vec.reverse(); +} diff --git a/server/prisma-rs/query-engine/core/src/lib.rs b/server/prisma-rs/query-engine/core/src/lib.rs index 17927a158a..5959e69627 100644 --- a/server/prisma-rs/query-engine/core/src/lib.rs +++ b/server/prisma-rs/query-engine/core/src/lib.rs @@ -1,10 +1,14 @@ #![deny(warnings)] +#[macro_use] +extern crate log; + mod builders; mod error; mod query_ast; mod query_results; -mod read_query_executor; +mod executor; +mod schema; pub mod ir; @@ -12,6 +16,14 @@ pub use builders::*; pub use error::*; pub use query_ast::*; pub use query_results::*; -pub use read_query_executor::*; +pub use executor::*; +pub use schema::*; pub type CoreResult = Result; + +/// A type wrapper around read and write queries +#[derive(Debug, Clone)] +pub enum Query { + Read(ReadQuery), + Write(WriteQuery), +} \ No newline at end of file diff --git a/server/prisma-rs/query-engine/core/src/query_ast.rs b/server/prisma-rs/query-engine/core/src/query_ast.rs index 80ed84f806..41a61cf01c 100644 --- a/server/prisma-rs/query-engine/core/src/query_ast.rs +++ b/server/prisma-rs/query-engine/core/src/query_ast.rs @@ -17,7 +17,6 @@ pub struct RecordQuery { pub selector: NodeSelector, pub selected_fields: SelectedFields, pub nested: Vec, - // TODO: rename to something more obvious maybe? pub fields: Vec, } @@ -28,7 +27,6 @@ pub struct ManyRecordsQuery { pub args: QueryArguments, pub selected_fields: SelectedFields, pub nested: Vec, - // TODO: rename to something more obvious maybe? pub fields: Vec, } @@ -39,7 +37,6 @@ pub struct RelatedRecordQuery { pub args: QueryArguments, pub selected_fields: SelectedFields, pub nested: Vec, - // TODO: rename to something more obvious maybe? pub fields: Vec, } @@ -50,6 +47,5 @@ pub struct ManyRelatedRecordsQuery { pub args: QueryArguments, pub selected_fields: SelectedFields, pub nested: Vec, - // TODO: rename to something more obvious maybe? pub fields: Vec, } diff --git a/server/prisma-rs/query-engine/core/src/query_results.rs b/server/prisma-rs/query-engine/core/src/query_results.rs index 854f42fe8a..54301cf85a 100644 --- a/server/prisma-rs/query-engine/core/src/query_results.rs +++ b/server/prisma-rs/query-engine/core/src/query_results.rs @@ -97,7 +97,7 @@ impl ManyReadQueryResults { query_arguments: QueryArguments, selected_fields: SelectedFields, ) -> Self { - let mut result = Self { + let result = Self { name, fields, scalars, @@ -108,7 +108,7 @@ impl ManyReadQueryResults { __inhibit: (), }; - result.remove_excess_records(); + // result.remove_excess_records(); result } @@ -119,22 +119,6 @@ impl ManyReadQueryResults { self.selected_fields.get_implicit_fields() } - /// Removes the excess records added to by the database query layer based on the query arguments - /// This would be the right place to add pagination markers (has next page, etc.). - pub fn remove_excess_records(&mut self) { - // The query engine reverses lists when querying for `last`, so we need to reverse again to have the intended order. - let reversed = self.query_arguments.last.is_some(); - if reversed { - self.scalars.reverse(); - } - - match (self.query_arguments.first, self.query_arguments.last) { - (Some(f), _) if self.scalars.nodes.len() > f as usize => self.scalars.drop_right(1), - (_, Some(l)) if self.scalars.nodes.len() > l as usize => self.scalars.drop_left(1), - _ => (), - }; - } - /// Get all IDs from a query result pub fn find_ids(&self) -> Option> { let id_position: usize = self.scalars.field_names.iter().position(|name| name == "id")?; diff --git a/server/prisma-rs/query-engine/core/src/schema/builder.rs b/server/prisma-rs/query-engine/core/src/schema/builder.rs new file mode 100644 index 0000000000..cafefde1e1 --- /dev/null +++ b/server/prisma-rs/query-engine/core/src/schema/builder.rs @@ -0,0 +1,13 @@ +use prisma_inflector; +use prisma_models::InternalDataModelRef; + +pub struct SchemaBuilder; + +impl SchemaBuilder { + pub fn build(data_model: InternalDataModelRef) { + data_model.models().into_iter().for_each(|m| { + let candidate = prisma_inflector::default().pluralize(&m.name); + println!("{} -> {:?}", &m.name, candidate); + }); + } +} diff --git a/server/prisma-rs/query-engine/core/src/schema/mod.rs b/server/prisma-rs/query-engine/core/src/schema/mod.rs new file mode 100644 index 0000000000..4049988380 --- /dev/null +++ b/server/prisma-rs/query-engine/core/src/schema/mod.rs @@ -0,0 +1,5 @@ +mod builder; +mod schema; + +pub use builder::*; +pub use schema::*; \ No newline at end of file diff --git a/server/prisma-rs/query-engine/core/src/schema/schema.rs b/server/prisma-rs/query-engine/core/src/schema/schema.rs new file mode 100644 index 0000000000..5f08d31c1e --- /dev/null +++ b/server/prisma-rs/query-engine/core/src/schema/schema.rs @@ -0,0 +1,35 @@ +// pub struct QuerySchema { +// query: ObjectType, // read(s)? +// mutation: ObjectType, // write(s)? +// } + +// // enum for Optional input types, list types? +// // Could also be a flag on the structs + +// impl QuerySchema {} + +// struct ObjectType {} + +// // On schema construction checks: +// // - field name uniqueness +// // - val NameRegexp = """^[_a-zA-Z][_a-zA-Z0-9]*$""".r match +// // - + +// enum InputType { +// EnumType, +// InputObjectType, +// ListInputType, +// OptionInputType, +// ScalarType, +// } + +// // enum OutputType { +// // EnumType, +// // ListType(OutputType), +// // ObjectType(ObjectType), +// // OptionType(OutputType), +// // ScalarType, +// // } + +// // Possible: +// // InputType(OptionType(StringType)) diff --git a/server/prisma-rs/query-engine/native-bridge/Cargo.toml b/server/prisma-rs/query-engine/native-bridge/Cargo.toml index 284f6f8c90..dbc76518b8 100644 --- a/server/prisma-rs/query-engine/native-bridge/Cargo.toml +++ b/server/prisma-rs/query-engine/native-bridge/Cargo.toml @@ -20,7 +20,7 @@ connector = { path = "../connectors/connector" } sql-connector = { path = "../connectors/sql-connector" } prisma-common = { path = "../../libs/prisma-common" } prisma-models = { path = "../../prisma-models" } -prisma-query = { path = "../../libs/prisma-query" } +prisma-query = { git = "https://github.com/prisma/prisma-query.git" } chrono = { version = "0.4", features = ["serde"] } failure = "0.1" failure_derive = "0.1" diff --git a/server/prisma-rs/query-engine/native-bridge/src/protobuf/interface.rs b/server/prisma-rs/query-engine/native-bridge/src/protobuf/interface.rs index 320ff9c727..dc8e79c40f 100644 --- a/server/prisma-rs/query-engine/native-bridge/src/protobuf/interface.rs +++ b/server/prisma-rs/query-engine/native-bridge/src/protobuf/interface.rs @@ -7,8 +7,8 @@ use connector::{error::ConnectorError, filter::NodeSelector, DataResolver, Datab use prisma_common::config::*; use prisma_models::prelude::*; use prost::Message; -use sql_connector::{database::SqlDatabase, database::Sqlite}; -use std::sync::Arc; +use sql_connector::{PostgreSql, SqlDatabase, Sqlite}; +use std::{convert::TryFrom, sync::Arc}; pub struct ProtoBufInterface { data_resolver: Arc, @@ -17,21 +17,33 @@ pub struct ProtoBufInterface { impl ProtoBufInterface { pub fn new(config: &PrismaConfig) -> ProtoBufInterface { - let connector = match config.databases.get("default") { + match config.databases.get("default") { Some(PrismaDatabase::Explicit(ref config)) if config.connector == "sqlite-native" || config.connector == "native-integration-tests" => { let server_root = std::env::var("SERVER_ROOT").expect("Env var SERVER_ROOT required but not found."); let sqlite = Sqlite::new(format!("{}/db", server_root).into(), config.limit(), true).unwrap(); - Arc::new(SqlDatabase::new(sqlite)) - } - _ => panic!("Database connector is not supported, use sqlite with a file for now!"), - }; + let connector = Arc::new(SqlDatabase::new(sqlite)); - ProtoBufInterface { - data_resolver: connector.clone(), - database_mutaction_executor: connector, + ProtoBufInterface { + data_resolver: connector.clone(), + database_mutaction_executor: connector, + } + } + Some(database) => match database.connector() { + "postgres-native" => { + let postgres = PostgreSql::try_from(database).unwrap(); + let connector = Arc::new(SqlDatabase::new(postgres)); + + ProtoBufInterface { + data_resolver: connector.clone(), + database_mutaction_executor: connector, + } + } + connector => panic!("Unsupported connector {}", connector), + }, + _ => panic!("Unsupported connector config"), } } @@ -48,7 +60,7 @@ impl ProtoBufInterface { response_payload } _ => { - let error_response = prisma::RpcResponse::error(error); + let error_response = prisma::RpcResponse::error(dbg!(error)); let mut payload = Vec::new(); error_response.encode(&mut payload).unwrap(); @@ -73,7 +85,7 @@ impl ExternalInterface for ProtoBufInterface { let project_template: ProjectTemplate = serde_json::from_reader(input.project_json.as_slice())?; let project: ProjectRef = project_template.into(); - let model = project.schema().find_model(&input.model_name)?; + let model = project.internal_data_model().find_model(&input.model_name)?; let selected_fields = input.selected_fields.into_selected_fields(model.clone(), None); let value: PrismaValue = input.value.into(); @@ -104,7 +116,7 @@ impl ExternalInterface for ProtoBufInterface { let project_template: ProjectTemplate = serde_json::from_reader(input.project_json.as_slice())?; let project: ProjectRef = project_template.into(); - let model = project.schema().find_model(&input.model_name)?; + let model = project.internal_data_model().find_model(&input.model_name)?; let selected_fields = input.selected_fields.into_selected_fields(model.clone(), None); let query_arguments = into_model_query_arguments(model.clone(), input.query_arguments); @@ -132,7 +144,7 @@ impl ExternalInterface for ProtoBufInterface { let project_template: ProjectTemplate = serde_json::from_reader(input.project_json.as_slice())?; let project: ProjectRef = project_template.into(); - let model = project.schema().find_model(&input.model_name)?; + let model = project.internal_data_model().find_model(&input.model_name)?; let from_field = model.fields().find_from_relation_fields(&input.from_field)?; let from_node_ids: Vec = input.from_node_ids.into_iter().map(GraphqlId::from).collect(); @@ -172,7 +184,7 @@ impl ExternalInterface for ProtoBufInterface { let project_template: ProjectTemplate = serde_json::from_reader(input.project_json.as_slice())?; let project: ProjectRef = project_template.into(); - let model = project.schema().find_model(&input.model_name)?; + let model = project.internal_data_model().find_model(&input.model_name)?; let list_field = model.fields().find_from_scalar(&input.list_field)?; let node_ids: Vec = input.node_ids.into_iter().map(GraphqlId::from).collect(); @@ -205,7 +217,7 @@ impl ExternalInterface for ProtoBufInterface { let project_template: ProjectTemplate = serde_json::from_reader(input.project_json.as_slice())?; let project: ProjectRef = project_template.into(); - let model = project.schema().find_model(&input.model_name)?; + let model = project.internal_data_model().find_model(&input.model_name)?; let query_arguments = into_model_query_arguments(model.clone(), input.query_arguments); let count = self.data_resolver.count_by_model(model, query_arguments)?; @@ -227,13 +239,13 @@ impl ExternalInterface for ProtoBufInterface { let project_template: ProjectTemplate = serde_json::from_reader(input.project_json.as_slice())?; let project: ProjectRef = project_template.into(); - let count = match project.schema().find_model(&input.model_name) { + let count = match project.internal_data_model().find_model(&input.model_name) { Ok(model) => self .data_resolver - .count_by_table(project.schema().db_name.as_ref(), model.db_name()), + .count_by_table(project.internal_data_model().db_name.as_ref(), model.db_name()), Err(_) => self .data_resolver - .count_by_table(project.schema().db_name.as_ref(), &input.model_name), + .count_by_table(project.internal_data_model().db_name.as_ref(), &input.model_name), }?; let response = RpcResponse::ok(count); @@ -248,7 +260,11 @@ impl ExternalInterface for ProtoBufInterface { fn execute_raw(&self, payload: &mut [u8]) -> Vec { Self::protobuf_result(|| { let input = ExecuteRawInput::decode(payload)?; - let json = self.database_mutaction_executor.execute_raw(input.query)?; + + let json = self + .database_mutaction_executor + .execute_raw(input.db_name, input.query)?; + let json_as_string = serde_json::to_string(&json)?; let response = RpcResponse::ok_raw(prisma::ExecuteRawResult { json: json_as_string }); @@ -267,7 +283,7 @@ impl ExternalInterface for ProtoBufInterface { let project: ProjectRef = project_template.into(); let mutaction = convert_mutaction(input, Arc::clone(&project)); - let db_name = project.schema().db_name.to_string(); + let db_name = project.internal_data_model().db_name.to_string(); let result = self.database_mutaction_executor.execute(db_name, mutaction)?; let response = RpcResponse::ok_mutaction(convert_mutaction_result(result)); diff --git a/server/prisma-rs/query-engine/native-bridge/src/protobuf/mod.rs b/server/prisma-rs/query-engine/native-bridge/src/protobuf/mod.rs index c97e5370d8..e24685b24e 100644 --- a/server/prisma-rs/query-engine/native-bridge/src/protobuf/mod.rs +++ b/server/prisma-rs/query-engine/native-bridge/src/protobuf/mod.rs @@ -155,7 +155,7 @@ impl From for GraphqlId { match id.id_value.unwrap() { id::IdValue::String(s) => GraphqlId::String(s), id::IdValue::Int(i) => GraphqlId::Int(i as usize), - id::IdValue::Uuid(s) => GraphqlId::String(s), + id::IdValue::Uuid(s) => GraphqlId::UUID(Uuid::parse_str(&s).unwrap()), // BAM! } } } diff --git a/server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs b/server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs index dd10b8f368..363b2c1af6 100644 --- a/server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs +++ b/server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs @@ -27,7 +27,7 @@ pub fn convert_create_envelope( } pub fn convert_create(m: crate::protobuf::prisma::CreateNode, project: ProjectRef) -> CreateNode { - let model = project.schema().find_model(&m.model_name).unwrap(); + let model = project.internal_data_model().find_model(&m.model_name).unwrap(); CreateNode { model: model, non_list_args: convert_prisma_args(m.non_list_args), @@ -129,7 +129,7 @@ pub fn convert_nested_update(m: crate::protobuf::prisma::NestedUpdateNode, proje } pub fn convert_update_nodes(m: crate::protobuf::prisma::UpdateNodes, project: ProjectRef) -> TopLevelDatabaseMutaction { - let model = project.schema().find_model(&m.model_name).unwrap(); + let model = project.internal_data_model().find_model(&m.model_name).unwrap(); let update_nodes = UpdateNodes { model: Arc::clone(&model), filter: m.filter.into_filter(model), @@ -186,7 +186,7 @@ pub fn convert_nested_delete(m: crate::protobuf::prisma::NestedDeleteNode, proje } pub fn convert_delete_nodes(m: crate::protobuf::prisma::DeleteNodes, project: ProjectRef) -> TopLevelDatabaseMutaction { - let model = project.schema().find_model(&m.model_name).unwrap(); + let model = project.internal_data_model().find_model(&m.model_name).unwrap(); let delete_nodes = DeleteNodes { model: Arc::clone(&model), filter: m.filter.into_filter(model), @@ -212,7 +212,7 @@ pub fn convert_reset(_: crate::protobuf::prisma::ResetData, project: ProjectRef) pub fn convert_nested_connect(m: crate::protobuf::prisma::NestedConnect, project: ProjectRef) -> NestedConnect { let relation_field = project - .schema() + .internal_data_model() .find_model(&m.model_name) .unwrap() .fields() @@ -231,7 +231,7 @@ pub fn convert_nested_disconnect( project: ProjectRef, ) -> NestedDisconnect { let relation_field = project - .schema() + .internal_data_model() .find_model(&m.model_name) .unwrap() .fields() @@ -246,7 +246,7 @@ pub fn convert_nested_disconnect( pub fn convert_nested_set(m: crate::protobuf::prisma::NestedSet, project: ProjectRef) -> NestedSet { let relation_field = project - .schema() + .internal_data_model() .find_model(&m.model_name) .unwrap() .fields() @@ -264,7 +264,7 @@ pub fn convert_nested_set(m: crate::protobuf::prisma::NestedSet, project: Projec } pub fn convert_node_select(selector: crate::protobuf::prisma::NodeSelector, project: ProjectRef) -> NodeSelector { - let model = project.schema().find_model(&selector.model_name).unwrap(); + let model = project.internal_data_model().find_model(&selector.model_name).unwrap(); let field = model.fields().find_from_scalar(&selector.field_name).unwrap(); let value: PrismaValue = selector.value.into(); NodeSelector { field, value } @@ -290,7 +290,7 @@ pub fn convert_list_args(proto: crate::protobuf::prisma::PrismaArgs) -> Vec<(Str pub fn find_relation_field(project: ProjectRef, model: String, field: String) -> Arc { project - .schema() + .internal_data_model() .find_model(&model) .unwrap() .fields() diff --git a/server/prisma-rs/query-engine/prisma/Cargo.toml b/server/prisma-rs/query-engine/prisma/Cargo.toml index 5e42afa85f..596b1c1135 100644 --- a/server/prisma-rs/query-engine/prisma/Cargo.toml +++ b/server/prisma-rs/query-engine/prisma/Cargo.toml @@ -14,6 +14,7 @@ actix-web = "0.7.18" actix = "0.7.5" lazy_static = "1.3" prisma-common = { path = "../../libs/prisma-common" } +prisma-inflector = { path = "../../libs/prisma-inflector" } prisma-models = { path = "../../prisma-models" } core = { path = "../core" } connector = { path = "../connectors/connector" } diff --git a/server/prisma-rs/query-engine/prisma/src/context.rs b/server/prisma-rs/query-engine/prisma/src/context.rs index 379995eba7..94153007df 100644 --- a/server/prisma-rs/query-engine/prisma/src/context.rs +++ b/server/prisma-rs/query-engine/prisma/src/context.rs @@ -1,52 +1,35 @@ -use crate::{data_model, PrismaResult}; -use core::ReadQueryExecutor; -use prisma_common::config::{self, ConnectionLimit, PrismaConfig, PrismaDatabase}; -use prisma_models::SchemaRef; -use std::sync::Arc; - -#[cfg(feature = "sql")] -use sql_connector::{database::SqlDatabase, database::Sqlite}; +use crate::{data_model, exec_loader, PrismaResult}; +use core::{Executor, SchemaBuilder}; +use prisma_common::config::{self, PrismaConfig}; +use prisma_models::InternalDataModelRef; #[derive(DebugStub)] pub struct PrismaContext { pub config: PrismaConfig, - pub schema: SchemaRef, + pub internal_data_model: InternalDataModelRef, - #[debug_stub = "#QueryExecutor#"] - pub read_query_executor: ReadQueryExecutor, + #[debug_stub = "#Executor#"] + pub executor: Executor, } impl PrismaContext { pub fn new() -> PrismaResult { + // Load config and executors let config = config::load().unwrap(); - let data_resolver = match config.databases.get("default") { - Some(PrismaDatabase::File(ref config)) if config.connector == "sqlite-native" => { - let db_name = config.db_name(); - let db_folder = config - .database_file - .trim_end_matches(&format!("{}.db", db_name)) - .trim_end_matches("/"); - - let sqlite = Sqlite::new(db_folder.to_owned(), config.limit(), false).unwrap(); - Arc::new(SqlDatabase::new(sqlite)) - } - _ => panic!("Database connector is not supported, use sqlite with a file for now!"), - }; + let executor = exec_loader::load(&config); - let read_query_executor: ReadQueryExecutor = ReadQueryExecutor { data_resolver }; + // Find db name. This right here influences how + let db = config.databases.get("default").unwrap(); + let db_name = db.schema().or_else(|| db.db_name()).unwrap_or_else(|| "prisma".into()); - let db_name = config - .databases - .get("default") - .unwrap() - .db_name() - .expect("database was not set"); + // Load internal data model + let internal_data_model = data_model::load(db_name)?; + // let _ = SchemaBuilder::build(internal_data_model.clone()); - let schema = data_model::load(db_name)?; Ok(Self { - config: config, - schema: schema, - read_query_executor, + config, + internal_data_model, + executor, }) } } diff --git a/server/prisma-rs/query-engine/prisma/src/data_model.rs b/server/prisma-rs/query-engine/prisma/src/data_model.rs index 9f09aac4af..83d7bf2aa5 100644 --- a/server/prisma-rs/query-engine/prisma/src/data_model.rs +++ b/server/prisma-rs/query-engine/prisma/src/data_model.rs @@ -1,6 +1,6 @@ use crate::{utilities, PrismaError, PrismaResult}; use graphql_parser::query; -use prisma_models::{SchemaRef, SchemaTemplate}; +use prisma_models::{InternalDataModelRef, InternalDataModelTemplate}; use serde::Serialize; use serde_json; use std::{ @@ -23,80 +23,118 @@ pub trait Validatable { fn validate(&self, doc: &query::Document) -> Result<(), ValidationError>; } -impl Validatable for SchemaRef { +impl Validatable for InternalDataModelRef { fn validate(&self, _: &query::Document) -> Result<(), ValidationError> { // It's not really ok 😭 Ok(()) } } -/// Loads and builds the internal schema from the data model -pub fn load(db_name: String) -> PrismaResult { +/// Loads and builds the internal data model from the data model JSON. +pub fn load(db_name: String) -> PrismaResult { let data_model_json = load_string()?; - Ok(serde_json::from_str::(&data_model_json)?.build(db_name)) + Ok(serde_json::from_str::(&data_model_json)?.build(db_name)) } -/// Loads the config as unparsed json string. -/// Attempts to resolve the data model from env and from file, see `load_from_env` and `load_from_file`. +/// Attempts to load the config as unparsed JSON string. pub fn load_string() -> PrismaResult { - load_from_env().or_else(|_| load_from_file()).map_err(|err| { - PrismaError::ConfigurationError(format!("Unable to resolve Prisma data model. Last error: {}", err)) - }) + load_internal_from_env() + .or_else(|_| load_sdl_string().and_then(|sdl| resolve_internal_data_model_json(sdl))) + .map_err(|err| { + PrismaError::ConfigurationError(format!( + "Unable to construct internal Prisma data model from any source. Last error: {}", + err + )) + }) } /// Attempts to resolve the internal data model from an env var. -/// Note that the content of the env var has to be base64 encoded JSON. -pub fn load_from_env() -> PrismaResult { - debug!("Trying to load data model from env..."); +/// Note that the content of the env var has to be base64 encoded. +/// Returns: Internal data model JSON string. +pub fn load_internal_from_env() -> PrismaResult { + debug!("Trying to load internal data model from env..."); + + utilities::get_env("PRISMA_INTERNAL_DATA_MODEL_JSON").and_then(|internal_data_model_b64| { + let bytes = base64::decode(&internal_data_model_b64)?; + let internal_data_model_json = String::from_utf8(bytes)?; + + debug!("Loaded internal data model from env."); + Ok(internal_data_model_json) + }) +} + +// let inferrer = resolve_internal_data_model_json(sdl)?; - utilities::get_env("PRISMA_INTERNAL_DATA_MODEL_JSON").and_then(|schema| { - let bytes = base64::decode(&schema)?; - let schema_json = String::from_utf8(bytes)?; +/// Attempts to load a Prisma SDL string from either env or file. +pub fn load_sdl_string() -> PrismaResult { + load_sdl_from_env().or_else(|_| load_sdl_from_file()).map_err(|err| { + PrismaError::ConfigurationError(format!("Unable to load SDL from any source. Last error: {}", err)) + }) +} - debug!("Loaded schema from env."); - Ok(schema_json) +/// Attempts to load a Prisma SDL string from env. +/// Note that the content of the env var can be base64 encoded if necessary. +/// Returns: (Decoded) Prisma SDL string. +fn load_sdl_from_env() -> PrismaResult { + debug!("Trying to load Prisma SDL from env..."); + utilities::get_env("PRISMA_SDL").and_then(|sdl_b64| { + let sdl = match base64::decode(&sdl_b64) { + Ok(bytes) => { + trace!("Decoded SDL from Base64."); + String::from_utf8(bytes)? + } + Err(e) => { + trace!("Error decoding SDL Base64: {:?}", e); + sdl_b64 + } + }; + + debug!("Loaded Prisma SDL from env."); + Ok(sdl) }) } -/// Attempts to resolve the internal data model from a Prisma SDL (DataModel) file. -/// The contents of that file are processed by the external schema inferrer (until we have a Rust equivalent), -/// which produces the internal data model JSON string. -pub fn load_from_file() -> PrismaResult { - debug!("Trying to load data model from file..."); - let data_model = load_sdl_string()?; +/// Attempts to load a Prisma SDL string from file. +/// Returns: Decoded Prisma SDL string. +pub fn load_sdl_from_file() -> PrismaResult { + debug!("Trying to load Prisma SDL from file..."); + + let path = utilities::get_env("PRISMA_SDL_PATH")?; + let mut f = File::open(&path)?; + let mut sdl = String::new(); + + f.read_to_string(&mut sdl)?; + debug!( + "Loaded Prisma SDL from file: {}.", + utilities::get_env("PRISMA_SDL_PATH")? + ); + + Ok(sdl) +} + +/// Transforms an SDL string into stringified JSON of the internal data model. +fn resolve_internal_data_model_json(sdl: String) -> PrismaResult { #[derive(Serialize)] #[serde(rename_all = "camelCase")] - struct SchemaInferrerJson { + struct InternalDataModelInferrerJson { data_model: String, } - let schema_inferrer = utilities::get_env("SCHEMA_INFERRER_PATH")?; - let mut child = Command::new(schema_inferrer) + let internal_data_model_inferrer = utilities::get_env("SCHEMA_INFERRER_PATH")?; + let mut child = Command::new(internal_data_model_inferrer) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn()?; + let compacted = sdl.replace('\n', " "); let child_in = child.stdin.as_mut().unwrap(); - let json = serde_json::to_string(&SchemaInferrerJson { data_model })?; + let json = serde_json::to_string(&InternalDataModelInferrerJson { data_model: compacted })?; child_in.write_all(json.as_bytes()).expect("Failed to write to stdin"); let output = child.wait_with_output()?; let inferred = String::from_utf8(output.stdout)?; - debug!( - "Loaded data model from file: {}.", - utilities::get_env("PRISMA_DATA_MODEL_PATH")? - ); Ok(inferred) } - -pub fn load_sdl_string() -> PrismaResult { - let path = utilities::get_env("PRISMA_DATA_MODEL_PATH")?; - let mut f = File::open(&path)?; - let mut data_model = String::new(); - - f.read_to_string(&mut data_model)?; - Ok(data_model) -} diff --git a/server/prisma-rs/query-engine/prisma/src/exec_loader.rs b/server/prisma-rs/query-engine/prisma/src/exec_loader.rs new file mode 100644 index 0000000000..f0741f99f1 --- /dev/null +++ b/server/prisma-rs/query-engine/prisma/src/exec_loader.rs @@ -0,0 +1,59 @@ +use core::{Executor, ReadQueryExecutor, WriteQueryExecutor}; +use prisma_common::config::{ConnectionLimit, FileConfig, PrismaConfig, PrismaDatabase}; +use std::convert::TryFrom; +use std::sync::Arc; + +#[cfg(feature = "sql")] +use sql_connector::{PostgreSql, SqlDatabase, Sqlite, Transactional}; + +pub fn load(config: &PrismaConfig) -> Executor { + match config.databases.get("default") { + #[cfg(feature = "sql")] + Some(PrismaDatabase::File(ref config)) if config.connector == "sqlite-native" => sqlite(config), + + #[cfg(feature = "sql")] + Some(config) if config.connector() == "postgres-native" => postgres(config), + _ => panic!("Database connector is not supported. Supported"), + } +} + +#[cfg(feature = "sql")] +fn sqlite(config: &FileConfig) -> Executor { + let db_name = config.db_name(); + let db_folder = config + .database_file + .trim_end_matches(&format!("{}.db", db_name)) + .trim_end_matches("/"); + + let sqlite = Sqlite::new(db_folder.to_owned(), config.limit(), false).unwrap(); + // let arc = Arc::new(SqlDatabase::new(sqlite)); + let wat = SqlDatabase::new(sqlite); + + // sql_executor(db_name.clone(), Arc::clone(&arc), arc) + sql_executor(db_name.clone(), wat) +} + +#[cfg(feature = "sql")] +fn postgres(config: &PrismaDatabase) -> Executor { + let postgres = PostgreSql::try_from(config).unwrap(); + let connector = SqlDatabase::new(postgres); + + sql_executor("".into(), connector) +} + +#[cfg(feature = "sql")] +fn sql_executor(db_name: String, connector: SqlDatabase) -> Executor +where + T: Transactional + Send + Sync + 'static, +{ + let arc = Arc::new(connector); + let read_exec: ReadQueryExecutor = ReadQueryExecutor { + data_resolver: arc.clone(), + }; + let write_exec: WriteQueryExecutor = WriteQueryExecutor { + db_name: db_name, + write_executor: arc, + }; + + Executor { read_exec, write_exec } +} diff --git a/server/prisma-rs/query-engine/prisma/src/main.rs b/server/prisma-rs/query-engine/prisma/src/main.rs index fa97a1572f..eb6af1fd7f 100644 --- a/server/prisma-rs/query-engine/prisma/src/main.rs +++ b/server/prisma-rs/query-engine/prisma/src/main.rs @@ -10,6 +10,7 @@ extern crate debug_stub_derive; mod context; mod data_model; mod error; +mod exec_loader; mod req_handlers; mod serializer; mod utilities; @@ -60,7 +61,7 @@ fn main() { .unwrap() .start(); - println!("Started http server: {}:{}", address.0, address.1); + println!("Started http server on {}:{}", address.0, address.1); let _ = sys.run(); } diff --git a/server/prisma-rs/query-engine/prisma/src/req_handlers/graphql.rs b/server/prisma-rs/query-engine/prisma/src/req_handlers/graphql.rs index f80520a753..d3591c0989 100644 --- a/server/prisma-rs/query-engine/prisma/src/req_handlers/graphql.rs +++ b/server/prisma-rs/query-engine/prisma/src/req_handlers/graphql.rs @@ -1,6 +1,9 @@ use super::{PrismaRequest, RequestHandler}; use crate::{context::PrismaContext, data_model::Validatable, error::PrismaError, PrismaResult}; -use core::{ir::Builder, ReadQuery, RootBuilder}; +use core::{ + ir::{self, Builder}, + RootBuilder, +}; use graphql_parser as gql; use serde::{Deserialize, Serialize}; use std::collections::HashMap; @@ -40,29 +43,37 @@ impl RequestHandler for GraphQlRequestHandler { fn handle_safely(req: PrismaRequest, ctx: &PrismaContext) -> PrismaResult { debug!("Incoming GQL query: {:?}", &req.body.query); + let query_doc = match gql::parse_query(&req.body.query) { Ok(doc) => doc, Err(e) => return Err(PrismaError::QueryParsingError(format!("{:?}", e))), }; // Let's validate the schema! - if let Err(_) = ctx.schema.validate(&query_doc) { + if let Err(_) = ctx.internal_data_model.validate(&query_doc) { return Err(PrismaError::QueryValidationError( - "Schema validation failed for unknown reasons".into(), + "InternalDataModel validation failed for unknown reasons".into(), )); } let rb = RootBuilder { query: query_doc, - schema: ctx.schema.clone(), + internal_data_model: ctx.internal_data_model.clone(), operation_name: req.body.operation_name, }; - let queries: Vec = rb.build()?; - let ir = dbg!(ctx.read_query_executor.execute(&queries)?) - .into_iter() - .fold(Builder::new(), |builder, result| builder.add(result)) - .build(); + let queries = rb.build(); + + let ir = match queries { + Ok(q) => match dbg!(ctx.executor.exec_all(q)) { + Ok(results) => results + .into_iter() + .fold(Builder::new(), |builder, result| builder.add(result)) + .build(), + Err(err) => vec![ir::Response::Error(format!("{:?}", err))], // This is merely a workaround + }, + Err(err) => vec![ir::Response::Error(format!("{:?}", err))], // This is merely a workaround + }; Ok(json::serialize(ir)) } diff --git a/server/prisma-rs/query-engine/prisma/src/serializer/json.rs b/server/prisma-rs/query-engine/prisma/src/serializer/json.rs index 0b4822880d..cc99bc87fa 100644 --- a/server/prisma-rs/query-engine/prisma/src/serializer/json.rs +++ b/server/prisma-rs/query-engine/prisma/src/serializer/json.rs @@ -1,4 +1,4 @@ -//! Json serialisation endpoint from IR +//! Json serialisation endpoint for IR use crate::{PrismaError, PrismaResult}; use core::ir::{Item, Response, ResponseSet}; @@ -10,7 +10,7 @@ type JsonMap = Map; type JsonVec = Vec; macro_rules! envelope { - ($name:ident, $producer:expr) => {{ + ($name:expr, $producer:expr) => {{ let mut m = JsonMap::new(); m.insert($name, $producer); Value::Object(m) @@ -20,23 +20,31 @@ macro_rules! envelope { pub fn serialize(resp: ResponseSet) -> Value { let mut map = Map::new(); - let vals: Vec = resp - .into_iter() - .map(|res| match res { - Response::Data(name, Item::List(list)) => envelope!(name, Value::Array(serialize_list(list))), - Response::Data(name, Item::Map(_parent, map)) => envelope!(name, Value::Object(serialize_map(map))), - _ => unreachable!(), - }) - .collect(); + // Error workaround + if let Response::Error(err) = resp.first().unwrap() { + map.insert( + "errors".into(), + Value::Array(vec![envelope!("error".into(), Value::String(err.to_string()))]), + ); + } else { + let vals: Vec = resp + .into_iter() + .map(|res| match res { + Response::Data(name, Item::List(list)) => envelope!(name, Value::Array(serialize_list(list))), + Response::Data(name, Item::Map(_parent, map)) => envelope!(name, Value::Object(serialize_map(map))), + _ => unreachable!(), + }) + .collect(); - map.insert( - "data".into(), - if vals.len() == 1 { - vals.first().unwrap().clone() - } else { - Value::Array(vals) - }, - ); + map.insert( + "data".into(), + if vals.len() == 1 { + vals.first().unwrap().clone() + } else { + Value::Array(vals) + }, + ); + } Value::Object(map) } diff --git a/server/prisma-rs/query-engine/prisma/src/serializer/mod.rs b/server/prisma-rs/query-engine/prisma/src/serializer/mod.rs index 28da90e305..72d8d4eda1 100644 --- a/server/prisma-rs/query-engine/prisma/src/serializer/mod.rs +++ b/server/prisma-rs/query-engine/prisma/src/serializer/mod.rs @@ -1,6 +1,3 @@ //! A modular query response serializer //! -//! It parses PrismaQueraResults into an intermediate representation. -//! This is then used to feed different encoders (json, ...) - pub mod json; diff --git a/server/prisma-rs/schema.prisma b/server/prisma-rs/schema.prisma index f302bd93aa..02f85e4ed8 100644 --- a/server/prisma-rs/schema.prisma +++ b/server/prisma-rs/schema.prisma @@ -1,42 +1,102 @@ -type Artist { - id: ID! @id - ArtistId: Int! @unique - Name: String! - Aliases: [String!]! @scalarList(strategy: RELATION) - Albums: [Album!]! -} - type Album { - id: ID! @id - AlbumId: Int! @unique + id: Int! @id @db(name:"AlbumId") Title: String! - Artist: Artist! @relation(link: TABLE) - Tracks: [Track!]! + Artist: Artist! @db(name:"ArtistId") + Tracks: [Track] } type Track { - id: ID! @id - TrackId: Int! @unique + id: Int! @id @db(name:"TrackId") Name: String! - Album: Album! @relation(link: TABLE) - MediaType: MediaType! @relation(link: TABLE) - Genre: Genre! @relation(link: TABLE) + Album: Album @db(name: "AlbumId") + AlbumId: Int + MediaType: MediaType! @db(name: "MediaTypeId") + Genre: Genre @db(name: "GenreId") Composer: String Milliseconds: Int! - Bytes: Int! UnitPrice: Float! + Playlists: [Playlist] @relation(name:"PlaylistTrack") +} + +type MediaType { + id: Int! @id @db(name:"MediaTypeId") + Name: String } type Genre { - id: ID! @id - GenreId: Int! @unique - Name: String! - Tracks: [Track!]! + id: Int! @id @db(name:"GenreId") + Name: String + Tracks: [Track] } -type MediaType { - id: ID! @id - MediaTypeId: Int! @unique - Name: String! - Tracks: [Track!]! +type Artist { + id: Int! @id @db(name:"ArtistId") + Name: String + Albums: [Album] +} + +type Customer { + id: Int! @id @db(name:"CustomerId") + FirstName: String! + LastName: String! + Company: String + Address: String + City: String + State: String + Country: String + PostalCode: String + Phone: String + Fax: String + Email: String! + SupportRep: Employee @db(name: "SupportRepId") +} + +type Employee { + id: Int! @id @db(name:"EmployeeId") + FirstName: String! + LastName: String! + Title: String + ReportsTo: Employee + BirthDate: DateTime + HireDate: DateTime + Address: String + City: String + State: String + Country: String + PostalCode: String + Phone: String + Fax: String + Email: String +} + +type Invoice { + id: Int! @id @db(name:"InvoiceId") + Customer: Customer! @db(name: "CustomerId") + InvoiceDate: DateTime! + BillingAddress: String + BillingCity: String + BillingState: String + BillingCountry: String + BillingPostalCode: String + Total: Float! + Lines: [InvoiceLine] +} + +type InvoiceLine { + id: Int! @id @db(name:"InvoiceLineId") + Invoice: Invoice! @db(name: "InvoiceId") + Track: Track! @db(name: "TrackId") + UnitPrice: Float! + Quantity: Int! +} + +type Playlist { + id: Int! @id @db(name:"PlaylistId") + Name: String + Tracks: [Track] @relation(name:"PlaylistTrack") +} + +type PlaylistTrack @relationTable { + PlaylistId: Playlist + TrackId: Track } \ No newline at end of file diff --git a/server/prisma-rs/test.sh b/server/prisma-rs/test.sh index 4c1d03aa9f..e8a319bd2d 100755 --- a/server/prisma-rs/test.sh +++ b/server/prisma-rs/test.sh @@ -2,4 +2,4 @@ set -e git submodule update --init || true -cargo test \ No newline at end of file +cargo test -- --test-threads 1 \ No newline at end of file diff --git a/server/protobuf/protocol.proto b/server/protobuf/protocol.proto index bf9acdfd8e..161cd37eed 100644 --- a/server/protobuf/protocol.proto +++ b/server/protobuf/protocol.proto @@ -151,8 +151,9 @@ message GetScalarListValuesByNodeIds { } message ExecuteRawInput { - required Header header = 1; - required string query = 2; + required Header header = 1; + required string db_name = 2; + required string query = 3; } message CountByModelInput { diff --git a/server/servers/api/src/test/scala/com/prisma/api/ApiTestServer.scala b/server/servers/api/src/test/scala/com/prisma/api/ApiTestServer.scala index 6ee405d1b0..1bce11bbc3 100644 --- a/server/servers/api/src/test/scala/com/prisma/api/ApiTestServer.scala +++ b/server/servers/api/src/test/scala/com/prisma/api/ApiTestServer.scala @@ -1,15 +1,13 @@ package com.prisma.api +import java.io.{BufferedReader, InputStreamReader} import java.net.{HttpURLConnection, URL} import java.nio.charset.StandardCharsets import java.util.Base64 -import java.io.BufferedReader -import java.io.InputStreamReader import com.prisma.api.schema.{ApiUserContext, PrivateSchemaBuilder, SchemaBuilder} import com.prisma.graphql.{GraphQlClient, GraphQlResponse} import com.prisma.shared.models.Project -import com.prisma.shared.models.{Schema => SchemaModel} import com.prisma.utils.json.PlayJsonExtensions import play.api.libs.json._ import sangria.parser.QueryParser @@ -19,7 +17,6 @@ import sangria.schema.Schema import scala.concurrent.duration.Duration import scala.concurrent.{Await, Awaitable, Future} import scala.reflect.io.File -import scala.sys.process.{Process, ProcessLogger} trait ApiTestServer extends PlayJsonExtensions { System.setProperty("org.jooq.no-logo", "true") @@ -86,8 +83,8 @@ trait ApiTestServer extends PlayJsonExtensions { } case class ExternalApiTestServer()(implicit val dependencies: ApiDependencies) extends ApiTestServer { - import dependencies.system.dispatcher import com.prisma.shared.models.ProjectJsonFormatter._ + import dependencies.system.dispatcher implicit val system = dependencies.system implicit val materializer = dependencies.materializer @@ -208,8 +205,8 @@ case class ExternalApiTestServer()(implicit val dependencies: ApiDependencies) e ) result.foreach(x => println(s"""Request Result: - |$x - """.stripMargin)) + |$x + """.stripMargin)) result } else { val prismaProcess = startPrismaProcess(project) diff --git a/server/servers/api/src/test/scala/com/prisma/api/filters/PortedFiltersSpec.scala b/server/servers/api/src/test/scala/com/prisma/api/filters/PortedFiltersSpec.scala index 96b0616df4..e025f1f2b2 100644 --- a/server/servers/api/src/test/scala/com/prisma/api/filters/PortedFiltersSpec.scala +++ b/server/servers/api/src/test/scala/com/prisma/api/filters/PortedFiltersSpec.scala @@ -65,7 +65,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { //region Recursion "A filter query" should "support the AND filter in one recursion level" in { - createTest("id1", "bar", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "foo bar", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "foo", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -84,7 +83,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the AND filter in two recursion levels" in { - createTest("id1", "bar", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "foo bar", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "foo", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -108,7 +106,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the OR filter in one recursion level" taggedAs (IgnoreMongo) in { - createTest("id1", "bar", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "foo bar", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "foo", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -123,7 +120,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the OR filter in two recursion levels" taggedAs (IgnoreMongo) in { - createTest("id1", "bar", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "foo bar", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "foo", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -143,7 +139,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { //region null "A filter query" should "support filtering on null" in { - createTest("id1", optString = null, 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", optString = "foo bar", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", optString = null, 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -178,7 +173,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { //region String "A filter query" should "support the equality filter on strings" in { - createTest("id1", "bar", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "foo bar", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "foo bar barz", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -191,7 +185,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the not-equality filter on strings" in { - createTest("id1", "bar", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "foo bar", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "foo bar barz", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -204,7 +197,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the contains filter on strings" in { - createTest("id1", "bara", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "foo bar", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "foo bar barz", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -217,7 +209,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the not_contains filter on strings" in { - createTest("id1", "bara", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "foo bar", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "foo bar barz", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -230,7 +221,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the starts_with filter on strings" in { - createTest("id1", "bara", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "foo bar", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "foo bar barz", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -243,7 +233,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the not_starts_with filter on strings" in { - createTest("id1", "bara", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "foo bar", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "foo bar barz", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -256,7 +245,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the ends_with filter on strings" in { - createTest("id1", "bara", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "foo bar", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "foo bar bar", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -269,7 +257,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the not_ends_with filter on strings" in { - createTest("id1", "bara", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "foo bar", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "foo bar bar", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -282,7 +269,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the lt filter on strings" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "3", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -295,7 +281,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the lte filter on strings" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "3", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -308,7 +293,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the gt filter on strings" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "3", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -321,7 +305,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the gte filter on strings" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "3", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -334,7 +317,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the in filter on strings" in { - createTest("id1", "a", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "ab", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "abc", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -361,7 +343,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the not_in filter on strings" in { - createTest("id1", "a", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "ab", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "abc", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -380,7 +361,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { //region Integer "A filter query" should "support the equality filter on integers" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "3", 3, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -393,7 +373,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the not equality filter on integers" in { - createTest("id1", "a", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "ab", 2, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "abc", 3, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -406,7 +385,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the lt filter on integers" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "3", 3, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -419,7 +397,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the lte filter on integers" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "3", 3, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -432,7 +409,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the gt filter on integers" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "3", 3, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -445,7 +421,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the gte filter on integers" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "3", 3, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -458,7 +433,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the in filter on integers" in { - createTest("id1", "a", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "ab", 2, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "abc", 3, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -471,7 +445,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the not_in filter on integers" in { - createTest("id1", "a", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "ab", 2, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "abc", 3, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -487,7 +460,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { //region Float "A filter query" should "support the equality filter on float" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 2, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "3", 3, 3, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -500,7 +472,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the not equality filter on float" in { - createTest("id1", "a", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "ab", 2, 2, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "abc", 3, 3, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -513,7 +484,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the lt filter on floats" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 2, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "3", 3, 3, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -526,7 +496,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the lte filter on floats" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 2, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "3", 3, 3, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -539,7 +508,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the gt filter on floats" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 2, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "3", 3, 3, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -552,7 +520,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the gte filter on floats" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 2, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "3", 3, 3, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -565,7 +532,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the in filter on floats" in { - createTest("id1", "a", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "ab", 2, 2, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "abc", 3, 3, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -578,7 +544,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the not_in filter on floats" in { - createTest("id1", "a", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "ab", 2, 2, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "abc", 3, 3, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -594,7 +559,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { // region Boolean "A filter query" should "support the equality filter on booleans" in { - createTest("id1", "bar", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "foo bar", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "foo bar barz", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -607,7 +571,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the not-equality filter on booleans" in { - createTest("id1", "bar", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "foo bar", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") createTest("id3", "foo bar barz", 1, 1, optBoolean = false, "A", "2016-09-23T12:29:32.342") @@ -623,7 +586,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { //region DateTime "A filter query" should "support the equality filter on DateTime" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 2, optBoolean = false, "A", "2016-09-24T12:29:32.342") createTest("id3", "3", 3, 3, optBoolean = false, "A", "2016-09-25T12:29:32.342") @@ -636,7 +598,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the not equality filter on DateTime" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 2, optBoolean = false, "A", "2016-09-24T12:29:32.342") createTest("id3", "3", 3, 3, optBoolean = false, "A", "2016-09-25T12:29:32.342") @@ -649,7 +610,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the lt filter on DateTime" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 2, optBoolean = false, "A", "2016-09-24T12:29:32.342") createTest("id3", "3", 3, 3, optBoolean = false, "A", "2016-09-25T12:29:32.342") @@ -662,7 +622,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the lte filter on DateTime" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 2, optBoolean = false, "A", "2016-09-24T12:29:32.342") createTest("id3", "3", 3, 3, optBoolean = false, "A", "2016-09-25T12:29:32.342") @@ -675,7 +634,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the gt filter on DateTime" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 2, optBoolean = false, "A", "2016-09-24T12:29:32.342") createTest("id3", "3", 3, 3, optBoolean = false, "A", "2016-09-25T12:29:32.342") @@ -688,7 +646,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the gte filter on DateTime" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 2, optBoolean = false, "A", "2016-09-24T12:29:32.342") createTest("id3", "3", 3, 3, optBoolean = false, "A", "2016-09-25T12:29:32.342") @@ -701,7 +658,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the in filter on DateTime" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 2, optBoolean = false, "A", "2016-09-24T12:29:32.342") createTest("id3", "3", 3, 3, optBoolean = false, "A", "2016-09-25T12:29:32.342") @@ -714,7 +670,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the not_in filter on DateTime" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 2, optBoolean = false, "A", "2016-09-24T12:29:32.342") createTest("id3", "3", 3, 3, optBoolean = false, "A", "2016-09-25T12:29:32.342") @@ -730,7 +685,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { //region Enum "A filter query" should "support the equality filter on Enum" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 2, optBoolean = false, "B", "2016-09-24T12:29:32.342") createTest("id3", "3", 3, 3, optBoolean = false, "B", "2016-09-25T12:29:32.342") @@ -743,7 +697,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the not equality filter on Enum" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 2, optBoolean = false, "B", "2016-09-24T12:29:32.342") createTest("id3", "3", 3, 3, optBoolean = false, "B", "2016-09-25T12:29:32.342") @@ -756,7 +709,6 @@ class PortedFiltersSpec extends FlatSpec with Matchers with ApiSpecBase { } "A filter query" should "support the in filter on Enum" in { - createTest("id1", "1", 1, 1, optBoolean = true, "A", "2016-09-23T12:29:32.342") createTest("id2", "2", 2, 2, optBoolean = false, "B", "2016-09-24T12:29:32.342") createTest("id3", "3", 3, 3, optBoolean = false, "B", "2016-09-25T12:29:32.342") diff --git a/server/servers/api/src/test/scala/com/prisma/api/mutations/BringYourOwnIdSpec.scala b/server/servers/api/src/test/scala/com/prisma/api/mutations/BringYourOwnIdSpec.scala index cea0609b67..1ee0fdad63 100644 --- a/server/servers/api/src/test/scala/com/prisma/api/mutations/BringYourOwnIdSpec.scala +++ b/server/servers/api/src/test/scala/com/prisma/api/mutations/BringYourOwnIdSpec.scala @@ -1,6 +1,6 @@ package com.prisma.api.mutations -import com.prisma.{ConnectorTag, IgnoreSQLite} +import com.prisma.{ConnectorTag, IgnorePostgres, IgnoreSQLite} import com.prisma.ConnectorTag.{MySqlConnectorTag, PostgresConnectorTag, SQLiteConnectorTag} import com.prisma.api.ApiSpecBase import com.prisma.api.mutations.nonEmbedded.nestedMutations.SchemaBaseV11 @@ -69,7 +69,7 @@ class BringYourOwnIdSpec extends FlatSpec with Matchers with ApiSpecBase with Sc } } - "A Create Mutation" should "error for id that is invalid 3" taggedAs IgnoreSQLite in { + "A Create Mutation" should "error for id that is invalid 3" taggedAs (IgnoreSQLite, IgnorePostgres) in { schemaP1optToC1opt.test { dataModel => val project = SchemaDsl.fromStringV11() { dataModel } database.setup(project) @@ -110,7 +110,7 @@ class BringYourOwnIdSpec extends FlatSpec with Matchers with ApiSpecBase with Sc } } - "A Nested Create Mutation" should "error with invalid id" taggedAs IgnoreSQLite in { // TODO: Should we really validate this + "A Nested Create Mutation" should "error with invalid id" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Should we really validate this schemaP1optToC1opt.test { dataModel => val project = SchemaDsl.fromStringV11() { dataModel } database.setup(project) @@ -147,7 +147,7 @@ class BringYourOwnIdSpec extends FlatSpec with Matchers with ApiSpecBase with Sc } } - "An Upsert Mutation" should "error with id that is too long" taggedAs IgnoreSQLite in { + "An Upsert Mutation" should "error with id that is too long" taggedAs (IgnoreSQLite, IgnorePostgres) in { schemaP1optToC1opt.test { dataModel => val project = SchemaDsl.fromStringV11() { dataModel } database.setup(project) diff --git a/server/servers/api/src/test/scala/com/prisma/api/mutations/CascadingDeleteSpec.scala b/server/servers/api/src/test/scala/com/prisma/api/mutations/CascadingDeleteSpec.scala index d578945521..b5ea6188f8 100644 --- a/server/servers/api/src/test/scala/com/prisma/api/mutations/CascadingDeleteSpec.scala +++ b/server/servers/api/src/test/scala/com/prisma/api/mutations/CascadingDeleteSpec.scala @@ -1,7 +1,7 @@ package com.prisma.api.mutations import akka.http.scaladsl.settings.ParserSettings.IllegalResponseHeaderValueProcessingMode.Ignore -import com.prisma.IgnoreSQLite +import com.prisma.{IgnorePostgres, IgnoreSQLite} import com.prisma.api.ApiSpecBase import com.prisma.shared.models.ConnectorCapability._ import com.prisma.shared.models._ @@ -14,7 +14,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { //region TOP LEVEL DELETE - "P1!-C1! relation deleting the parent" should "work if parent is marked marked cascading" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "P1!-C1! relation deleting the parent" should "work if parent is marked marked cascading" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove ignore when cascading again // P-C val project = SchemaDsl.fromStringV11() { """ @@ -42,7 +42,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { } - "PM-CM relation deleting the parent" should "delete all children if the parent is marked cascading" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "PM-CM relation deleting the parent" should "delete all children if the parent is marked cascading" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again // P-C val project = SchemaDsl.fromStringV11() { """ @@ -71,7 +71,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { } - "PM-CM relation deleting the parent" should "succeed if both sides are marked cascading although that is a circle" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "PM-CM relation deleting the parent" should "succeed if both sides are marked cascading although that is a circle" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again // P-C val project = SchemaDsl.fromStringV11() { """ @@ -98,7 +98,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { } - "P1!-C1! relation deleting the parent" should "work if both sides are marked marked cascading" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "P1!-C1! relation deleting the parent" should "work if both sides are marked marked cascading" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again // P-C val project = SchemaDsl.fromStringV11() { """ @@ -125,7 +125,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { } - "P1!-C1! relation deleting the parent" should "error if only child is marked marked cascading" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "P1!-C1! relation deleting the parent" should "error if only child is marked marked cascading" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again // P-C val project = SchemaDsl.fromStringV11() { """ @@ -153,7 +153,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { } - "P1!-C1!-C1!-GC! relation deleting the parent and child and grandchild if marked cascading" should "work" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "P1!-C1!-C1!-GC! relation deleting the parent and child and grandchild if marked cascading" should "work" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again // P-C-GC val project = SchemaDsl.fromStringV11() { """ @@ -190,7 +190,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { } - "P1!-C1!-C1-GC relation deleting the parent and child marked cascading" should "work but preserve the grandchild" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "P1!-C1!-C1-GC relation deleting the parent and child marked cascading" should "work but preserve the grandchild" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again // P-C-GC val project = SchemaDsl.fromStringV11() { """ @@ -229,7 +229,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { } - "P1!-C1! relation deleting the parent marked cascading" should "error if the child is required in another non-cascading relation" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "P1!-C1! relation deleting the parent marked cascading" should "error if the child is required in another non-cascading relation" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again // P-C-GC val project = SchemaDsl.fromStringV11() { """ @@ -264,7 +264,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { } - "If the parent is not cascading nothing on the path" should "be deleted except for the parent" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "If the parent is not cascading nothing on the path" should "be deleted except for the parent" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again // P-C-GC val project = SchemaDsl.fromStringV11() { """ @@ -299,7 +299,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { } - "P1!-C1! PM-SC1! relation deleting the parent marked cascading" should "work" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "P1!-C1! PM-SC1! relation deleting the parent marked cascading" should "work" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again // P // / \ // C SC @@ -340,7 +340,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { } - "P!->C PM->SC relation without backrelations" should "work when deleting the parent marked cascading" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "P!->C PM->SC relation without backrelations" should "work when deleting the parent marked cascading" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again // P // / \ not a real circle since from the children there are no backrelations to the parent // C - SC @@ -383,7 +383,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { } - "A path that is interrupted since there are nodes missing" should "only cascade up until the gap" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "A path that is interrupted since there are nodes missing" should "only cascade up until the gap" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again // P-C-GC-|-D-E val project = SchemaDsl.fromStringV11() { """ @@ -437,7 +437,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { } - "A deep uninterrupted path" should "cascade all the way down" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "A deep uninterrupted path" should "cascade all the way down" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again // P-C-GC-D-E val project = SchemaDsl.fromStringV11() { """ @@ -491,7 +491,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { } - "A deep uninterrupted path" should "error on a required relation violation at the end" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "A deep uninterrupted path" should "error on a required relation violation at the end" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again // P-C-GC-D-E-F! val project = SchemaDsl.fromStringV11() { """ @@ -559,7 +559,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { } - "A required relation violation anywhere on the path" should "error and roll back all of the changes" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "A required relation violation anywhere on the path" should "error and roll back all of the changes" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again /** A If cascading all the way down to D from A is fine, but deleting C would * / violate a required relation on E that is not cascading then this should @@ -621,7 +621,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { ) } - "A required relation violation on the parent" should "roll back all cascading deletes on the path" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "A required relation violation on the parent" should "roll back all cascading deletes on the path" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again /** A If A!<->D! ia not marked cascading an existing D should cause all the deletes to fail * / | : even if A<->B, A<->C and C<->E could successfully cascade. @@ -685,7 +685,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { } - "Several relations between the same model" should "be handled correctly" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "Several relations between the same model" should "be handled correctly" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again /** A If there are two relations between B and C and only one of them is marked * / cascading, then only the nodes connected to C's which are connected to B @@ -777,7 +777,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { ) } - "P1-C1-C1!-GC! relation updating the parent to delete the child and grandchild if marked cascading" should "work" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "P1-C1-C1!-GC! relation updating the parent to delete the child and grandchild if marked cascading" should "work" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again // P-C-GC val project = SchemaDsl.fromStringV11() { """ @@ -853,7 +853,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { } //endregion - "Self Relations" should "work" taggedAs IgnoreSQLite in { + "Self Relations" should "work" taggedAs (IgnoreSQLite, IgnorePostgres) in { val project = SchemaDsl.fromStringV11() { """type Folder { | id: ID! @id | name: String! @unique @@ -886,7 +886,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { server.query("""query{folders{name}}""", project).toString should be("""{"data":{"folders":[]}}""") } - "Self Relations" should "work 2" taggedAs (IgnoreSQLite) in { // FIXME: Eats all the RAM // TODO: Remove SQLite ignore when cascading again + "Self Relations" should "work 2" taggedAs (IgnoreSQLite, IgnorePostgres) in { // FIXME: Eats all the RAM // TODO: Remove SQLite ignore when cascading again val project = SchemaDsl.fromStringV11() { """type Folder { | id: ID! @id | name: String! @unique @@ -919,7 +919,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { server.query("""query{folders{name}}""", project).toString should be("""{"data":{"folders":[]}}""") } - "Self Relations" should "work 3" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "Self Relations" should "work 3" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again val project = SchemaDsl.fromStringV11() { """type Folder { | id: ID! @id | name: String! @unique @@ -950,7 +950,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { server.query("""query{folders{name}}""", project).toString should be("""{"data":{"folders":[]}}""") } - "Cascade on both sides" should "halt" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "Cascade on both sides" should "halt" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again val project = SchemaDsl.fromStringV11() { """type User { | id: ID! @id | name: String! @unique @@ -985,7 +985,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { } - "A deleteMany " should " work with cascading delete" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "A deleteMany " should " work with cascading delete" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again val project: Project = setupForDeleteManys @@ -999,7 +999,7 @@ class CascadingDeleteSpec extends FlatSpec with Matchers with ApiSpecBase { } - "A nested deleteMany " should " work with cascading delete" taggedAs IgnoreSQLite in { // TODO: Remove SQLite ignore when cascading again + "A nested deleteMany " should " work with cascading delete" taggedAs (IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again val project: Project = setupForDeleteManys diff --git a/server/servers/api/src/test/scala/com/prisma/api/mutations/CreateMutationListSpec.scala b/server/servers/api/src/test/scala/com/prisma/api/mutations/CreateMutationListSpec.scala index 859139c2cf..f4cbb03d96 100644 --- a/server/servers/api/src/test/scala/com/prisma/api/mutations/CreateMutationListSpec.scala +++ b/server/servers/api/src/test/scala/com/prisma/api/mutations/CreateMutationListSpec.scala @@ -5,6 +5,7 @@ import com.prisma.api.util.TroubleCharacters import com.prisma.shared.models.ConnectorCapability.ScalarListsCapability import com.prisma.shared.schema_dsl.SchemaDsl import org.scalatest.{FlatSpec, Matchers} +import play.api.libs.json.{JsValue, Json} class CreateMutationListSpec extends FlatSpec with Matchers with ApiSpecBase { @@ -57,10 +58,10 @@ class CreateMutationListSpec extends FlatSpec with Matchers with ApiSpecBase { res.toString should be( s"""{"data":{"createScalarModel":{"optEnums":["A","A"],"optBooleans":[true,false],"optDateTimes":["2016-07-31T23:59:01.000Z","2017-07-31T23:59:01.000Z"],"optStrings":["lala${TroubleCharacters.value}"],"optInts":[1337,12],"optJsons":[[1,2,3]],"optFloats":[1.234,1.45]}}}""") - val queryRes = server.query("""{ scalarModels{optStrings, optInts, optFloats, optBooleans, optEnums, optDateTimes, optJsons}}""", project = project) + val queryRes: JsValue = server.query("""{ scalarModels{optStrings, optInts, optFloats, optBooleans, optEnums, optDateTimes, optJsons}}""", project = project) - queryRes.toString should be( - s"""{"data":{"scalarModels":[{"optEnums":["A","A"],"optBooleans":[true,false],"optDateTimes":["2016-07-31T23:59:01.000Z","2017-07-31T23:59:01.000Z"],"optStrings":["lala${TroubleCharacters.value}"],"optInts":[1337,12],"optJsons":[[1,2,3]],"optFloats":[1.234,1.45]}]}}""") + queryRes should be( + Json.parse(s"""{"data":{"scalarModels":[{"optEnums":["A","A"],"optBooleans":[true,false],"optDateTimes":["2016-07-31T23:59:01.000Z","2017-07-31T23:59:01.000Z"],"optStrings":["lala${TroubleCharacters.value}"],"optInts":[1337,12],"optJsons":[[1,2,3]],"optFloats":[1.234,1.45]}]}}""")) } "A Create Mutation" should "create and return items with empty listvalues" in { diff --git a/server/servers/api/src/test/scala/com/prisma/api/mutations/CreateMutationSpec.scala b/server/servers/api/src/test/scala/com/prisma/api/mutations/CreateMutationSpec.scala index 277ad0ae76..c8aae7e753 100644 --- a/server/servers/api/src/test/scala/com/prisma/api/mutations/CreateMutationSpec.scala +++ b/server/servers/api/src/test/scala/com/prisma/api/mutations/CreateMutationSpec.scala @@ -56,8 +56,8 @@ class CreateMutationSpec extends FlatSpec with Matchers with ApiSpecBase { val queryRes = server.query("""{ scalarModels{optString, optInt, optFloat, optBoolean, optEnum, optDateTime, optJson}}""", project = project) - queryRes.toString should be( - s"""{"data":{"scalarModels":[{"optJson":[1,2,3],"optInt":1337,"optBoolean":true,"optDateTime":"2016-07-31T23:59:01.000Z","optString":"lala${TroubleCharacters.value}","optEnum":"A","optFloat":1.234}]}}""") + queryRes should be(Json.parse( + s"""{"data":{"scalarModels":[{"optJson":[1,2,3],"optInt":1337,"optBoolean":true,"optDateTime":"2016-07-31T23:59:01.000Z","optString":"lala${TroubleCharacters.value}","optEnum":"A","optFloat":1.234}]}}""")) } "A Create Mutation" should "create and return item with empty string" in { diff --git a/server/servers/api/src/test/scala/com/prisma/api/mutations/DateTimeSpec.scala b/server/servers/api/src/test/scala/com/prisma/api/mutations/DateTimeSpec.scala index b2893ab46a..b2f53368f6 100644 --- a/server/servers/api/src/test/scala/com/prisma/api/mutations/DateTimeSpec.scala +++ b/server/servers/api/src/test/scala/com/prisma/api/mutations/DateTimeSpec.scala @@ -1,6 +1,6 @@ package com.prisma.api.mutations -import com.prisma.{IgnoreMySql, IgnoreSQLite} +import com.prisma.{IgnoreMySql, IgnorePostgres, IgnoreSQLite} import com.prisma.api.ApiSpecBase import com.prisma.shared.schema_dsl.SchemaDsl import org.scalatest.{FlatSpec, Matchers} @@ -47,14 +47,14 @@ class DateTimeSpec extends FlatSpec with Matchers with ApiSpecBase { // https://tools.ietf.org/html/rfc3339 doesn't support 5-digit years. Therefore Rust date libraries will give a parse // error here. - "Using a date after 10000" should "work" taggedAs (IgnoreMySql, IgnoreSQLite) in { + "Using a date after 10000" should "work" taggedAs (IgnoreMySql, IgnoreSQLite, IgnorePostgres) in { server.query(s"""mutation {createPerson(data: {name: "Fifth", born: "11979-01-01T10:33:59Z"}){name}}""", project) val res = server.query(s"""query {person(where:{name: "Fifth"}){name, born}}""", project) res.toString should be("""{"data":{"person":{"name":"Fifth","born":"11979-01-01T10:33:59.000Z"}}}""") } - "Using milliseconds in a date after 10000" should "work" taggedAs (IgnoreMySql, IgnoreSQLite) in { + "Using milliseconds in a date after 10000" should "work" taggedAs (IgnoreMySql, IgnoreSQLite, IgnorePostgres) in { server.query(s"""mutation {createPerson(data: {name: "Sixth", born: "11979-01-01T10:33:59.828Z"}){name}}""", project) val res = server.query(s"""query {person(where:{name: "Sixth"}){name, born}}""", project) diff --git a/server/servers/api/src/test/scala/com/prisma/api/mutations/ExecuteRawSpec.scala b/server/servers/api/src/test/scala/com/prisma/api/mutations/ExecuteRawSpec.scala index bea9216544..2c4f7bbdd2 100644 --- a/server/servers/api/src/test/scala/com/prisma/api/mutations/ExecuteRawSpec.scala +++ b/server/servers/api/src/test/scala/com/prisma/api/mutations/ExecuteRawSpec.scala @@ -1,8 +1,9 @@ package com.prisma.api.mutations +import com.prisma.{IgnoreMySql, IgnorePostgres, IgnoreSQLite} import com.prisma.api.ApiSpecBase import com.prisma.api.connector.jdbc.impl.JdbcDatabaseMutactionExecutor -import com.prisma.api.connector.sqlite.native.SQLiteDatabaseMutactionExecutor +import com.prisma.api.connector.native.NativeDatabaseMutactionExecutor import com.prisma.shared.models.ConnectorCapability.{JoinRelationLinksCapability, RawAccessCapability} import com.prisma.shared.models.{ConnectorCapability, Project} import com.prisma.shared.schema_dsl.SchemaDsl @@ -42,7 +43,7 @@ class ExecuteRawSpec extends WordSpecLike with Matchers with ApiSpecBase { lazy val slickDatabase = testDependencies.databaseMutactionExecutor match { case m: JdbcDatabaseMutactionExecutor => m.slickDatabase - case m: SQLiteDatabaseMutactionExecutor => m.slickDatabaseArg + case m: NativeDatabaseMutactionExecutor => m.slickDatabaseArg } lazy val isMySQL = slickDatabase.isMySql @@ -137,7 +138,7 @@ class ExecuteRawSpec extends WordSpecLike with Matchers with ApiSpecBase { } } - "syntactic errors should bubble through to the user" in { + "syntactic errors should bubble through to the user" taggedAs (IgnoreSQLite, IgnoreMySql, IgnorePostgres) in { val (errorCode, errorContains) = () match { case _ if isPostgres => (0, "syntax error at end of input") case _ if isMySQL => (1064, "check the manual that corresponds to your MySQL server version for the right syntax to use near") @@ -156,7 +157,7 @@ class ExecuteRawSpec extends WordSpecLike with Matchers with ApiSpecBase { ) } - "other errors should also bubble through to the user" in { + "other errors should also bubble through to the user" taggedAs (IgnoreSQLite, IgnoreMySql, IgnorePostgres) in { val id = createTodo("title") val (errorCode, errorContains) = () match { case _ if isPostgres => (0, "duplicate key value violates unique constraint") diff --git a/server/servers/api/src/test/scala/com/prisma/api/mutations/SameModelSelfRelationWithoutBackRelationSpec.scala b/server/servers/api/src/test/scala/com/prisma/api/mutations/SameModelSelfRelationWithoutBackRelationSpec.scala index 6ba2a2bf1f..8878e087e3 100644 --- a/server/servers/api/src/test/scala/com/prisma/api/mutations/SameModelSelfRelationWithoutBackRelationSpec.scala +++ b/server/servers/api/src/test/scala/com/prisma/api/mutations/SameModelSelfRelationWithoutBackRelationSpec.scala @@ -76,11 +76,9 @@ class SameModelSelfRelationWithoutBackRelationSpec extends FlatSpec with Matcher testDataModels.testV11 { project => server.query("mutation{createPost(data:{identifier: 1}){identifier}}", project) server.query("mutation{createPost(data:{identifier: 2}){identifier}}", project) - server.query( """mutation { | updatePost ( - | | where:{identifier: 1} | data: { | related: { diff --git a/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/NonEmbeddedDeleteScalarListsSpec.scala b/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/NonEmbeddedDeleteScalarListsSpec.scala index daa297891a..e85de0884a 100644 --- a/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/NonEmbeddedDeleteScalarListsSpec.scala +++ b/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/NonEmbeddedDeleteScalarListsSpec.scala @@ -1,6 +1,6 @@ package com.prisma.api.mutations.nonEmbedded -import com.prisma.{IgnoreMongo, IgnoreSQLite} +import com.prisma.{IgnoreMongo, IgnorePostgres, IgnoreSQLite} import com.prisma.api.ApiSpecBase import com.prisma.shared.models.ConnectorCapability.{JoinRelationLinksCapability, NonEmbeddedScalarListCapability, ScalarListsCapability} import com.prisma.shared.models.Project @@ -52,7 +52,7 @@ class NonEmbeddedDeleteScalarListsSpec extends FlatSpec with Matchers with ApiSp res.toString should be("""{"data":{"updateTop":{"name":"test","bottom":null}}}""") } - "A cascading delete mutation" should "also delete ListTable entries" taggedAs (IgnoreMongo, IgnoreSQLite) in { // TODO: Remove SQLite ignore when cascading again + "A cascading delete mutation" should "also delete ListTable entries" taggedAs (IgnoreMongo, IgnoreSQLite, IgnorePostgres) in { // TODO: Remove SQLite ignore when cascading again val project: Project = SchemaDsl.fromStringV11() { s"""type Top { diff --git a/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/WhereAndDateTimeSpec.scala b/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/WhereAndDateTimeSpec.scala index b56696f330..cda03f2968 100644 --- a/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/WhereAndDateTimeSpec.scala +++ b/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/WhereAndDateTimeSpec.scala @@ -82,9 +82,8 @@ class WhereAndDateTimeSpec extends FlatSpec with Matchers with ApiSpecBase { } "Using the same input in an update using where as used during creation of the item" should "work with the same time for inner and outer" in { - - val outerWhere = """"2018-01-03T11:27:38+00:00"""" - val innerWhere = """"2018-01-03T11:27:38+00:00"""" + val outerWhere = """"2018-01-03T11:27:38.000Z"""" + val innerWhere = """"2018-01-03T11:27:38.000Z"""" database.setup(project) diff --git a/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/nestedMutations/NestedConnectMutationInsideUpdateSpec.scala b/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/nestedMutations/NestedConnectMutationInsideUpdateSpec.scala index e0c1e96991..16149b4eef 100644 --- a/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/nestedMutations/NestedConnectMutationInsideUpdateSpec.scala +++ b/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/nestedMutations/NestedConnectMutationInsideUpdateSpec.scala @@ -51,8 +51,7 @@ class NestedConnectMutationInsideUpdateSpec extends FlatSpec with Matchers with ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(2) } server.queryThatMustFail( - s""" - |mutation { + s"""mutation { | updateParent( | where: {id: "$parentId2"} | data:{ @@ -117,8 +116,7 @@ class NestedConnectMutationInsideUpdateSpec extends FlatSpec with Matchers with ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(2) } server.queryThatMustFail( - s""" - |mutation { + s"""mutation { | updateParent( | where: {id: "$parentId2"} | data:{ @@ -687,8 +685,7 @@ class NestedConnectMutationInsideUpdateSpec extends FlatSpec with Matchers with ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(2) } server.queryThatMustFail( - s""" - |mutation { + s"""mutation { | updateParent( | where: {p: "p2"} | data:{ diff --git a/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/nestedMutations/NestedDeleteMutationInsideUpsertSpec.scala b/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/nestedMutations/NestedDeleteMutationInsideUpsertSpec.scala index 97ab704537..2f597f79c3 100644 --- a/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/nestedMutations/NestedDeleteMutationInsideUpsertSpec.scala +++ b/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/nestedMutations/NestedDeleteMutationInsideUpsertSpec.scala @@ -36,8 +36,7 @@ class NestedDeleteMutationInsideUpsertSpec extends FlatSpec with Matchers with A ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) } server.queryThatMustFail( - s""" - |mutation { + s"""mutation { | upsertParent( | where: {id: "$parentId"} | update:{ @@ -90,8 +89,7 @@ class NestedDeleteMutationInsideUpsertSpec extends FlatSpec with Matchers with A ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) } server.queryThatMustFail( - s""" - |mutation { + s"""mutation { | upsertParent( | where: {id: "$parentId"} | update:{ @@ -201,8 +199,7 @@ class NestedDeleteMutationInsideUpsertSpec extends FlatSpec with Matchers with A ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(0) } val res = server.queryThatMustFail( - s""" - |mutation { + s"""mutation { | upsertParent( | where:{id: "$parent1Id"} | update:{ @@ -400,8 +397,7 @@ class NestedDeleteMutationInsideUpsertSpec extends FlatSpec with Matchers with A ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) } server.queryThatMustFail( - s""" - |mutation { + s"""mutation { | upsertParent( | where: {p: "p1"} | update:{ @@ -577,8 +573,7 @@ class NestedDeleteMutationInsideUpsertSpec extends FlatSpec with Matchers with A ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) } server.queryThatMustFail( - s""" - |mutation { + s"""mutation { | upsertParent( | where: { p: "p1"} | update:{ @@ -1036,8 +1031,7 @@ class NestedDeleteMutationInsideUpsertSpec extends FlatSpec with Matchers with A val noteId = createResult.pathAsString("data.createNote.id") val result = server.queryThatMustFail( - s""" - |mutation { + s"""mutation { | upsertNote( | where: {id: "$noteId"} | update: { diff --git a/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/nestedMutations/NestedDisconnectMutationInsideUpsertSpec.scala b/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/nestedMutations/NestedDisconnectMutationInsideUpsertSpec.scala index 286cd27c9d..be2643a830 100644 --- a/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/nestedMutations/NestedDisconnectMutationInsideUpsertSpec.scala +++ b/server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/nestedMutations/NestedDisconnectMutationInsideUpsertSpec.scala @@ -94,8 +94,7 @@ class NestedDisconnectMutationInsideUpsertSpec extends FlatSpec with Matchers wi ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(0) } val res = server.queryThatMustFail( - s""" - |mutation { + s"""mutation { | upsertParent( | where:{id: "$parent1Id"} | update:{ @@ -144,8 +143,7 @@ class NestedDisconnectMutationInsideUpsertSpec extends FlatSpec with Matchers wi ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) } server.queryThatMustFail( - s""" - |mutation { + s"""mutation { | upsertParent( | where: {p: "p1"} | update:{ @@ -191,8 +189,7 @@ class NestedDisconnectMutationInsideUpsertSpec extends FlatSpec with Matchers wi ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) } server.queryThatMustFail( - s""" - |mutation { + s"""mutation { | upsertParent( | where: {p: "p1"} | update:{ diff --git a/server/servers/api/src/test/scala/com/prisma/api/queries/ExtendedPaginationSpec.scala b/server/servers/api/src/test/scala/com/prisma/api/queries/ExtendedPaginationSpec.scala index 2f7bf433b6..bb62ccd8c9 100644 --- a/server/servers/api/src/test/scala/com/prisma/api/queries/ExtendedPaginationSpec.scala +++ b/server/servers/api/src/test/scala/com/prisma/api/queries/ExtendedPaginationSpec.scala @@ -5,6 +5,7 @@ import com.prisma.shared.models.ConnectorCapability.JoinRelationLinksCapability import com.prisma.shared.models.{ConnectorCapability, Project} import com.prisma.shared.schema_dsl.SchemaDsl import org.scalatest.{FlatSpec, Matchers} +import play.api.libs.json.Json class ExtendedPaginationSpec extends FlatSpec with Matchers with ApiSpecBase { @@ -65,8 +66,8 @@ class ExtendedPaginationSpec extends FlatSpec with Matchers with ApiSpecBase { project ) - result.toString() should be( - """{"data":{"tops":[{"t":"T1","middles":[{"m":"M11","bottoms":[{"b":"B111"},{"b":"B112"},{"b":"B113"}]},{"m":"M12","bottoms":[{"b":"B121"},{"b":"B122"},{"b":"B123"}]},{"m":"M13","bottoms":[{"b":"B131"},{"b":"B132"},{"b":"B133"}]}]},{"t":"T2","middles":[{"m":"M21","bottoms":[{"b":"B211"},{"b":"B212"},{"b":"B213"}]},{"m":"M22","bottoms":[{"b":"B221"},{"b":"B222"},{"b":"B223"}]},{"m":"M23","bottoms":[{"b":"B231"},{"b":"B232"},{"b":"B233"}]}]},{"t":"T3","middles":[{"m":"M31","bottoms":[{"b":"B311"},{"b":"B312"},{"b":"B313"}]},{"m":"M32","bottoms":[{"b":"B321"},{"b":"B322"},{"b":"B323"}]},{"m":"M33","bottoms":[{"b":"B331"},{"b":"B332"},{"b":"B333"}]}]}]}}""") + result should be( + Json.parse("""{"data":{"tops":[{"t":"T1","middles":[{"m":"M11","bottoms":[{"b":"B111"},{"b":"B112"},{"b":"B113"}]},{"m":"M12","bottoms":[{"b":"B121"},{"b":"B122"},{"b":"B123"}]},{"m":"M13","bottoms":[{"b":"B131"},{"b":"B132"},{"b":"B133"}]}]},{"t":"T2","middles":[{"m":"M21","bottoms":[{"b":"B211"},{"b":"B212"},{"b":"B213"}]},{"m":"M22","bottoms":[{"b":"B221"},{"b":"B222"},{"b":"B223"}]},{"m":"M23","bottoms":[{"b":"B231"},{"b":"B232"},{"b":"B233"}]}]},{"t":"T3","middles":[{"m":"M31","bottoms":[{"b":"B311"},{"b":"B312"},{"b":"B313"}]},{"m":"M32","bottoms":[{"b":"B321"},{"b":"B322"},{"b":"B323"}]},{"m":"M33","bottoms":[{"b":"B331"},{"b":"B332"},{"b":"B333"}]}]}]}}""")) } } @@ -216,8 +217,8 @@ class ExtendedPaginationSpec extends FlatSpec with Matchers with ApiSpecBase { project ) - result.toString() should be( - """{"data":{"tops":[{"middles":[{"bottoms":[{"b":"B111"},{"b":"B112"},{"b":"B113"}]},{"bottoms":[{"b":"B121"},{"b":"B122"},{"b":"B123"}]},{"bottoms":[{"b":"B131"},{"b":"B132"},{"b":"B133"}]}]},{"middles":[{"bottoms":[{"b":"B211"},{"b":"B212"},{"b":"B213"}]},{"bottoms":[{"b":"B221"},{"b":"B222"},{"b":"B223"}]},{"bottoms":[{"b":"B231"},{"b":"B232"},{"b":"B233"}]}]},{"middles":[{"bottoms":[{"b":"B311"},{"b":"B312"},{"b":"B313"}]},{"bottoms":[{"b":"B321"},{"b":"B322"},{"b":"B323"}]},{"bottoms":[{"b":"B331"},{"b":"B332"},{"b":"B333"}]}]}]}}""") + result should be( + Json.parse("""{"data":{"tops":[{"middles":[{"bottoms":[{"b":"B111"},{"b":"B112"},{"b":"B113"}]},{"bottoms":[{"b":"B121"},{"b":"B122"},{"b":"B123"}]},{"bottoms":[{"b":"B131"},{"b":"B132"},{"b":"B133"}]}]},{"middles":[{"bottoms":[{"b":"B211"},{"b":"B212"},{"b":"B213"}]},{"bottoms":[{"b":"B221"},{"b":"B222"},{"b":"B223"}]},{"bottoms":[{"b":"B231"},{"b":"B232"},{"b":"B233"}]}]},{"middles":[{"bottoms":[{"b":"B311"},{"b":"B312"},{"b":"B313"}]},{"bottoms":[{"b":"B321"},{"b":"B322"},{"b":"B323"}]},{"bottoms":[{"b":"B331"},{"b":"B332"},{"b":"B333"}]}]}]}}""")) } } @@ -233,8 +234,8 @@ class ExtendedPaginationSpec extends FlatSpec with Matchers with ApiSpecBase { project ) - result.toString() should be( - """{"data":{"tops":[{"middles":[{"bottoms":[{"b":"B112"},{"b":"B113"}]},{"bottoms":[{"b":"B122"},{"b":"B123"}]},{"bottoms":[{"b":"B132"},{"b":"B133"}]}]},{"middles":[{"bottoms":[{"b":"B212"},{"b":"B213"}]},{"bottoms":[{"b":"B222"},{"b":"B223"}]},{"bottoms":[{"b":"B232"},{"b":"B233"}]}]},{"middles":[{"bottoms":[{"b":"B312"},{"b":"B313"}]},{"bottoms":[{"b":"B322"},{"b":"B323"}]},{"bottoms":[{"b":"B332"},{"b":"B333"}]}]}]}}""") + result should be( + Json.parse("""{"data":{"tops":[{"middles":[{"bottoms":[{"b":"B112"},{"b":"B113"}]},{"bottoms":[{"b":"B122"},{"b":"B123"}]},{"bottoms":[{"b":"B132"},{"b":"B133"}]}]},{"middles":[{"bottoms":[{"b":"B212"},{"b":"B213"}]},{"bottoms":[{"b":"B222"},{"b":"B223"}]},{"bottoms":[{"b":"B232"},{"b":"B233"}]}]},{"middles":[{"bottoms":[{"b":"B312"},{"b":"B313"}]},{"bottoms":[{"b":"B322"},{"b":"B323"}]},{"bottoms":[{"b":"B332"},{"b":"B333"}]}]}]}}""")) } } diff --git a/server/servers/api/src/test/scala/com/prisma/subscriptions/NonEmbeddedServerSideSubscriptionSpec.scala b/server/servers/api/src/test/scala/com/prisma/subscriptions/NonEmbeddedServerSideSubscriptionSpec.scala index 8bffc4e9e1..7817f50ba0 100644 --- a/server/servers/api/src/test/scala/com/prisma/subscriptions/NonEmbeddedServerSideSubscriptionSpec.scala +++ b/server/servers/api/src/test/scala/com/prisma/subscriptions/NonEmbeddedServerSideSubscriptionSpec.scala @@ -1,7 +1,7 @@ package com.prisma.subscriptions import com.prisma.ConnectorTag -import com.prisma.ConnectorTag.SQLiteConnectorTag +import com.prisma.ConnectorTag.{PostgresConnectorTag, SQLiteConnectorTag} import com.prisma.api.ApiSpecBase import com.prisma.shared.models.ConnectorCapability.JoinRelationLinksCapability import com.prisma.shared.models._ @@ -12,7 +12,7 @@ import org.scalatest.{FlatSpec, Matchers} class NonEmbeddedServerSideSubscriptionSpec extends FlatSpec with Matchers with ApiSpecBase with ScalaFutures { override def runOnlyForCapabilities: Set[ConnectorCapability] = Set(JoinRelationLinksCapability) - override def doNotRunForConnectors: Set[ConnectorTag] = Set(SQLiteConnectorTag) + override def doNotRunForConnectors: Set[ConnectorTag] = Set(SQLiteConnectorTag, PostgresConnectorTag) val webhookTestKit = testDependencies.webhookPublisher diff --git a/server/servers/deploy/src/test/scala/com/prisma/shared/schema_dsl/SchemaDsl.scala b/server/servers/deploy/src/test/scala/com/prisma/shared/schema_dsl/SchemaDsl.scala index 038bd987b4..0f2c063532 100644 --- a/server/servers/deploy/src/test/scala/com/prisma/shared/schema_dsl/SchemaDsl.scala +++ b/server/servers/deploy/src/test/scala/com/prisma/shared/schema_dsl/SchemaDsl.scala @@ -71,8 +71,8 @@ object SchemaDsl extends AwaitUtils { val schema = SchemaInferrer(capabilities).infer(emptyBaseSchema, emptySchemaMapping, prismaSdl) val withBackRelationsAdded = MissingBackRelations.add(schema) val manifestation = ConfigLoader.load().databases.head.connector match { - case x if x == "postgres" => ProjectManifestation(database = Some(id + "_DB"), schema = Some(id + "_S"), x) - case y => ProjectManifestation(database = Some(id + "_DB"), schema = None, y) + case x if x == "postgres" || x == "postgres-native" => ProjectManifestation(database = Some(id + "_DB"), schema = Some(id + "_S"), x) + case y => ProjectManifestation(database = Some(id + "_DB"), schema = None, y) } TestProject.emptyV11.copy(id = id, schema = withBackRelationsAdded, manifestation = manifestation) } diff --git a/server/servers/servers-shared/src/test/scala/com/prisma/ConnectorAwareTest.scala b/server/servers/servers-shared/src/test/scala/com/prisma/ConnectorAwareTest.scala index 0f7c02a9cb..031fbc1abe 100644 --- a/server/servers/servers-shared/src/test/scala/com/prisma/ConnectorAwareTest.scala +++ b/server/servers/servers-shared/src/test/scala/com/prisma/ConnectorAwareTest.scala @@ -48,7 +48,7 @@ trait ConnectorAwareTest extends SuiteMixin { self: Suite => lazy val connectorTag = connector.connector match { case "mongo" => ConnectorTag.MongoConnectorTag case "mysql" => ConnectorTag.MySqlConnectorTag - case "postgres" => ConnectorTag.PostgresConnectorTag + case "postgres" | "postgres-native" => ConnectorTag.PostgresConnectorTag case "sqlite" | "sqlite-native" | "native-integration-tests" => ConnectorTag.SQLiteConnectorTag } private lazy val isPrototype: Boolean = prismaConfig.isPrototype diff --git a/server/shared-models/src/main/scala/com/prisma/shared/models/Project.scala b/server/shared-models/src/main/scala/com/prisma/shared/models/Project.scala index 2aaf508a90..d4285ad784 100644 --- a/server/shared-models/src/main/scala/com/prisma/shared/models/Project.scala +++ b/server/shared-models/src/main/scala/com/prisma/shared/models/Project.scala @@ -21,9 +21,9 @@ case class Project( val serverSideSubscriptionFunctions = functions.collect { case x: ServerSideSubscriptionFunction => x } val dbName: String = manifestation match { - case ProjectManifestation(Some(_), Some(schema), "postgres") => schema + case ProjectManifestation(Some(_), Some(schema), "postgres" | "postgres-native") => schema case ProjectManifestation(Some(_), Some(schema), _) => sys.error("Only Postgres allows schema + database.") - case ProjectManifestation(Some(_), None, "postgres") => id + case ProjectManifestation(Some(_), None, "postgres" | "postgres-native") => id case ProjectManifestation(Some(database), None, "mongo" | "mysql" | "sqlite" | "sqlite-native" | "native-integration-tests") => database case ProjectManifestation(Some(database), None, _) => sys.error("We only have four connectors atm.") case ProjectManifestation(None, Some(_), _) => sys.error("You cannot provide a schema only.")