Skip to content

Commit

Permalink
Test fixes, array agg, docs
Browse files Browse the repository at this point in the history
  • Loading branch information
Katrix committed Aug 4, 2024
1 parent d641e7c commit 59a6d48
Show file tree
Hide file tree
Showing 11 changed files with 219 additions and 37 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -892,8 +892,5 @@ trait SqlQueries extends SqlQueriesBase { platform: SqlQueryPlatform =>
}

extension [A](query: Query[[F[_]] =>> F[A]])
// TODO: Make use of an implicit conversion here?
@targetName("queryAsMany") override def asMany: Many[A] = query.asDbValue.unsafeDbValAsMany

@targetName("queryAsDbValue") override def asDbValue: DbValue[A] = SqlDbValue.SubSelect(query).lift
}
Original file line number Diff line number Diff line change
Expand Up @@ -130,13 +130,10 @@ trait SqlQueriesBase extends SqlQueryPlatformBase, SqlDbValuesBase { platform =>
end SqlQueryCompanion

extension [A](query: Query[[F[_]] =>> F[A]])
// TODO: Make use of an implicit conversion here?
@targetName("queryAsMany") def asMany: Many[A]

@targetName("queryAsDbValue") def asDbValue: DbValue[A]

type Api <: SqlQueryApi & SqlDbValueApi & QueryApi
trait SqlQueryApi {
export platform.{asDbValue, asMany}
export platform.asDbValue
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,12 @@ trait SqlArrays extends SqlDbValuesBase { platform =>
}
}

extension [A](many: Many[A])
// TODO: Check that the return type is indeed Long on all platforms
@targetName("sqlArrayArrayAgg") def arrayAgg: DbValue[Seq[A]] =
val v = Many.unsafeAsDbValue(many)
Impl.function(SqlExpr.FunctionName.ArrayAgg, Seq(v.asAnyDbVal), arrayOfType(v.tpe))

type Impl <: SqlArraysImpl & SqlValuesBaseImpl & SqlBaseImpl
trait SqlArraysImpl {
def queryFunction[A[_[_]]: ApplyKC: TraverseKC](
Expand All @@ -141,6 +147,7 @@ trait SqlArrays extends SqlDbValuesBase { platform =>
trait SqlArraysApi {
export platform.DbArrayLike
export platform.DbArrayLike.given
export platform.arrayAgg

type DbArrayCompanion = platform.DbArrayCompanion
inline def DbArray: DbArrayCompanion = platform.DbArray
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -582,7 +582,7 @@ trait SqlDbValues extends SqlDbValuesBase { platform: SqlQueryPlatform =>
.Function(SqlExpr.FunctionName.Count, Seq(many.unsafeAsDbValue.asAnyDbVal), AnsiTypes.bigint.notNull)
.lift

inline def unsafeAsDbValue: DbValue[A] = many.asInstanceOf[DbValue[A]]
def unsafeAsDbValue: DbValue[A] = many.asInstanceOf[DbValue[A]]

def map[B](f: DbValue[A] => DbValue[B]): Many[B] = f(many.asInstanceOf[DbValue[A]]).asInstanceOf[DbValue[Any]]
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,7 @@ trait SqlDbValuesBase extends SqlQueryPlatformBase { platform =>
// TODO: Check that the return type is indeed Long on all platforms
def count: DbValue[Long]

inline def unsafeAsDbValue: DbValue[A]
def unsafeAsDbValue: DbValue[A]

def map[B](f: DbValue[A] => DbValue[B]): Many[B]
}
Expand Down Expand Up @@ -283,7 +283,7 @@ trait SqlDbValuesBase extends SqlQueryPlatformBase { platform =>
val Case: CaseCompanion
type CaseCompanion <: SqlCaseCompanion

trait SqlCaseCompanion {
trait SqlCaseCompanion {
def apply[A](v: DbValue[A]): ValueCase0[A]
def when[A](whenCond: DbValue[Boolean])(thenV: DbValue[A]): ConditionCase[A]
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,7 @@ class AstRenderer[Codec[_]](ansiTypes: AnsiTypes[Codec], getCodecTypeName: [A] =
case SqlExpr.FunctionName.ArrayContains => normal("array_contains")
case SqlExpr.FunctionName.TrimArray => normal("trim_array")
case SqlExpr.FunctionName.Unnest => normal("unnest")
case SqlExpr.FunctionName.ArrayAgg => normal("array_agg")

case SqlExpr.FunctionName.Custom(f) => normal(f)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,7 @@ object SqlExpr {
case ArrayContains
case TrimArray
case Unnest
case ArrayAgg

case Custom(f: String)

Expand Down
21 changes: 14 additions & 7 deletions common/src/test/scala/dataprism/PlatformArraysSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -116,21 +116,28 @@ trait PlatformArraysSuite[Codec0[_], Platform <: SqlQueryPlatform { type Codec[A
val arr2Type: Type[Seq[Seq[A]]] = platform.arrayOfType(arr1Type)
Select(
Query.of(
// Seq().as(arr1Type),
// Seq(v1).as(arr1Type),
// Seq(v1, v2).as(arr1Type),
Seq().as(arr1Type),
Seq(v1).as(arr1Type),
Seq(v1, v2).as(arr1Type),
Seq(Seq(v1, v2), Seq(v3, v4)).as(arr2Type)
)
)
.runOne[F]
.map: /*r1, r2, r3,*/ r4 =>
.map: (r1, r2, r3, r4) =>
expect.all(
// r1 == Seq(),
// r2 == Seq(v1),
// r3 == Seq(v1, v2),
r1 == Seq(),
r2 == Seq(v1),
r3 == Seq(v1, v2),
r4 == Seq(Seq(v1, v2), Seq(v3, v4))
)

typeTest("ArrayAgg", tpe):
configuredForall(genNel(gen)): (v, vs) =>
Select(Query.values(tpe)(v, vs*).mapSingleGrouped(v => v.arrayAgg))
.runOne[F]
.map: r =>
expect(r == (v +: vs))

end testArrays

def testArrayUnnest[A: Show](tpe: Type[A], gen: Gen[A], opV: A => A, opDb: DbValue[A] => DbValue[A])(
Expand Down
18 changes: 2 additions & 16 deletions docs/_docs/guide/02_queries.md
Original file line number Diff line number Diff line change
Expand Up @@ -133,15 +133,9 @@ traditional `groupBy` function, as a `groupMap` function maps better to DataPris
The second function does the aggregation given both the extracted value, and the values of the
query, which are now wrapped in `Many`. Here are some examples:

Arrays are currently out of commission.

```scala 3 sc-compile-with:User.scala
/*
import dataprism.jdbc.platform.PostgresJdbcPlatform.Api.{*, given}

//Needed for arrayAgg currently
import dataprism.jdbc.sql.PostgresJdbcTypes.ArrayMapping.given_ArrayMapping_A
val q: Query[UserK] = Query.from(UserK.table)

val q1: Query[[F[_]] =>> (F[String], F[Seq[String]])] =
Expand All @@ -154,7 +148,6 @@ val q2: Query[[F[_]] =>> (F[Option[String]], F[String], F[Seq[String]])] =
(t: (DbValue[Option[String]], DbValue[String]), v: UserK[Many]) =>
(t._1, t._2, v.email.arrayAgg)
)
*/
```

Note how you don't have to directly return a column from the grouping function. For example, in `q3`
Expand All @@ -173,8 +166,8 @@ want flatMap for a database which does not support `LATERAL`, create your own pl
import dataprism.jdbc.platform.PostgresJdbcPlatform.Api.{*, given}

//TODO: Does not compile for some reason. Fix MapRes
//val q1: Query[[F[_]] =>> (UserK[F], UserK[F])] =
// Query.from(UserK.table).flatMap(u1 => Query.from(UserK.table).map(u2 => (u1, u2)))
val q1: Query[[F[_]] =>> (UserK[F], UserK[F])] =
Query.from(UserK.table).flatMap(u1 => Query.from(UserK.table).map(u2 => (u1, u2)))

val q2: Query[UserK] = for
u <- Query.from(UserK.table)
Expand All @@ -190,15 +183,9 @@ and similar. The HKD used to define a table is not special. Any HKD (or not even
with `perspective.ApplyKC` and `perspective.TraverseKC` instances can be used as a result type in
functions like `map`. Here's one example:

Arrays are currently out of commission.

```scala 3 sc-compile-with:User.scala
/*
import dataprism.jdbc.platform.PostgresJdbcPlatform.Api.{*, given}

//Needed for arrayAgg currently
import dataprism.jdbc.sql.PostgresJdbcTypes.ArrayMapping.given_ArrayMapping_A
case class UsersWithEmailK[F[_]](email: F[String], usernames: F[Seq[String]])

object UsersWithEmailK:
Expand All @@ -209,7 +196,6 @@ val q1: Query[UsersWithEmailK] =
Query.from(UserK.table).groupMap((v: UserK[DbValue]) => v.email)(
(email: DbValue[String], v: UserK[Many]) => UsersWithEmailK(email, v.username.arrayAgg)
)
*/
```

For more info, see [MapRes and Exotic data](07_mapres_exotic_data.md)
190 changes: 188 additions & 2 deletions docs/_docs/guide/05_dbvalue_expressions.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,22 +19,208 @@ do `Some("a").as(text.nullable)` or more simply `"a".asNullable(text)`.

## Expressions

TODO
Here are some operations that can be done on expressions. This is not a comprehensive list.

### Primitive operators and functions

Here are some of the operators and functions found on all db values:

All DbValues can be compared for equality. This is done using the `===` and `!==` operators.

```scala 3
import dataprism.jdbc.platform.PostgresJdbcPlatform.Api.{*, given}

val a: DbValue[Boolean] = ???
val b: DbValue[Boolean] = ???

val eq = a === b
val neq = a !== b
```

Casting and converting to Some. For casting you need a `CastType`. For most platforms, this is the normal types used
elsewhere in DataPrism. For MySQL platforms, they are special types found at `MySqlJdbcTypes.castType`.

```
import dataprism.jdbc.platform.PostgresJdbcPlatform.Api.{*, given}
import dataprism.jdbc.sql.PostgresJdbcTypes
val a: DbValue[Boolean] = ???
val asSome = a.asSome //Wrapping in Some
val casted = a.cast(PostgresJdbcTypes.integer)
```

### Booleans

Normal boolean operations work with DbValues

```scala 3
import dataprism.jdbc.platform.PostgresJdbcPlatform.Api.{*, given}

val a: DbValue[Boolean] = ???
val b: DbValue[Boolean] = ???

val and = a && b
val or = a || b
val not = !a
```

### Numerics

Most dataprism numeric operators are found in the `SqlNumeric` typeclass

```scala 3
import dataprism.jdbc.platform.PostgresJdbcPlatform.Api.{*, given}

val a: DbValue[Double] = ???
val b: DbValue[Double] = ???

val plus = a + b
val minus = a - b
val times = a * b
val div = a / b
val mod = a % b
val neg = -a
```

### Math functions

DataPrism puts a lot of math functions in the `DbMath` object.

```scala 3
import dataprism.jdbc.platform.PostgresJdbcPlatform.Api.{*, given}
import dataprism.jdbc.sql.PostgresJdbcTypes

val a: DbValue[Double] = ???
val b: DbValue[Double] = ???

val pow = DbMath.pow(a, b)
val sqrt = DbMath.sqrt(a)
val ceil = DbMath.ceil(a)
val floor = DbMath.floor(a)
val log = DbMath.log(a, b)
val sign = DbMath.sign(a)
val pi = DbMath.pi(PostgresJdbcTypes.doublePrecision) //Cast type here
val random = DbMath.random(PostgresJdbcTypes.doublePrecision) //Cast type here

val sin = DbMath.sin(a)
val cos = DbMath.cos(a)
val tan = DbMath.tan(a)
```

### Nullable values
Here are operations that can be used on nullable values.

```scala 3
import dataprism.jdbc.platform.PostgresJdbcPlatform.Api.{*, given}

val a: DbValue[Option[Double]] = ???
val b: DbValue[Option[Double]] = ???
val c: DbValue[Double] = ???

val map = a.map(v => v + c)
val filter = a.filter(v => v > c)
val flatMap = a.flatMap(_ => b)

val isEmpty = a.isEmpty
val isDefined = a.isDefined

val orElse = a.orElse(b)
val getOrElse = a.getOrElse(c)

val mapN = (a, b).mapNullableN((v1, v2) => v1 + v2)
```

### Many
Many is the type used to represent a group of values, usually from groupBy or having.

```scala 3
import dataprism.jdbc.platform.PostgresJdbcPlatform.Api.{*, given}

val v: DbValue[Double] = ???
val m1: Many[Double] = ???
val m2: Many[Double] = ???

val map = m1.map(_ + v)
val count = m2.count

val mapN = (m1, m2).mapManyN((a, b) => a + b)
```

### String operations
Here are some SQL string operations. These are found in the `SqlString` typeclass.

```scala 3
import dataprism.jdbc.platform.PostgresJdbcPlatform.Api.{*, given}
import dataprism.jdbc.sql.PostgresJdbcTypes

val a: DbValue[String] = ???
val b: DbValue[String] = ???
val c: DbValue[String] = ???

val concat = a ++ b
val repeat = a * 5.as(PostgresJdbcTypes.integer)
val length = a.length
val lower = a.toLowerCase
val upper = a.toUpperCase
val like = a.like(b)
val startsWith = a.startsWith(b)
val endsWith = a.endsWith(b)
val replace = a.replace(b, c)

val concat2 = SqlString.concat(a, b)
val concatWs = SqlString.concatWs(" ".as(PostgresJdbcTypes.text), a, b)
```

### In
SQL IN looks like this.

```scala 3
import dataprism.jdbc.platform.PostgresJdbcPlatform.Api.{*, given}
import dataprism.jdbc.sql.PostgresJdbcTypes

val a: DbValue[Boolean] = ???
val b: DbValue[Boolean] = ???
val c: DbValue[Boolean] = ???

val in1 = a.in(b, c)
val in2 = a.notIn(b, c)
val in3 = a.in(Query.of(b))
val in4 = a.notIn(Query.of(b))
val in5 = a.inAs(Seq(true, false), PostgresJdbcTypes.boolean)
val in6 = a.notInAs(Seq(true, false), PostgresJdbcTypes.boolean)
```

### Case
DataPrism supports case both as many if checks, and as a pattern match like operator.

```scala 3
import dataprism.jdbc.platform.PostgresJdbcPlatform.Api.{*, given}

val v: DbValue[Double] = ???
val w1: DbValue[Double] = ???
val w2: DbValue[Double] = ???

val t1: DbValue[Int] = ???
val t2: DbValue[Int] = ???
val t3: DbValue[Int] = ???

Case(v)
.when(w1)(t1)
.when(w2)(t2)
.otherwise(t3)

Case
.when(v === w1)(t1)
.when(v === w2)(t2)
.otherwise(t3)
```

### Custom SQL

Custom SQL can be created using the functions `DbValue.raw` and `DbValue.rawK`.

### Custom SQL functions

### Custom SQL strings
When a custom SQL function is needed, there exists helpers called `DbValue.function` and `DbValue.functionK` to help
create these functions.

Loading

0 comments on commit 59a6d48

Please sign in to comment.