Simplify Mirror Codec
This commit is contained in:
parent
1bc6baad68
commit
17e97495b7
17 changed files with 275 additions and 112 deletions
|
@ -1,7 +1,14 @@
|
||||||
name := "minisql"
|
name := "minisql"
|
||||||
|
|
||||||
scalaVersion := "3.7.0"
|
scalaVersion := "3.7.1"
|
||||||
|
|
||||||
libraryDependencies ++= Seq(
|
libraryDependencies ++= Seq(
|
||||||
"org.scalameta" %% "munit" % "1.0.3" % Test
|
"org.scalameta" %% "munit" % "1.0.3" % Test
|
||||||
)
|
)
|
||||||
|
|
||||||
|
scalacOptions ++= Seq(
|
||||||
|
"-deprecation",
|
||||||
|
"-feature",
|
||||||
|
"-source:3.7-migration",
|
||||||
|
"-rewrite"
|
||||||
|
)
|
||||||
|
|
|
@ -1,14 +1,23 @@
|
||||||
package minisql
|
package minisql
|
||||||
|
|
||||||
import minisql.context.mirror.*
|
import minisql.context.mirror.*
|
||||||
|
import minisql.util.Messages.fail
|
||||||
|
import scala.reflect.ClassTag
|
||||||
|
|
||||||
class MirrorContext[Idiom <: idiom.Idiom, Naming <: NamingStrategy](
|
class MirrorContext[Idiom <: idiom.Idiom, Naming <: NamingStrategy](
|
||||||
val idiom: Idiom,
|
val idiom: Idiom,
|
||||||
val naming: Naming
|
val naming: Naming
|
||||||
) extends context.Context[Idiom, Naming] {
|
) extends context.Context[Idiom, Naming]
|
||||||
|
with MirrorCodecs {
|
||||||
|
|
||||||
type DBRow = Row
|
type DBRow = IArray[Any] *: EmptyTuple
|
||||||
|
type DBResultSet = Iterable[DBRow]
|
||||||
|
type DBStatement = Map[Int, Any]
|
||||||
|
|
||||||
type DBResultSet = ResultSet
|
extension (r: DBRow) {
|
||||||
|
|
||||||
|
def data: IArray[Any] = r._1
|
||||||
|
def add(value: Any): DBRow = (r.data :+ value) *: EmptyTuple
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,71 +1,139 @@
|
||||||
package minisql.context.mirror
|
package minisql.context.mirror
|
||||||
|
|
||||||
import minisql.{MirrorContext, NamingStrategy, ParamEncoder, ColumnDecoder}
|
import minisql.MirrorContext
|
||||||
import minisql.idiom.Idiom
|
import java.time.LocalDate
|
||||||
|
import java.util.{Date, UUID}
|
||||||
|
import minisql.{ParamEncoder, ColumnDecoder}
|
||||||
import minisql.util.Messages.fail
|
import minisql.util.Messages.fail
|
||||||
|
import scala.util.{Failure, Success, Try}
|
||||||
import scala.util.Try
|
import scala.util.Try
|
||||||
import scala.reflect.ClassTag
|
import scala.reflect.ClassTag
|
||||||
|
|
||||||
/**
|
trait MirrorCodecs {
|
||||||
* No extra class defined
|
ctx: MirrorContext[?, ?] =>
|
||||||
*/
|
|
||||||
opaque type Row = IArray[Any] *: EmptyTuple
|
|
||||||
opaque type ResultSet = Iterable[Row]
|
|
||||||
opaque type Statement = Map[Int, Any]
|
|
||||||
|
|
||||||
extension (r: Row) {
|
final protected def mirrorEncoder[V]: Encoder[V] = new ParamEncoder[V] {
|
||||||
|
type Stmt = ctx.DBStatement
|
||||||
def data: IArray[Any] = r._1
|
def setParam(s: Stmt, idx: Int, v: V): Stmt = {
|
||||||
|
s + (idx -> v)
|
||||||
def add(value: Any): Row = (r.data :+ value) *: EmptyTuple
|
|
||||||
|
|
||||||
def apply[T](idx: Int)(using t: ClassTag[T]): T = {
|
|
||||||
r.data(idx) match {
|
|
||||||
case v: T => v
|
|
||||||
case other =>
|
|
||||||
fail(
|
|
||||||
s"Invalid column type. Expected '${t.runtimeClass}', but got '$other'"
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
type Encoder[E] = ParamEncoder[E] {
|
final protected def mirrorColumnDecoder[X](
|
||||||
type Stmt = Statement
|
conv: Any => Option[X]
|
||||||
}
|
): Decoder[X] =
|
||||||
|
new ColumnDecoder[X] {
|
||||||
|
type DBRow = ctx.DBRow
|
||||||
|
def decode(row: DBRow, idx: Int): Try[X] = {
|
||||||
|
row.data
|
||||||
|
.lift(idx)
|
||||||
|
.flatMap { x =>
|
||||||
|
conv(x)
|
||||||
|
}
|
||||||
|
.toRight(new Exception(s"Cannot convert value at ${idx}"))
|
||||||
|
.toTry
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private def encoder[V]: Encoder[V] = new ParamEncoder[V] {
|
given optionDecoder[T](using d: Decoder[T]): Decoder[Option[T]] = {
|
||||||
|
new ColumnDecoder[Option[T]] {
|
||||||
type Stmt = Map[Int, Any]
|
type DBRow = ctx.DBRow
|
||||||
|
override def decode(row: DBRow, idx: Int): Try[Option[T]] =
|
||||||
def setParam(s: Stmt, idx: Int, v: V): Stmt = {
|
row.data.lift(idx) match {
|
||||||
s + (idx -> v)
|
case Some(null) => Success(None)
|
||||||
}
|
case Some(value) => d.decode(row, idx).map(Some(_))
|
||||||
}
|
case None => Success(None)
|
||||||
|
|
||||||
given Encoder[Long] = encoder[Long]
|
|
||||||
|
|
||||||
type Decoder[A] = ColumnDecoder[A] {
|
|
||||||
type DBRow = Row
|
|
||||||
}
|
|
||||||
|
|
||||||
private def apply[X](conv: Any => Option[X]): Decoder[X] =
|
|
||||||
new ColumnDecoder[X] {
|
|
||||||
type DBRow = Row
|
|
||||||
def decode(row: Row, idx: Int): Try[X] = {
|
|
||||||
row._1
|
|
||||||
.lift(idx)
|
|
||||||
.flatMap { x =>
|
|
||||||
conv(x)
|
|
||||||
}
|
}
|
||||||
.toRight(new Exception(s"Cannot convert value at ${idx}"))
|
|
||||||
.toTry
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
given Decoder[Long] = apply(x =>
|
given optionEncoder[T](using e: Encoder[T]): Encoder[Option[T]] =
|
||||||
x match {
|
new ParamEncoder[Option[T]] {
|
||||||
case l: Long => Some(l)
|
type Stmt = ctx.DBStatement
|
||||||
case _ => None
|
override def setParam(
|
||||||
}
|
s: Stmt,
|
||||||
)
|
idx: Int,
|
||||||
|
v: Option[T]
|
||||||
|
): Stmt =
|
||||||
|
v match {
|
||||||
|
case Some(value) => e.setParam(s, idx, value)
|
||||||
|
case None =>
|
||||||
|
s + (idx -> null)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Implement all required decoders using mirrorColumnDecoder from MirrorCodecs
|
||||||
|
given stringDecoder: Decoder[String] = mirrorColumnDecoder[String](x =>
|
||||||
|
x match { case s: String => Some(s); case _ => None }
|
||||||
|
)
|
||||||
|
given bigDecimalDecoder: Decoder[BigDecimal] =
|
||||||
|
mirrorColumnDecoder[BigDecimal](x =>
|
||||||
|
x match {
|
||||||
|
case bd: BigDecimal => Some(bd); case i: Int => Some(BigDecimal(i));
|
||||||
|
case l: Long => Some(BigDecimal(l));
|
||||||
|
case d: Double => Some(BigDecimal(d)); case _ => None
|
||||||
|
}
|
||||||
|
)
|
||||||
|
given booleanDecoder: Decoder[Boolean] = mirrorColumnDecoder[Boolean](x =>
|
||||||
|
x match { case b: Boolean => Some(b); case _ => None }
|
||||||
|
)
|
||||||
|
given byteDecoder: Decoder[Byte] = mirrorColumnDecoder[Byte](x =>
|
||||||
|
x match {
|
||||||
|
case b: Byte => Some(b); case i: Int => Some(i.toByte); case _ => None
|
||||||
|
}
|
||||||
|
)
|
||||||
|
given shortDecoder: Decoder[Short] = mirrorColumnDecoder[Short](x =>
|
||||||
|
x match {
|
||||||
|
case s: Short => Some(s); case i: Int => Some(i.toShort); case _ => None
|
||||||
|
}
|
||||||
|
)
|
||||||
|
given intDecoder: Decoder[Int] = mirrorColumnDecoder[Int](x =>
|
||||||
|
x match { case i: Int => Some(i); case _ => None }
|
||||||
|
)
|
||||||
|
given longDecoder: Decoder[Long] = mirrorColumnDecoder[Long](x =>
|
||||||
|
x match {
|
||||||
|
case l: Long => Some(l); case i: Int => Some(i.toLong); case _ => None
|
||||||
|
}
|
||||||
|
)
|
||||||
|
given floatDecoder: Decoder[Float] = mirrorColumnDecoder[Float](x =>
|
||||||
|
x match {
|
||||||
|
case f: Float => Some(f); case d: Double => Some(d.toFloat);
|
||||||
|
case _ => None
|
||||||
|
}
|
||||||
|
)
|
||||||
|
given doubleDecoder: Decoder[Double] = mirrorColumnDecoder[Double](x =>
|
||||||
|
x match {
|
||||||
|
case d: Double => Some(d); case f: Float => Some(f.toDouble);
|
||||||
|
case _ => None
|
||||||
|
}
|
||||||
|
)
|
||||||
|
given byteArrayDecoder: Decoder[Array[Byte]] =
|
||||||
|
mirrorColumnDecoder[Array[Byte]](x =>
|
||||||
|
x match { case ba: Array[Byte] => Some(ba); case _ => None }
|
||||||
|
)
|
||||||
|
given dateDecoder: Decoder[Date] = mirrorColumnDecoder[Date](x =>
|
||||||
|
x match { case d: Date => Some(d); case _ => None }
|
||||||
|
)
|
||||||
|
given localDateDecoder: Decoder[LocalDate] =
|
||||||
|
mirrorColumnDecoder[LocalDate](x =>
|
||||||
|
x match { case ld: LocalDate => Some(ld); case _ => None }
|
||||||
|
)
|
||||||
|
given uuidDecoder: Decoder[UUID] = mirrorColumnDecoder[UUID](x =>
|
||||||
|
x match { case uuid: UUID => Some(uuid); case _ => None }
|
||||||
|
)
|
||||||
|
|
||||||
|
// Implement all required encoders using mirrorEncoder from MirrorCodecs
|
||||||
|
given stringEncoder: Encoder[String] = mirrorEncoder[String]
|
||||||
|
given bigDecimalEncoder: Encoder[BigDecimal] = mirrorEncoder[BigDecimal]
|
||||||
|
given booleanEncoder: Encoder[Boolean] = mirrorEncoder[Boolean]
|
||||||
|
given byteEncoder: Encoder[Byte] = mirrorEncoder[Byte]
|
||||||
|
given shortEncoder: Encoder[Short] = mirrorEncoder[Short]
|
||||||
|
given intEncoder: Encoder[Int] = mirrorEncoder[Int]
|
||||||
|
given longEncoder: Encoder[Long] = mirrorEncoder[Long]
|
||||||
|
given floatEncoder: Encoder[Float] = mirrorEncoder[Float]
|
||||||
|
given doubleEncoder: Encoder[Double] = mirrorEncoder[Double]
|
||||||
|
given byteArrayEncoder: Encoder[Array[Byte]] = mirrorEncoder[Array[Byte]]
|
||||||
|
given dateEncoder: Encoder[Date] = mirrorEncoder[Date]
|
||||||
|
given localDateEncoder: Encoder[LocalDate] = mirrorEncoder[LocalDate]
|
||||||
|
given uuidEncoder: Encoder[UUID] = mirrorEncoder[UUID]
|
||||||
|
}
|
||||||
|
|
24
src/main/scala/minisql/context/sql/MirrorSqlContext.scala
Normal file
24
src/main/scala/minisql/context/sql/MirrorSqlContext.scala
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
package minisql.context.sql
|
||||||
|
|
||||||
|
import minisql.{NamingStrategy, MirrorContext}
|
||||||
|
import minisql.context.Context
|
||||||
|
import minisql.idiom.Idiom // Changed from minisql.idiom.* to avoid ambiguity with Statement
|
||||||
|
import minisql.context.mirror.MirrorCodecs
|
||||||
|
import minisql.context.ReturningClauseSupported
|
||||||
|
import minisql.context.ReturningCapability
|
||||||
|
|
||||||
|
class MirrorSqlIdiom extends idiom.SqlIdiom {
|
||||||
|
override def concatFunction: String = "CONCAT"
|
||||||
|
override def idiomReturningCapability: ReturningCapability =
|
||||||
|
ReturningClauseSupported
|
||||||
|
|
||||||
|
// Implementations previously provided by MirrorIdiomBase
|
||||||
|
override def prepareForProbing(string: String): String = string
|
||||||
|
override def liftingPlaceholder(index: Int): String = "?"
|
||||||
|
}
|
||||||
|
object MirrorSqlIdiom extends MirrorSqlIdiom
|
||||||
|
|
||||||
|
class MirrorSqlContext[N <: NamingStrategy](naming: N)
|
||||||
|
extends MirrorContext[MirrorSqlIdiom, N](MirrorSqlIdiom, naming)
|
||||||
|
with SqlContext[MirrorSqlIdiom, N]
|
||||||
|
with MirrorCodecs {}
|
|
@ -1,4 +1,4 @@
|
||||||
package minisql
|
package minisql.context.sql
|
||||||
|
|
||||||
import minisql.context.{
|
import minisql.context.{
|
||||||
CanReturnClause,
|
CanReturnClause,
|
|
@ -20,13 +20,13 @@ trait OnConflictSupport {
|
||||||
}
|
}
|
||||||
|
|
||||||
val customAstTokenizer =
|
val customAstTokenizer =
|
||||||
Tokenizer.withFallback[Ast](self.astTokenizer(_, strategy)) {
|
Tokenizer.withFallback[Ast](self.astTokenizer(using _, strategy)) {
|
||||||
case _: OnConflict.Excluded => stmt"EXCLUDED"
|
case _: OnConflict.Excluded => stmt"EXCLUDED"
|
||||||
case OnConflict.Existing(a) => stmt"${a.token}"
|
case OnConflict.Existing(a) => stmt"${a.token}"
|
||||||
case a: Action =>
|
case a: Action =>
|
||||||
self
|
self
|
||||||
.actionTokenizer(customEntityTokenizer)(
|
.actionTokenizer(customEntityTokenizer)(
|
||||||
actionAstTokenizer,
|
using actionAstTokenizer,
|
||||||
strategy
|
strategy
|
||||||
)
|
)
|
||||||
.token(a)
|
.token(a)
|
||||||
|
@ -37,7 +37,7 @@ trait OnConflictSupport {
|
||||||
def doUpdateStmt(i: Token, t: Token, u: Update) = {
|
def doUpdateStmt(i: Token, t: Token, u: Update) = {
|
||||||
val assignments = u.assignments
|
val assignments = u.assignments
|
||||||
.map(a =>
|
.map(a =>
|
||||||
stmt"${actionAstTokenizer.token(a.property)} = ${scopedTokenizer(a.value)(customAstTokenizer)}"
|
stmt"${actionAstTokenizer.token(a.property)} = ${scopedTokenizer(a.value)(using customAstTokenizer)}"
|
||||||
)
|
)
|
||||||
.mkStmt()
|
.mkStmt()
|
||||||
|
|
||||||
|
@ -65,6 +65,6 @@ trait OnConflictSupport {
|
||||||
case OnConflict(i, p: Properties, Ignore) => doNothingStmt(i, p.token)
|
case OnConflict(i, p: Properties, Ignore) => doNothingStmt(i, p.token)
|
||||||
}
|
}
|
||||||
|
|
||||||
tokenizer(customAstTokenizer)
|
tokenizer(using customAstTokenizer)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
44
src/main/scala/minisql/context/sql/SqlContext.scala
Normal file
44
src/main/scala/minisql/context/sql/SqlContext.scala
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
package minisql.context.sql
|
||||||
|
|
||||||
|
import java.time.LocalDate
|
||||||
|
|
||||||
|
import minisql.idiom.{Idiom => BaseIdiom}
|
||||||
|
import java.util.{Date, UUID}
|
||||||
|
|
||||||
|
import minisql.context.Context
|
||||||
|
import minisql.NamingStrategy
|
||||||
|
|
||||||
|
trait SqlContext[Idiom <: BaseIdiom, Naming <: NamingStrategy]
|
||||||
|
extends Context[Idiom, Naming] {
|
||||||
|
|
||||||
|
given optionDecoder[T](using d: Decoder[T]): Decoder[Option[T]]
|
||||||
|
given optionEncoder[T](using d: Encoder[T]): Encoder[Option[T]]
|
||||||
|
|
||||||
|
given stringDecoder: Decoder[String]
|
||||||
|
given bigDecimalDecoder: Decoder[BigDecimal]
|
||||||
|
given booleanDecoder: Decoder[Boolean]
|
||||||
|
given byteDecoder: Decoder[Byte]
|
||||||
|
given shortDecoder: Decoder[Short]
|
||||||
|
given intDecoder: Decoder[Int]
|
||||||
|
given longDecoder: Decoder[Long]
|
||||||
|
given floatDecoder: Decoder[Float]
|
||||||
|
given doubleDecoder: Decoder[Double]
|
||||||
|
given byteArrayDecoder: Decoder[Array[Byte]]
|
||||||
|
given dateDecoder: Decoder[Date]
|
||||||
|
given localDateDecoder: Decoder[LocalDate]
|
||||||
|
given uuidDecoder: Decoder[UUID]
|
||||||
|
|
||||||
|
given stringEncoder: Encoder[String]
|
||||||
|
given bigDecimalEncoder: Encoder[BigDecimal]
|
||||||
|
given booleanEncoder: Encoder[Boolean]
|
||||||
|
given byteEncoder: Encoder[Byte]
|
||||||
|
given shortEncoder: Encoder[Short]
|
||||||
|
given intEncoder: Encoder[Int]
|
||||||
|
given longEncoder: Encoder[Long]
|
||||||
|
given floatEncoder: Encoder[Float]
|
||||||
|
given doubleEncoder: Encoder[Double]
|
||||||
|
given byteArrayEncoder: Encoder[Array[Byte]]
|
||||||
|
given dateEncoder: Encoder[Date]
|
||||||
|
given localDateEncoder: Encoder[LocalDate]
|
||||||
|
given uuidEncoder: Encoder[UUID]
|
||||||
|
}
|
|
@ -26,9 +26,7 @@ trait SqlIdiom extends Idiom {
|
||||||
|
|
||||||
protected def concatBehavior: ConcatBehavior = AnsiConcat
|
protected def concatBehavior: ConcatBehavior = AnsiConcat
|
||||||
protected def equalityBehavior: EqualityBehavior = AnsiEquality
|
protected def equalityBehavior: EqualityBehavior = AnsiEquality
|
||||||
|
protected def actionAlias: Option[Ident] = None
|
||||||
protected def actionAlias: Option[Ident] = None
|
|
||||||
|
|
||||||
override def format(queryString: String): String = queryString
|
override def format(queryString: String): String = queryString
|
||||||
|
|
||||||
def querifyAst(ast: Ast) = SqlQuery(ast)
|
def querifyAst(ast: Ast) = SqlQuery(ast)
|
||||||
|
@ -67,7 +65,7 @@ trait SqlIdiom extends Idiom {
|
||||||
|
|
||||||
def defaultTokenizer(implicit naming: NamingStrategy): Tokenizer[Ast] =
|
def defaultTokenizer(implicit naming: NamingStrategy): Tokenizer[Ast] =
|
||||||
new Tokenizer[Ast] {
|
new Tokenizer[Ast] {
|
||||||
private val stableTokenizer = astTokenizer(this, naming)
|
private val stableTokenizer = astTokenizer(using this, naming)
|
||||||
|
|
||||||
extension (v: Ast) {
|
extension (v: Ast) {
|
||||||
def token = stableTokenizer.token(v)
|
def token = stableTokenizer.token(v)
|
||||||
|
@ -249,7 +247,9 @@ trait SqlIdiom extends Idiom {
|
||||||
}
|
}
|
||||||
|
|
||||||
val customAstTokenizer =
|
val customAstTokenizer =
|
||||||
Tokenizer.withFallback[Ast](SqlIdiom.this.astTokenizer(_, strategy)) {
|
Tokenizer.withFallback[Ast](
|
||||||
|
SqlIdiom.this.astTokenizer(using _, strategy)
|
||||||
|
) {
|
||||||
case Aggregation(op, Ident(_) | Tuple(_)) => stmt"${op.token}(*)"
|
case Aggregation(op, Ident(_) | Tuple(_)) => stmt"${op.token}(*)"
|
||||||
case Aggregation(op, Distinct(ast)) =>
|
case Aggregation(op, Distinct(ast)) =>
|
||||||
stmt"${op.token}(DISTINCT ${ast.token})"
|
stmt"${op.token}(DISTINCT ${ast.token})"
|
||||||
|
@ -257,7 +257,7 @@ trait SqlIdiom extends Idiom {
|
||||||
case Aggregation(op, ast) => stmt"${op.token}(${ast.token})"
|
case Aggregation(op, ast) => stmt"${op.token}(${ast.token})"
|
||||||
}
|
}
|
||||||
|
|
||||||
tokenizer(customAstTokenizer)
|
tokenizer(using customAstTokenizer)
|
||||||
}
|
}
|
||||||
|
|
||||||
implicit def operationTokenizer(implicit
|
implicit def operationTokenizer(implicit
|
||||||
|
@ -528,14 +528,14 @@ trait SqlIdiom extends Idiom {
|
||||||
case Entity.Opinionated(name, _, renameable) =>
|
case Entity.Opinionated(name, _, renameable) =>
|
||||||
stmt"INTO ${tokenizeTable(strategy, name, renameable).token}"
|
stmt"INTO ${tokenizeTable(strategy, name, renameable).token}"
|
||||||
}
|
}
|
||||||
actionTokenizer(insertEntityTokenizer)(actionAstTokenizer, strategy)
|
actionTokenizer(insertEntityTokenizer)(using actionAstTokenizer, strategy)
|
||||||
}
|
}
|
||||||
|
|
||||||
protected def actionAstTokenizer(implicit
|
protected def actionAstTokenizer(implicit
|
||||||
astTokenizer: Tokenizer[Ast],
|
astTokenizer: Tokenizer[Ast],
|
||||||
strategy: NamingStrategy
|
strategy: NamingStrategy
|
||||||
) =
|
) =
|
||||||
Tokenizer.withFallback[Ast](SqlIdiom.this.astTokenizer(_, strategy)) {
|
Tokenizer.withFallback[Ast](SqlIdiom.this.astTokenizer(using _, strategy)) {
|
||||||
case q: Query => astTokenizer.token(q)
|
case q: Query => astTokenizer.token(q)
|
||||||
case Property(Property.Opinionated(_, name, renameable, _), "isEmpty") =>
|
case Property(Property.Opinionated(_, name, renameable, _), "isEmpty") =>
|
||||||
stmt"${renameable.fixedOr(name)(tokenizeColumn(strategy, name, renameable)).token} IS NULL"
|
stmt"${renameable.fixedOr(name)(tokenizeColumn(strategy, name, renameable)).token} IS NULL"
|
||||||
|
@ -557,14 +557,16 @@ trait SqlIdiom extends Idiom {
|
||||||
strategy: NamingStrategy
|
strategy: NamingStrategy
|
||||||
): Tokenizer[List[Ast]] = {
|
): Tokenizer[List[Ast]] = {
|
||||||
val customAstTokenizer =
|
val customAstTokenizer =
|
||||||
Tokenizer.withFallback[Ast](SqlIdiom.this.astTokenizer(_, strategy)) {
|
Tokenizer.withFallback[Ast](
|
||||||
|
SqlIdiom.this.astTokenizer(using _, strategy)
|
||||||
|
) {
|
||||||
case sq: Query =>
|
case sq: Query =>
|
||||||
stmt"(${tokenizer.token(sq)})"
|
stmt"(${tokenizer.token(sq)})"
|
||||||
}
|
}
|
||||||
|
|
||||||
Tokenizer[List[Ast]] {
|
Tokenizer[List[Ast]] {
|
||||||
case list =>
|
case list =>
|
||||||
list.mkStmt(", ")(customAstTokenizer)
|
list.mkStmt(", ")(using customAstTokenizer)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -653,7 +655,7 @@ object SqlIdiom {
|
||||||
private[minisql] def copyIdiom(
|
private[minisql] def copyIdiom(
|
||||||
parent: SqlIdiom,
|
parent: SqlIdiom,
|
||||||
newActionAlias: Option[Ident]
|
newActionAlias: Option[Ident]
|
||||||
) =
|
): SqlIdiom =
|
||||||
new SqlIdiom {
|
new SqlIdiom {
|
||||||
override protected def actionAlias: Option[Ident] = newActionAlias
|
override protected def actionAlias: Option[Ident] = newActionAlias
|
||||||
override def prepareForProbing(string: String): String =
|
override def prepareForProbing(string: String): String =
|
||||||
|
@ -678,10 +680,10 @@ object SqlIdiom {
|
||||||
val idiom = copyIdiom(parentIdiom, Some(query.alias))
|
val idiom = copyIdiom(parentIdiom, Some(query.alias))
|
||||||
import idiom._
|
import idiom._
|
||||||
|
|
||||||
implicit val stableTokenizer: Tokenizer[Ast] = idiom.astTokenizer(
|
implicit val stableTokenizer: Tokenizer[Ast] = idiom.astTokenizer(using
|
||||||
new Tokenizer[Ast] { self =>
|
new Tokenizer[Ast] { self =>
|
||||||
extension (v: Ast) {
|
extension (v: Ast) {
|
||||||
def token = astTokenizer(self, strategy).token(v)
|
def token = astTokenizer(using self, strategy).token(v)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
strategy
|
strategy
|
||||||
|
@ -695,6 +697,8 @@ object SqlIdiom {
|
||||||
stmt"${action.token} RETURNING ${returnListTokenizer.token(
|
stmt"${action.token} RETURNING ${returnListTokenizer.token(
|
||||||
ExpandReturning(r)(idiom, strategy).map(_._1)
|
ExpandReturning(r)(idiom, strategy).map(_._1)
|
||||||
)}"
|
)}"
|
||||||
|
case r =>
|
||||||
|
fail(s"Unsupported Returning construct: $r")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@ case class FlatJoinContext(t: JoinType, a: FromContext, on: Ast)
|
||||||
|
|
||||||
sealed trait SqlQuery {
|
sealed trait SqlQuery {
|
||||||
override def toString = {
|
override def toString = {
|
||||||
import minisql.MirrorSqlDialect._
|
import MirrorSqlDialect.*
|
||||||
import minisql.idiom.StatementInterpolator.*
|
import minisql.idiom.StatementInterpolator.*
|
||||||
given Tokenizer[SqlQuery] = sqlQueryTokenizer(using
|
given Tokenizer[SqlQuery] = sqlQueryTokenizer(using
|
||||||
defaultTokenizer(using Literal),
|
defaultTokenizer(using Literal),
|
||||||
|
|
|
@ -66,7 +66,7 @@ class ExpandNestedQueries(strategy: NamingStrategy) {
|
||||||
|
|
||||||
// Need to unhide properties that were used during the query
|
// Need to unhide properties that were used during the query
|
||||||
def replaceProps(ast: Ast) =
|
def replaceProps(ast: Ast) =
|
||||||
BetaReduction(ast, replacedRefs: _*)
|
BetaReduction(ast, replacedRefs*)
|
||||||
def replacePropsOption(ast: Option[Ast]) =
|
def replacePropsOption(ast: Option[Ast]) =
|
||||||
ast.map(replaceProps(_))
|
ast.map(replaceProps(_))
|
||||||
|
|
||||||
|
|
|
@ -37,7 +37,7 @@ case class FlattenGroupByAggregation(agg: Ident) extends StatelessTransformer {
|
||||||
super.apply(other)
|
super.apply(other)
|
||||||
}
|
}
|
||||||
|
|
||||||
private[this] def isGroupByAggregation(ast: Ast): Boolean =
|
private def isGroupByAggregation(ast: Ast): Boolean =
|
||||||
ast match {
|
ast match {
|
||||||
case Aggregation(a, b) => isGroupByAggregation(b)
|
case Aggregation(a, b) => isGroupByAggregation(b)
|
||||||
case Map(a, b, c) => isGroupByAggregation(a)
|
case Map(a, b, c) => isGroupByAggregation(a)
|
||||||
|
|
|
@ -22,31 +22,31 @@ class SqlNormalize(
|
||||||
) {
|
) {
|
||||||
|
|
||||||
private val normalize =
|
private val normalize =
|
||||||
(identity[Ast] _)
|
(identity[Ast])
|
||||||
.andThen(trace("original"))
|
.andThen(trace("original"))
|
||||||
.andThen(DemarcateExternalAliases.apply _)
|
.andThen(DemarcateExternalAliases.apply)
|
||||||
.andThen(trace("DemarcateReturningAliases"))
|
.andThen(trace("DemarcateReturningAliases"))
|
||||||
.andThen(new FlattenOptionOperation(concatBehavior).apply _)
|
.andThen(new FlattenOptionOperation(concatBehavior).apply)
|
||||||
.andThen(trace("FlattenOptionOperation"))
|
.andThen(trace("FlattenOptionOperation"))
|
||||||
.andThen(new SimplifyNullChecks(equalityBehavior).apply _)
|
.andThen(new SimplifyNullChecks(equalityBehavior).apply)
|
||||||
.andThen(trace("SimplifyNullChecks"))
|
.andThen(trace("SimplifyNullChecks"))
|
||||||
.andThen(Normalize.apply _)
|
.andThen(Normalize.apply)
|
||||||
.andThen(trace("Normalize"))
|
.andThen(trace("Normalize"))
|
||||||
// Need to do RenameProperties before ExpandJoin which normalizes-out all the tuple indexes
|
// Need to do RenameProperties before ExpandJoin which normalizes-out all the tuple indexes
|
||||||
// on which RenameProperties relies
|
// on which RenameProperties relies
|
||||||
.andThen(RenameProperties.apply _)
|
.andThen(RenameProperties.apply)
|
||||||
.andThen(trace("RenameProperties"))
|
.andThen(trace("RenameProperties"))
|
||||||
.andThen(ExpandDistinct.apply _)
|
.andThen(ExpandDistinct.apply)
|
||||||
.andThen(trace("ExpandDistinct"))
|
.andThen(trace("ExpandDistinct"))
|
||||||
.andThen(NestImpureMappedInfix.apply _)
|
.andThen(NestImpureMappedInfix.apply)
|
||||||
.andThen(trace("NestMappedInfix"))
|
.andThen(trace("NestMappedInfix"))
|
||||||
.andThen(Normalize.apply _)
|
.andThen(Normalize.apply)
|
||||||
.andThen(trace("Normalize"))
|
.andThen(trace("Normalize"))
|
||||||
.andThen(ExpandJoin.apply _)
|
.andThen(ExpandJoin.apply)
|
||||||
.andThen(trace("ExpandJoin"))
|
.andThen(trace("ExpandJoin"))
|
||||||
.andThen(ExpandMappedInfix.apply _)
|
.andThen(ExpandMappedInfix.apply)
|
||||||
.andThen(trace("ExpandMappedInfix"))
|
.andThen(trace("ExpandMappedInfix"))
|
||||||
.andThen(Normalize.apply _)
|
.andThen(Normalize.apply)
|
||||||
.andThen(trace("Normalize"))
|
.andThen(trace("Normalize"))
|
||||||
|
|
||||||
def apply(ast: Ast) = normalize(ast)
|
def apply(ast: Ast) = normalize(ast)
|
||||||
|
|
|
@ -85,16 +85,16 @@ private class ExpandSelect(
|
||||||
|
|
||||||
val orderedSelect = ref match {
|
val orderedSelect = ref match {
|
||||||
case pp @ Property(ast: Property, TupleIndex(idx)) =>
|
case pp @ Property(ast: Property, TupleIndex(idx)) =>
|
||||||
trace"Reference is a sub-property of a tuple index: $idx. Walking inside." andReturn
|
trace"Reference is a sub-property of a tuple index: $idx. Walking inside." `andReturn`
|
||||||
expandReference(ast) match {
|
expandReference(ast) match {
|
||||||
case OrderedSelect(o, SelectValue(Tuple(elems), alias, c)) =>
|
case OrderedSelect(o, SelectValue(Tuple(elems), alias, c)) =>
|
||||||
trace"Expressing Element $idx of $elems " andReturn
|
trace"Expressing Element $idx of $elems " `andReturn`
|
||||||
OrderedSelect(
|
OrderedSelect(
|
||||||
o :+ idx,
|
o :+ idx,
|
||||||
SelectValue(elems(idx), concat(alias, idx), c)
|
SelectValue(elems(idx), concat(alias, idx), c)
|
||||||
)
|
)
|
||||||
case OrderedSelect(o, SelectValue(ast, alias, c)) =>
|
case OrderedSelect(o, SelectValue(ast, alias, c)) =>
|
||||||
trace"Appending $idx to $alias " andReturn
|
trace"Appending $idx to $alias " `andReturn`
|
||||||
OrderedSelect(o, SelectValue(ast, concat(alias, idx), c))
|
OrderedSelect(o, SelectValue(ast, concat(alias, idx), c))
|
||||||
}
|
}
|
||||||
case pp @ Property.Opinionated(
|
case pp @ Property.Opinionated(
|
||||||
|
@ -103,7 +103,7 @@ private class ExpandSelect(
|
||||||
renameable,
|
renameable,
|
||||||
visible
|
visible
|
||||||
) =>
|
) =>
|
||||||
trace"Reference is a sub-property. Walking inside." andReturn
|
trace"Reference is a sub-property. Walking inside." `andReturn`
|
||||||
expandReference(ast) match {
|
expandReference(ast) match {
|
||||||
case OrderedSelect(o, SelectValue(ast, nested, c)) =>
|
case OrderedSelect(o, SelectValue(ast, nested, c)) =>
|
||||||
// Alias is the name of the column after the naming strategy
|
// Alias is the name of the column after the naming strategy
|
||||||
|
@ -121,7 +121,7 @@ private class ExpandSelect(
|
||||||
// this may need to change based on how distinct appends table names instead of just tuple indexes
|
// this may need to change based on how distinct appends table names instead of just tuple indexes
|
||||||
// into the property path.
|
// into the property path.
|
||||||
|
|
||||||
trace"...inside walk completed, continuing to return: " andReturn
|
trace"...inside walk completed, continuing to return: " `andReturn`
|
||||||
OrderedSelect(
|
OrderedSelect(
|
||||||
o,
|
o,
|
||||||
SelectValue(
|
SelectValue(
|
||||||
|
@ -135,7 +135,7 @@ private class ExpandSelect(
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
case pp @ Property(_, TupleIndex(idx)) =>
|
case pp @ Property(_, TupleIndex(idx)) =>
|
||||||
trace"Reference is a tuple index: $idx from $select." andReturn
|
trace"Reference is a tuple index: $idx from $select." `andReturn`
|
||||||
select(idx) match {
|
select(idx) match {
|
||||||
case OrderedSelect(o, SelectValue(ast, alias, c)) =>
|
case OrderedSelect(o, SelectValue(ast, alias, c)) =>
|
||||||
OrderedSelect(o, SelectValue(ast, concat(alias, idx), c))
|
OrderedSelect(o, SelectValue(ast, concat(alias, idx), c))
|
||||||
|
@ -155,13 +155,13 @@ private class ExpandSelect(
|
||||||
s"Cannot find element $name in $cc"
|
s"Cannot find element $name in $cc"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
trace"Reference is a case class member: " andReturn
|
trace"Reference is a case class member: " `andReturn`
|
||||||
OrderedSelect(
|
OrderedSelect(
|
||||||
o :+ index,
|
o :+ index,
|
||||||
SelectValue(ast, Some(expandColumn(name, renameable)), c)
|
SelectValue(ast, Some(expandColumn(name, renameable)), c)
|
||||||
)
|
)
|
||||||
case List(OrderedSelect(o, SelectValue(i: Ident, _, c))) =>
|
case List(OrderedSelect(o, SelectValue(i: Ident, _, c))) =>
|
||||||
trace"Reference is an identifier: " andReturn
|
trace"Reference is an identifier: " `andReturn`
|
||||||
OrderedSelect(
|
OrderedSelect(
|
||||||
o,
|
o,
|
||||||
SelectValue(
|
SelectValue(
|
||||||
|
@ -171,7 +171,7 @@ private class ExpandSelect(
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
case other =>
|
case other =>
|
||||||
trace"Reference is unidentified: $other returning:" andReturn
|
trace"Reference is unidentified: $other returning:" `andReturn`
|
||||||
OrderedSelect(
|
OrderedSelect(
|
||||||
Integer.MAX_VALUE,
|
Integer.MAX_VALUE,
|
||||||
SelectValue(
|
SelectValue(
|
||||||
|
@ -191,7 +191,7 @@ private class ExpandSelect(
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
trace"Expanded $ref into $orderedSelect then Normalized to $normalizedOrderedSelect" andReturn
|
trace"Expanded $ref into $orderedSelect then Normalized to $normalizedOrderedSelect" `andReturn`
|
||||||
normalizedOrderedSelect
|
normalizedOrderedSelect
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -201,7 +201,7 @@ private class ExpandSelect(
|
||||||
_,
|
_,
|
||||||
sv @ SelectValue(Property(Ident(_), propName), Some(alias), _)
|
sv @ SelectValue(Property(Ident(_), propName), Some(alias), _)
|
||||||
) if (propName == alias) =>
|
) if (propName == alias) =>
|
||||||
trace"Detected select value with un-needed alias: $os removing it:" andReturn
|
trace"Detected select value with un-needed alias: $os removing it:" `andReturn`
|
||||||
os.copy(selectValue = sv.copy(alias = None))
|
os.copy(selectValue = sv.copy(alias = None))
|
||||||
case _ => os
|
case _ => os
|
||||||
}
|
}
|
||||||
|
@ -224,7 +224,7 @@ private class ExpandSelect(
|
||||||
// are there any selects that have infix values which we have not already selected? We need to include
|
// are there any selects that have infix values which we have not already selected? We need to include
|
||||||
// them because they could be doing essential things e.g. RANK ... ORDER BY
|
// them because they could be doing essential things e.g. RANK ... ORDER BY
|
||||||
val remainingSelectsWithInfixes =
|
val remainingSelectsWithInfixes =
|
||||||
trace"Searching Selects with Infix:" andReturn
|
trace"Searching Selects with Infix:" `andReturn`
|
||||||
new FindUnexpressedInfixes(select)(mappedRefs)
|
new FindUnexpressedInfixes(select)(mappedRefs)
|
||||||
|
|
||||||
implicit val ordering: scala.math.Ordering[List[Int]] =
|
implicit val ordering: scala.math.Ordering[List[Int]] =
|
||||||
|
|
|
@ -47,7 +47,7 @@ class FindUnexpressedInfixes(select: List[OrderedSelect]) {
|
||||||
): List[(Ast, List[Int])] = {
|
): List[(Ast, List[Int])] = {
|
||||||
trace"Searching for infix: $ast in the sub-path $parentOrder".andLog()
|
trace"Searching for infix: $ast in the sub-path $parentOrder".andLog()
|
||||||
if (pathExists(parentOrder))
|
if (pathExists(parentOrder))
|
||||||
trace"No infixes found" andContinue
|
trace"No infixes found" `andContinue`
|
||||||
List()
|
List()
|
||||||
else
|
else
|
||||||
ast match {
|
ast match {
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
package minisql
|
package minisql.idiom
|
||||||
|
|
||||||
|
import minisql.NamingStrategy
|
||||||
import minisql.ast.Renameable.{ByStrategy, Fixed}
|
import minisql.ast.Renameable.{ByStrategy, Fixed}
|
||||||
import minisql.ast.Visibility.Hidden
|
import minisql.ast.Visibility.Hidden
|
||||||
import minisql.ast._
|
import minisql.ast.*
|
||||||
import minisql.context.CanReturnClause
|
import minisql.context.CanReturnClause
|
||||||
import minisql.idiom.{Idiom, SetContainsToken, Statement}
|
import minisql.idiom.{Idiom, SetContainsToken, Statement}
|
||||||
import minisql.idiom.StatementInterpolator.*
|
import minisql.idiom.StatementInterpolator.*
|
||||||
|
|
|
@ -1,20 +1,20 @@
|
||||||
package minisql.parsing
|
package minisql.context.mirror
|
||||||
|
|
||||||
import minisql.*
|
import minisql.*
|
||||||
import minisql.ast.*
|
import minisql.ast.*
|
||||||
import minisql.idiom.*
|
import minisql.idiom.*
|
||||||
import minisql.NamingStrategy
|
import minisql.NamingStrategy
|
||||||
import minisql.MirrorContext
|
import minisql.MirrorContext
|
||||||
import minisql.MirrorIdiom
|
|
||||||
import minisql.context.mirror.{*, given}
|
import minisql.context.mirror.{*, given}
|
||||||
|
|
||||||
class QuotedSuite extends munit.FunSuite {
|
class QuotedSuite extends munit.FunSuite {
|
||||||
val ctx = new MirrorContext(MirrorIdiom, SnakeCase)
|
|
||||||
|
|
||||||
case class Foo(id: Long)
|
case class Foo(id: Long)
|
||||||
|
|
||||||
|
import mirrorContext.given
|
||||||
|
|
||||||
test("SimpleQuery") {
|
test("SimpleQuery") {
|
||||||
val o = ctx.io(query[Foo]("foo").filter(_.id > 0))
|
val o = mirrorContext.io(query[Foo]("foo").filter(_.id > 0))
|
||||||
println("============" + o)
|
println("============" + o)
|
||||||
o
|
o
|
||||||
}
|
}
|
6
src/test/scala/minisql/mirror/context.scala
Normal file
6
src/test/scala/minisql/mirror/context.scala
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
package minisql.context.mirror
|
||||||
|
|
||||||
|
import minisql.*
|
||||||
|
import minisql.idiom.MirrorIdiom
|
||||||
|
|
||||||
|
val mirrorContext = new MirrorContext(MirrorIdiom, Literal)
|
Loading…
Add table
Add a link
Reference in a new issue