Convert to using
This commit is contained in:
parent
3a9d15f015
commit
24f7f6aec0
14 changed files with 112 additions and 59 deletions
|
@ -6,6 +6,8 @@ libraryDependencies ++= Seq(
|
||||||
"org.scalameta" %% "munit" % "1.0.3" % Test
|
"org.scalameta" %% "munit" % "1.0.3" % Test
|
||||||
)
|
)
|
||||||
|
|
||||||
|
javaOptions ++= Seq("-Xss16m")
|
||||||
|
|
||||||
scalacOptions ++= Seq(
|
scalacOptions ++= Seq(
|
||||||
"-deprecation",
|
"-deprecation",
|
||||||
"-feature",
|
"-feature",
|
||||||
|
|
|
@ -39,8 +39,8 @@ private def quotedLiftImpl[X: Type](
|
||||||
e: Expr[ParamEncoder[X]]
|
e: Expr[ParamEncoder[X]]
|
||||||
)(using Quotes): Expr[ast.ScalarValueLift] = {
|
)(using Quotes): Expr[ast.ScalarValueLift] = {
|
||||||
import quotes.reflect.*
|
import quotes.reflect.*
|
||||||
val name = x.asTerm.symbol.fullName
|
val name = x.asTerm.show
|
||||||
val liftId = x.asTerm.symbol.owner.fullName + "@" + name
|
val liftId = liftIdOfExpr(x)
|
||||||
'{
|
'{
|
||||||
ast.ScalarValueLift(
|
ast.ScalarValueLift(
|
||||||
${ Expr(name) },
|
${ Expr(name) },
|
||||||
|
@ -84,23 +84,19 @@ object EntityQuery {
|
||||||
}
|
}
|
||||||
|
|
||||||
inline def insert(v: E)(using m: Mirror.ProductOf[E]): Insert = {
|
inline def insert(v: E)(using m: Mirror.ProductOf[E]): Insert = {
|
||||||
val entity = e.asInstanceOf[ast.Entity]
|
ast.Insert(e, transformCaseClassToAssignments[E](v))
|
||||||
val assignments = transformCaseClassToAssignments[E](v, entity.name)
|
|
||||||
ast.Insert(entity, assignments)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private inline def transformCaseClassToAssignments[E](
|
private inline def transformCaseClassToAssignments[E](
|
||||||
v: E,
|
v: E
|
||||||
entityName: String
|
|
||||||
)(using m: Mirror.ProductOf[E]): List[ast.Assignment] = ${
|
)(using m: Mirror.ProductOf[E]): List[ast.Assignment] = ${
|
||||||
transformCaseClassToAssignmentsImpl[E]('v, 'entityName)
|
transformCaseClassToAssignmentsImpl[E]('v)
|
||||||
}
|
}
|
||||||
|
|
||||||
private def transformCaseClassToAssignmentsImpl[E: Type](
|
private def transformCaseClassToAssignmentsImpl[E: Type](
|
||||||
v: Expr[E],
|
v: Expr[E]
|
||||||
entityName: Expr[String]
|
|
||||||
)(using Quotes): Expr[List[ast.Assignment]] = {
|
)(using Quotes): Expr[List[ast.Assignment]] = {
|
||||||
import quotes.reflect.*
|
import quotes.reflect.*
|
||||||
|
|
||||||
|
@ -115,10 +111,10 @@ private def transformCaseClassToAssignmentsImpl[E: Type](
|
||||||
case '[t] =>
|
case '[t] =>
|
||||||
'{
|
'{
|
||||||
ast.Assignment(
|
ast.Assignment(
|
||||||
ast.Ident($entityName),
|
ast.Ident("v"),
|
||||||
ast.Property(ast.Ident($entityName), ${ Expr(fieldName) }),
|
ast.Property(ast.Ident("v"), ${ Expr(fieldName) }),
|
||||||
quotedLift[t](${ Select.unique(v.asTerm, fieldName).asExprOf[t] })(
|
quotedLift[t](${ Select(v.asTerm, field).asExprOf[t] })(using
|
||||||
using summonInline[ParamEncoder[t]]
|
summonInline[ParamEncoder[t]]
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -186,8 +182,10 @@ private def compileImpl[I <: Idiom, N <: NamingStrategy](
|
||||||
n: Expr[N]
|
n: Expr[N]
|
||||||
)(using Quotes, Type[I], Type[N]): Expr[Statement] = {
|
)(using Quotes, Type[I], Type[N]): Expr[Statement] = {
|
||||||
import quotes.reflect.*
|
import quotes.reflect.*
|
||||||
|
println(s"Start q.value")
|
||||||
q.value match {
|
q.value match {
|
||||||
case Some(ast) =>
|
case Some(ast) =>
|
||||||
|
println(s"Finish q.value: ${ast}")
|
||||||
val idiom = LoadObject[I].getOrElse(
|
val idiom = LoadObject[I].getOrElse(
|
||||||
report.errorAndAbort(s"Idiom not known at compile")
|
report.errorAndAbort(s"Idiom not known at compile")
|
||||||
)
|
)
|
||||||
|
|
|
@ -46,8 +46,7 @@ private given FromExpr[ScalarValueLift] with {
|
||||||
def unapply(x: Expr[ScalarValueLift])(using Quotes): Option[ScalarValueLift] =
|
def unapply(x: Expr[ScalarValueLift])(using Quotes): Option[ScalarValueLift] =
|
||||||
x match {
|
x match {
|
||||||
case '{ ScalarValueLift(${ Expr(n) }, ${ Expr(id) }, $y) } =>
|
case '{ ScalarValueLift(${ Expr(n) }, ${ Expr(id) }, $y) } =>
|
||||||
// don't cared about value here, a little tricky
|
Some(ScalarValueLift(n, id, None))
|
||||||
Some(ScalarValueLift(n, id, null))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,14 @@
|
||||||
package minisql.context
|
package minisql.context
|
||||||
|
|
||||||
import scala.deriving.*
|
|
||||||
import scala.compiletime.*
|
|
||||||
import scala.util.Try
|
|
||||||
import minisql.util.*
|
import minisql.util.*
|
||||||
import minisql.idiom.{Idiom, Statement, ReifyStatement}
|
import minisql.idiom.{Idiom, Statement, ReifyStatement}
|
||||||
import minisql.{NamingStrategy, ParamEncoder}
|
import minisql.{NamingStrategy, ParamEncoder}
|
||||||
import minisql.ColumnDecoder
|
import minisql.ColumnDecoder
|
||||||
import minisql.ast.{Ast, ScalarValueLift, CollectAst}
|
import minisql.ast.{Ast, ScalarValueLift, CollectAst}
|
||||||
|
import scala.deriving.*
|
||||||
|
import scala.compiletime.*
|
||||||
|
import scala.util.Try
|
||||||
|
import scala.annotation.targetName
|
||||||
|
|
||||||
trait RowExtract[A, Row] {
|
trait RowExtract[A, Row] {
|
||||||
def extract(row: Row): Try[A]
|
def extract(row: Row): Try[A]
|
||||||
|
@ -89,6 +90,30 @@ trait Context[I <: Idiom, N <: NamingStrategy] { selft =>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@targetName("ioAction")
|
||||||
|
inline def io[E](inline q: minisql.Action[E]): DBIO[E] = {
|
||||||
|
val extractor = summonFrom {
|
||||||
|
case e: RowExtract[E, DBRow] => e
|
||||||
|
case e: ColumnDecoder.Aux[DBRow, E] =>
|
||||||
|
RowExtract.single(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
val lifts = q.liftMap
|
||||||
|
val stmt = minisql.compile(q, idiom, naming)
|
||||||
|
val (sql, params) = stmt.expand(lifts)
|
||||||
|
(
|
||||||
|
sql = sql,
|
||||||
|
params = params.map(_.value.get.asInstanceOf[(Any, Encoder[?])]),
|
||||||
|
mapper = (rows) =>
|
||||||
|
rows
|
||||||
|
.traverse(extractor.extract)
|
||||||
|
.flatMap(
|
||||||
|
_.headOption.toRight(new Exception(s"No value return")).toTry
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
@targetName("ioQuery")
|
||||||
inline def io[E](
|
inline def io[E](
|
||||||
inline q: minisql.Query[E]
|
inline q: minisql.Query[E]
|
||||||
): DBIO[IArray[E]] = {
|
): DBIO[IArray[E]] = {
|
||||||
|
|
|
@ -31,13 +31,13 @@ trait SqlIdiom extends Idiom {
|
||||||
|
|
||||||
def querifyAst(ast: Ast) = SqlQuery(ast)
|
def querifyAst(ast: Ast) = SqlQuery(ast)
|
||||||
|
|
||||||
private def doTranslate(ast: Ast, cached: Boolean)(implicit
|
private def doTranslate(ast: Ast, cached: Boolean)(using
|
||||||
naming: NamingStrategy
|
naming: NamingStrategy
|
||||||
): (Ast, Statement) = {
|
): (Ast, Statement) = {
|
||||||
val normalizedAst =
|
val normalizedAst =
|
||||||
SqlNormalize(ast, concatBehavior, equalityBehavior)
|
SqlNormalize(ast, concatBehavior, equalityBehavior)
|
||||||
|
|
||||||
implicit val tokernizer: Tokenizer[Ast] = defaultTokenizer
|
given Tokenizer[Ast] = defaultTokenizer
|
||||||
|
|
||||||
val token =
|
val token =
|
||||||
normalizedAst match {
|
normalizedAst match {
|
||||||
|
@ -63,7 +63,7 @@ trait SqlIdiom extends Idiom {
|
||||||
doTranslate(ast, false)
|
doTranslate(ast, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
def defaultTokenizer(implicit naming: NamingStrategy): Tokenizer[Ast] =
|
def defaultTokenizer(using naming: NamingStrategy): Tokenizer[Ast] =
|
||||||
new Tokenizer[Ast] {
|
new Tokenizer[Ast] {
|
||||||
private val stableTokenizer = astTokenizer(using this, naming)
|
private val stableTokenizer = astTokenizer(using this, naming)
|
||||||
|
|
||||||
|
@ -73,7 +73,7 @@ trait SqlIdiom extends Idiom {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def astTokenizer(implicit
|
def astTokenizer(using
|
||||||
astTokenizer: Tokenizer[Ast],
|
astTokenizer: Tokenizer[Ast],
|
||||||
strategy: NamingStrategy
|
strategy: NamingStrategy
|
||||||
): Tokenizer[Ast] =
|
): Tokenizer[Ast] =
|
||||||
|
|
|
@ -305,7 +305,7 @@ trait MirrorIdiomBase extends Idiom {
|
||||||
Tokenizer[OnConflict.Target] {
|
Tokenizer[OnConflict.Target] {
|
||||||
case OnConflict.NoTarget => stmt""
|
case OnConflict.NoTarget => stmt""
|
||||||
case OnConflict.Properties(props) =>
|
case OnConflict.Properties(props) =>
|
||||||
val listTokens = listTokenizer(using astTokenizer).token(props)
|
val listTokens = props.token
|
||||||
stmt"(${listTokens})"
|
stmt"(${listTokens})"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -16,11 +16,12 @@ object ReifyStatement {
|
||||||
liftMap: SMap[String, (Any, ParamEncoder[?])]
|
liftMap: SMap[String, (Any, ParamEncoder[?])]
|
||||||
): (String, List[ScalarValueLift]) = {
|
): (String, List[ScalarValueLift]) = {
|
||||||
val expanded = expandLiftings(statement, emptySetContainsToken, liftMap)
|
val expanded = expandLiftings(statement, emptySetContainsToken, liftMap)
|
||||||
token2string(expanded, liftingPlaceholder)
|
token2string(expanded, liftMap, liftingPlaceholder)
|
||||||
}
|
}
|
||||||
|
|
||||||
private def token2string(
|
private def token2string(
|
||||||
token: Token,
|
token: Token,
|
||||||
|
liftMap: SMap[String, (Any, ParamEncoder[?])],
|
||||||
liftingPlaceholder: Int => String
|
liftingPlaceholder: Int => String
|
||||||
): (String, List[ScalarValueLift]) = {
|
): (String, List[ScalarValueLift]) = {
|
||||||
|
|
||||||
|
@ -44,7 +45,7 @@ object ReifyStatement {
|
||||||
)
|
)
|
||||||
case ScalarLiftToken(lift: ScalarValueLift) =>
|
case ScalarLiftToken(lift: ScalarValueLift) =>
|
||||||
sqlBuilder ++= liftingPlaceholder(liftingSize)
|
sqlBuilder ++= liftingPlaceholder(liftingSize)
|
||||||
liftBuilder += lift
|
liftBuilder += lift.copy(value = liftMap.get(lift.liftId))
|
||||||
loop(tail, liftingSize + 1)
|
loop(tail, liftingSize + 1)
|
||||||
case ScalarLiftToken(o) =>
|
case ScalarLiftToken(o) =>
|
||||||
throw new Exception(s"Cannot tokenize ScalarQueryLift: ${o}")
|
throw new Exception(s"Cannot tokenize ScalarQueryLift: ${o}")
|
||||||
|
|
|
@ -8,12 +8,20 @@ import scala.collection.mutable.ListBuffer
|
||||||
|
|
||||||
object StatementInterpolator {
|
object StatementInterpolator {
|
||||||
|
|
||||||
|
extension [T](list: List[T]) {
|
||||||
|
private[minisql] def mkStmt(
|
||||||
|
sep: String = ", "
|
||||||
|
)(using tokenize: Tokenizer[T]) = {
|
||||||
|
val l1 = list.map(_.token)
|
||||||
|
val l2 = List.fill(l1.size - 1)(StringToken(sep))
|
||||||
|
Statement(Interleave(l1, l2))
|
||||||
|
}
|
||||||
|
}
|
||||||
trait Tokenizer[T] {
|
trait Tokenizer[T] {
|
||||||
extension (v: T) {
|
extension (v: T) {
|
||||||
def token: Token
|
def token: Token
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
object Tokenizer {
|
object Tokenizer {
|
||||||
def apply[T](f: T => Token): Tokenizer[T] = new Tokenizer[T] {
|
def apply[T](f: T => Token): Tokenizer[T] = new Tokenizer[T] {
|
||||||
extension (v: T) {
|
extension (v: T) {
|
||||||
|
@ -31,37 +39,29 @@ object StatementInterpolator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
implicit class TokenImplicit[T](v: T)(implicit tokenizer: Tokenizer[T]) {
|
extension [T](v: T)(using tokenizer: Tokenizer[T]) {
|
||||||
def token = tokenizer.token(v)
|
def token = tokenizer.token(v)
|
||||||
}
|
}
|
||||||
|
|
||||||
implicit def stringTokenizer: Tokenizer[String] =
|
given stringTokenizer: Tokenizer[String] =
|
||||||
Tokenizer[String] {
|
Tokenizer[String] {
|
||||||
case string => StringToken(string)
|
case string => StringToken(string)
|
||||||
}
|
}
|
||||||
|
|
||||||
implicit def liftTokenizer: Tokenizer[Lift] =
|
given liftTokenizer: Tokenizer[Lift] =
|
||||||
Tokenizer[Lift] {
|
Tokenizer[Lift] {
|
||||||
case lift: ScalarLift => ScalarLiftToken(lift)
|
case lift: ScalarLift => ScalarLiftToken(lift)
|
||||||
}
|
}
|
||||||
|
|
||||||
implicit def tokenTokenizer: Tokenizer[Token] = Tokenizer[Token](identity)
|
given tokenTokenizer: Tokenizer[Token] = Tokenizer[Token](identity)
|
||||||
implicit def statementTokenizer: Tokenizer[Statement] =
|
given statementTokenizer: Tokenizer[Statement] =
|
||||||
Tokenizer[Statement](identity)
|
Tokenizer[Statement](identity)
|
||||||
implicit def stringTokenTokenizer: Tokenizer[StringToken] =
|
given stringTokenTokenizer: Tokenizer[StringToken] =
|
||||||
Tokenizer[StringToken](identity)
|
Tokenizer[StringToken](identity)
|
||||||
implicit def liftingTokenTokenizer: Tokenizer[ScalarLiftToken] =
|
given liftingTokenTokenizer: Tokenizer[ScalarLiftToken] =
|
||||||
Tokenizer[ScalarLiftToken](identity)
|
Tokenizer[ScalarLiftToken](identity)
|
||||||
|
|
||||||
extension [T](list: List[T]) {
|
given listTokenizer[T](using
|
||||||
def mkStmt(sep: String = ", ")(implicit tokenize: Tokenizer[T]) = {
|
|
||||||
val l1 = list.map(_.token)
|
|
||||||
val l2 = List.fill(l1.size - 1)(StringToken(sep))
|
|
||||||
Statement(Interleave(l1, l2))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
implicit def listTokenizer[T](implicit
|
|
||||||
tokenize: Tokenizer[T]
|
tokenize: Tokenizer[T]
|
||||||
): Tokenizer[List[T]] =
|
): Tokenizer[List[T]] =
|
||||||
Tokenizer[List[T]] {
|
Tokenizer[List[T]] {
|
||||||
|
|
|
@ -4,13 +4,14 @@ import scala.quoted.*
|
||||||
import minisql.ParamEncoder
|
import minisql.ParamEncoder
|
||||||
import minisql.ast
|
import minisql.ast
|
||||||
import minisql.*
|
import minisql.*
|
||||||
|
import minisql.util.*
|
||||||
|
|
||||||
private[parsing] def liftParsing(
|
private[parsing] def liftParsing(
|
||||||
astParser: => Parser[ast.Ast]
|
astParser: => Parser[ast.Ast]
|
||||||
)(using Quotes): Parser[ast.Lift] = {
|
)(using Quotes): Parser[ast.Lift] = {
|
||||||
case '{ lift[t](${ x })(using $e: ParamEncoder[t]) } =>
|
case '{ lift[t](${ x })(using $e: ParamEncoder[t]) } =>
|
||||||
import quotes.reflect.*
|
import quotes.reflect.*
|
||||||
val name = x.asTerm.symbol.fullName
|
val name = x.show
|
||||||
val liftId = x.asTerm.symbol.owner.fullName + "@" + name
|
val liftId = liftIdOfExpr(x)
|
||||||
'{ ast.ScalarValueLift(${ Expr(name) }, ${ Expr(liftId) }, Some($x -> $e)) }
|
'{ ast.ScalarValueLift(${ Expr(name) }, ${ Expr(liftId) }, Some($x -> $e)) }
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,7 @@ extension [A](xs: Iterable[A]) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
object CollectTry {
|
private[minisql] object CollectTry {
|
||||||
def apply[T](list: List[Try[T]]): Try[List[T]] =
|
def apply[T](list: List[Try[T]]): Try[List[T]] =
|
||||||
list.foldLeft(Try(List.empty[T])) {
|
list.foldLeft(Try(List.empty[T])) {
|
||||||
case (list, t) =>
|
case (list, t) =>
|
||||||
|
|
24
src/main/scala/minisql/util/QuotesHelper.scala
Normal file
24
src/main/scala/minisql/util/QuotesHelper.scala
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
package minisql.util
|
||||||
|
|
||||||
|
import scala.quoted.*
|
||||||
|
|
||||||
|
private[minisql] def splicePkgPath(using Quotes) = {
|
||||||
|
import quotes.reflect.*
|
||||||
|
def recurse(sym: Symbol): String =
|
||||||
|
sym match {
|
||||||
|
case s if s.isPackageDef => s.fullName
|
||||||
|
case s if s.isNoSymbol => ""
|
||||||
|
case _ =>
|
||||||
|
recurse(sym.maybeOwner)
|
||||||
|
}
|
||||||
|
recurse(Symbol.spliceOwner)
|
||||||
|
}
|
||||||
|
|
||||||
|
private[minisql] def liftIdOfExpr(x: Expr[?])(using Quotes) = {
|
||||||
|
import quotes.reflect.*
|
||||||
|
val name = x.asTerm.show
|
||||||
|
val packageName = splicePkgPath
|
||||||
|
val pos = x.asTerm.pos
|
||||||
|
val fileName = pos.sourceFile.name
|
||||||
|
s"${name}@${packageName}.${fileName}:${pos.startLine}:${pos.startColumn}"
|
||||||
|
}
|
|
@ -1,4 +1,4 @@
|
||||||
package minisql.context.mirror
|
package minisql.context.sql
|
||||||
|
|
||||||
import minisql.*
|
import minisql.*
|
||||||
import minisql.ast.*
|
import minisql.ast.*
|
||||||
|
@ -9,23 +9,27 @@ import minisql.context.mirror.{*, given}
|
||||||
|
|
||||||
class QuotedSuite extends munit.FunSuite {
|
class QuotedSuite extends munit.FunSuite {
|
||||||
|
|
||||||
case class Foo(id: Long)
|
case class Foo(id: Long, name: String)
|
||||||
|
|
||||||
import mirrorContext.given
|
inline def Foos = query[Foo]("foo")
|
||||||
|
|
||||||
|
import testContext.given
|
||||||
|
|
||||||
test("SimpleQuery") {
|
test("SimpleQuery") {
|
||||||
val o = mirrorContext.io(
|
val o = testContext.io(
|
||||||
query[Foo](
|
query[Foo](
|
||||||
"foo",
|
"foo",
|
||||||
alias("id", "id1")
|
alias("id", "id1")
|
||||||
).filter(_.id > 0)
|
).filter(_.id > 0)
|
||||||
)
|
)
|
||||||
println("============" + o)
|
println(o)
|
||||||
o
|
|
||||||
}
|
}
|
||||||
|
|
||||||
test("Insert") {
|
test("Insert") {
|
||||||
|
val v: Foo = Foo(0L, "foo")
|
||||||
|
|
||||||
|
val o = testContext.io(Foos.insert(v))
|
||||||
|
println(o)
|
||||||
???
|
???
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
5
src/test/scala/minisql/context/sql/context.scala
Normal file
5
src/test/scala/minisql/context/sql/context.scala
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
package minisql.context.sql
|
||||||
|
|
||||||
|
import minisql.*
|
||||||
|
|
||||||
|
val testContext = new MirrorSqlContext(Literal)
|
|
@ -1,6 +0,0 @@
|
||||||
package minisql.context.mirror
|
|
||||||
|
|
||||||
import minisql.*
|
|
||||||
import minisql.idiom.MirrorIdiom
|
|
||||||
|
|
||||||
val mirrorContext = new MirrorContext(MirrorIdiom, Literal)
|
|
Loading…
Add table
Add a link
Reference in a new issue