Convert to using

This commit is contained in:
jilen 2025-07-15 18:47:34 +08:00
parent 14f15da10c
commit ed736783fc
3 changed files with 120 additions and 122 deletions

View file

@ -65,15 +65,12 @@ trait SqlIdiom extends Idiom {
def defaultTokenizer(using naming: NamingStrategy): Tokenizer[Ast] =
new Tokenizer[Ast] {
private val stableTokenizer = astTokenizer(using this, naming)
extension (v: Ast) {
def token = stableTokenizer.token(v)
}
private def stableTokenizer = astTokenizer(using this, naming)
def token(v: Ast) = stableTokenizer.token(v)
}
def astTokenizer(using
def astTokenizer(implicit
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[Ast] =
@ -98,7 +95,7 @@ trait SqlIdiom extends Idiom {
fail(s"Malformed or unsupported construct: $a.")
}
implicit def ifTokenizer(implicit
given ifTokenizer(using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[If] = Tokenizer[If] {
@ -185,10 +182,28 @@ trait SqlIdiom extends Idiom {
def apply = stmt"SELECT $withLimitOffset"
}
implicit def sqlQueryTokenizer(implicit
given unaryOperatorTokenizer: Tokenizer[UnaryOperator] =
Tokenizer[UnaryOperator] {
case NumericOperator.`-` => stmt"-"
case BooleanOperator.`!` => stmt"NOT"
case StringOperator.`toUpperCase` => stmt"UPPER"
case StringOperator.`toLowerCase` => stmt"LOWER"
case StringOperator.`toLong` => stmt"" // cast is implicit
case StringOperator.`toInt` => stmt"" // cast is implicit
case SetOperator.`isEmpty` => stmt"NOT EXISTS"
case SetOperator.`nonEmpty` => stmt"EXISTS"
}
given setOperationTokenizer: Tokenizer[SetOperation] =
Tokenizer[SetOperation] {
case UnionOperation => stmt"UNION"
case UnionAllOperation => stmt"UNION ALL"
}
given sqlQueryTokenizer(using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[SqlQuery] = Tokenizer[SqlQuery] {
): Tokenizer[SqlQuery] = Tokenizer.withSelf[SqlQuery] {
case q: FlattenSqlQuery =>
new FlattenSqlQueryTokenizerHelper(q).apply
case SetOperationSqlQuery(a, op, b) =>
@ -220,7 +235,16 @@ trait SqlIdiom extends Idiom {
protected def tokenizeAlias(strategy: NamingStrategy, table: String) =
strategy.default(table)
implicit def selectValueTokenizer(implicit
given aggregationOperatorTokenizer: Tokenizer[AggregationOperator] =
Tokenizer[AggregationOperator] {
case AggregationOperator.`min` => stmt"MIN"
case AggregationOperator.`max` => stmt"MAX"
case AggregationOperator.`avg` => stmt"AVG"
case AggregationOperator.`sum` => stmt"SUM"
case AggregationOperator.`size` => stmt"COUNT"
}
given selectValueTokenizer(using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[SelectValue] = {
@ -260,7 +284,39 @@ trait SqlIdiom extends Idiom {
tokenizer(using customAstTokenizer)
}
implicit def operationTokenizer(implicit
given binaryOperatorTokenizer: Tokenizer[BinaryOperator] =
Tokenizer[BinaryOperator] {
case EqualityOperator.`==` => stmt"="
case EqualityOperator.`!=` => stmt"<>"
case BooleanOperator.`&&` => stmt"AND"
case BooleanOperator.`||` => stmt"OR"
case StringOperator.`concat` => stmt"||"
case StringOperator.`startsWith` =>
fail("bug: this code should be unreachable")
case StringOperator.`split` => stmt"SPLIT"
case NumericOperator.`-` => stmt"-"
case NumericOperator.`+` => stmt"+"
case NumericOperator.`*` => stmt"*"
case NumericOperator.`>` => stmt">"
case NumericOperator.`>=` => stmt">="
case NumericOperator.`<` => stmt"<"
case NumericOperator.`<=` => stmt"<="
case NumericOperator.`/` => stmt"/"
case NumericOperator.`%` => stmt"%"
case SetOperator.`contains` => stmt"IN"
}
given optionOperationTokenizer(using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[OptionOperation] = Tokenizer[OptionOperation] {
case OptionIsEmpty(ast) => stmt"${ast.token} IS NULL"
case OptionNonEmpty(ast) => stmt"${ast.token} IS NOT NULL"
case OptionIsDefined(ast) => stmt"${ast.token} IS NOT NULL"
case other => fail(s"Malformed or unsupported construct: $other.")
}
given operationTokenizer(using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[Operation] = Tokenizer[Operation] {
@ -296,25 +352,9 @@ trait SqlIdiom extends Idiom {
case e: FunctionApply => fail(s"Can't translate the ast to sql: '$e'")
}
implicit def optionOperationTokenizer(implicit
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[OptionOperation] = Tokenizer[OptionOperation] {
case OptionIsEmpty(ast) => stmt"${ast.token} IS NULL"
case OptionNonEmpty(ast) => stmt"${ast.token} IS NOT NULL"
case OptionIsDefined(ast) => stmt"${ast.token} IS NOT NULL"
case other => fail(s"Malformed or unsupported construct: $other.")
}
implicit val setOperationTokenizer: Tokenizer[SetOperation] =
Tokenizer[SetOperation] {
case UnionOperation => stmt"UNION"
case UnionAllOperation => stmt"UNION ALL"
}
protected def limitOffsetToken(
query: Statement
)(implicit astTokenizer: Tokenizer[Ast], strategy: NamingStrategy) =
)(using astTokenizer: Tokenizer[Ast], strategy: NamingStrategy) =
Tokenizer[(Option[Ast], Option[Ast])] {
case (None, None) => query
case (Some(limit), None) => stmt"$query LIMIT ${limit.token}"
@ -325,10 +365,25 @@ trait SqlIdiom extends Idiom {
protected def tokenOrderBy(
criterias: List[OrderByCriteria]
)(implicit astTokenizer: Tokenizer[Ast], strategy: NamingStrategy) =
)(using astTokenizer: Tokenizer[Ast], strategy: NamingStrategy) =
stmt"ORDER BY ${criterias.token}"
implicit def sourceTokenizer(implicit
given entityTokenizer(using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[Entity] = Tokenizer[Entity] {
case Entity.Opinionated(name, _, renameable) =>
tokenizeTable(strategy, name, renameable).token
}
given joinTypeTokenizer: Tokenizer[JoinType] = Tokenizer[JoinType] {
case JoinType.InnerJoin => stmt"INNER JOIN"
case JoinType.LeftJoin => stmt"LEFT JOIN"
case JoinType.RightJoin => stmt"RIGHT JOIN"
case JoinType.FullJoin => stmt"FULL JOIN"
}
given sourceTokenizer(using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[FromContext] = Tokenizer[FromContext] {
@ -341,18 +396,12 @@ trait SqlIdiom extends Idiom {
case InfixContext(infix, alias) =>
stmt"(${(infix: Ast).token}) AS ${strategy.default(alias).token}"
case JoinContext(t, a, b, on) =>
stmt"${a.token} ${t.token} ${b.token} ON ${on.token}"
case FlatJoinContext(t, a, on) => stmt"${t.token} ${a.token} ON ${on.token}"
stmt"${sourceTokenizer.token(a)} ${t.token} ${sourceTokenizer.token(b)} ON ${on.token}"
case FlatJoinContext(t, a, on) =>
stmt"${t.token} ${sourceTokenizer.token(a)} ON ${on.token}"
}
implicit val joinTypeTokenizer: Tokenizer[JoinType] = Tokenizer[JoinType] {
case JoinType.InnerJoin => stmt"INNER JOIN"
case JoinType.LeftJoin => stmt"LEFT JOIN"
case JoinType.RightJoin => stmt"RIGHT JOIN"
case JoinType.FullJoin => stmt"FULL JOIN"
}
implicit def orderByCriteriaTokenizer(implicit
given orderByCriteriaTokenizer(using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[OrderByCriteria] = Tokenizer[OrderByCriteria] {
@ -370,50 +419,7 @@ trait SqlIdiom extends Idiom {
stmt"${scopedTokenizer(ast)} DESC NULLS LAST"
}
implicit val unaryOperatorTokenizer: Tokenizer[UnaryOperator] =
Tokenizer[UnaryOperator] {
case NumericOperator.`-` => stmt"-"
case BooleanOperator.`!` => stmt"NOT"
case StringOperator.`toUpperCase` => stmt"UPPER"
case StringOperator.`toLowerCase` => stmt"LOWER"
case StringOperator.`toLong` => stmt"" // cast is implicit
case StringOperator.`toInt` => stmt"" // cast is implicit
case SetOperator.`isEmpty` => stmt"NOT EXISTS"
case SetOperator.`nonEmpty` => stmt"EXISTS"
}
implicit val aggregationOperatorTokenizer: Tokenizer[AggregationOperator] =
Tokenizer[AggregationOperator] {
case AggregationOperator.`min` => stmt"MIN"
case AggregationOperator.`max` => stmt"MAX"
case AggregationOperator.`avg` => stmt"AVG"
case AggregationOperator.`sum` => stmt"SUM"
case AggregationOperator.`size` => stmt"COUNT"
}
implicit val binaryOperatorTokenizer: Tokenizer[BinaryOperator] =
Tokenizer[BinaryOperator] {
case EqualityOperator.`==` => stmt"="
case EqualityOperator.`!=` => stmt"<>"
case BooleanOperator.`&&` => stmt"AND"
case BooleanOperator.`||` => stmt"OR"
case StringOperator.`concat` => stmt"||"
case StringOperator.`startsWith` =>
fail("bug: this code should be unreachable")
case StringOperator.`split` => stmt"SPLIT"
case NumericOperator.`-` => stmt"-"
case NumericOperator.`+` => stmt"+"
case NumericOperator.`*` => stmt"*"
case NumericOperator.`>` => stmt">"
case NumericOperator.`>=` => stmt">="
case NumericOperator.`<` => stmt"<"
case NumericOperator.`<=` => stmt"<="
case NumericOperator.`/` => stmt"/"
case NumericOperator.`%` => stmt"%"
case SetOperator.`contains` => stmt"IN"
}
implicit def propertyTokenizer(implicit
given propertyTokenizer(using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[Property] = {
@ -480,7 +486,7 @@ trait SqlIdiom extends Idiom {
}
}
implicit def valueTokenizer(implicit
given valueTokenizer(using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[Value] = Tokenizer[Value] {
@ -492,7 +498,7 @@ trait SqlIdiom extends Idiom {
case CaseClass(values) => stmt"${values.map(_._2).token}"
}
implicit def infixTokenizer(implicit
given infixTokenizer(using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[Infix] = Tokenizer[Infix] {
@ -502,19 +508,19 @@ trait SqlIdiom extends Idiom {
Statement(Interleave(pt, pr))
}
implicit def identTokenizer(implicit
given identTokenizer(using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[Ident] =
Tokenizer[Ident](e => strategy.default(e.name).token)
implicit def externalIdentTokenizer(implicit
given externalIdentTokenizer(using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[ExternalIdent] =
Tokenizer[ExternalIdent](e => strategy.default(e.name).token)
implicit def assignmentTokenizer(implicit
given (using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[Assignment] = Tokenizer[Assignment] {
@ -522,7 +528,7 @@ trait SqlIdiom extends Idiom {
stmt"${prop.token} = ${scopedTokenizer(value)}"
}
implicit def defaultAstTokenizer(implicit
given defaultAstTokenizer(using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[Action] = {
@ -533,7 +539,7 @@ trait SqlIdiom extends Idiom {
actionTokenizer(insertEntityTokenizer)(using actionAstTokenizer, strategy)
}
protected def actionAstTokenizer(implicit
protected def actionAstTokenizer(using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
) =
@ -554,7 +560,7 @@ trait SqlIdiom extends Idiom {
)
}
def returnListTokenizer(implicit
def returnListTokenizer(using
tokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[List[Ast]] = {
@ -574,7 +580,7 @@ trait SqlIdiom extends Idiom {
protected def actionTokenizer(
insertEntityTokenizer: Tokenizer[Entity]
)(implicit
)(using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[Action] =
@ -636,15 +642,7 @@ trait SqlIdiom extends Idiom {
fail(s"Action ast can't be translated to sql: '$other'")
}
implicit def entityTokenizer(implicit
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[Entity] = Tokenizer[Entity] {
case Entity.Opinionated(name, _, renameable) =>
tokenizeTable(strategy, name, renameable).token
}
protected def scopedTokenizer(ast: Ast)(implicit tokenizer: Tokenizer[Ast]) =
protected def scopedTokenizer(ast: Ast)(using tokenizer: Tokenizer[Ast]) =
ast match {
case _: Query => stmt"(${ast.token})"
case _: BinaryOperation => stmt"(${ast.token})"
@ -680,13 +678,12 @@ object SqlIdiom {
query: ReturningAction
)(implicit strategy: NamingStrategy) = {
val idiom = copyIdiom(parentIdiom, Some(query.alias))
import idiom._
import idiom.{*, given}
implicit val stableTokenizer: Tokenizer[Ast] = idiom.astTokenizer(using
given Tokenizer[Ast] = idiom.astTokenizer(using
new Tokenizer[Ast] { self =>
extension (v: Ast) {
def token = astTokenizer(using self, strategy).token(v)
}
def token(v: Ast) = astTokenizer(using self, strategy).token(v)
},
strategy
)

View file

@ -19,12 +19,10 @@ case class FlatJoinContext(t: JoinType, a: FromContext, on: Ast)
sealed trait SqlQuery {
override def toString = {
import MirrorSqlDialect.*
import MirrorSqlDialect.{*, given}
import minisql.idiom.StatementInterpolator.*
given Tokenizer[SqlQuery] = sqlQueryTokenizer(using
defaultTokenizer(using Literal),
Literal
)
given NamingStrategy = Literal
given Tokenizer[Ast] = defaultTokenizer(using Literal)
summon[Tokenizer[SqlQuery]].token(this).toString()
}
}

View file

@ -18,24 +18,27 @@ object StatementInterpolator {
}
}
trait Tokenizer[T] {
extension (v: T) {
def token: Token
}
def token(v: T): Token
}
object Tokenizer {
def apply[T](f: T => Token): Tokenizer[T] = new Tokenizer[T] {
extension (v: T) {
def token: Token = f(v)
def withSelf[T](f: Tokenizer[T] ?=> (T => Token)): Tokenizer[T] =
new Tokenizer[T] { self =>
def token(v: T): Token = f(using self)(v)
}
def apply[T](f: T => Token): Tokenizer[T] = new Tokenizer[T] {
def token(v: T): Token = f(v)
}
def withFallback[T](
fallback: Tokenizer[T] => Tokenizer[T]
)(pf: PartialFunction[T, Token]) =
new Tokenizer[T] {
extension (v: T) {
private def stable = fallback(this)
override def token = pf.applyOrElse(v, stable.token)
new Tokenizer[T] { self =>
override def token(v: T): Token = {
def stable = fallback(self)
pf.applyOrElse(v, stable.token)
}
}
}