Convert to using

This commit is contained in:
jilen 2025-07-15 18:47:34 +08:00
parent 14f15da10c
commit ed736783fc
3 changed files with 120 additions and 122 deletions

View file

@ -65,15 +65,12 @@ trait SqlIdiom extends Idiom {
def defaultTokenizer(using naming: NamingStrategy): Tokenizer[Ast] = def defaultTokenizer(using naming: NamingStrategy): Tokenizer[Ast] =
new Tokenizer[Ast] { new Tokenizer[Ast] {
private val stableTokenizer = astTokenizer(using this, naming) private def stableTokenizer = astTokenizer(using this, naming)
def token(v: Ast) = stableTokenizer.token(v)
extension (v: Ast) {
def token = stableTokenizer.token(v)
}
} }
def astTokenizer(using def astTokenizer(implicit
astTokenizer: Tokenizer[Ast], astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy strategy: NamingStrategy
): Tokenizer[Ast] = ): Tokenizer[Ast] =
@ -98,7 +95,7 @@ trait SqlIdiom extends Idiom {
fail(s"Malformed or unsupported construct: $a.") fail(s"Malformed or unsupported construct: $a.")
} }
implicit def ifTokenizer(implicit given ifTokenizer(using
astTokenizer: Tokenizer[Ast], astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy strategy: NamingStrategy
): Tokenizer[If] = Tokenizer[If] { ): Tokenizer[If] = Tokenizer[If] {
@ -185,10 +182,28 @@ trait SqlIdiom extends Idiom {
def apply = stmt"SELECT $withLimitOffset" def apply = stmt"SELECT $withLimitOffset"
} }
implicit def sqlQueryTokenizer(implicit given unaryOperatorTokenizer: Tokenizer[UnaryOperator] =
Tokenizer[UnaryOperator] {
case NumericOperator.`-` => stmt"-"
case BooleanOperator.`!` => stmt"NOT"
case StringOperator.`toUpperCase` => stmt"UPPER"
case StringOperator.`toLowerCase` => stmt"LOWER"
case StringOperator.`toLong` => stmt"" // cast is implicit
case StringOperator.`toInt` => stmt"" // cast is implicit
case SetOperator.`isEmpty` => stmt"NOT EXISTS"
case SetOperator.`nonEmpty` => stmt"EXISTS"
}
given setOperationTokenizer: Tokenizer[SetOperation] =
Tokenizer[SetOperation] {
case UnionOperation => stmt"UNION"
case UnionAllOperation => stmt"UNION ALL"
}
given sqlQueryTokenizer(using
astTokenizer: Tokenizer[Ast], astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy strategy: NamingStrategy
): Tokenizer[SqlQuery] = Tokenizer[SqlQuery] { ): Tokenizer[SqlQuery] = Tokenizer.withSelf[SqlQuery] {
case q: FlattenSqlQuery => case q: FlattenSqlQuery =>
new FlattenSqlQueryTokenizerHelper(q).apply new FlattenSqlQueryTokenizerHelper(q).apply
case SetOperationSqlQuery(a, op, b) => case SetOperationSqlQuery(a, op, b) =>
@ -220,7 +235,16 @@ trait SqlIdiom extends Idiom {
protected def tokenizeAlias(strategy: NamingStrategy, table: String) = protected def tokenizeAlias(strategy: NamingStrategy, table: String) =
strategy.default(table) strategy.default(table)
implicit def selectValueTokenizer(implicit given aggregationOperatorTokenizer: Tokenizer[AggregationOperator] =
Tokenizer[AggregationOperator] {
case AggregationOperator.`min` => stmt"MIN"
case AggregationOperator.`max` => stmt"MAX"
case AggregationOperator.`avg` => stmt"AVG"
case AggregationOperator.`sum` => stmt"SUM"
case AggregationOperator.`size` => stmt"COUNT"
}
given selectValueTokenizer(using
astTokenizer: Tokenizer[Ast], astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy strategy: NamingStrategy
): Tokenizer[SelectValue] = { ): Tokenizer[SelectValue] = {
@ -260,7 +284,39 @@ trait SqlIdiom extends Idiom {
tokenizer(using customAstTokenizer) tokenizer(using customAstTokenizer)
} }
implicit def operationTokenizer(implicit given binaryOperatorTokenizer: Tokenizer[BinaryOperator] =
Tokenizer[BinaryOperator] {
case EqualityOperator.`==` => stmt"="
case EqualityOperator.`!=` => stmt"<>"
case BooleanOperator.`&&` => stmt"AND"
case BooleanOperator.`||` => stmt"OR"
case StringOperator.`concat` => stmt"||"
case StringOperator.`startsWith` =>
fail("bug: this code should be unreachable")
case StringOperator.`split` => stmt"SPLIT"
case NumericOperator.`-` => stmt"-"
case NumericOperator.`+` => stmt"+"
case NumericOperator.`*` => stmt"*"
case NumericOperator.`>` => stmt">"
case NumericOperator.`>=` => stmt">="
case NumericOperator.`<` => stmt"<"
case NumericOperator.`<=` => stmt"<="
case NumericOperator.`/` => stmt"/"
case NumericOperator.`%` => stmt"%"
case SetOperator.`contains` => stmt"IN"
}
given optionOperationTokenizer(using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[OptionOperation] = Tokenizer[OptionOperation] {
case OptionIsEmpty(ast) => stmt"${ast.token} IS NULL"
case OptionNonEmpty(ast) => stmt"${ast.token} IS NOT NULL"
case OptionIsDefined(ast) => stmt"${ast.token} IS NOT NULL"
case other => fail(s"Malformed or unsupported construct: $other.")
}
given operationTokenizer(using
astTokenizer: Tokenizer[Ast], astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy strategy: NamingStrategy
): Tokenizer[Operation] = Tokenizer[Operation] { ): Tokenizer[Operation] = Tokenizer[Operation] {
@ -296,25 +352,9 @@ trait SqlIdiom extends Idiom {
case e: FunctionApply => fail(s"Can't translate the ast to sql: '$e'") case e: FunctionApply => fail(s"Can't translate the ast to sql: '$e'")
} }
implicit def optionOperationTokenizer(implicit
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[OptionOperation] = Tokenizer[OptionOperation] {
case OptionIsEmpty(ast) => stmt"${ast.token} IS NULL"
case OptionNonEmpty(ast) => stmt"${ast.token} IS NOT NULL"
case OptionIsDefined(ast) => stmt"${ast.token} IS NOT NULL"
case other => fail(s"Malformed or unsupported construct: $other.")
}
implicit val setOperationTokenizer: Tokenizer[SetOperation] =
Tokenizer[SetOperation] {
case UnionOperation => stmt"UNION"
case UnionAllOperation => stmt"UNION ALL"
}
protected def limitOffsetToken( protected def limitOffsetToken(
query: Statement query: Statement
)(implicit astTokenizer: Tokenizer[Ast], strategy: NamingStrategy) = )(using astTokenizer: Tokenizer[Ast], strategy: NamingStrategy) =
Tokenizer[(Option[Ast], Option[Ast])] { Tokenizer[(Option[Ast], Option[Ast])] {
case (None, None) => query case (None, None) => query
case (Some(limit), None) => stmt"$query LIMIT ${limit.token}" case (Some(limit), None) => stmt"$query LIMIT ${limit.token}"
@ -325,10 +365,25 @@ trait SqlIdiom extends Idiom {
protected def tokenOrderBy( protected def tokenOrderBy(
criterias: List[OrderByCriteria] criterias: List[OrderByCriteria]
)(implicit astTokenizer: Tokenizer[Ast], strategy: NamingStrategy) = )(using astTokenizer: Tokenizer[Ast], strategy: NamingStrategy) =
stmt"ORDER BY ${criterias.token}" stmt"ORDER BY ${criterias.token}"
implicit def sourceTokenizer(implicit given entityTokenizer(using
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[Entity] = Tokenizer[Entity] {
case Entity.Opinionated(name, _, renameable) =>
tokenizeTable(strategy, name, renameable).token
}
given joinTypeTokenizer: Tokenizer[JoinType] = Tokenizer[JoinType] {
case JoinType.InnerJoin => stmt"INNER JOIN"
case JoinType.LeftJoin => stmt"LEFT JOIN"
case JoinType.RightJoin => stmt"RIGHT JOIN"
case JoinType.FullJoin => stmt"FULL JOIN"
}
given sourceTokenizer(using
astTokenizer: Tokenizer[Ast], astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy strategy: NamingStrategy
): Tokenizer[FromContext] = Tokenizer[FromContext] { ): Tokenizer[FromContext] = Tokenizer[FromContext] {
@ -341,18 +396,12 @@ trait SqlIdiom extends Idiom {
case InfixContext(infix, alias) => case InfixContext(infix, alias) =>
stmt"(${(infix: Ast).token}) AS ${strategy.default(alias).token}" stmt"(${(infix: Ast).token}) AS ${strategy.default(alias).token}"
case JoinContext(t, a, b, on) => case JoinContext(t, a, b, on) =>
stmt"${a.token} ${t.token} ${b.token} ON ${on.token}" stmt"${sourceTokenizer.token(a)} ${t.token} ${sourceTokenizer.token(b)} ON ${on.token}"
case FlatJoinContext(t, a, on) => stmt"${t.token} ${a.token} ON ${on.token}" case FlatJoinContext(t, a, on) =>
stmt"${t.token} ${sourceTokenizer.token(a)} ON ${on.token}"
} }
implicit val joinTypeTokenizer: Tokenizer[JoinType] = Tokenizer[JoinType] { given orderByCriteriaTokenizer(using
case JoinType.InnerJoin => stmt"INNER JOIN"
case JoinType.LeftJoin => stmt"LEFT JOIN"
case JoinType.RightJoin => stmt"RIGHT JOIN"
case JoinType.FullJoin => stmt"FULL JOIN"
}
implicit def orderByCriteriaTokenizer(implicit
astTokenizer: Tokenizer[Ast], astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy strategy: NamingStrategy
): Tokenizer[OrderByCriteria] = Tokenizer[OrderByCriteria] { ): Tokenizer[OrderByCriteria] = Tokenizer[OrderByCriteria] {
@ -370,50 +419,7 @@ trait SqlIdiom extends Idiom {
stmt"${scopedTokenizer(ast)} DESC NULLS LAST" stmt"${scopedTokenizer(ast)} DESC NULLS LAST"
} }
implicit val unaryOperatorTokenizer: Tokenizer[UnaryOperator] = given propertyTokenizer(using
Tokenizer[UnaryOperator] {
case NumericOperator.`-` => stmt"-"
case BooleanOperator.`!` => stmt"NOT"
case StringOperator.`toUpperCase` => stmt"UPPER"
case StringOperator.`toLowerCase` => stmt"LOWER"
case StringOperator.`toLong` => stmt"" // cast is implicit
case StringOperator.`toInt` => stmt"" // cast is implicit
case SetOperator.`isEmpty` => stmt"NOT EXISTS"
case SetOperator.`nonEmpty` => stmt"EXISTS"
}
implicit val aggregationOperatorTokenizer: Tokenizer[AggregationOperator] =
Tokenizer[AggregationOperator] {
case AggregationOperator.`min` => stmt"MIN"
case AggregationOperator.`max` => stmt"MAX"
case AggregationOperator.`avg` => stmt"AVG"
case AggregationOperator.`sum` => stmt"SUM"
case AggregationOperator.`size` => stmt"COUNT"
}
implicit val binaryOperatorTokenizer: Tokenizer[BinaryOperator] =
Tokenizer[BinaryOperator] {
case EqualityOperator.`==` => stmt"="
case EqualityOperator.`!=` => stmt"<>"
case BooleanOperator.`&&` => stmt"AND"
case BooleanOperator.`||` => stmt"OR"
case StringOperator.`concat` => stmt"||"
case StringOperator.`startsWith` =>
fail("bug: this code should be unreachable")
case StringOperator.`split` => stmt"SPLIT"
case NumericOperator.`-` => stmt"-"
case NumericOperator.`+` => stmt"+"
case NumericOperator.`*` => stmt"*"
case NumericOperator.`>` => stmt">"
case NumericOperator.`>=` => stmt">="
case NumericOperator.`<` => stmt"<"
case NumericOperator.`<=` => stmt"<="
case NumericOperator.`/` => stmt"/"
case NumericOperator.`%` => stmt"%"
case SetOperator.`contains` => stmt"IN"
}
implicit def propertyTokenizer(implicit
astTokenizer: Tokenizer[Ast], astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy strategy: NamingStrategy
): Tokenizer[Property] = { ): Tokenizer[Property] = {
@ -480,7 +486,7 @@ trait SqlIdiom extends Idiom {
} }
} }
implicit def valueTokenizer(implicit given valueTokenizer(using
astTokenizer: Tokenizer[Ast], astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy strategy: NamingStrategy
): Tokenizer[Value] = Tokenizer[Value] { ): Tokenizer[Value] = Tokenizer[Value] {
@ -492,7 +498,7 @@ trait SqlIdiom extends Idiom {
case CaseClass(values) => stmt"${values.map(_._2).token}" case CaseClass(values) => stmt"${values.map(_._2).token}"
} }
implicit def infixTokenizer(implicit given infixTokenizer(using
astTokenizer: Tokenizer[Ast], astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy strategy: NamingStrategy
): Tokenizer[Infix] = Tokenizer[Infix] { ): Tokenizer[Infix] = Tokenizer[Infix] {
@ -502,19 +508,19 @@ trait SqlIdiom extends Idiom {
Statement(Interleave(pt, pr)) Statement(Interleave(pt, pr))
} }
implicit def identTokenizer(implicit given identTokenizer(using
astTokenizer: Tokenizer[Ast], astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy strategy: NamingStrategy
): Tokenizer[Ident] = ): Tokenizer[Ident] =
Tokenizer[Ident](e => strategy.default(e.name).token) Tokenizer[Ident](e => strategy.default(e.name).token)
implicit def externalIdentTokenizer(implicit given externalIdentTokenizer(using
astTokenizer: Tokenizer[Ast], astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy strategy: NamingStrategy
): Tokenizer[ExternalIdent] = ): Tokenizer[ExternalIdent] =
Tokenizer[ExternalIdent](e => strategy.default(e.name).token) Tokenizer[ExternalIdent](e => strategy.default(e.name).token)
implicit def assignmentTokenizer(implicit given (using
astTokenizer: Tokenizer[Ast], astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy strategy: NamingStrategy
): Tokenizer[Assignment] = Tokenizer[Assignment] { ): Tokenizer[Assignment] = Tokenizer[Assignment] {
@ -522,7 +528,7 @@ trait SqlIdiom extends Idiom {
stmt"${prop.token} = ${scopedTokenizer(value)}" stmt"${prop.token} = ${scopedTokenizer(value)}"
} }
implicit def defaultAstTokenizer(implicit given defaultAstTokenizer(using
astTokenizer: Tokenizer[Ast], astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy strategy: NamingStrategy
): Tokenizer[Action] = { ): Tokenizer[Action] = {
@ -533,7 +539,7 @@ trait SqlIdiom extends Idiom {
actionTokenizer(insertEntityTokenizer)(using actionAstTokenizer, strategy) actionTokenizer(insertEntityTokenizer)(using actionAstTokenizer, strategy)
} }
protected def actionAstTokenizer(implicit protected def actionAstTokenizer(using
astTokenizer: Tokenizer[Ast], astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy strategy: NamingStrategy
) = ) =
@ -554,7 +560,7 @@ trait SqlIdiom extends Idiom {
) )
} }
def returnListTokenizer(implicit def returnListTokenizer(using
tokenizer: Tokenizer[Ast], tokenizer: Tokenizer[Ast],
strategy: NamingStrategy strategy: NamingStrategy
): Tokenizer[List[Ast]] = { ): Tokenizer[List[Ast]] = {
@ -574,7 +580,7 @@ trait SqlIdiom extends Idiom {
protected def actionTokenizer( protected def actionTokenizer(
insertEntityTokenizer: Tokenizer[Entity] insertEntityTokenizer: Tokenizer[Entity]
)(implicit )(using
astTokenizer: Tokenizer[Ast], astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy strategy: NamingStrategy
): Tokenizer[Action] = ): Tokenizer[Action] =
@ -636,15 +642,7 @@ trait SqlIdiom extends Idiom {
fail(s"Action ast can't be translated to sql: '$other'") fail(s"Action ast can't be translated to sql: '$other'")
} }
implicit def entityTokenizer(implicit protected def scopedTokenizer(ast: Ast)(using tokenizer: Tokenizer[Ast]) =
astTokenizer: Tokenizer[Ast],
strategy: NamingStrategy
): Tokenizer[Entity] = Tokenizer[Entity] {
case Entity.Opinionated(name, _, renameable) =>
tokenizeTable(strategy, name, renameable).token
}
protected def scopedTokenizer(ast: Ast)(implicit tokenizer: Tokenizer[Ast]) =
ast match { ast match {
case _: Query => stmt"(${ast.token})" case _: Query => stmt"(${ast.token})"
case _: BinaryOperation => stmt"(${ast.token})" case _: BinaryOperation => stmt"(${ast.token})"
@ -680,13 +678,12 @@ object SqlIdiom {
query: ReturningAction query: ReturningAction
)(implicit strategy: NamingStrategy) = { )(implicit strategy: NamingStrategy) = {
val idiom = copyIdiom(parentIdiom, Some(query.alias)) val idiom = copyIdiom(parentIdiom, Some(query.alias))
import idiom._ import idiom.{*, given}
implicit val stableTokenizer: Tokenizer[Ast] = idiom.astTokenizer(using given Tokenizer[Ast] = idiom.astTokenizer(using
new Tokenizer[Ast] { self => new Tokenizer[Ast] { self =>
extension (v: Ast) { def token(v: Ast) = astTokenizer(using self, strategy).token(v)
def token = astTokenizer(using self, strategy).token(v)
}
}, },
strategy strategy
) )

View file

@ -19,12 +19,10 @@ case class FlatJoinContext(t: JoinType, a: FromContext, on: Ast)
sealed trait SqlQuery { sealed trait SqlQuery {
override def toString = { override def toString = {
import MirrorSqlDialect.* import MirrorSqlDialect.{*, given}
import minisql.idiom.StatementInterpolator.* import minisql.idiom.StatementInterpolator.*
given Tokenizer[SqlQuery] = sqlQueryTokenizer(using given NamingStrategy = Literal
defaultTokenizer(using Literal), given Tokenizer[Ast] = defaultTokenizer(using Literal)
Literal
)
summon[Tokenizer[SqlQuery]].token(this).toString() summon[Tokenizer[SqlQuery]].token(this).toString()
} }
} }

View file

@ -18,24 +18,27 @@ object StatementInterpolator {
} }
} }
trait Tokenizer[T] { trait Tokenizer[T] {
extension (v: T) { def token(v: T): Token
def token: Token
}
} }
object Tokenizer { object Tokenizer {
def apply[T](f: T => Token): Tokenizer[T] = new Tokenizer[T] {
extension (v: T) { def withSelf[T](f: Tokenizer[T] ?=> (T => Token)): Tokenizer[T] =
def token: Token = f(v) new Tokenizer[T] { self =>
def token(v: T): Token = f(using self)(v)
} }
def apply[T](f: T => Token): Tokenizer[T] = new Tokenizer[T] {
def token(v: T): Token = f(v)
} }
def withFallback[T]( def withFallback[T](
fallback: Tokenizer[T] => Tokenizer[T] fallback: Tokenizer[T] => Tokenizer[T]
)(pf: PartialFunction[T, Token]) = )(pf: PartialFunction[T, Token]) =
new Tokenizer[T] { new Tokenizer[T] { self =>
extension (v: T) { override def token(v: T): Token = {
private def stable = fallback(this) def stable = fallback(self)
override def token = pf.applyOrElse(v, stable.token) pf.applyOrElse(v, stable.token)
} }
} }
} }