Skip to content

Commit

Permalink
factorize antlr related code
Browse files Browse the repository at this point in the history
  • Loading branch information
ericvergnaud committed Nov 15, 2024
1 parent 444edfe commit e857f0d
Show file tree
Hide file tree
Showing 3 changed files with 51 additions and 52 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
package com.databricks.labs.remorph.parsers

import com.databricks.labs.remorph.{BuildingAst, KoResult, Parsing, Transformation, WorkflowStage}
import com.databricks.labs.remorph.intermediate.{LogicalPlan, ParsingErrors, PlanGenerationFailure}
import org.antlr.v4.runtime.{Parser, ParserRuleContext}

import scala.util.control.NonFatal

trait AntlrPlanParser extends PlanParser {

def setErrorListener(parser: Parser, listener: ProductionErrorCollector): ProductionErrorCollector = {
parser.removeErrorListeners()
parser.addErrorListener(listener)
listener
}

def generatePlan(
context: ParserRuleContext,
createPlan: () => LogicalPlan,
errListener: ProductionErrorCollector): Transformation[LogicalPlan] = {
if (errListener.errorCount > 0) {
lift(KoResult(stage = WorkflowStage.PARSE, ParsingErrors(errListener.errors)))
} else {
update {
case p: Parsing => BuildingAst(context, Some(p))
case _ => BuildingAst(context)
}.flatMap { _ =>
try {
ok(createPlan())
} catch {
case NonFatal(e) =>
lift(KoResult(stage = WorkflowStage.PLAN, PlanGenerationFailure(e)))
}
}
}
}

}
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
package com.databricks.labs.remorph.parsers.snowflake

import com.databricks.labs.remorph.intermediate.{LogicalPlan, ParsingErrors, PlanGenerationFailure}
import com.databricks.labs.remorph.parsers.{PlanParser, ProductionErrorCollector}
import com.databricks.labs.remorph.intermediate.LogicalPlan
import com.databricks.labs.remorph.parsers.{AntlrPlanParser, PlanParser, ProductionErrorCollector}
import com.databricks.labs.remorph.parsers.snowflake.rules._
import com.databricks.labs.remorph.{BuildingAst, KoResult, Parsing, Transformation, WorkflowStage, intermediate => ir}
import com.databricks.labs.remorph.{Parsing, Transformation, intermediate => ir}
import org.antlr.v4.runtime.tree.ParseTree
import org.antlr.v4.runtime.{CharStreams, CommonTokenStream}

import scala.util.control.NonFatal

class SnowflakePlanParser extends PlanParser {
class SnowflakePlanParser extends PlanParser with AntlrPlanParser {

private val vc = new SnowflakeVisitorCoordinator(SnowflakeParser.VOCABULARY, SnowflakeParser.ruleNames)

Expand All @@ -19,27 +17,9 @@ class SnowflakePlanParser extends PlanParser {
val tokens = new CommonTokenStream(lexer)
val parser = new SnowflakeParser(tokens)
parser.setErrorHandler(new SnowflakeErrorStrategy)
val errListener = new ProductionErrorCollector(parsing.source, parsing.filename)
parser.removeErrorListeners()
parser.addErrorListener(errListener)
val errListener = setErrorListener(parser, new ProductionErrorCollector(parsing.source, parsing.filename))
val tree = parser.snowflakeFile()
if (errListener.errorCount > 0) {
lift(KoResult(stage = WorkflowStage.PARSE, ParsingErrors(errListener.errors)))
} else {
update {
case p: Parsing => BuildingAst(tree, Some(p))
case _ => BuildingAst(tree)
}.flatMap { _ =>
try {
ok(createPlan(tokens, tree))
} catch {
case NonFatal(e) =>
lift(KoResult(stage = WorkflowStage.PLAN, PlanGenerationFailure(e)))
}
}

}

generatePlan(tree, () => createPlan(tokens, tree), errListener)
}

private def createPlan(tokens: CommonTokenStream, tree: ParseTree): LogicalPlan = {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
package com.databricks.labs.remorph.parsers.tsql

import com.databricks.labs.remorph.intermediate.{LogicalPlan, ParsingErrors, PlanGenerationFailure}
import com.databricks.labs.remorph.parsers.{PlanParser, ProductionErrorCollector}
import com.databricks.labs.remorph.intermediate.LogicalPlan
import com.databricks.labs.remorph.parsers.{AntlrPlanParser, PlanParser, ProductionErrorCollector}
import com.databricks.labs.remorph.parsers.tsql.rules.{PullLimitUpwards, TSqlCallMapper, TopPercentToLimitSubquery, TrapInsertDefaultsAction}
import com.databricks.labs.remorph.{BuildingAst, KoResult, Parsing, Transformation, WorkflowStage, intermediate => ir}
import com.databricks.labs.remorph.{Parsing, Transformation, intermediate => ir}
import org.antlr.v4.runtime._
import org.antlr.v4.runtime.tree.ParseTree

import scala.util.control.NonFatal

class TSqlPlanParser extends PlanParser {
class TSqlPlanParser extends PlanParser with AntlrPlanParser {

val vc = new TSqlVisitorCoordinator(TSqlParser.VOCABULARY, TSqlParser.ruleNames)

Expand All @@ -19,33 +17,16 @@ class TSqlPlanParser extends PlanParser {
val tokens = new CommonTokenStream(lexer)
val parser = new TSqlParser(tokens)
parser.setErrorHandler(new TSqlErrorStrategy)
val errListener = new ProductionErrorCollector(parsing.source, parsing.filename)
parser.removeErrorListeners()
parser.addErrorListener(errListener)
val errListener = setErrorListener(parser, new ProductionErrorCollector(parsing.source, parsing.filename))
val tree = parser.tSqlFile()
if (errListener.errorCount > 0) {
lift(KoResult(stage = WorkflowStage.PARSE, ParsingErrors(errListener.errors)))
} else {
update {
case p: Parsing => BuildingAst(tree, Some(p))
case _ => BuildingAst(tree)
}.flatMap { _ =>
try {
ok(createPlan(tokens, tree))
} catch {
case NonFatal(e) =>
lift(KoResult(stage = WorkflowStage.PLAN, PlanGenerationFailure(e)))
}
}

}

generatePlan(tree, () => createPlan(tokens, tree), errListener)
}

private def createPlan(tokens: CommonTokenStream, tree: ParseTree): LogicalPlan = {
val plan = vc.astBuilder.visit(tree)
plan
}

def dialect: String = "tsql"

// TODO: Note that this is not the correct place for the optimizer, but it is here for now
Expand Down

0 comments on commit e857f0d

Please sign in to comment.