flink-issues mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From fhueske <...@git.apache.org>
Subject [GitHub] flink pull request #3829: [FLINK-6442] [table] Extend TableAPI Support Sink ...
Date Mon, 18 Sep 2017 20:52:13 GMT
Github user fhueske commented on a diff in the pull request:

    https://github.com/apache/flink/pull/3829#discussion_r139526993
  
    --- Diff: flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/TableEnvironment.scala
---
    @@ -502,23 +521,130 @@ abstract class TableEnvironment(val config: TableConfig) {
         * @param query The SQL query to evaluate.
         * @return The result of the query as Table.
         */
    +  @deprecated("use [[sqlQuery()]] instead")
       def sql(query: String): Table = {
    +    sqlQuery(query)
    +  }
    +
    +  /**
    +    * Evaluates a SQL Select query on registered tables and retrieves the result as a
    +    * [[Table]].
    +    *
    +    * All tables referenced by the query must be registered in the TableEnvironment.
But
    +    * [[Table.toString]] will automatically register an unique table name and return
the
    +    * table name. So it allows to call SQL directly on tables like this:
    +    *
    +    * {{{
    +    *   val table: Table = ...
    +    *   // the table is not registered to the table environment
    +    *   tEnv.sqlSelect(s"SELECT * FROM $table")
    +    * }}}
    +    *
    +    * @param query The SQL query to evaluate.
    +    * @return The result of the query as Table
    +    */
    +  def sqlQuery(query: String): Table = {
         val planner = new FlinkPlannerImpl(getFrameworkConfig, getPlanner, getTypeFactory)
         // parse the sql query
         val parsed = planner.parse(query)
    -    // validate the sql query
    -    val validated = planner.validate(parsed)
    -    // transform to a relational tree
    -    val relational = planner.rel(validated)
    +    if (null != parsed && parsed.getKind.belongsTo(SqlKind.QUERY)) {
    +      // validate the sql query
    +      val validated = planner.validate(parsed)
    +      // transform to a relational tree
    +      val relational = planner.rel(validated)
    +      new Table(this, LogicalRelNode(relational.rel))
    +    } else {
    +      throw new TableException(
    +        "Unsupported SQL query! sqlQuery() only accepts SQL queries of type SELECT, UNION,
" +
    +          "INTERSECT, EXCEPT, VALUES, WITH, ORDER_BY, and EXPLICIT_TABLE.")
    +    }
    +  }
    +
    +  /**
    +    * Evaluates a SQL statement such as INSERT, UPDATE or DELETE; or a DDL statement;
    +    * Currently only SQL INSERT statement on registered tables are supported.
    +    *
    +    * All tables referenced by the query must be registered in the TableEnvironment.
But
    +    * [[Table.toString]] will automatically register an unique table name and return
the
    +    * table name except table sink table(Table sink tables are not automatically registered
via
    +    * [[Table.toString]]). So it allows to call SQL directly on tables like this:
    +    *
    +    * {{{
    +    *   // should register the table sink which will be inserted into
    +    *   tEnv.registerTableSink("target_table", fieldNames, fieldsTypes, tableSink)
    +    *   val sourceTable: Table = ...
    +    *   // sourceTable is not registered to the table environment
    +    *   tEnv.sqlInsert(s"INSERT INTO target_table SELECT * FROM $sourceTable")
    +    * }}}
    +    *
    +    * @param stmt The SQL statement to evaluate.
    +    */
    +  def sqlUpdate(stmt: String): Unit = {
    +    sqlUpdate(stmt, this.queryConfig)
    +  }
    +
    +  /**
    +    * Evaluates a SQL statement such as INSERT, UPDATE or DELETE; or a DDL statement;
    +    * Currently only SQL INSERT statement on registered tables are supported.
    +    *
    +    * All tables referenced by the query must be registered in the TableEnvironment.
But
    +    * [[Table.toString]] will automatically register an unique table name and return
the
    +    * table name except table sink table(Table sink tables are not automatically registered
via
    +    * [[Table.toString]]). So it allows to call SQL directly on tables like this:
    +    *
    +    * {{{
    +    *   // should register the table sink which will be inserted into
    +    *   tEnv.registerTableSink("target_table", fieldNames, fieldsTypes, tableSink)
    +    *   val sourceTable: Table = ...
    +    *   // sourceTable is not registered to the table environment
    +    *   tEnv.sqlInsert(s"INSERT INTO target_table SELECT * FROM $sourceTable")
    +    * }}}
    +    *
    +    * @param stmt The SQL statement to evaluate.
    +    * @param config The [[QueryConfig]] to use.
    +    */
    +  def sqlUpdate(stmt: String, config: QueryConfig): Unit = {
    +    val planner = new FlinkPlannerImpl(getFrameworkConfig, getPlanner, getTypeFactory)
    +    // parse the sql query
    +    val parsed = planner.parse(stmt)
    +    parsed match {
    +      case insert: SqlInsert => {
    +        // validate the sql query
    +        planner.validate(parsed)
    +
    +        // validate sink table
    +        val targetName = insert.getTargetTable.asInstanceOf[SqlIdentifier].names.get(0)
    +        val targetTable = getTable(targetName)
    +        if (null == targetTable || !targetTable.isInstanceOf[TableSinkTable[_]]) {
    +          throw new TableException("SQL INSERT operation need a registered TableSink
Table!")
    +        }
    +        // validate unsupported partial insertion to sink table
    +        val sinkTable = targetTable.asInstanceOf[TableSinkTable[_]]
    +        if (null != insert.getTargetColumnList && insert.getTargetColumnList.size()
!=
    +          sinkTable.tableSink.getFieldTypes.length) {
    +
    +          throw new TableException(
    +            "SQL INSERT requires that the schema of the inserted records exactly matches
the " +
    +              "schema of the target table. Record schema:${} Target table schema:${}")
    --- End diff --
    
    insert actual schema and make this a `s""`


---

Mime
View raw message