Flink Hive 探索

参考文档

https://ci.apache.org/projects/flink/flink-docs-release-1.13/docs/connectors/table/hive/overview/#dependencies

描述

过程总结

  • 整个探索的过程,可以总结为‘痛并快乐着’。
  1. 痛1. 官方文档中,直接给了几个依赖,照例搭建环境之后,发现并不可用,还会报出各种奇葩的错误。
  2. 痛2. 没有测试环境,需要从0搭建。

话不多说,干货

pom文件,不一定是最准确的,但是是能用的。欢迎各位大佬指正。

  <properties>
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    <maven.compiler.source>1.8</maven.compiler.source>
    <maven.compiler.target>1.8</maven.compiler.target>
    <flink.version>1.13.0</flink.version>
    <scala.version>2.11</scala.version>
    <hive.version>3.1.2</hive.version>
  </properties>
  <repositories>
    <repository>
      <id>cloudera-releases</id>
      <url>https://repository.cloudera.com/artifactory/cloudera-repos</url>
      <releases>
        <enabled>true</enabled>
      </releases>
      <snapshots>
        <enabled>false</enabled>
      </snapshots>
    </repository>
  </repositories>

  <dependencies>
    <!-- https://mvnrepository.com/artifact/com.fasterxml.woodstox/woodstox-core -->
    <dependency>
      <groupId>com.fasterxml.woodstox</groupId>
      <artifactId>woodstox-core</artifactId>
      <version>6.2.4</version>
    </dependency>
    <!--
    hadoop-mapreduce-client-core

    hadoop-common

    hadoop-mapreduce-client-common

    hadoop-mapreduce-client-jobclient

    -->
    <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-core -->
    <!-- 找不到 jobConf class-->
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-mapreduce-client-core</artifactId>
      <version>${hive.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-common</artifactId>
      <version>${hive.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-mapreduce-client-common</artifactId>
      <version>${hive.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
      <version>${hive.version}</version>
    </dependency>


    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-clients_${scala.version}</artifactId>
      <version>${flink.version}</version>
    </dependency>

    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-table-planner_${scala.version}</artifactId>
      <version>${flink.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-table-planner-blink_${scala.version}</artifactId>
      <version>${flink.version}</version>
    </dependency>


    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-table-common</artifactId>
      <version>${flink.version}</version>
      <!--<scope>provided</scope>-->
    </dependency>
    <!-- Flink Dependency -->
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-connector-hive_${scala.version}</artifactId>
      <version>${flink.version}</version>
    </dependency>

    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-sql-connector-hive-${hive.version}_${scala.version}</artifactId>
      <version>${flink.version}</version>
    </dependency>

    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-table-api-scala-bridge_2.11</artifactId>
      <version>${flink.version}</version>
    </dependency>

    <!-- Hive Dependency -->
    <dependency>
      <groupId>org.apache.hive</groupId>
      <artifactId>hive-exec</artifactId>
      <version>${hive.version}</version>
    </dependency>
    <dependency>
      <groupId>junit</groupId>
      <artifactId>junit</artifactId>
      <version>4.11</version>
      <scope>test</scope>
    </dependency>
  </dependencies>


scala code

import org.apache.flink.table.api._
import org.apache.flink.table.api.{EnvironmentSettings, TableEnvironment}
import org.apache.flink.table.catalog.hive.HiveCatalog

object FlinkHive {
  def main(args: Array[String]): Unit = {
    val settings = EnvironmentSettings.newInstance().useBlinkPlanner().build()
    val tableEnv = TableEnvironment.create(settings)

    val name            = "myhive"
    val defaultDatabase = "tmp_prgdb"  // hive 对应的库名
    val hiveConfDir     = "D:\\IdeaProject\\test_flink\\src\\main\\resources" //hive-site.xml 存放位置。本地测试,所以写死了。

    val hive = new HiveCatalog(name, defaultDatabase, hiveConfDir)
    tableEnv.registerCatalog("myhive", hive)

    // set the HiveCatalog as the current catalog of the session
    tableEnv.useCatalog("myhive") //

    val table: Table = tableEnv.sqlQuery("select * from student") //这里执行的是select 数据 from hive。
    table.printSchema()


    //注册临时表
    tableEnv.createTemporaryView("student", table)

    val result = tableEnv.sqlQuery("select * from student")


  }

}

Scala stream code

import org.apache.flink.api.scala
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment
import org.apache.flink.table.api.{EnvironmentSettings, Table}
import org.apache.flink.table.catalog.hive.HiveCatalog
import org.apache.flink.types.Row

/**
  * @Author: Jhon_y
  * @Description: ...
  */
object FlinkStreamHive {

  def main(args: Array[String]): Unit = {
    val streamEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    val settings: EnvironmentSettings = EnvironmentSettings.newInstance().useBlinkPlanner().build()

    val tableEnv = StreamTableEnvironment.create(streamEnv, settings)
//    val tableEnv = StreamTableEnvironment.create(streamEnv)

    val name            = "myhive"
    val defaultDatabase = "tmp_prgdb"

    val hiveConfDir     = "D:\\IdeaProject\\test_flink\\src\\main\\resources"
    val hive = new HiveCatalog(name, defaultDatabase, hiveConfDir)
    tableEnv.registerCatalog("myhive", hive)

    // set the HiveCatalog as the current catalog of the session
    tableEnv.useCatalog("myhive")

    val table: Table = tableEnv.sqlQuery("select * from student")
    table.printSchema()


    //注册临时表
    tableEnv.createTemporaryView("student", table)

    val result = tableEnv.sqlQuery("select * from student where id = '2'")

//    tableEnv.toChangelogStream(result).print()

    //result 转换成ds
    val value: DataStream[Row] = tableEnv.toChangelogStream(result)
    value.map(row => println(row.getField("id").toString + row.getField("name")))





    streamEnv.execute("test a")
  }
}

Java Code

	public class FlinkHive {
    public static void main(String[] args) throws Exception{
        //settings
        EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().build();
        //StreamEnv
        StreamExecutionEnvironment env = new StreamExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);

        // to use hive dialect
        tableEnv.getConfig().setSqlDialect(SqlDialect.HIVE);
        // to use default dialect
        tableEnv.getConfig().setSqlDialect(SqlDialect.DEFAULT);

        //catalog
        String cName = "hive";
        String database = "test";
        String hiveConfDir = "/"; //不配置会在项目 /目录下查找对应的配置文件。

        HiveCatalog hiveCatalog = new HiveCatalog(cName, database, hiveConfDir);

        tableEnv.useCatalog("hive");

        //直接执行出结果
//        TableResult tableResult = tableEnv.executeSql("select * from test");

        // 这还是一种转换操作
        Table table = tableEnv.sqlQuery("select * from test");

        // table 2 ds
        DataStream<Row> rowDataStream = tableEnv.toChangelogStream(table);

        // ds map get row, parse to stm u wanted
        SingleOutputStreamOperator<String> id1 = rowDataStream.map(row -> row.getFieldAs("id").toString());

        env.execute("test flink hive");
    }
}

问题记录


Logged in as: dr.who
Application
Tools
Configuration
Local logs
Server stacks
Server metrics
Log Type: jobmanager.out

Log Upload Time: Mon Nov 22 16:46:21 +0800 2021

Log Length: 10134

2021-11-22 16:46:18,301 - org.apache.hadoop.yarn.client.RMProxy -0    [flink-akka.actor.default-dispatcher-3] INFO  org.apache.hadoop.yarn.client.RMProxy  - Connecting to ResourceManager at hdpnn002-qa.gdc.com/10.31.4.54:8030
2021-11-22 16:46:18,303 - org.apache.hadoop.yarn.ipc.YarnRPC -2    [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.hadoop.yarn.ipc.YarnRPC  - Creating YarnRPC for org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC
2021-11-22 16:46:18,303 - org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC -2    [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC  - Creating a HadoopYarnProtoRpc proxy for protocol interface org.apache.hadoop.yarn.api.ApplicationMasterProtocol
2021-11-22 16:46:18,416 - org.apache.hadoop.yarn.security.AMRMTokenSelector -115  [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.hadoop.yarn.security.AMRMTokenSelector  - Looking for a token with service 10.31.4.54:8030
2021-11-22 16:46:18,416 - org.apache.hadoop.yarn.security.AMRMTokenSelector -115  [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.hadoop.yarn.security.AMRMTokenSelector  - Token kind is YARN_AM_RM_TOKEN and the token's service name is 10.31.4.54:8030
2021-11-22 16:46:18,540 - org.apache.hadoop.yarn.client.api.async.impl.NMClientAsyncImpl -239  [flink-akka.actor.default-dispatcher-3] INFO  org.apache.hadoop.yarn.client.api.async.impl.NMClientAsyncImpl  - Upper bound of the thread pool size is 500
2021-11-22 16:46:18,541 - org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy -240  [flink-akka.actor.default-dispatcher-3] INFO  org.apache.hadoop.yarn.client.api.impl.ContainerManagementProtocolProxy  - yarn.client.max-cached-nodemanagers-proxies : 0
2021-11-22 16:46:18,541 - org.apache.hadoop.yarn.ipc.YarnRPC -240  [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.hadoop.yarn.ipc.YarnRPC  - Creating YarnRPC for org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC
2021-11-22 16:46:19,306 - org.apache.htrace.core.Tracer -1005 [flink-akka.actor.default-dispatcher-4] DEBUG org.apache.htrace.core.Tracer  - sampler.classes = ; loaded no samplers
2021-11-22 16:46:19,307 - org.apache.htrace.core.Tracer -1006 [flink-akka.actor.default-dispatcher-4] DEBUG org.apache.htrace.core.Tracer  - span.receiver.classes = ; loaded no span receivers
2021-11-22 16:46:19,508 - org.apache.hadoop.hdfs.BlockReaderLocal -1207 [flink-akka.actor.default-dispatcher-4] DEBUG org.apache.hadoop.hdfs.BlockReaderLocal  - dfs.client.use.legacy.blockreader.local = false
2021-11-22 16:46:19,508 - org.apache.hadoop.hdfs.BlockReaderLocal -1207 [flink-akka.actor.default-dispatcher-4] DEBUG org.apache.hadoop.hdfs.BlockReaderLocal  - dfs.client.read.shortcircuit = false
2021-11-22 16:46:19,508 - org.apache.hadoop.hdfs.BlockReaderLocal -1207 [flink-akka.actor.default-dispatcher-4] DEBUG org.apache.hadoop.hdfs.BlockReaderLocal  - dfs.client.domain.socket.data.traffic = false
2021-11-22 16:46:19,508 - org.apache.hadoop.hdfs.BlockReaderLocal -1207 [flink-akka.actor.default-dispatcher-4] DEBUG org.apache.hadoop.hdfs.BlockReaderLocal  - dfs.domain.socket.path = /var/run/hdfs-sockets/dn
16:46:19.513 [flink-akka.actor.default-dispatcher-4] ERROR org.apache.flink.runtime.entrypoint.ClusterEntrypoint - Fatal error occurred in the cluster entrypoint.
org.apache.flink.util.FlinkException: Application failed unexpectedly.
	at org.apache.flink.client.deployment.application.ApplicationDispatcherBootstrap.lambda$runApplicationAndShutdownClusterAsync$0(ApplicationDispatcherBootstrap.java:170) ~[flink-dist_2.11-1.13.1.jar:1.13.1]
	at java.util.concurrent.CompletableFuture.uniHandle(CompletableFuture.java:822) ~[?:1.8.0_141]
	at java.util.concurrent.CompletableFuture$UniHandle.tryFire(CompletableFuture.java:797) ~[?:1.8.0_141]
	at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474) ~[?:1.8.0_141]
	at java.util.concurrent.CompletableFuture.completeExceptionally(CompletableFuture.java:1977) ~[?:1.8.0_141]
	at org.apache.flink.client.deployment.application.ApplicationDispatcherBootstrap.runApplicationEntryPoint(ApplicationDispatcherBootstrap.java:257) ~[flink-dist_2.11-1.13.1.jar:1.13.1]
	at org.apache.flink.client.deployment.application.ApplicationDispatcherBootstrap.lambda$runApplicationAsync$1(ApplicationDispatcherBootstrap.java:212) ~[flink-dist_2.11-1.13.1.jar:1.13.1]
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) [?:1.8.0_141]
	at java.util.concurrent.FutureTask.run(FutureTask.java:266) [?:1.8.0_141]
	at org.apache.flink.runtime.concurrent.akka.ActorSystemScheduledExecutorAdapter$ScheduledFutureTask.run(ActorSystemScheduledExecutorAdapter.java:159) [flink_sql-1.0-jar-with-dependencies.jar:?]
	at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:40) [flink_sql-1.0-jar-with-dependencies.jar:?]
	at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(ForkJoinExecutorConfigurator.scala:44) [flink_sql-1.0-jar-with-dependencies.jar:?]
	at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) [flink_sql-1.0-jar-with-dependencies.jar:?]
	at akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) [flink_sql-1.0-jar-with-dependencies.jar:?]
	at akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) [flink_sql-1.0-jar-with-dependencies.jar:?]
	at akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) [flink_sql-1.0-jar-with-dependencies.jar:?]
Caused by: java.util.concurrent.CompletionException: org.apache.flink.client.deployment.application.ApplicationExecutionException: Could not execute application.
	at java.util.concurrent.CompletableFuture.encodeThrowable(CompletableFuture.java:292) ~[?:1.8.0_141]
	at java.util.concurrent.CompletableFuture.completeThrowable(CompletableFuture.java:308) ~[?:1.8.0_141]
	at java.util.concurrent.CompletableFuture.uniCompose(CompletableFuture.java:943) ~[?:1.8.0_141]
	at java.util.concurrent.CompletableFuture$UniCompose.tryFire(CompletableFuture.java:926) ~[?:1.8.0_141]
	... 13 more
Caused by: org.apache.flink.client.deployment.application.ApplicationExecutionException: Could not execute application.
	... 11 more
Caused by: org.apache.flink.client.program.ProgramInvocationException: The main method caused an error: For input string: "30s"
	at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:372) ~[flink-dist_2.11-1.13.1.jar:1.13.1]
	at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:222) ~[flink-dist_2.11-1.13.1.jar:1.13.1]
	at org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:114) ~[flink-dist_2.11-1.13.1.jar:1.13.1]
	at org.apache.flink.client.deployment.application.ApplicationDispatcherBootstrap.runApplicationEntryPoint(ApplicationDispatcherBootstrap.java:242) ~[flink-dist_2.11-1.13.1.jar:1.13.1]
	... 10 more
Caused by: java.lang.NumberFormatException: For input string: "30s"
	at java.lang.NumberFormatException.forInputString(NumberFormatException.java:65) ~[?:1.8.0_141]
	at java.lang.Long.parseLong(Long.java:589) ~[?:1.8.0_141]
	at java.lang.Long.parseLong(Long.java:631) ~[?:1.8.0_141]
	at org.apache.hadoop.conf.Configuration.getLong(Configuration.java:1535) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.hadoop.hdfs.DFSClient$Conf.<init>(DFSClient.java:491) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:620) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:604) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:148) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3354) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3403) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3371) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:477) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.hadoop.fs.Path.getFileSystem(Path.java:361) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.flink.table.catalog.hive.HiveCatalog.createHiveConf(HiveCatalog.java:265) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.flink.table.catalog.hive.HiveCatalog.<init>(HiveCatalog.java:180) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.flink.table.catalog.hive.HiveCatalog.<init>(HiveCatalog.java:171) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.flink.table.catalog.hive.HiveCatalog.<init>(HiveCatalog.java:163) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at com.gwm.core.SmallFileMerge$.main(SmallFileMerge.scala:31) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at com.gwm.core.SmallFileMerge.main(SmallFileMerge.scala) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_141]
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_141]
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_141]
	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_141]
	at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:355) ~[flink-dist_2.11-1.13.1.jar:1.13.1]
	at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:222) ~[flink-dist_2.11-1.13.1.jar:1.13.1]
	at org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:114) ~[flink-dist_2.11-1.13.1.jar:1.13.1]
	at org.apache.flink.client.deployment.application.ApplicationDispatcherBootstrap.runApplicationEntryPoint(ApplicationDispatcherBootstrap.java:242) ~[flink-dist_2.11-1.13.1.jar:1.13.1]
	... 10 more

在这里插入图片描述

Caused by: org.apache.flink.sql.parser.impl.ParseException: Encountered “table” at line 1, column 18.

table 加反引号 `table`

Caused by: org.apache.flink.sql.parser.impl.ParseException: Encountered "table" at line 1, column 18.
Was expecting one of:
    <BRACKET_QUOTED_IDENTIFIER> ...
    <QUOTED_IDENTIFIER> ...
    <BACK_QUOTED_IDENTIFIER> ...
    <HYPHENATED_IDENTIFIER> ...
    <IDENTIFIER> ...
    <UNICODE_QUOTED_IDENTIFIER> ...
    
	at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.generateParseException(FlinkSqlParserImpl.java:39897) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.jj_consume_token(FlinkSqlParserImpl.java:39708) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.IdentifierSegment(FlinkSqlParserImpl.java:24185) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.TableIdentifierSegment(FlinkSqlParserImpl.java:24205) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.CompoundTableIdentifier(FlinkSqlParserImpl.java:24690) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.TableRefWithHintsOpt(FlinkSqlParserImpl.java:7019) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.RichSqlInsert(FlinkSqlParserImpl.java:5434) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.SqlStmt(FlinkSqlParserImpl.java:3364) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.SqlStmtEof(FlinkSqlParserImpl.java:3924) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.parseSqlStmtEof(FlinkSqlParserImpl.java:263) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.calcite.sql.parser.SqlParser.parseQuery(SqlParser.java:153) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.calcite.sql.parser.SqlParser.parseStmt(SqlParser.java:180) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.flink.table.planner.parse.CalciteParser.parse(CalciteParser.java:54) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.flink.table.planner.delegation.ParserImpl.parse(ParserImpl.java:98) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeSql(TableEnvironmentImpl.java:724) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at com.gwm.core.SmallFileMerge$.main(SmallFileMerge.scala:52) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at com.gwm.core.SmallFileMerge.main(SmallFileMerge.scala) ~[flink_sql-1.0-jar-with-dependencies.jar:?]
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_141]
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_141]
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_141]
	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_141]
	at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:355) ~[flink-dist_2.11-1.13.1.jar:1.13.1]
	at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:222) ~[flink-dist_2.11-1.13.1.jar:1.13.1]
	at org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:114) ~[flink-dist_2.11-1.13.1.jar:1.13.1]
	at org.apache.flink.client.deployment.application.ApplicationDispatcherBootstrap.runApplicationEntryPoint(ApplicationDispatcherBootstrap.java:242) ~[flink-dist_2.11-1.13.1.jar:1.13.1]

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值