Comments (4)
use userportrait;
INSERT INTO userportrait.sink_table
select ts,data.id,data.case_code,data.applicant_phone,data.created_at from log;
换成
INSERT INTO sink_table
select ts,data.id,data.case_code,data.applicant_phone,data.created_at from log;
from dinky.
执行报错
`org.apache.flink.table.api.TableException: findAndCreateTableSink failed.
at org.apache.flink.table.factories.TableFactoryUtil.findAndCreateTableSink(TableFactoryUtil.java:94)
at org.apache.flink.table.factories.TableFactoryUtil.lambda$findAndCreateTableSink$0(TableFactoryUtil.java:121)
at java.util.Optional.orElseGet(Optional.java:267)
at org.apache.flink.table.factories.TableFactoryUtil.findAndCreateTableSink(TableFactoryUtil.java:121)
at org.apache.flink.table.planner.delegation.PlannerBase.getTableSink(PlannerBase.scala:353)
at org.apache.flink.table.planner.delegation.PlannerBase.translateToRel(PlannerBase.scala:220)
at org.apache.flink.table.planner.delegation.PlannerBase$$anonfun$1.apply(PlannerBase.scala:164)
at org.apache.flink.table.planner.delegation.PlannerBase$$anonfun$1.apply(PlannerBase.scala:164)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.Iterator$class.foreach(Iterator.scala:891)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
at scala.collection.AbstractTraversable.map(Traversable.scala:104)
at org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:164)
at org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:1267)
at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:675)
at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeOperation(TableEnvironmentImpl.java:759)
at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeSql(TableEnvironmentImpl.java:665)
at com.dlink.executor.Executor.executeSql(Executor.java:187)
at com.dlink.job.JobManager.executeSql(JobManager.java:309)
at com.dlink.service.impl.StudioServiceImpl.executeFlinkSql(StudioServiceImpl.java:97)
at com.dlink.service.impl.StudioServiceImpl.executeSql(StudioServiceImpl.java:81)
at com.dlink.controller.StudioController.executeSql(StudioController.java:38)
at sun.reflect.GeneratedMethodAccessor214.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:205)
at org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:150)
at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:117)
at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:895)
at org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:808)
at org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87)
at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1067)
at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:963)
at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006)
at org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:909)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:681)
at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:764)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:227)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:162)
at org.apache.tomcat.websocket.server.WsFilter.doFilter(WsFilter.java:53)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:189)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:162)
at com.alibaba.druid.support.http.WebStatFilter.doFilter(WebStatFilter.java:124)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:189)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:162)
at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100)
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:117)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:189)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:162)
at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93)
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:117)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:189)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:162)
at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201)
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:117)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:189)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:162)
at org.apache.catalina.core.StandardWrapperValve.invoke(StandardWrapperValve.java:197)
at org.apache.catalina.core.StandardContextValve.invoke(StandardContextValve.java:97)
at org.apache.catalina.authenticator.AuthenticatorBase.invoke(AuthenticatorBase.java:540)
at org.apache.catalina.core.StandardHostValve.invoke(StandardHostValve.java:135)
at org.apache.catalina.valves.ErrorReportValve.invoke(ErrorReportValve.java:92)
at org.apache.catalina.core.StandardEngineValve.invoke(StandardEngineValve.java:78)
at org.apache.catalina.connector.CoyoteAdapter.service(CoyoteAdapter.java:357)
at org.apache.coyote.http11.Http11Processor.service(Http11Processor.java:382)
at org.apache.coyote.AbstractProcessorLight.process(AbstractProcessorLight.java:65)
at org.apache.coyote.AbstractProtocol$ConnectionHandler.process(AbstractProtocol.java:895)
at org.apache.tomcat.util.net.NioEndpoint$SocketProcessor.doRun(NioEndpoint.java:1732)
at org.apache.tomcat.util.net.SocketProcessorBase.run(SocketProcessorBase.java:49)
at org.apache.tomcat.util.threads.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1191)
at org.apache.tomcat.util.threads.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:659)
at org.apache.tomcat.util.threads.TaskThread$WrappingRunnable.run(TaskThread.java:61)
at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.flink.table.api.NoMatchingTableFactoryException: Could not find a suitable table factory for 'org.apache.flink.table.factories.TableSinkFactory' in
the classpath.
Reason: No factory supports all properties.
The matching candidates:
org.apache.flink.connector.jdbc.table.JdbcTableSourceSinkFactory
Unsupported property keys:
connector.database
The following properties are requested:
connector.database=userportrait
connector.driver=com.mysql.cj.jdbc.Driver
connector.password=xxx
connector.table=xxx
connector.type=jdbc
connector.url=jdbc:mysql://192.168.200.x:x/x
connector.username=root
connector.write.flush.interval=5s
connector.write.flush.max-rows=5
schema.0.data-type=BIGINT
schema.0.name=ts
schema.1.data-type=VARCHAR(100)
schema.1.name=id
The following factories have been considered:
org.apache.flink.connector.jdbc.table.JdbcTableSourceSinkFactory
org.apache.flink.streaming.connectors.kafka.KafkaTableSourceSinkFactory
org.apache.flink.streaming.connectors.elasticsearch7.Elasticsearch7UpsertTableSinkFactory
org.apache.flink.table.sinks.CsvBatchTableSinkFactory
org.apache.flink.table.sinks.CsvAppendTableSinkFactory
at org.apache.flink.table.factories.TableFactoryService.filterBySupportedProperties(TableFactoryService.java:434)
at org.apache.flink.table.factories.TableFactoryService.filter(TableFactoryService.java:195)
at org.apache.flink.table.factories.TableFactoryService.findSingleInternal(TableFactoryService.java:143)
at org.apache.flink.table.factories.TableFactoryService.find(TableFactoryService.java:96)
at org.apache.flink.table.factories.TableFactoryUtil.findAndCreateTableSink(TableFactoryUtil.java:91)
... 77 more
`
from dinky.
请查阅对应版本的FlinkSQL 连接器的配置参数语法。
from dinky.
CREATE TABLE log (
database string,
table string,
type string,
ts bigint,
data ROW<id bigint,insure_num string,case_code string,applicant_phone string,created_at string>
) WITH (
'connector.type' = 'kafka',
'connector.version' = 'universal',
'connector.topic' = 'xxx',
'connector.startup-mode' = 'earliest-offset',
'connector.properties.group.id' = 'xxx',
'connector.properties.bootstrap.servers' = '192.168.x.x:x',
'update-mode' = 'append',
'format.type' = 'json',
'format.derive-schema' = 'true'
);
CREATE TABLE sink_ps_table (
ts bigint,
id bigint,
case_code varchar(300),
applicant_phone varchar(300),
created_at varchar(300)
) WITH (
'connector.type' = 'jdbc',
'connector.driver' = 'com.mysql.cj.jdbc.Driver',
'connector.url' = 'jdbc:mysql://x.x.x.x:x/x',
'connector.table' = 'x',
'connector.username' = 'x',
'connector.password' = 'x',
'connector.write.flush.max-rows' = '5',
'connector.write.flush.interval' = '5s'
);
-- describe log;
INSERT INTO sink_ps_table
select ts,data.id,data.case_code,data.applicant_phone,data.created_at from log
where data.case_code is not null and data.applicant_phone is not null;
-- INSERT INTO sink_ps_table
-- select ts, cast(data
.id as bigint) as id,cast(data
.case_code as varchar) as case_code,
-- cast(data
.applicant_phone as varchar) as applicant_phone, cast(data
.created_at as varchar) as created_at from log;
链接的语法错误导致的
from dinky.
Related Issues (20)
- [Bug] [yarn application] Task submission using yarn application mode failed, and the log reported a null pointer exception. HOT 8
- [Bug]ava.lang.IllegalArgumentException: newLimit > capacity: (88 > 81) HOT 2
- [Bug] Flinksqlbatch is called by CURL every two minutes,The number of threads continues to grow without being released HOT 11
- [Optimization][Catalog] Supports viewing the schema of SQL tables containing complex data types in catalog HOT 1
- [Bug] [Local] Caused by: java.lang.NumberFormatException: For input string: 'abc'. Invalid character found HOT 1
- An error is reported in the log when running flink cdc, and an error is also reported when saving. HOT 3
- [Bug] [admin] Startup failed in version 1.1.0-SNAPSHOT HOT 1
- [Improvement][admin] Optimize page error message display
- mysql catalog script unable to connect HOT 1
- [Bug] [MySQLCDC 整库到 MySQL] The default value for datetime should be `CURRENT_TIMESTAMP` instead of `'CURRENT_TIMESTAMP'`. HOT 4
- [Bug] [Module Name] k8s operator create flink session ha,use ingress config HOT 3
- yarn session manual registration exception HOT 3
- [FAQ] No content to map due to end-of-input HOT 2
- [Bug] [k8s] The set sql statement does not take effect in k8s mode HOT 1
- [Bug] Error when registering cluster k8s Native HOT 4
- [Feature] Release version 1.0.3
- [Feature] Remove the scala version of the profile
- [Document] Document version archive 1.0
- [Bug] [Flink Jar] 1.0.0 upgrade 1.0.2 exec FLinkJar java.lang.NullPointerException HOT 3
- DECIMAL(38,null) of flink sql shows null HOT 4
Recommend Projects
-
React
A declarative, efficient, and flexible JavaScript library for building user interfaces.
-
Vue.js
🖖 Vue.js is a progressive, incrementally-adoptable JavaScript framework for building UI on the web.
-
Typescript
TypeScript is a superset of JavaScript that compiles to clean JavaScript output.
-
TensorFlow
An Open Source Machine Learning Framework for Everyone
-
Django
The Web framework for perfectionists with deadlines.
-
Laravel
A PHP framework for web artisans
-
D3
Bring data to life with SVG, Canvas and HTML. 📊📈🎉
-
Recommend Topics
-
javascript
JavaScript (JS) is a lightweight interpreted programming language with first-class functions.
-
web
Some thing interesting about web. New door for the world.
-
server
A server is a program made to process requests and deliver data to clients.
-
Machine learning
Machine learning is a way of modeling and interpreting data that allows a piece of software to respond intelligently.
-
Visualization
Some thing interesting about visualization, use data art
-
Game
Some thing interesting about game, make everyone happy.
Recommend Org
-
Facebook
We are working to build community through open source technology. NB: members must have two-factor auth.
-
Microsoft
Open source projects and samples from Microsoft.
-
Google
Google ❤️ Open Source for everyone.
-
Alibaba
Alibaba Open Source for everyone
-
D3
Data-Driven Documents codes.
-
Tencent
China tencent open source team.
from dinky.