Open biandou1313 opened 2 years ago
请问分支是最新的master 分支吗?
17:29:03.069 [main] INFO org.apache.flink.runtime.entrypoint.ClusterEntrypoint - --------------------------------------------------------------------------------
17:29:03.071 [main] INFO org.apache.flink.runtime.entrypoint.ClusterEntrypoint - Starting YarnJobClusterEntrypoint (Version: 1.12.2, Scala: 2.11, Rev:4dedee0, Date:2021-02-26T17:14:28+01:00)
17:29:03.071 [main] INFO org.apache.flink.runtime.entrypoint.ClusterEntrypoint - OS current user: root
17:29:03.615 [main] DEBUG org.apache.hadoop.util.Shell - Failed to detect a valid hadoop home directory
java.io.FileNotFoundException: HADOOP_HOME and hadoop.home.dir are unset.
at org.apache.hadoop.util.Shell.checkHadoopHomeInner(Shell.java:448)
at org.apache.hadoop.util.Shell.checkHadoopHome(Shell.java:419)
at org.apache.hadoop.util.Shell.
JdbcOutputFormat [423170601490842048] writeRecord error: when converting field[0] in Row(+I(18,xulei,22,wuhan)) at java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357) at java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1928) at org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.quiesceTimeServiceAndCloseOperator(StreamOperatorWrapper.java:168) at org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.close(StreamOperatorWrapper.java:131) at org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.close(StreamOperatorWrapper.java:135) at org.apache.flink.streaming.runtime.tasks.OperatorChain.closeOperators(OperatorChain.java:439) at org.apache.flink.streaming.runtime.tasks.StreamTask.afterInvoke(StreamTask.java:627) at org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:589) at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:755) at org.apache.flink.runtime.taskmanager.Task.run(Task.java:570) at java.lang.Thread.run(Thread.java:748) Caused by: java.lang.RuntimeException: java.lang.IllegalArgumentException: WritingRecordError: error writing record [2] exceed limit [0] +I(18,xulei,22,wuhan) com.dtstack.flinkx.throwable.WriteRecordException: JdbcOutputFormat [423170601490842048] writeRecord error: when converting field[0] in Row(+I(18,xulei,22,wuhan)) com.mysql.jdbc.exceptions.jdbc4.MySQLIntegrityConstraintViolationException: Duplicate entry '18' for key 'PRIMARY' at com.dtstack.flinkx.connector.jdbc.sink.JdbcOutputFormat.processWriteException(JdbcOutputFormat.java:342) at com.dtstack.flinkx.connector.jdbc.sink.JdbcOutputFormat.writeSingleRecordInternal(JdbcOutputFormat.java:181) at com.dtstack.flinkx.sink.format.BaseRichOutputFormat.writeSingleRecord(BaseRichOutputFormat.java:465) at java.util.ArrayList.forEach(ArrayList.java:1259) at com.dtstack.flinkx.sink.format.BaseRichOutputFormat.writeRecordInternal(BaseRichOutputFormat.java:485) at com.dtstack.flinkx.sink.format.BaseRichOutputFormat.getFormatState(BaseRichOutputFormat.java:576) at com.dtstack.flinkx.sink.DtOutputFormatSinkFunction.snapshotState(DtOutputFormatSinkFunction.java:147) at org.apache.flink.streaming.util.functions.StreamingFunctionUtils.trySnapshotFunctionState(StreamingFunctionUtils.java:118) at org.apache.flink.streaming.util.functions.StreamingFunctionUtils.snapshotFunctionState(StreamingFunctionUtils.java:99) at org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator.snapshotState(AbstractUdfStreamOperator.java:89) at org.apache.flink.streaming.api.operators.StreamOperatorStateHandler.snapshotState(StreamOperatorStateHandler.java:205) at org.apache.flink.streaming.api.operators.StreamOperatorStateHandler.snapshotState(StreamOperatorStateHandler.java:162) at org.apache.flink.streaming.api.operators.AbstractStreamOperator.snapshotState(AbstractStreamOperator.java:371) at org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinatorImpl.checkpointStreamOperator(SubtaskCheckpointCoordinatorImpl.java:686) at org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinatorImpl.buildOperatorSnapshotFutures(SubtaskCheckpointCoordinatorImpl.java:607) at org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinatorImpl.takeSnapshotSync(SubtaskCheckpointCoordinatorImpl.java:572) at org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinatorImpl.checkpointState(SubtaskCheckpointCoordinatorImpl.java:298) at org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$performCheckpoint$9(StreamTask.java:1004) at org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$SynchronizedStreamTaskActionExecutor.runThrowing(StreamTaskActionExecutor.java:93) at org.apache.flink.streaming.runtime.tasks.StreamTask.performCheckpoint(StreamTask.java:988) at org.apache.flink.streaming.runtime.tasks.StreamTask.triggerCheckpoint(StreamTask.java:912) at org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$triggerCheckpointAsync$8(StreamTask.java:885) at org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$SynchronizedStreamTaskActionExecutor.runThrowing(StreamTaskActionExecutor.java:93) at org.apache.flink.streaming.runtime.tasks.mailbox.Mail.run(Mail.java:90) at org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMail(MailboxProcessor.java:317) at org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:189) at org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:617) at org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:581) at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:755) at org.apache.flink.runtime.taskmanager.Task.run(Task.java:570) at java.lang.Thread.run(Thread.java:748) Caused by: com.mysql.jdbc.exceptions.jdbc4.MySQLIntegrityConstraintViolationException: Duplicate entry '18' for key 'PRIMARY' at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at com.mysql.jdbc.Util.handleNewInstance(Util.java:425) at com.mysql.jdbc.Util.getInstance(Util.java:408) at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:936) at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3976) at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3912) at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:2530) at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2683) at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2486) at com.mysql.jdbc.PreparedStatement.executeInternal(PreparedStatement.java:1858) at com.mysql.jdbc.PreparedStatement.execute(PreparedStatement.java:1197) at com.dtstack.flinkx.connector.jdbc.statement.FieldNamedPreparedStatementImpl.execute(FieldNamedPreparedStatementImpl.java:76) at com.dtstack.flinkx.connector.jdbc.sink.JdbcOutputFormat.writeSingleRecordInternal(JdbcOutputFormat.java:175) ... 29 more
JdbcOutputFormat [423170601490842048] writeRecord error: when converting field[0] in Row(+I(18,xulei,22,wuhan)) at com.dtstack.flinkx.sink.format.BaseRichOutputFormat.close(BaseRichOutputFormat.java:332) at com.dtstack.flinkx.sink.DtOutputFormatSinkFunction.close(DtOutputFormatSinkFunction.java:127) at org.apache.flink.api.common.functions.util.FunctionUtils.closeFunction(FunctionUtils.java:41) at org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator.close(AbstractUdfStreamOperator.java:109) at org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.lambda$closeOperator$5(StreamOperatorWrapper.java:213) at org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$SynchronizedStreamTaskActionExecutor.runThrowing(StreamTaskActionExecutor.java:93) at org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.closeOperator(StreamOperatorWrapper.java:210) at org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.lambda$deferCloseOperatorToMailbox$3(StreamOperatorWrapper.java:185) at org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$SynchronizedStreamTaskActionExecutor.runThrowing(StreamTaskActionExecutor.java:93) at org.apache.flink.streaming.runtime.tasks.mailbox.Mail.run(Mail.java:90) at org.apache.flink.streaming.runtime.tasks.mailbox.MailboxExecutorImpl.tryYield(MailboxExecutorImpl.java:97) at org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.quiesceTimeServiceAndCloseOperator(StreamOperatorWrapper.java:162) ... 8 common frames omitted Caused by: java.lang.IllegalArgumentException: WritingRecordError: error writing record [2] exceed limit [0] +I(18,xulei,22,wuhan) com.dtstack.flinkx.throwable.WriteRecordException: JdbcOutputFormat [423170601490842048] writeRecord error: when converting field[0] in Row(+I(18,xulei,22,wuhan)) com.mysql.jdbc.exceptions.jdbc4.MySQLIntegrityConstraintViolationException: Duplicate entry '18' for key 'PRIMARY' at com.dtstack.flinkx.connector.jdbc.sink.JdbcOutputFormat.processWriteException(JdbcOutputFormat.java:342) at com.dtstack.flinkx.connector.jdbc.sink.JdbcOutputFormat.writeSingleRecordInternal(JdbcOutputFormat.java:181) at com.dtstack.flinkx.sink.format.BaseRichOutputFormat.writeSingleRecord(BaseRichOutputFormat.java:465) at java.util.ArrayList.forEach(ArrayList.java:1259) at com.dtstack.flinkx.sink.format.BaseRichOutputFormat.writeRecordInternal(BaseRichOutputFormat.java:485) at com.dtstack.flinkx.sink.format.BaseRichOutputFormat.getFormatState(BaseRichOutputFormat.java:576) at com.dtstack.flinkx.sink.DtOutputFormatSinkFunction.snapshotState(DtOutputFormatSinkFunction.java:147) at org.apache.flink.streaming.util.functions.StreamingFunctionUtils.trySnapshotFunctionState(StreamingFunctionUtils.java:118) at org.apache.flink.streaming.util.functions.StreamingFunctionUtils.snapshotFunctionState(StreamingFunctionUtils.java:99) at org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator.snapshotState(AbstractUdfStreamOperator.java:89) at org.apache.flink.streaming.api.operators.StreamOperatorStateHandler.snapshotState(StreamOperatorStateHandler.java:205) at org.apache.flink.streaming.api.operators.StreamOperatorStateHandler.snapshotState(StreamOperatorStateHandler.java:162) at org.apache.flink.streaming.api.operators.AbstractStreamOperator.snapshotState(AbstractStreamOperator.java:371) at org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinatorImpl.checkpointStreamOperator(SubtaskCheckpointCoordinatorImpl.java:686) at org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinatorImpl.buildOperatorSnapshotFutures(SubtaskCheckpointCoordinatorImpl.java:607) at org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinatorImpl.takeSnapshotSync(SubtaskCheckpointCoordinatorImpl.java:572) at org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinatorImpl.checkpointState(SubtaskCheckpointCoordinatorImpl.java:298) at org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$performCheckpoint$9(StreamTask.java:1004) at org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$SynchronizedStreamTaskActionExecutor.runThrowing(StreamTaskActionExecutor.java:93) at org.apache.flink.streaming.runtime.tasks.StreamTask.performCheckpoint(StreamTask.java:988) at org.apache.flink.streaming.runtime.tasks.StreamTask.triggerCheckpoint(StreamTask.java:912) at org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$triggerCheckpointAsync$8(StreamTask.java:885) at org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$SynchronizedStreamTaskActionExecutor.runThrowing(StreamTaskActionExecutor.java:93) at org.apache.flink.streaming.runtime.tasks.mailbox.Mail.run(Mail.java:90) at org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMail(MailboxProcessor.java:317) at org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:189) at org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:617) at org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:581) at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:755) at org.apache.flink.runtime.taskmanager.Task.run(Task.java:570) at java.lang.Thread.run(Thread.java:748) Caused by: com.mysql.jdbc.exceptions.jdbc4.MySQLIntegrityConstraintViolationException: Duplicate entry '18' for key 'PRIMARY' at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at com.mysql.jdbc.Util.handleNewInstance(Util.java:425) at com.mysql.jdbc.Util.getInstance(Util.java:408) at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:936) at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3976) at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3912) at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:2530) at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2683) at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2486) at com.mysql.jdbc.PreparedStatement.executeInternal(PreparedStatement.java:1858) at com.mysql.jdbc.PreparedStatement.execute(PreparedStatement.java:1197) at com.dtstack.flinkx.connector.jdbc.statement.FieldNamedPreparedStatementImpl.execute(FieldNamedPreparedStatementImpl.java:76) at com.dtstack.flinkx.connector.jdbc.sink.JdbcOutputFormat.writeSingleRecordInternal(JdbcOutputFormat.java:175) ... 29 more
JdbcOutputFormat [423170601490842048] writeRecord error: when converting field[0] in Row(+I(18,xulei,22,wuhan)) at org.apache.flink.util.Preconditions.checkArgument(Preconditions.java:138) at com.dtstack.flinkx.sink.ErrorLimiter.checkErrorLimit(ErrorLimiter.java:58) at com.dtstack.flinkx.sink.format.BaseRichOutputFormat.close(BaseRichOutputFormat.java:309) ... 19 common frames omitted 17:29:35.231 [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.flink.runtime.scheduler.SharedSlot - Remove logical slot (SlotRequestId{4f81130d00d683b843290f0d674c4d4c}) for execution vertex (id cbc357ccb763df2852fee8c4fc7d55f2_0) from the physical slot (SlotRequestId{7df61e0bcbb9771e49756af542a2b476}) 17:29:35.231 [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.flink.runtime.scheduler.SharedSlot - Release shared slot externally (SlotRequestId{7df61e0bcbb9771e49756af542a2b476}) 17:29:35.231 [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.flink.runtime.jobmaster.slotpool.SlotPoolImpl - Releasing slot [SlotRequestId{7df61e0bcbb9771e49756af542a2b476}] because: Slot is being returned from SlotSharingExecutionSlotAllocator. 17:29:35.231 [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.flink.runtime.scheduler.SharedSlot - Release shared slot (SlotRequestId{7df61e0bcbb9771e49756af542a2b476}) 17:29:35.232 [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.flink.runtime.jobmaster.slotpool.SlotPoolImpl - Adding slot [263b45de4451d938ca89a9169bb7db47] to available slots 17:29:35.237 [flink-akka.actor.default-dispatcher-3] INFO org.apache.flink.runtime.executiongraph.failover.flip1.RestartPipelinedRegionFailoverStrategy - Calculating tasks to restart to recover the failed task cbc357ccb763df2852fee8c4fc7d55f2_0. 17:29:35.237 [flink-akka.actor.default-dispatcher-3] INFO org.apache.flink.runtime.executiongraph.failover.flip1.RestartPipelinedRegionFailoverStrategy - 1 tasks should be restarted to recover the failed task cbc357ccb763df2852fee8c4fc7d55f2_0. 17:29:35.238 [flink-akka.actor.default-dispatcher-3] INFO org.apache.flink.runtime.executiongraph.ExecutionGraph - Job 423170601490842048 (89793930bca3af859d0b25c8849c38a9) switched from state RUNNING to RESTARTING. 17:29:35.241 [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.flink.runtime.scheduler.SlotSharingExecutionSlotAllocator - There is no SharedSlot for ExecutionSlotSharingGroup of ExecutionVertexID cbc357ccb763df2852fee8c4fc7d55f2_0 17:29:35.399 [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.flink.runtime.resourcemanager.active.ActiveResourceManager - Trigger heartbeat request. 17:29:35.400 [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.flink.runtime.resourcemanager.active.ActiveResourceManager - Trigger heartbeat request. 17:29:35.400 [flink-akka.actor.default-dispatcher-2] DEBUG org.apache.flink.runtime.jobmaster.JobMaster - Received heartbeat request from db6cd8bfc7d92c8b310571bcb10db42f. 17:29:35.400 [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.flink.runtime.resourcemanager.active.ActiveResourceManager - Received heartbeat from 0c5739e93426e29d465b9ea4aa7d32fb. 17:29:35.403 [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.flink.runtime.resourcemanager.active.ActiveResourceManager - Received heartbeat from container_e02_1656054038821_0503_01_000002. 17:29:35.403 [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.flink.runtime.resourcemanager.slotmanager.SlotManagerImpl - Received slot report from instance 14f5b6f45ec9575ea8557bac125a68b8: SlotReport{SlotStatus{slotID=container_e02_1656054038821_0503_01_000002_0, allocationID=263b45de4451d938ca89a9169bb7db47, jobID=89793930bca3af859d0b25c8849c38a9, resourceProfile=ResourceProfile{cpuCores=1.0000000000000000, taskHeapMemory=384.000mb (402653174 bytes), taskOffHeapMemory=0 bytes, managedMemory=512.000mb (536870920 bytes), networkMemory=128.000mb (134217730 bytes)}}}. 17:29:35.403 [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.flink.runtime.io.network.partition.ResourceManagerPartitionTrackerImpl - Processing cluster partition report from task executor container_e02_1656054038821_0503_01_000002: PartitionReport{entries=[]}. 17:29:35.800 [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.flink.runtime.jobmaster.JobMaster - Trigger heartbeat request. 17:29:35.805 [flink-akka.actor.default-dispatcher-3] DEBUG org.apache.flink.runtime.jobmaster.JobMaster - Received heartbeat from container_e02_1656054038821_0503_01_000002.
一直都在重复 没有停止
1.12.6版本,binlog-x到mysql-x,update时遇到同样的问题,报主键冲突。
BUG信息: JdbcOutputFormat [Flink_Job] writeRecord error: when converting field[0] in Row(+I(18,xulei,22,wuhan)) at java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357) at java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1915) at org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.quiesceTimeServiceAndCloseOperator(StreamOperatorWrapper.java:168) at org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.close(StreamOperatorWrapper.java:131) at org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.close(StreamOperatorWrapper.java:135) at org.apache.flink.streaming.runtime.tasks.OperatorChain.closeOperators(OperatorChain.java:439) at org.apache.flink.streaming.runtime.tasks.StreamTask.afterInvoke(StreamTask.java:627) at org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:589) at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:755) at org.apache.flink.runtime.taskmanager.Task.run(Task.java:570) at java.lang.Thread.run(Thread.java:748) Caused by: java.lang.RuntimeException: java.lang.IllegalArgumentException: WritingRecordError: error writing record [2] exceed limit [0] +I(18,xulei,22,wuhan) com.dtstack.flinkx.throwable.WriteRecordException: JdbcOutputFormat [Flink_Job] writeRecord error: when converting field[0] in Row(+I(18,xulei,22,wuhan)) com.mysql.jdbc.exceptions.jdbc4.MySQLIntegrityConstraintViolationException: Duplicate entry '18' for key 'PRIMARY' at com.dtstack.flinkx.connector.jdbc.sink.JdbcOutputFormat.processWriteException(JdbcOutputFormat.java:342) at com.dtstack.flinkx.connector.jdbc.sink.JdbcOutputFormat.writeSingleRecordInternal(JdbcOutputFormat.java:181) at com.dtstack.flinkx.sink.format.BaseRichOutputFormat.writeSingleRecord(BaseRichOutputFormat.java:465) at java.util.ArrayList.forEach(ArrayList.java:1249) at com.dtstack.flinkx.sink.format.BaseRichOutputFormat.writeRecordInternal(BaseRichOutputFormat.java:485) at com.dtstack.flinkx.sink.format.BaseRichOutputFormat.lambda$initTimingSubmitTask$0(BaseRichOutputFormat.java:438) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308) at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180) at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: com.mysql.jdbc.exceptions.jdbc4.MySQLIntegrityConstraintViolationException: Duplicate entry '18' for key 'PRIMARY' at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at com.mysql.jdbc.Util.handleNewInstance(Util.java:425) at com.mysql.jdbc.Util.getInstance(Util.java:408) at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:936) at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3976) at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3912) at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:2530) at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2683) at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2486) at com.mysql.jdbc.PreparedStatement.executeInternal(PreparedStatement.java:1858) at com.mysql.jdbc.PreparedStatement.execute(PreparedStatement.java:1197) at com.dtstack.flinkx.connector.jdbc.statement.FieldNamedPreparedStatementImpl.execute(FieldNamedPreparedStatementImpl.java:76) at com.dtstack.flinkx.connector.jdbc.sink.JdbcOutputFormat.writeSingleRecordInternal(JdbcOutputFormat.java:175) ... 11 more
JdbcOutputFormat [Flink_Job] writeRecord error: when converting field[0] in Row(+I(18,xulei,22,wuhan)) at com.dtstack.flinkx.sink.format.BaseRichOutputFormat.close(BaseRichOutputFormat.java:332) at com.dtstack.flinkx.sink.DtOutputFormatSinkFunction.close(DtOutputFormatSinkFunction.java:127) at org.apache.flink.api.common.functions.util.FunctionUtils.closeFunction(FunctionUtils.java:41) at org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator.close(AbstractUdfStreamOperator.java:109) at org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.lambda$closeOperator$5(StreamOperatorWrapper.java:213) at org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$SynchronizedStreamTaskActionExecutor.runThrowing(StreamTaskActionExecutor.java:93) at org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.closeOperator(StreamOperatorWrapper.java:210) at org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.lambda$deferCloseOperatorToMailbox$3(StreamOperatorWrapper.java:185) at org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$SynchronizedStreamTaskActionExecutor.runThrowing(StreamTaskActionExecutor.java:93) at org.apache.flink.streaming.runtime.tasks.mailbox.Mail.run(Mail.java:90) at org.apache.flink.streaming.runtime.tasks.mailbox.MailboxExecutorImpl.tryYield(MailboxExecutorImpl.java:97) at org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.quiesceTimeServiceAndCloseOperator(StreamOperatorWrapper.java:162) ... 8 more Caused by: java.lang.IllegalArgumentException: WritingRecordError: error writing record [2] exceed limit [0] +I(18,xulei,22,wuhan) com.dtstack.flinkx.throwable.WriteRecordException: JdbcOutputFormat [Flink_Job] writeRecord error: when converting field[0] in Row(+I(18,xulei,22,wuhan)) com.mysql.jdbc.exceptions.jdbc4.MySQLIntegrityConstraintViolationException: Duplicate entry '18' for key 'PRIMARY'
执行JSON: { "job": { "content": [ { "reader": { "parameter": { "password": "123456", "dataSourceId": 38, "column": [ { "precision": 10, "name": "id", "columnDisplaySize": 10, "type": "INT" }, { "precision": 20, "name": "name", "columnDisplaySize": 20, "type": "VARCHAR" }, { "precision": 10, "name": "age", "columnDisplaySize": 10, "type": "INT" }, { "precision": 20, "name": "address", "columnDisplaySize": 20, "type": "VARCHAR" } ], "connection": [ { "jdbcUrl": [ "jdbc:mysql://172.18.8.113:3306/test_fjf" ], "table": [ "mysqlreader" ] } ], "splitPk": "id", "username": "root" }, "name": "mysqlreader" }, "writer": { "parameter": { "password": "123456", "dataSourceId": 38, "updateKey": [ "id" ], "column": [ { "precision": 10, "name": "id", "columnDisplaySize": 10, "type": "INT" }, { "precision": 20, "name": "name", "columnDisplaySize": 20, "type": "VARCHAR" }, { "precision": 10, "name": "age", "columnDisplaySize": 10, "type": "INT" }, { "precision": 20, "name": "address", "columnDisplaySize": 20, "type": "VARCHAR" } ], "connection": [ { "jdbcUrl": "jdbc:mysql://172.18.8.113:3306/test_fjf", "table": [ "mysqlwriter" ] } ], "writeMode": "update", "username": "root" }, "name": "mysqlwriter" } } ], "setting": {
} }
执行表: