imx7 commented on issue #21832:
URL: https://github.com/apache/doris/issues/21832#issuecomment-1637311137
@amorynan
drop table if exists ods_iot.test_doris_v1;
CREATE TABLE IF NOT EXISTS ods_iot.test_doris_v1
(
p_date datev2 not null comment '天分区' ,
data_code varchar(32) not null comment '产品编码',
sensor_code varchar(128) not null comment '设备编码',
post_time datetime not null comment '日志上报时间',
test_col_1 map<datetimev2,int> not null comment '测试1',
test_col_2 map<varchar(32),int> not null comment
'测试2'
)
ENGINE=OLAP
UNIQUE KEY(p_date, data_code,sensor_code,post_time)
PARTITION BY RANGE(p_date)()
DISTRIBUTED BY HASH(data_code,sensor_code) BUCKETS 8
PROPERTIES (
"dynamic_partition.enable" = "true",
"dynamic_partition.create_history_partition" = "true",
"dynamic_partition.time_unit" = "DAY",
"dynamic_partition.start" = "-360",
"dynamic_partition.end" = "3",
"dynamic_partition.prefix" = "p",
"dynamic_partition.buckets" = "16",
"dynamic_partition.replication_num" = "1"
);
//该sql执行正常。
insert into ods_iot.test_doris_v1(
p_date,
data_code,
sensor_code,
post_time,
test_col_1,
test_col_2)
values(
'2023-07-13',
'test_data_code_1',
'test_sensor_code_1',
'2023-07-13 13:13:13',
{'2023-07-13 13:13:13':12,'2023-07-13 13:13:15':15},
{'str_2023-07-13 13:13:13':12,'str_2023-07-13 13:13:15':15}
)
kafka发送数据
{"data":{"wsLat":30.918466},"dataCode":"FOH8U310XA4EORK","userCode":"user_gis","sensorId":"dq_20220712_1632","postTime":"2023-07-18
02:01:01","commissionCode":"WB510100000001"}
程序执行结果
2023-07-17 10:51:08
java.lang.Exception: Could not perform checkpoint 5 for operator Source:
KafkaSourceLog -> log-process -> (Sink: Print to Std. Out, log-to-doris: Writer
-> log-to-doris: Committer) (12/16)#0.
at
org.apache.flink.streaming.runtime.tasks.StreamTask.triggerCheckpointAsyncInMailbox(StreamTask.java:1184)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$triggerCheckpointAsync$13(StreamTask.java:1131)
at
org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$1.runThrowing(StreamTaskActionExecutor.java:50)
at
org.apache.flink.streaming.runtime.tasks.mailbox.Mail.run(Mail.java:90)
at
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMail(MailboxProcessor.java:398)
at
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMailsWhenDefaultActionUnavailable(MailboxProcessor.java:367)
at
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMail(MailboxProcessor.java:352)
at
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:229)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:839)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:788)
at
org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:952)
at
org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:931)
at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:745)
at org.apache.flink.runtime.taskmanager.Task.run(Task.java:562)
at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: org.apache.doris.flink.exception.DorisRuntimeException: stream
load error: [INTERNAL_ERROR]too many filtered rows, see more in
http://10.21.221.18:8040/api/_load_error_log?file=__shard_9/error_log_insert_stmt_b480845759a6b07-a2db1414732dcc91_b480845759a6b07_a2db1414732dcc91
at
org.apache.doris.flink.sink.writer.DorisWriter.prepareCommit(DorisWriter.java:158)
at
org.apache.flink.streaming.api.transformations.SinkV1Adapter$SinkWriterV1Adapter.prepareCommit(SinkV1Adapter.java:151)
at
org.apache.flink.streaming.runtime.operators.sink.SinkWriterOperator.emitCommittables(SinkWriterOperator.java:196)
at
org.apache.flink.streaming.runtime.operators.sink.SinkWriterOperator.prepareSnapshotPreBarrier(SinkWriterOperator.java:166)
at
org.apache.flink.streaming.runtime.tasks.RegularOperatorChain.prepareSnapshotPreBarrier(RegularOperatorChain.java:89)
at
org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinatorImpl.checkpointState(SubtaskCheckpointCoordinatorImpl.java:321)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$performCheckpoint$14(StreamTask.java:1299)
at
org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$1.runThrowing(StreamTaskActionExecutor.java:50)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.performCheckpoint(StreamTask.java:1287)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.triggerCheckpointAsyncInMailbox(StreamTask.java:1172)
... 14 more
---------------------------------------------------------------------
http://10.21.221.18:8040/api/_load_error_log?file=__shard_9/error_log_insert_stmt_b480845759a6b07-a2db1414732dcc91_b480845759a6b07_a2db1414732dcc91
Reason: column(test_col_1) values is null while columns is not nullable. src
line [2023-07-18 FOH8U310XA4EORK dq_20220712_1632 2023-07-18 02:01:01.0
{\"2023-07-17T18:01:01.000+00:00\":176} {\"1689616861000\":176} 0];
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]