附件为日志,麻烦帮忙分析一下。
2020-05-10 21:22:09,336 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - -------------------------------------------------------------------------------- 2020-05-10 21:22:09,337 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - Starting YARN TaskExecutor runner (Version: 1.9.2, Rev:c9d2c90, Date:24.01.2020 @ 08:44:30 CST) 2020-05-10 21:22:09,337 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - OS current user: ocdc 2020-05-10 21:22:09,733 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - Current Hadoop/Kerberos user: ocdp 2020-05-10 21:22:09,733 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - JVM: Java HotSpot(TM) 64-Bit Server VM - Oracle Corporation - 1.8/25.152-b16 2020-05-10 21:22:09,733 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - Maximum heap size: 1988 MiBytes 2020-05-10 21:22:09,733 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - JAVA_HOME: /usr/local/jdk1.8.0_152 2020-05-10 21:22:09,734 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - Hadoop version: 2.7.3.2.6.0.3-8 2020-05-10 21:22:09,734 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - JVM Options: 2020-05-10 21:22:09,734 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - -Xms2073m 2020-05-10 21:22:09,734 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - -Xmx2073m 2020-05-10 21:22:09,734 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - -XX:MaxDirectMemorySize=999m 2020-05-10 21:22:09,735 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - -Dlog.file=/data07/hadoop/yarn/log/application_1567067657620_0177/container_e07_1567067657620_0177_01_000013/taskmanager.log 2020-05-10 21:22:09,735 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - -Dlogback.configurationFile=file:./logback.xml 2020-05-10 21:22:09,735 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - -Dlog4j.configuration=file:./log4j.properties 2020-05-10 21:22:09,735 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - Program Arguments: 2020-05-10 21:22:09,735 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - --configDir 2020-05-10 21:22:09,735 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - . 2020-05-10 21:22:09,735 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - -Dweb.port=0 2020-05-10 21:22:09,735 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - -Djobmanager.rpc.address=audit-dp03 2020-05-10 21:22:09,735 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - -Dtaskmanager.memory.size=1522029056b 2020-05-10 21:22:09,735 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - -Dweb.tmpdir=/tmp/flink-web-6d8636c9-1a14-4bbd-ab9c-ce133440b66f 2020-05-10 21:22:09,735 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - -Djobmanager.rpc.port=57872 2020-05-10 21:22:09,735 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - -Drest.address=audit-dp03 2020-05-10 21:22:09,735 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - Classpath: lib/flink-table-blink_2.12-1.9.2.jar:lib/flink-table_2.12-1.9.2.jar:lib/log4j-1.2.17.jar:lib/slf4j-log4j12-1.7.15.jar:log4j.properties:logback.xml:flink.jar:flink-conf.yaml::/etc/hadoop/conf:/usr/hdp/current/hadoop-client/hadoop-auth-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-client/hadoop-aws.jar:/usr/hdp/current/hadoop-client/hadoop-azure-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-client/hadoop-azure.jar:/usr/hdp/current/hadoop-client/hadoop-auth.jar:/usr/hdp/current/hadoop-client/hadoop-aws-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-client/hadoop-azure-datalake-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-client/hadoop-common-2.7.3.2.6.0.3-8-tests.jar:/usr/hdp/current/hadoop-client/hadoop-annotations-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-client/hadoop-nfs.jar:/usr/hdp/current/hadoop-client/hadoop-common.jar:/usr/hdp/current/hadoop-client/hadoop-annotations.jar:/usr/hdp/current/hadoop-client/hadoop-nfs-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-client/hadoop-common-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-client/hadoop-azure-datalake.jar:/usr/hdp/current/hadoop-client/hadoop-common-tests.jar:/usr/hdp/current/hadoop-client/azure-data-lake-store-sdk-2.1.4.jar:/usr/hdp/current/hadoop-client/lib/commons-logging-1.1.3.jar:/usr/hdp/current/hadoop-client/lib/jetty-6.1.26.hwx.jar:/usr/hdp/current/hadoop-client/lib/jsr305-3.0.0.jar:/usr/hdp/current/hadoop-client/lib/api-util-1.0.0-M20.jar:/usr/hdp/current/hadoop-client/lib/commons-io-2.4.jar:/usr/hdp/current/hadoop-client/lib/gson-2.2.4.jar:/usr/hdp/current/hadoop-client/lib/apacheds-kerberos-codec-2.0.0-M15.jar:/usr/hdp/current/hadoop-client/lib/commons-lang-2.6.jar:/usr/hdp/current/hadoop-client/lib/hamcrest-core-1.3.jar:/usr/hdp/current/hadoop-client/lib/jackson-mapper-asl-1.9.13.jar:/usr/hdp/current/hadoop-client/lib/jersey-json-1.9.jar:/usr/hdp/current/hadoop-client/lib/jetty-sslengine-6.1.26.hwx.jar:/usr/hdp/current/hadoop-client/lib/commons-compress-1.4.1.jar:/usr/hdp/current/hadoop-client/lib/azure-storage-4.2.0.jar:/usr/hdp/current/hadoop-client/lib/jetty-util-6.1.26.hwx.jar:/usr/hdp/current/hadoop-client/lib/commons-math3-3.1.1.jar:/usr/hdp/current/hadoop-client/lib/jackson-databind-2.2.3.jar:/usr/hdp/current/hadoop-client/lib/httpcore-4.4.4.jar:/usr/hdp/current/hadoop-client/lib/guava-11.0.2.jar:/usr/hdp/current/hadoop-client/lib/commons-beanutils-core-1.8.0.jar:/usr/hdp/current/hadoop-client/lib/xz-1.0.jar:/usr/hdp/current/hadoop-client/lib/java-xmlbuilder-0.4.jar:/usr/hdp/current/hadoop-client/lib/protobuf-java-2.5.0.jar:/usr/hdp/current/hadoop-client/lib/paranamer-2.3.jar:/usr/hdp/current/hadoop-client/lib/jersey-server-1.9.jar:/usr/hdp/current/hadoop-client/lib/json-smart-1.1.1.jar:/usr/hdp/current/hadoop-client/lib/slf4j-log4j12-1.7.10.jar:/usr/hdp/current/hadoop-client/lib/aws-java-sdk-kms-1.10.6.jar:/usr/hdp/current/hadoop-client/lib/apacheds-i18n-2.0.0-M15.jar:/usr/hdp/current/hadoop-client/lib/jets3t-0.9.0.jar:/usr/hdp/current/hadoop-client/lib/curator-recipes-2.7.1.jar:/usr/hdp/current/hadoop-client/lib/asm-3.2.jar:/usr/hdp/current/hadoop-client/lib/htrace-core-3.1.0-incubating.jar:/usr/hdp/current/hadoop-client/lib/activation-1.1.jar:/usr/hdp/current/hadoop-client/lib/commons-configuration-1.6.jar:/usr/hdp/current/hadoop-client/lib/log4j-1.2.17.jar:/usr/hdp/current/hadoop-client/lib/jackson-core-asl-1.9.13.jar:/usr/hdp/current/hadoop-client/lib/jackson-jaxrs-1.9.13.jar:/usr/hdp/current/hadoop-client/lib/jackson-core-2.2.3.jar:/usr/hdp/current/hadoop-client/lib/jsp-api-2.1.jar:/usr/hdp/current/hadoop-client/lib/ranger-plugin-classloader-0.7.0.2.6.0.3-8.jar:/usr/hdp/current/hadoop-client/lib/jersey-core-1.9.jar:/usr/hdp/current/hadoop-client/lib/jaxb-api-2.2.2.jar:/usr/hdp/current/hadoop-client/lib/jsch-0.1.54.jar:/usr/hdp/current/hadoop-client/lib/avro-1.7.4.jar:/usr/hdp/current/hadoop-client/lib/azure-keyvault-core-0.8.0.jar:/usr/hdp/current/hadoop-client/lib/commons-codec-1.4.jar:/usr/hdp/current/hadoop-client/lib/ojdbc6.jar:/usr/hdp/current/hadoop-client/lib/commons-digester-1.8.jar:/usr/hdp/current/hadoop-client/lib/commons-cli-1.2.jar:/usr/hdp/current/hadoop-client/lib/junit-4.11.jar:/usr/hdp/current/hadoop-client/lib/aws-java-sdk-core-1.10.6.jar:/usr/hdp/current/hadoop-client/lib/curator-client-2.7.1.jar:/usr/hdp/current/hadoop-client/lib/mockito-all-1.8.5.jar:/usr/hdp/current/hadoop-client/lib/api-asn1-api-1.0.0-M20.jar:/usr/hdp/current/hadoop-client/lib/snappy-java-1.0.4.1.jar:/usr/hdp/current/hadoop-client/lib/aws-java-sdk-s3-1.10.6.jar:/usr/hdp/current/hadoop-client/lib/jackson-annotations-2.2.3.jar:/usr/hdp/current/hadoop-client/lib/joda-time-2.9.4.jar:/usr/hdp/current/hadoop-client/lib/commons-lang3-3.4.jar:/usr/hdp/current/hadoop-client/lib/jaxb-impl-2.2.3-1.jar:/usr/hdp/current/hadoop-client/lib/ranger-yarn-plugin-shim-0.7.0.2.6.0.3-8.jar:/usr/hdp/current/hadoop-client/lib/slf4j-api-1.7.10.jar:/usr/hdp/current/hadoop-client/lib/commons-net-3.1.jar:/usr/hdp/current/hadoop-client/lib/jettison-1.1.jar:/usr/hdp/current/hadoop-client/lib/xmlenc-0.52.jar:/usr/hdp/current/hadoop-client/lib/jackson-xc-1.9.13.jar:/usr/hdp/current/hadoop-client/lib/commons-beanutils-1.7.0.jar:/usr/hdp/current/hadoop-client/lib/stax-api-1.0-2.jar:/usr/hdp/current/hadoop-client/lib/curator-framework-2.7.1.jar:/usr/hdp/current/hadoop-client/lib/ranger-hdfs-plugin-shim-0.7.0.2.6.0.3-8.jar:/usr/hdp/current/hadoop-client/lib/zookeeper-3.4.6.2.6.0.3-8.jar:/usr/hdp/current/hadoop-client/lib/servlet-api-2.5.jar:/usr/hdp/current/hadoop-client/lib/nimbus-jose-jwt-3.9.jar:/usr/hdp/current/hadoop-client/lib/commons-collections-3.2.2.jar:/usr/hdp/current/hadoop-client/lib/netty-3.6.2.Final.jar:/usr/hdp/current/hadoop-client/lib/jcip-annotations-1.0.jar:/usr/hdp/current/hadoop-client/lib/httpclient-4.5.2.jar:/usr/hdp/current/hadoop-hdfs-client/hadoop-hdfs-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-hdfs-client/hadoop-hdfs-nfs-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-hdfs-client/hadoop-hdfs-tests.jar:/usr/hdp/current/hadoop-hdfs-client/hadoop-hdfs-2.7.3.2.6.0.3-8-tests.jar:/usr/hdp/current/hadoop-hdfs-client/hadoop-hdfs-nfs.jar:/usr/hdp/current/hadoop-hdfs-client/hadoop-hdfs.jar:/usr/hdp/current/hadoop-hdfs-client/lib/commons-logging-1.1.3.jar:/usr/hdp/current/hadoop-hdfs-client/lib/jetty-6.1.26.hwx.jar:/usr/hdp/current/hadoop-hdfs-client/lib/jsr305-3.0.0.jar:/usr/hdp/current/hadoop-hdfs-client/lib/commons-io-2.4.jar:/usr/hdp/current/hadoop-hdfs-client/lib/commons-lang-2.6.jar:/usr/hdp/current/hadoop-hdfs-client/lib/jackson-mapper-asl-1.9.13.jar:/usr/hdp/current/hadoop-hdfs-client/lib/jetty-util-6.1.26.hwx.jar:/usr/hdp/current/hadoop-hdfs-client/lib/jackson-databind-2.2.3.jar:/usr/hdp/current/hadoop-hdfs-client/lib/guava-11.0.2.jar:/usr/hdp/current/hadoop-hdfs-client/lib/protobuf-java-2.5.0.jar:/usr/hdp/current/hadoop-hdfs-client/lib/jersey-server-1.9.jar:/usr/hdp/current/hadoop-hdfs-client/lib/asm-3.2.jar:/usr/hdp/current/hadoop-hdfs-client/lib/htrace-core-3.1.0-incubating.jar:/usr/hdp/current/hadoop-hdfs-client/lib/netty-all-4.0.23.Final.jar:/usr/hdp/current/hadoop-hdfs-client/lib/xercesImpl-2.9.1.jar:/usr/hdp/current/hadoop-hdfs-client/lib/log4j-1.2.17.jar:/usr/hdp/current/hadoop-hdfs-client/lib/jackson-core-asl-1.9.13.jar:/usr/hdp/current/hadoop-hdfs-client/lib/jackson-core-2.2.3.jar:/usr/hdp/current/hadoop-hdfs-client/lib/jersey-core-1.9.jar:/usr/hdp/current/hadoop-hdfs-client/lib/xml-apis-1.3.04.jar:/usr/hdp/current/hadoop-hdfs-client/lib/commons-codec-1.4.jar:/usr/hdp/current/hadoop-hdfs-client/lib/commons-cli-1.2.jar:/usr/hdp/current/hadoop-hdfs-client/lib/jackson-annotations-2.2.3.jar:/usr/hdp/current/hadoop-hdfs-client/lib/xmlenc-0.52.jar:/usr/hdp/current/hadoop-hdfs-client/lib/okhttp-2.4.0.jar:/usr/hdp/current/hadoop-hdfs-client/lib/commons-daemon-1.0.13.jar:/usr/hdp/current/hadoop-hdfs-client/lib/servlet-api-2.5.jar:/usr/hdp/current/hadoop-hdfs-client/lib/okio-1.4.0.jar:/usr/hdp/current/hadoop-hdfs-client/lib/leveldbjni-all-1.8.jar:/usr/hdp/current/hadoop-hdfs-client/lib/netty-3.6.2.Final.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-api-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-applications-unmanaged-am-launcher.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-applications-distributedshell-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-applications-distributedshell.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-server-resourcemanager.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-server-tests-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-server-applicationhistoryservice-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-server-applicationhistoryservice.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-server-web-proxy.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-registry-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-server-timeline-pluginstorage.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-server-nodemanager.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-registry.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-server-web-proxy-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-server-common-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-client.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-server-common.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-client-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-api.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-server-sharedcachemanager.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-server-nodemanager-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-server-timeline-pluginstorage-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-common.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-applications-unmanaged-am-launcher-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-common-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-server-tests.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-server-resourcemanager-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-server-sharedcachemanager-2.7.3.2.6.0.3-8.jar:/usr/hdp/current/hadoop-yarn-client/lib/javassist-3.18.1-GA.jar:/usr/hdp/current/hadoop-yarn-client/lib/commons-logging-1.1.3.jar:/usr/hdp/current/hadoop-yarn-client/lib/jetty-6.1.26.hwx.jar:/usr/hdp/current/hadoop-yarn-client/lib/jsr305-3.0.0.jar:/usr/hdp/current/hadoop-yarn-client/lib/metrics-core-3.0.1.jar:/usr/hdp/current/hadoop-yarn-client/lib/api-util-1.0.0-M20.jar:/usr/hdp/current/hadoop-yarn-client/lib/commons-io-2.4.jar:/usr/hdp/current/hadoop-yarn-client/lib/gson-2.2.4.jar:/usr/hdp/current/hadoop-yarn-client/lib/apacheds-kerberos-codec-2.0.0-M15.jar:/usr/hdp/current/hadoop-yarn-client/lib/commons-lang-2.6.jar:/usr/hdp/current/hadoop-yarn-client/lib/javax.inject-1.jar:/usr/hdp/current/hadoop-yarn-client/lib/jackson-mapper-asl-1.9.13.jar:/usr/hdp/current/hadoop-yarn-client/lib/jersey-json-1.9.jar:/usr/hdp/current/hadoop-yarn-client/lib/jetty-sslengine-6.1.26.hwx.jar:/usr/hdp/current/hadoop-yarn-client/lib/commons-compress-1.4.1.jar:/usr/hdp/current/hadoop-yarn-client/lib/azure-storage-4.2.0.jar:/usr/hdp/current/hadoop-yarn-client/lib/jetty-util-6.1.26.hwx.jar:/usr/hdp/current/hadoop-yarn-client/lib/commons-math3-3.1.1.jar:/usr/hdp/current/hadoop-yarn-client/lib/jackson-databind-2.2.3.jar:/usr/hdp/current/hadoop-yarn-client/lib/httpcore-4.4.4.jar:/usr/hdp/current/hadoop-yarn-client/lib/guava-11.0.2.jar:/usr/hdp/current/hadoop-yarn-client/lib/guice-servlet-3.0.jar:/usr/hdp/current/hadoop-yarn-client/lib/commons-beanutils-core-1.8.0.jar:/usr/hdp/current/hadoop-yarn-client/lib/xz-1.0.jar:/usr/hdp/current/hadoop-yarn-client/lib/java-xmlbuilder-0.4.jar:/usr/hdp/current/hadoop-yarn-client/lib/protobuf-java-2.5.0.jar:/usr/hdp/current/hadoop-yarn-client/lib/paranamer-2.3.jar:/usr/hdp/current/hadoop-yarn-client/lib/jersey-server-1.9.jar:/usr/hdp/current/hadoop-yarn-client/lib/json-smart-1.1.1.jar:/usr/hdp/current/hadoop-yarn-client/lib/apacheds-i18n-2.0.0-M15.jar:/usr/hdp/current/hadoop-yarn-client/lib/jets3t-0.9.0.jar:/usr/hdp/current/hadoop-yarn-client/lib/curator-recipes-2.7.1.jar:/usr/hdp/current/hadoop-yarn-client/lib/asm-3.2.jar:/usr/hdp/current/hadoop-yarn-client/lib/htrace-core-3.1.0-incubating.jar:/usr/hdp/current/hadoop-yarn-client/lib/activation-1.1.jar:/usr/hdp/current/hadoop-yarn-client/lib/commons-configuration-1.6.jar:/usr/hdp/current/hadoop-yarn-client/lib/log4j-1.2.17.jar:/usr/hdp/current/hadoop-yarn-client/lib/objenesis-2.1.jar:/usr/hdp/current/hadoop-yarn-client/lib/jersey-client-1.9.jar:/usr/hdp/current/hadoop-yarn-client/lib/jackson-core-asl-1.9.13.jar:/usr/hdp/current/hadoop-yarn-client/lib/jackson-jaxrs-1.9.13.jar:/usr/hdp/current/hadoop-yarn-client/lib/jackson-core-2.2.3.jar:/usr/hdp/current/hadoop-yarn-client/lib/fst-2.24.jar:/usr/hdp/current/hadoop-yarn-client/lib/jsp-api-2.1.jar:/usr/hdp/current/hadoop-yarn-client/lib/jersey-core-1.9.jar:/usr/hdp/current/hadoop-yarn-client/lib/jaxb-api-2.2.2.jar:/usr/hdp/current/hadoop-yarn-client/lib/jsch-0.1.54.jar:/usr/hdp/current/hadoop-yarn-client/lib/zookeeper-3.4.6.2.6.0.3-8-tests.jar:/usr/hdp/current/hadoop-yarn-client/lib/avro-1.7.4.jar:/usr/hdp/current/hadoop-yarn-client/lib/azure-keyvault-core-0.8.0.jar:/usr/hdp/current/hadoop-yarn-client/lib/commons-codec-1.4.jar:/usr/hdp/current/hadoop-yarn-client/lib/guice-3.0.jar:/usr/hdp/current/hadoop-yarn-client/lib/commons-digester-1.8.jar:/usr/hdp/current/hadoop-yarn-client/lib/commons-cli-1.2.jar:/usr/hdp/current/hadoop-yarn-client/lib/curator-client-2.7.1.jar:/usr/hdp/current/hadoop-yarn-client/lib/api-asn1-api-1.0.0-M20.jar:/usr/hdp/current/hadoop-yarn-client/lib/snappy-java-1.0.4.1.jar:/usr/hdp/current/hadoop-yarn-client/lib/jackson-annotations-2.2.3.jar:/usr/hdp/current/hadoop-yarn-client/lib/commons-lang3-3.4.jar:/usr/hdp/current/hadoop-yarn-client/lib/jaxb-impl-2.2.3-1.jar:/usr/hdp/current/hadoop-yarn-client/lib/aopalliance-1.0.jar:/usr/hdp/current/hadoop-yarn-client/lib/commons-net-3.1.jar:/usr/hdp/current/hadoop-yarn-client/lib/jettison-1.1.jar:/usr/hdp/current/hadoop-yarn-client/lib/xmlenc-0.52.jar:/usr/hdp/current/hadoop-yarn-client/lib/jackson-xc-1.9.13.jar:/usr/hdp/current/hadoop-yarn-client/lib/commons-beanutils-1.7.0.jar:/usr/hdp/current/hadoop-yarn-client/lib/stax-api-1.0-2.jar:/usr/hdp/current/hadoop-yarn-client/lib/curator-framework-2.7.1.jar:/usr/hdp/current/hadoop-yarn-client/lib/zookeeper-3.4.6.2.6.0.3-8.jar:/usr/hdp/current/hadoop-yarn-client/lib/jersey-guice-1.9.jar:/usr/hdp/current/hadoop-yarn-client/lib/servlet-api-2.5.jar:/usr/hdp/current/hadoop-yarn-client/lib/nimbus-jose-jwt-3.9.jar:/usr/hdp/current/hadoop-yarn-client/lib/leveldbjni-all-1.8.jar:/usr/hdp/current/hadoop-yarn-client/lib/commons-collections-3.2.2.jar:/usr/hdp/current/hadoop-yarn-client/lib/netty-3.6.2.Final.jar:/usr/hdp/current/hadoop-yarn-client/lib/jcip-annotations-1.0.jar:/usr/hdp/current/hadoop-yarn-client/lib/httpclient-4.5.2.jar 2020-05-10 21:22:09,736 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - -------------------------------------------------------------------------------- 2020-05-10 21:22:09,737 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - Registered UNIX signal handlers for [TERM, HUP, INT] 2020-05-10 21:22:09,740 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - Current working Directory: /data07/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/container_e07_1567067657620_0177_01_000013 2020-05-10 21:22:09,750 INFO org.apache.flink.configuration.GlobalConfiguration - Loading configuration property: state.checkpoints.num, 8 2020-05-10 21:22:09,750 INFO org.apache.flink.configuration.GlobalConfiguration - Loading configuration property: internal.cluster.execution-mode, NORMAL 2020-05-10 21:22:09,750 INFO org.apache.flink.configuration.GlobalConfiguration - Loading configuration property: parallelism.default, 1 2020-05-10 21:22:09,750 INFO org.apache.flink.configuration.GlobalConfiguration - Loading configuration property: jobmanager.execution.failover-strategy, region 2020-05-10 21:22:09,750 INFO org.apache.flink.configuration.GlobalConfiguration - Loading configuration property: high-availability.cluster-id, application_1567067657620_0177 2020-05-10 21:22:09,750 INFO org.apache.flink.configuration.GlobalConfiguration - Loading configuration property: jobmanager.rpc.address, localhost 2020-05-10 21:22:09,751 INFO org.apache.flink.configuration.GlobalConfiguration - Loading configuration property: taskmanager.numberOfTaskSlots, 1 2020-05-10 21:22:09,751 INFO org.apache.flink.configuration.GlobalConfiguration - Loading configuration property: jobmanager.rpc.port, 6123 2020-05-10 21:22:09,751 INFO org.apache.flink.configuration.GlobalConfiguration - Loading configuration property: taskmanager.heap.size, 3072m 2020-05-10 21:22:09,751 INFO org.apache.flink.configuration.GlobalConfiguration - Loading configuration property: jobmanager.heap.size, 2048m 2020-05-10 21:22:09,801 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - Current working/local Directory: /data01/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data02/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data03/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data04/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data05/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data06/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data07/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data08/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data09/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data10/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data11/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data12/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177 2020-05-10 21:22:09,803 INFO org.apache.flink.runtime.clusterframework.BootstrapTools - Setting directories for temporary files to: /data01/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data02/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data03/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data04/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data05/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data06/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data07/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data08/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data09/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data10/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data11/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177,/data12/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177 2020-05-10 21:22:09,803 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - TM: remote keytab path obtained null 2020-05-10 21:22:09,803 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - TM: remote keytab principal obtained null 2020-05-10 21:22:09,805 INFO org.apache.flink.yarn.YarnTaskExecutorRunner - YARN daemon is running as: ocdp Yarn client user obtainer: ocdp 2020-05-10 21:22:09,849 INFO org.apache.flink.runtime.security.modules.HadoopModule - Hadoop user set to ocdp (auth:SIMPLE) 2020-05-10 21:22:10,330 WARN org.apache.flink.configuration.Configuration - Config uses deprecated configuration key 'web.port' instead of proper key 'rest.port' 2020-05-10 21:22:10,333 INFO org.apache.flink.runtime.taskexecutor.TaskManagerRunner - Using configured hostname/address for TaskManager: audit-dp05. 2020-05-10 21:22:10,334 INFO org.apache.flink.runtime.rpc.akka.AkkaRpcServiceUtils - Trying to start actor system at audit-dp05:0 2020-05-10 21:22:10,921 INFO akka.event.slf4j.Slf4jLogger - Slf4jLogger started 2020-05-10 21:22:10,949 INFO akka.remote.Remoting - Starting remoting 2020-05-10 21:22:11,091 INFO akka.remote.Remoting - Remoting started; listening on addresses :[akka.tcp://flink@audit-dp05:59865] 2020-05-10 21:22:11,300 INFO org.apache.flink.runtime.rpc.akka.AkkaRpcServiceUtils - Actor system started at akka.tcp://flink@audit-dp05:59865 2020-05-10 21:22:11,314 INFO org.apache.flink.runtime.metrics.MetricRegistryImpl - No metrics reporter configured, no metrics will be exposed/reported. 2020-05-10 21:22:11,316 INFO org.apache.flink.runtime.rpc.akka.AkkaRpcServiceUtils - Trying to start actor system at audit-dp05:0 2020-05-10 21:22:11,337 INFO akka.event.slf4j.Slf4jLogger - Slf4jLogger started 2020-05-10 21:22:11,340 INFO akka.remote.Remoting - Starting remoting 2020-05-10 21:22:11,352 INFO akka.remote.Remoting - Remoting started; listening on addresses :[akka.tcp://flink-metrics@audit-dp05:33416] 2020-05-10 21:22:11,394 INFO org.apache.flink.runtime.rpc.akka.AkkaRpcServiceUtils - Actor system started at akka.tcp://flink-metrics@audit-dp05:33416 2020-05-10 21:22:11,400 INFO org.apache.flink.runtime.rpc.akka.AkkaRpcService - Starting RPC endpoint for org.apache.flink.runtime.metrics.dump.MetricQueryService at akka://flink-metrics/user/MetricQueryService_container_e07_1567067657620_0177_01_000013 . 2020-05-10 21:22:11,412 INFO org.apache.flink.runtime.blob.PermanentBlobCache - Created BLOB cache storage directory /data11/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/blobStore-7def6bc1-da29-4bc3-a10f-ae04710f69b8 2020-05-10 21:22:11,415 INFO org.apache.flink.runtime.blob.TransientBlobCache - Created BLOB cache storage directory /data11/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/blobStore-e4d19f65-4b62-4bba-aa3a-6e41a17d6175 2020-05-10 21:22:11,415 INFO org.apache.flink.runtime.taskexecutor.TaskManagerRunner - Starting TaskManager with ResourceID: container_e07_1567067657620_0177_01_000013 2020-05-10 21:22:11,548 INFO org.apache.flink.runtime.taskexecutor.TaskManagerServices - Temporary file directory '/data01/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177': total 3666 GB, usable 3455 GB (94.24% usable) 2020-05-10 21:22:11,548 INFO org.apache.flink.runtime.taskexecutor.TaskManagerServices - Temporary file directory '/data02/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177': total 3666 GB, usable 3455 GB (94.24% usable) 2020-05-10 21:22:11,549 INFO org.apache.flink.runtime.taskexecutor.TaskManagerServices - Temporary file directory '/data03/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177': total 3666 GB, usable 3452 GB (94.16% usable) 2020-05-10 21:22:11,549 INFO org.apache.flink.runtime.taskexecutor.TaskManagerServices - Temporary file directory '/data04/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177': total 3666 GB, usable 3455 GB (94.24% usable) 2020-05-10 21:22:11,549 INFO org.apache.flink.runtime.taskexecutor.TaskManagerServices - Temporary file directory '/data05/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177': total 3666 GB, usable 3453 GB (94.19% usable) 2020-05-10 21:22:11,549 INFO org.apache.flink.runtime.taskexecutor.TaskManagerServices - Temporary file directory '/data06/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177': total 3666 GB, usable 3455 GB (94.24% usable) 2020-05-10 21:22:11,549 INFO org.apache.flink.runtime.taskexecutor.TaskManagerServices - Temporary file directory '/data07/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177': total 3666 GB, usable 3451 GB (94.14% usable) 2020-05-10 21:22:11,550 INFO org.apache.flink.runtime.taskexecutor.TaskManagerServices - Temporary file directory '/data08/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177': total 3666 GB, usable 3452 GB (94.16% usable) 2020-05-10 21:22:11,550 INFO org.apache.flink.runtime.taskexecutor.TaskManagerServices - Temporary file directory '/data09/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177': total 3666 GB, usable 3456 GB (94.27% usable) 2020-05-10 21:22:11,550 INFO org.apache.flink.runtime.taskexecutor.TaskManagerServices - Temporary file directory '/data10/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177': total 3666 GB, usable 3453 GB (94.19% usable) 2020-05-10 21:22:11,550 INFO org.apache.flink.runtime.taskexecutor.TaskManagerServices - Temporary file directory '/data11/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177': total 3666 GB, usable 3451 GB (94.14% usable) 2020-05-10 21:22:11,551 INFO org.apache.flink.runtime.taskexecutor.TaskManagerServices - Temporary file directory '/data12/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177': total 3666 GB, usable 3455 GB (94.24% usable) 2020-05-10 21:22:11,553 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data01/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-io-9e9439b3-c798-4bd5-bb74-6ac9863e8648 for spill files. 2020-05-10 21:22:11,553 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data02/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-io-8e5ac8b6-b218-47f5-a5b4-3d746e69d156 for spill files. 2020-05-10 21:22:11,553 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data03/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-io-db7fab6a-8467-4b66-a262-b1d2eb5ef446 for spill files. 2020-05-10 21:22:11,554 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data04/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-io-2109e609-8482-4cc9-8086-6c87fcff7e44 for spill files. 2020-05-10 21:22:11,554 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data05/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-io-62dfa1ea-2bfe-4d75-b058-40a00943f820 for spill files. 2020-05-10 21:22:11,554 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data06/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-io-5f0f60c3-2d17-46c2-853c-f5b075c367cc for spill files. 2020-05-10 21:22:11,554 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data07/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-io-68477b0e-6ef7-4519-96df-ac020cf4fc8c for spill files. 2020-05-10 21:22:11,554 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data08/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-io-7e560c56-2bc4-4cd3-b5af-9a7a6b2b2918 for spill files. 2020-05-10 21:22:11,555 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data09/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-io-99c8e4a3-1256-469e-a826-8439f31cb00c for spill files. 2020-05-10 21:22:11,555 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data10/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-io-0e1aea5a-085a-46ed-9d7d-8808cc709d5a for spill files. 2020-05-10 21:22:11,555 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data11/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-io-ffe7c953-1dd5-42c0-985a-e6ab60086a37 for spill files. 2020-05-10 21:22:11,555 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data12/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-io-87ae5ba0-2dea-4b0b-879d-250d93a02c61 for spill files. 2020-05-10 21:22:11,566 INFO org.apache.flink.runtime.io.network.netty.NettyConfig - NettyConfig [server address: audit-dp05/10.191.31.72, server port: 0, ssl enabled: false, memory segment size (bytes): 32768, transport type: NIO, number of server threads: 1 (manual), number of client threads: 1 (manual), server connect backlog: 0 (use Netty's default), client connect timeout (sec): 120, send/receive buffer size (bytes): 0 (use Netty's default)] 2020-05-10 21:22:11,568 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data01/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-netty-shuffle-952e5089-a2e7-4d27-8b73-67b56718a67f for spill files. 2020-05-10 21:22:11,568 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data02/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-netty-shuffle-0d6b9c85-77f6-4837-9631-abdac168090f for spill files. 2020-05-10 21:22:11,568 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data03/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-netty-shuffle-517fc8f1-09f4-40f0-87fc-9a58ed2f24c1 for spill files. 2020-05-10 21:22:11,569 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data04/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-netty-shuffle-c752c4e3-307a-49de-a029-150b1b80fef4 for spill files. 2020-05-10 21:22:11,569 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data05/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-netty-shuffle-980e8e5a-f532-465a-b93c-1f51d76dc38a for spill files. 2020-05-10 21:22:11,569 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data06/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-netty-shuffle-418b240b-aa44-482c-856f-c0ee44730188 for spill files. 2020-05-10 21:22:11,569 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data07/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-netty-shuffle-c75f358e-bc27-4058-8ac9-40d54e79a4b5 for spill files. 2020-05-10 21:22:11,569 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data08/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-netty-shuffle-eb0b4b2a-c8ab-4774-b107-28deb737e2c0 for spill files. 2020-05-10 21:22:11,570 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data09/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-netty-shuffle-18fcc739-f240-4d43-9919-71893aeba65a for spill files. 2020-05-10 21:22:11,570 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data10/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-netty-shuffle-0b5d380d-e68f-4624-912a-f3f08c9a22b9 for spill files. 2020-05-10 21:22:11,570 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data11/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-netty-shuffle-8a8b6e92-43b1-455c-aa1f-6702752a580a for spill files. 2020-05-10 21:22:11,570 INFO org.apache.flink.runtime.io.disk.FileChannelManagerImpl - FileChannelManager uses directory /data12/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-netty-shuffle-f5c36506-cd05-42bb-9ffd-009593e63dd1 for spill files. 2020-05-10 21:22:11,767 INFO org.apache.flink.runtime.io.network.buffer.NetworkBufferPool - Allocated 220 MB for network buffer pool (number of memory segments: 7068, bytes per segment: 32768). 2020-05-10 21:22:11,774 INFO org.apache.flink.runtime.io.network.NettyShuffleEnvironment - Starting the network environment and its components. 2020-05-10 21:22:11,810 INFO org.apache.flink.runtime.io.network.netty.NettyClient - Successful initialization (took 35 ms). 2020-05-10 21:22:11,850 INFO org.apache.flink.runtime.io.network.netty.NettyServer - Successful initialization (took 38 ms). Listening on SocketAddress /10.191.31.72:48237. 2020-05-10 21:22:11,851 INFO org.apache.flink.runtime.taskexecutor.KvStateService - Starting the kvState service and its components. 2020-05-10 21:22:11,851 INFO org.apache.flink.runtime.taskexecutor.TaskManagerServices - Limiting managed memory to 1451 MB, memory will be allocated lazily. 2020-05-10 21:22:11,861 INFO org.apache.flink.runtime.taskexecutor.TaskManagerConfiguration - Messages have a max timeout of 10000 ms 2020-05-10 21:22:11,870 INFO org.apache.flink.runtime.rpc.akka.AkkaRpcService - Starting RPC endpoint for org.apache.flink.runtime.taskexecutor.TaskExecutor at akka://flink/user/taskmanager_0 . 2020-05-10 21:22:11,882 INFO org.apache.flink.runtime.taskexecutor.JobLeaderService - Start job leader service. 2020-05-10 21:22:11,883 INFO org.apache.flink.runtime.filecache.FileCache - User file cache uses directory /data01/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-dist-cache-a21ca632-768d-4a6a-89c2-f499f9b00382 2020-05-10 21:22:11,884 INFO org.apache.flink.runtime.filecache.FileCache - User file cache uses directory /data02/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-dist-cache-07958b8c-53e5-4626-a3c7-b947b6fa50c5 2020-05-10 21:22:11,884 INFO org.apache.flink.runtime.filecache.FileCache - User file cache uses directory /data03/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-dist-cache-c91dd6fc-a627-4652-86d3-5cfa8f4afcc1 2020-05-10 21:22:11,884 INFO org.apache.flink.runtime.filecache.FileCache - User file cache uses directory /data04/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-dist-cache-047458da-e6d6-4633-9126-60020a616033 2020-05-10 21:22:11,884 INFO org.apache.flink.runtime.filecache.FileCache - User file cache uses directory /data05/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-dist-cache-02389f49-789a-4078-91f4-7c9ca3ddc50a 2020-05-10 21:22:11,884 INFO org.apache.flink.runtime.filecache.FileCache - User file cache uses directory /data06/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-dist-cache-1db99f36-a54e-4ab6-8766-603d83faa654 2020-05-10 21:22:11,884 INFO org.apache.flink.runtime.filecache.FileCache - User file cache uses directory /data07/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-dist-cache-e785df23-eae9-4785-965d-5a1ff264a088 2020-05-10 21:22:11,884 INFO org.apache.flink.runtime.filecache.FileCache - User file cache uses directory /data08/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-dist-cache-47b44b47-e208-44a3-8d46-80f99df14f1a 2020-05-10 21:22:11,884 INFO org.apache.flink.runtime.filecache.FileCache - User file cache uses directory /data09/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-dist-cache-02dc0f51-ff84-43af-8e49-89da34f02706 2020-05-10 21:22:11,885 INFO org.apache.flink.runtime.filecache.FileCache - User file cache uses directory /data10/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-dist-cache-77f30067-64b8-4429-9764-14092aeedf66 2020-05-10 21:22:11,885 INFO org.apache.flink.runtime.filecache.FileCache - User file cache uses directory /data11/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-dist-cache-04d1b4e4-5430-4683-8344-915c47fdcb7d 2020-05-10 21:22:11,885 INFO org.apache.flink.runtime.filecache.FileCache - User file cache uses directory /data12/hadoop/yarn/local/usercache/ocdp/appcache/application_1567067657620_0177/flink-dist-cache-5b414a89-1af3-473c-9703-d3784f4d8506 2020-05-10 21:22:11,886 INFO org.apache.flink.runtime.taskexecutor.TaskExecutor - Connecting to ResourceManager akka.tcp://flink@audit-dp03:57872/user/resourcemanager(00000000000000000000000000000000). 2020-05-10 21:22:12,025 INFO org.apache.flink.runtime.taskexecutor.TaskExecutor - Resolved ResourceManager address, beginning registration 2020-05-10 21:22:12,025 INFO org.apache.flink.runtime.taskexecutor.TaskExecutor - Registration at ResourceManager attempt 1 (timeout=100ms) 2020-05-10 21:22:12,052 INFO org.apache.flink.runtime.taskexecutor.TaskExecutor - Successful registration at resource manager akka.tcp://flink@audit-dp03:57872/user/resourcemanager under registration id e88a78731852f2a1a0b4af1bdde3d7cc. 2020-05-10 21:22:12,063 INFO org.apache.flink.runtime.taskexecutor.TaskExecutor - Receive slot request ff652fae92feac543d5e8f84527f4177 for job 8183afb213bfbd32b49e3a6bab977f7c from resource manager with leader id 00000000000000000000000000000000. 2020-05-10 21:22:12,064 INFO org.apache.flink.runtime.taskexecutor.TaskExecutor - Allocated slot for ff652fae92feac543d5e8f84527f4177. 2020-05-10 21:22:12,064 INFO org.apache.flink.runtime.taskexecutor.JobLeaderService - Add job 8183afb213bfbd32b49e3a6bab977f7c for job leader monitoring. 2020-05-10 21:22:12,065 INFO org.apache.flink.runtime.taskexecutor.JobLeaderService - Try to register at job manager akka.tcp://flink@audit-dp03:57872/user/jobmanager_0 with leader id 00000000-0000-0000-0000-000000000000. 2020-05-10 21:22:12,077 INFO org.apache.flink.runtime.taskexecutor.JobLeaderService - Resolved JobManager address, beginning registration 2020-05-10 21:22:12,077 INFO org.apache.flink.runtime.taskexecutor.JobLeaderService - Registration at JobManager attempt 1 (timeout=100ms) 2020-05-10 21:22:12,087 INFO org.apache.flink.runtime.taskexecutor.JobLeaderService - Successful registration at job manager akka.tcp://flink@audit-dp03:57872/user/jobmanager_0 for job 8183afb213bfbd32b49e3a6bab977f7c. 2020-05-10 21:22:12,087 INFO org.apache.flink.runtime.taskexecutor.TaskExecutor - Establish JobManager connection for job 8183afb213bfbd32b49e3a6bab977f7c. 2020-05-10 21:22:12,090 INFO org.apache.flink.runtime.taskexecutor.TaskExecutor - Offer reserved slots to the leader of job 8183afb213bfbd32b49e3a6bab977f7c. 2020-05-10 21:22:12,097 INFO org.apache.flink.runtime.taskexecutor.slot.TaskSlotTable - Activate slot ff652fae92feac543d5e8f84527f4177. 2020-05-10 21:22:12,184 INFO org.apache.flink.runtime.taskexecutor.TaskExecutor - Received task Source: source -> Filter -> Map (5/12). 2020-05-10 21:22:12,185 INFO org.apache.flink.runtime.taskmanager.Task - Source: source -> Filter -> Map (5/12) (bf153ce5e29fda7fcf4f21f1d62d829c) switched from CREATED to DEPLOYING. 2020-05-10 21:22:12,185 INFO org.apache.flink.runtime.taskmanager.Task - Creating FileSystem stream leak safety net for task Source: source -> Filter -> Map (5/12) (bf153ce5e29fda7fcf4f21f1d62d829c) [DEPLOYING] 2020-05-10 21:22:12,188 INFO org.apache.flink.runtime.taskmanager.Task - Loading JAR files for task Source: source -> Filter -> Map (5/12) (bf153ce5e29fda7fcf4f21f1d62d829c) [DEPLOYING]. 2020-05-10 21:22:12,191 INFO org.apache.flink.runtime.blob.BlobClient - Downloading 8183afb213bfbd32b49e3a6bab977f7c/p-fbe32ce2b1ca95412ec80decaa87f7a4e0ee6049-ba832edada6594960d00c1ce12453584 from audit-dp03/10.191.31.70:53187 2020-05-10 21:22:12,203 INFO org.apache.flink.runtime.taskexecutor.TaskExecutor - Received task Map -> Filter (3/12). 2020-05-10 21:22:12,204 INFO org.apache.flink.runtime.taskmanager.Task - Map -> Filter (3/12) (f6de1dff7595d9a57ed7d3b189bf0f87) switched from CREATED to DEPLOYING. 2020-05-10 21:22:12,204 INFO org.apache.flink.runtime.taskmanager.Task - Creating FileSystem stream leak safety net for task Map -> Filter (3/12) (f6de1dff7595d9a57ed7d3b189bf0f87) [DEPLOYING] 2020-05-10 21:22:12,204 INFO org.apache.flink.runtime.taskmanager.Task - Loading JAR files for task Map -> Filter (3/12) (f6de1dff7595d9a57ed7d3b189bf0f87) [DEPLOYING]. 2020-05-10 21:22:12,209 INFO org.apache.flink.runtime.taskexecutor.TaskExecutor - Received task OnlineOrderAddressMatchDataProcess -> Sink: ResultDataSink (3/12). 2020-05-10 21:22:12,209 INFO org.apache.flink.runtime.taskmanager.Task - OnlineOrderAddressMatchDataProcess -> Sink: ResultDataSink (3/12) (802e29a0aa8f318c16cd0f47e03cb22d) switched from CREATED to DEPLOYING. 2020-05-10 21:22:12,210 INFO org.apache.flink.runtime.taskmanager.Task - Creating FileSystem stream leak safety net for task OnlineOrderAddressMatchDataProcess -> Sink: ResultDataSink (3/12) (802e29a0aa8f318c16cd0f47e03cb22d) [DEPLOYING] 2020-05-10 21:22:12,210 INFO org.apache.flink.runtime.taskmanager.Task - Loading JAR files for task OnlineOrderAddressMatchDataProcess -> Sink: ResultDataSink (3/12) (802e29a0aa8f318c16cd0f47e03cb22d) [DEPLOYING]. 2020-05-10 21:22:13,137 INFO org.apache.flink.runtime.taskmanager.Task - Registering task at network: OnlineOrderAddressMatchDataProcess -> Sink: ResultDataSink (3/12) (802e29a0aa8f318c16cd0f47e03cb22d) [DEPLOYING]. 2020-05-10 21:22:13,137 INFO org.apache.flink.runtime.taskmanager.Task - Registering task at network: Source: source -> Filter -> Map (5/12) (bf153ce5e29fda7fcf4f21f1d62d829c) [DEPLOYING]. 2020-05-10 21:22:13,137 INFO org.apache.flink.runtime.taskmanager.Task - Registering task at network: Map -> Filter (3/12) (f6de1dff7595d9a57ed7d3b189bf0f87) [DEPLOYING]. 2020-05-10 21:22:13,215 INFO org.apache.flink.runtime.taskmanager.Task - OnlineOrderAddressMatchDataProcess -> Sink: ResultDataSink (3/12) (802e29a0aa8f318c16cd0f47e03cb22d) switched from DEPLOYING to RUNNING. 2020-05-10 21:22:13,217 INFO org.apache.flink.runtime.taskmanager.Task - Map -> Filter (3/12) (f6de1dff7595d9a57ed7d3b189bf0f87) switched from DEPLOYING to RUNNING. 2020-05-10 21:22:13,217 INFO org.apache.flink.runtime.taskmanager.Task - Source: source -> Filter -> Map (5/12) (bf153ce5e29fda7fcf4f21f1d62d829c) switched from DEPLOYING to RUNNING. 2020-05-10 21:22:13,230 INFO org.apache.flink.streaming.runtime.tasks.StreamTask - Using application-defined state backend: File State Backend (checkpoints: 'hdfs://audit-dp02:8020/flink/checkpoints/OnlineOrderAddressMatch', savepoints: 'null', asynchronous: UNDEFINED, fileStateThreshold: -1) 2020-05-10 21:22:13,230 INFO org.apache.flink.streaming.runtime.tasks.StreamTask - Using application-defined state backend: File State Backend (checkpoints: 'hdfs://audit-dp02:8020/flink/checkpoints/OnlineOrderAddressMatch', savepoints: 'null', asynchronous: UNDEFINED, fileStateThreshold: -1) 2020-05-10 21:22:13,230 INFO org.apache.flink.streaming.runtime.tasks.StreamTask - Configuring application-defined state backend with job/cluster config 2020-05-10 21:22:13,230 INFO org.apache.flink.streaming.runtime.tasks.StreamTask - Using application-defined state backend: File State Backend (checkpoints: 'hdfs://audit-dp02:8020/flink/checkpoints/OnlineOrderAddressMatch', savepoints: 'null', asynchronous: UNDEFINED, fileStateThreshold: -1) 2020-05-10 21:22:13,231 INFO org.apache.flink.streaming.runtime.tasks.StreamTask - Configuring application-defined state backend with job/cluster config 2020-05-10 21:22:13,230 INFO org.apache.flink.streaming.runtime.tasks.StreamTask - Configuring application-defined state backend with job/cluster config 2020-05-10 21:22:14,454 INFO org.apache.flink.api.java.typeutils.TypeExtractor - class org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition does not contain a setter for field topic 2020-05-10 21:22:14,454 INFO org.apache.flink.api.java.typeutils.TypeExtractor - Class class org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition cannot be used as a POJO type because not all fields are valid POJO fields, and must be processed as GenericType. Please read the Flink documentation on "Data Types & Serialization" for details of the effect on performance. 2020-05-10 21:22:14,462 INFO org.apache.flink.runtime.state.heap.HeapKeyedStateBackend - Initializing heap keyed state backend with stream factory. 2020-05-10 21:22:14,463 INFO org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase - Consumer subtask 4 has no restore state. 2020-05-10 21:22:14,475 INFO org.apache.flink.streaming.api.functions.sink.TwoPhaseCommitSinkFunction - FlinkKafkaProducer 2/12 - no state to restore 2020-05-10 21:22:14,489 INFO org.apache.kafka.clients.consumer.ConsumerConfig - ConsumerConfig values: auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [dn49.hadoop.unicom:6667, dn50.hadoop.unicom:6667, dn51.hadoop.unicom:6667, dn54.hadoop.unicom:6667, dn55.hadoop.unicom:6667, dn56.hadoop.unicom:6667] check.crcs = true client.dns.lookup = default client.id = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = OnlineOrderAddressMatch_20200401 heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
2020-05-10 21:22:14,489 INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values: acks = 1 batch.size = 16384 bootstrap.servers = [dn49.hadoop.unicom:6667, dn50.hadoop.unicom:6667, dn51.hadoop.unicom:6667, dn54.hadoop.unicom:6667, dn55.hadoop.unicom:6667, dn56.hadoop.unicom:6667] buffer.memory = 33554432 client.dns.lookup = default client.id = compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = false interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 3600000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer 2020-05-10 21:22:14,536 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 2.2.0 2020-05-10 21:22:14,536 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 05fcfde8f69b0349 2020-05-10 21:22:14,539 INFO org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer - Starting FlinkKafkaInternalProducer (3/12) to produce into default topic UA_RESULT_DATA_OF_FLINK 2020-05-10 21:22:14,542 INFO org.apache.flink.runtime.state.heap.HeapKeyedStateBackend - Initializing heap keyed state backend with stream factory. 2020-05-10 21:22:14,550 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 2.2.0 2020-05-10 21:22:14,550 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 05fcfde8f69b0349 2020-05-10 21:22:14,655 INFO org.apache.kafka.clients.Metadata - Cluster ID: oaeNn86gQ-Wd3dXwYN8aFA 2020-05-10 21:22:14,655 INFO org.apache.kafka.clients.Metadata - Cluster ID: oaeNn86gQ-Wd3dXwYN8aFA 2020-05-10 21:22:14,661 INFO org.apache.kafka.clients.consumer.ConsumerConfig - ConsumerConfig values: auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [dn49.hadoop.unicom:6667, dn50.hadoop.unicom:6667, dn51.hadoop.unicom:6667, dn54.hadoop.unicom:6667, dn55.hadoop.unicom:6667, dn56.hadoop.unicom:6667] check.crcs = true client.dns.lookup = default client.id = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = OnlineOrderAddressMatch_20200401 heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer 2020-05-10 21:22:14,666 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 2.2.0 2020-05-10 21:22:14,666 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 05fcfde8f69b0349 2020-05-10 21:22:14,674 INFO org.apache.kafka.clients.Metadata - Cluster ID: oaeNn86gQ-Wd3dXwYN8aFA 2020-05-10 21:22:14,683 INFO org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase - Consumer subtask 4 will start reading the following 1 partitions from timestamp 1588262400000: [KafkaTopicPartition{topic='UA_DATA_FOR_SPARK', partition=5}] 2020-05-10 21:22:14,685 INFO org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase - Consumer subtask 4 creating fetcher with offsets {KafkaTopicPartition{topic='UA_DATA_FOR_SPARK', partition=5}=1261533}. 2020-05-10 21:22:14,693 INFO org.apache.kafka.clients.consumer.ConsumerConfig - ConsumerConfig values: auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [dn49.hadoop.unicom:6667, dn50.hadoop.unicom:6667, dn51.hadoop.unicom:6667, dn54.hadoop.unicom:6667, dn55.hadoop.unicom:6667, dn56.hadoop.unicom:6667] check.crcs = true client.dns.lookup = default client.id = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = OnlineOrderAddressMatch_20200401 heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = https ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer 2020-05-10 21:22:14,697 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 2.2.0 2020-05-10 21:22:14,697 INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 05fcfde8f69b0349 2020-05-10 21:22:14,700 INFO org.apache.kafka.clients.consumer.KafkaConsumer - [Consumer clientId=consumer-3, groupId=OnlineOrderAddressMatch_20200401] Subscribed to partition(s): UA_DATA_FOR_SPARK-5 2020-05-10 21:22:14,713 INFO org.apache.kafka.clients.Metadata - Cluster ID: oaeNn86gQ-Wd3dXwYN8aFA 2020-05-10 21:23:05,520 INFO org.apache.flink.streaming.api.functions.sink.TwoPhaseCommitSinkFunction - FlinkKafkaProducer 2/12 - checkpoint 1 complete, committing transaction TransactionHolder{handle=KafkaTransactionState [transactionalId=null, producerId=-1, epoch=-1], transactionStartTime=1589116934542} from checkpoint 1 2020-05-10 21:23:05,528 INFO org.apache.kafka.clients.consumer.internals.AbstractCoordinator - [Consumer clientId=consumer-3, groupId=OnlineOrderAddressMatch_20200401] Discovered group coordinator dn51.hadoop.unicom:6667 (id: 2147482638 rack: null) 2020-05-11 00:43:05,603 WARN org.apache.flink.runtime.state.filesystem.FsCheckpointStreamFactory - Could not close the state stream for hdfs://audit-dp02:8020/flink/checkpoints/OnlineOrderAddressMatch/8183afb213bfbd32b49e3a6bab977f7c/chk-25/d993f40c-a8a1-4582-ad7b-ac47a2b163a0. org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException): No lease on /flink/checkpoints/OnlineOrderAddressMatch/8183afb213bfbd32b49e3a6bab977f7c/chk-25/d993f40c-a8a1-4582-ad7b-ac47a2b163a0 (inode 25791993): File does not exist. Holder DFSClient_NONMAPREDUCE_1212990289_95 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3659) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.completeFileInternal(FSNamesystem.java:3749) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.completeFile(FSNamesystem.java:3716) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.complete(NameNodeRpcServer.java:911) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.complete(ClientNamenodeProtocolServerSideTranslatorPB.java:547) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:640) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2351) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2347) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2345) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1554) at org.apache.hadoop.ipc.Client.call(Client.java:1498) at org.apache.hadoop.ipc.Client.call(Client.java:1398) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:233) at com.sun.proxy.$Proxy18.complete(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.complete(ClientNamenodeProtocolTranslatorPB.java:503) at sun.reflect.GeneratedMethodAccessor33.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:291) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:203) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:185) at com.sun.proxy.$Proxy19.complete(Unknown Source) at org.apache.hadoop.hdfs.DFSOutputStream.completeFile(DFSOutputStream.java:2442) at org.apache.hadoop.hdfs.DFSOutputStream.closeImpl(DFSOutputStream.java:2419) at org.apache.hadoop.hdfs.DFSOutputStream.close(DFSOutputStream.java:2384) at org.apache.hadoop.fs.FSDataOutputStream$PositionCache.close(FSDataOutputStream.java:72) at org.apache.hadoop.fs.FSDataOutputStream.close(FSDataOutputStream.java:106) at org.apache.flink.runtime.fs.hdfs.HadoopDataOutputStream.close(HadoopDataOutputStream.java:52) at org.apache.flink.core.fs.ClosingFSDataOutputStream.close(ClosingFSDataOutputStream.java:64) at org.apache.flink.runtime.state.filesystem.FsCheckpointStreamFactory$FsCheckpointStateOutputStream.close(FsCheckpointStreamFactory.java:277) at org.apache.flink.runtime.state.CheckpointStreamWithResultProvider.close(CheckpointStreamWithResultProvider.java:58) at org.apache.flink.util.IOUtils.closeQuietly(IOUtils.java:263) at org.apache.flink.util.IOUtils.closeAllQuietly(IOUtils.java:250) at org.apache.flink.util.AbstractCloseableRegistry.close(AbstractCloseableRegistry.java:122) at org.apache.flink.runtime.state.AsyncSnapshotCallable.closeSnapshotIO(AsyncSnapshotCallable.java:167) at org.apache.flink.runtime.state.AsyncSnapshotCallable.call(AsyncSnapshotCallable.java:83) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at org.apache.flink.runtime.concurrent.FutureUtils.runIfNotDoneAndGet(FutureUtils.java:447) at org.apache.flink.streaming.api.operators.OperatorSnapshotFinalizer.<init>(OperatorSnapshotFinalizer.java:47) at org.apache.flink.streaming.runtime.tasks.StreamTask$AsyncCheckpointRunnable.run(StreamTask.java:1031) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) 2020-05-11 02:56:27,359 WARN org.apache.flink.runtime.state.filesystem.FsCheckpointStreamFactory - Could not close the state stream for hdfs://audit-dp02:8020/flink/checkpoints/OnlineOrderAddressMatch/8183afb213bfbd32b49e3a6bab977f7c/chk-41/b6cafb66-bdcb-4d73-acc9-74efa52dc332. org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException): No lease on /flink/checkpoints/OnlineOrderAddressMatch/8183afb213bfbd32b49e3a6bab977f7c/chk-41/b6cafb66-bdcb-4d73-acc9-74efa52dc332 (inode 25796335): File does not exist. Holder DFSClient_NONMAPREDUCE_1212990289_95 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3659) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.completeFileInternal(FSNamesystem.java:3749) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.completeFile(FSNamesystem.java:3716) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.complete(NameNodeRpcServer.java:911) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.complete(ClientNamenodeProtocolServerSideTranslatorPB.java:547) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:640) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2351) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2347) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2345) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1554) at org.apache.hadoop.ipc.Client.call(Client.java:1498) at org.apache.hadoop.ipc.Client.call(Client.java:1398) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:233) at com.sun.proxy.$Proxy18.complete(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.complete(ClientNamenodeProtocolTranslatorPB.java:503) at sun.reflect.GeneratedMethodAccessor33.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:291) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:203) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:185) at com.sun.proxy.$Proxy19.complete(Unknown Source) at org.apache.hadoop.hdfs.DFSOutputStream.completeFile(DFSOutputStream.java:2442) at org.apache.hadoop.hdfs.DFSOutputStream.closeImpl(DFSOutputStream.java:2419) at org.apache.hadoop.hdfs.DFSOutputStream.close(DFSOutputStream.java:2384) at org.apache.hadoop.fs.FSDataOutputStream$PositionCache.close(FSDataOutputStream.java:72) at org.apache.hadoop.fs.FSDataOutputStream.close(FSDataOutputStream.java:106) at org.apache.flink.runtime.fs.hdfs.HadoopDataOutputStream.close(HadoopDataOutputStream.java:52) at org.apache.flink.core.fs.ClosingFSDataOutputStream.close(ClosingFSDataOutputStream.java:64) at org.apache.flink.runtime.state.filesystem.FsCheckpointStreamFactory$FsCheckpointStateOutputStream.close(FsCheckpointStreamFactory.java:277) at org.apache.flink.runtime.state.CheckpointStreamWithResultProvider.close(CheckpointStreamWithResultProvider.java:58) at org.apache.flink.util.IOUtils.closeQuietly(IOUtils.java:263) at org.apache.flink.util.IOUtils.closeAllQuietly(IOUtils.java:250) at org.apache.flink.util.AbstractCloseableRegistry.close(AbstractCloseableRegistry.java:122) at org.apache.flink.runtime.state.AsyncSnapshotCallable.closeSnapshotIO(AsyncSnapshotCallable.java:167) at org.apache.flink.runtime.state.AsyncSnapshotCallable.call(AsyncSnapshotCallable.java:83) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at org.apache.flink.runtime.concurrent.FutureUtils.runIfNotDoneAndGet(FutureUtils.java:447) at org.apache.flink.streaming.api.operators.OperatorSnapshotFinalizer.<init>(OperatorSnapshotFinalizer.java:47) at org.apache.flink.streaming.runtime.tasks.StreamTask$AsyncCheckpointRunnable.run(StreamTask.java:1031) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) 2020-05-11 04:11:26,990 WARN org.apache.hadoop.hdfs.DFSClient - DataStreamer Exception org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException): No lease on /flink/checkpoints/OnlineOrderAddressMatch/8183afb213bfbd32b49e3a6bab977f7c/chk-50/7d4fb7d7-583d-4959-9ff0-2730f0a7a6ab (inode 25798866): File does not exist. Holder DFSClient_NONMAPREDUCE_1212990289_95 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3659) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.analyzeFileState(FSNamesystem.java:3462) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3300) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3260) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:849) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:503) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:640) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2351) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2347) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2345) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1554) at org.apache.hadoop.ipc.Client.call(Client.java:1498) at org.apache.hadoop.ipc.Client.call(Client.java:1398) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:233) at com.sun.proxy.$Proxy18.addBlock(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:459) at sun.reflect.GeneratedMethodAccessor32.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:291) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:203) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:185) at com.sun.proxy.$Proxy19.addBlock(Unknown Source) at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1568) at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1363) at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:552) 2020-05-11 04:11:26,992 WARN org.apache.flink.runtime.state.filesystem.FsCheckpointStreamFactory - Could not close the state stream for hdfs://audit-dp02:8020/flink/checkpoints/OnlineOrderAddressMatch/8183afb213bfbd32b49e3a6bab977f7c/chk-50/7d4fb7d7-583d-4959-9ff0-2730f0a7a6ab. org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException): No lease on /flink/checkpoints/OnlineOrderAddressMatch/8183afb213bfbd32b49e3a6bab977f7c/chk-50/7d4fb7d7-583d-4959-9ff0-2730f0a7a6ab (inode 25798866): File does not exist. Holder DFSClient_NONMAPREDUCE_1212990289_95 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3659) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.analyzeFileState(FSNamesystem.java:3462) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3300) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3260) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:849) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:503) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:640) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2351) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2347) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2345) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1554) at org.apache.hadoop.ipc.Client.call(Client.java:1498) at org.apache.hadoop.ipc.Client.call(Client.java:1398) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:233) at com.sun.proxy.$Proxy18.addBlock(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:459) at sun.reflect.GeneratedMethodAccessor32.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:291) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:203) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:185) at com.sun.proxy.$Proxy19.addBlock(Unknown Source) at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1568) at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1363) at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:552) 2020-05-11 05:18:07,005 WARN org.apache.flink.runtime.state.filesystem.FsCheckpointStreamFactory - Could not close the state stream for hdfs://audit-dp02:8020/flink/checkpoints/OnlineOrderAddressMatch/8183afb213bfbd32b49e3a6bab977f7c/chk-58/17050874-b971-4bbe-81e6-ea206480a84b. org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException): No lease on /flink/checkpoints/OnlineOrderAddressMatch/8183afb213bfbd32b49e3a6bab977f7c/chk-58/17050874-b971-4bbe-81e6-ea206480a84b (inode 25801104): File does not exist. Holder DFSClient_NONMAPREDUCE_1212990289_95 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3659) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.completeFileInternal(FSNamesystem.java:3749) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.completeFile(FSNamesystem.java:3716) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.complete(NameNodeRpcServer.java:911) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.complete(ClientNamenodeProtocolServerSideTranslatorPB.java:547) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:640) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2351) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2347) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2345) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1554) at org.apache.hadoop.ipc.Client.call(Client.java:1498) at org.apache.hadoop.ipc.Client.call(Client.java:1398) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:233) at com.sun.proxy.$Proxy18.complete(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.complete(ClientNamenodeProtocolTranslatorPB.java:503) at sun.reflect.GeneratedMethodAccessor33.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:291) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:203) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:185) at com.sun.proxy.$Proxy19.complete(Unknown Source) at org.apache.hadoop.hdfs.DFSOutputStream.completeFile(DFSOutputStream.java:2442) at org.apache.hadoop.hdfs.DFSOutputStream.closeImpl(DFSOutputStream.java:2419) at org.apache.hadoop.hdfs.DFSOutputStream.close(DFSOutputStream.java:2384) at org.apache.hadoop.fs.FSDataOutputStream$PositionCache.close(FSDataOutputStream.java:72) at org.apache.hadoop.fs.FSDataOutputStream.close(FSDataOutputStream.java:106) at org.apache.flink.runtime.fs.hdfs.HadoopDataOutputStream.close(HadoopDataOutputStream.java:52) at org.apache.flink.core.fs.ClosingFSDataOutputStream.close(ClosingFSDataOutputStream.java:64) at org.apache.flink.runtime.state.filesystem.FsCheckpointStreamFactory$FsCheckpointStateOutputStream.close(FsCheckpointStreamFactory.java:277) at org.apache.flink.runtime.state.CheckpointStreamWithResultProvider.close(CheckpointStreamWithResultProvider.java:58) at org.apache.flink.util.IOUtils.closeQuietly(IOUtils.java:263) at org.apache.flink.util.IOUtils.closeAllQuietly(IOUtils.java:250) at org.apache.flink.util.AbstractCloseableRegistry.close(AbstractCloseableRegistry.java:122) at org.apache.flink.runtime.state.AsyncSnapshotCallable.closeSnapshotIO(AsyncSnapshotCallable.java:167) at org.apache.flink.runtime.state.AsyncSnapshotCallable.call(AsyncSnapshotCallable.java:83) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at org.apache.flink.runtime.concurrent.FutureUtils.runIfNotDoneAndGet(FutureUtils.java:447) at org.apache.flink.streaming.api.operators.OperatorSnapshotFinalizer.<init>(OperatorSnapshotFinalizer.java:47) at org.apache.flink.streaming.runtime.tasks.StreamTask$AsyncCheckpointRunnable.run(StreamTask.java:1031) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748)