Skip to content

Commit d424dea

Browse files
committed
[fix-34810][core] console dirty plugin 调整打印初始值,打印第一条脏数据信息
1 parent c1dce35 commit d424dea

File tree

4 files changed

+9
-10
lines changed

4 files changed

+9
-10
lines changed

core/src/main/java/com/dtstack/flink/sql/dirtyManager/consumer/AbstractDirtyDataConsumer.java

+1-2
Original file line numberDiff line numberDiff line change
@@ -83,8 +83,7 @@ public void run() {
8383
}
8484
} catch (Exception e) {
8585
LOG.error("consume dirtyData error", e);
86-
errorCount.incrementAndGet();
87-
if (errorCount.get() > errorLimit) {
86+
if (errorCount.getAndIncrement() > errorLimit) {
8887
throw new RuntimeException("The task failed due to the number of dirty data consume failed reached the limit " + errorLimit);
8988
}
9089
}

dirtyData/console/src/main/java/com/dtstack/flink/sql/dirty/console/ConsoleDirtyDataConsumer.java

+1-2
Original file line numberDiff line numberDiff line change
@@ -43,8 +43,7 @@ public class ConsoleDirtyDataConsumer extends AbstractDirtyDataConsumer {
4343
@Override
4444
public void consume() throws InterruptedException {
4545
DirtyDataEntity dataEntity = queue.take();
46-
count.incrementAndGet();
47-
if (count.get() % printLimit == 0) {
46+
if (count.getAndIncrement() % printLimit == 0) {
4847
LOG.warn("\nget dirtyData: " + dataEntity.getDirtyData() + "\n"
4948
+ "cause: " + dataEntity.getCause() + "\n"
5049
+ "processTime: " + dataEntity.getProcessDate() + "\n"

dirtyData/mysql/src/main/java/com/dtstack/flink/sql/dirty/mysql/MysqlDirtyDataConsumer.java

+1
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,7 @@ public void consume() throws Exception {
131131
statement.addBatch();
132132

133133
if (count.get() % batchSize == 0) {
134+
LOG.warn("Get dirty Data: " + entity.getDirtyData());
134135
statement.executeBatch();
135136
}
136137
}

hbase/hbase-sink/src/main/java/com/dtstack/flink/sql/sink/hbase/HbaseOutputFormat.java

+6-6
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,8 @@
1919

2020
package com.dtstack.flink.sql.sink.hbase;
2121

22-
import com.dtstack.flink.sql.factory.DTThreadFactory;
2322
import com.dtstack.flink.sql.dirtyManager.manager.DirtyDataManager;
24-
import com.dtstack.flink.sql.enums.EUpdateMode;
23+
import com.dtstack.flink.sql.factory.DTThreadFactory;
2524
import com.dtstack.flink.sql.outputformat.AbstractDtRichOutputFormat;
2625
import com.google.common.collect.Maps;
2726
import org.apache.commons.lang3.StringUtils;
@@ -202,7 +201,7 @@ private void openKerberosConn() throws Exception {
202201
@Override
203202
public void writeRecord(Tuple2<Boolean, Row> record) {
204203
if (record.f0) {
205-
if (this.batchSize != 0) {
204+
if (this.batchSize > 1) {
206205
writeBatchRecord(record.f1);
207206
} else {
208207
dealInsert(record.f1);
@@ -231,10 +230,11 @@ protected synchronized void dealBatchOperation(List<Row> records) {
231230
// 判断数据是否插入成功
232231
for (int i = 0; i < results.length; i++) {
233232
if (results[i] == null) {
234-
if (outDirtyRecords.getCount() % DIRTY_PRINT_FREQUENCY == 0 || LOG.isDebugEnabled()) {
235-
LOG.error("record insert failed ..{}", records.get(i).toString());
236-
}
237233
// 脏数据记录
234+
dirtyDataManager.collectDirtyData(
235+
records.get(i).toString(),
236+
"Batch Hbase Sink Error"
237+
);
238238
outDirtyRecords.inc();
239239
} else {
240240
// 输出结果条数记录

0 commit comments

Comments
 (0)