|
| 1 | +package org.apache.flink.cdc.connectors.iceberg.sink; |
| 2 | + |
| 3 | +import org.apache.flink.cdc.common.configuration.ConfigOption; |
| 4 | +import org.apache.flink.cdc.common.event.Event; |
| 5 | +import org.apache.flink.cdc.common.factories.DataSinkFactory; |
| 6 | +import org.apache.flink.cdc.common.factories.FactoryHelper; |
| 7 | +import org.apache.flink.cdc.common.pipeline.PipelineOptions; |
| 8 | +import org.apache.flink.cdc.common.sink.DataSink; |
| 9 | +import org.apache.flink.cdc.common.utils.Preconditions; |
| 10 | +import org.apache.flink.cdc.connectors.iceberg.sink.v2.IcebergRecordEventSerializer; |
| 11 | +import org.apache.flink.cdc.connectors.iceberg.sink.v2.IcebergRecordSerializer; |
| 12 | +import org.apache.flink.table.catalog.Catalog; |
| 13 | + |
| 14 | +import org.apache.commons.collections.map.HashedMap; |
| 15 | +import org.apache.iceberg.flink.FlinkCatalogFactory; |
| 16 | + |
| 17 | +import java.time.ZoneId; |
| 18 | +import java.util.HashMap; |
| 19 | +import java.util.HashSet; |
| 20 | +import java.util.Map; |
| 21 | +import java.util.Objects; |
| 22 | +import java.util.Set; |
| 23 | + |
| 24 | +import static org.apache.flink.cdc.connectors.iceberg.sink.IcebergDataSinkOptions.PREFIX_CATALOG_PROPERTIES; |
| 25 | +import static org.apache.flink.cdc.connectors.iceberg.sink.IcebergDataSinkOptions.PREFIX_TABLE_PROPERTIES; |
| 26 | + |
| 27 | +public class IcebergDataSinkFactory implements DataSinkFactory { |
| 28 | + |
| 29 | + public static final String IDENTIFIER = "iceberg"; |
| 30 | + |
| 31 | + @Override |
| 32 | + public DataSink createDataSink(Context context) { |
| 33 | + FactoryHelper.createFactoryHelper(this, context) |
| 34 | + .validateExcept(PREFIX_TABLE_PROPERTIES, PREFIX_CATALOG_PROPERTIES); |
| 35 | + |
| 36 | + Map<String, String> allOptions = context.getFactoryConfiguration().toMap(); |
| 37 | + Map<String, String> catalogOptions = new HashMap<>(); |
| 38 | + Map<String, String> tableOptions = new HashMap<>(); |
| 39 | + allOptions.forEach( |
| 40 | + (key, value) -> { |
| 41 | + if (key.startsWith(PREFIX_TABLE_PROPERTIES)) { |
| 42 | + tableOptions.put(key.substring(PREFIX_TABLE_PROPERTIES.length()), value); |
| 43 | + } else if (key.startsWith(IcebergDataSinkOptions.PREFIX_CATALOG_PROPERTIES)) { |
| 44 | + catalogOptions.put( |
| 45 | + key.substring( |
| 46 | + IcebergDataSinkOptions.PREFIX_CATALOG_PROPERTIES.length()), |
| 47 | + value); |
| 48 | + } |
| 49 | + }); |
| 50 | + FlinkCatalogFactory factory = new FlinkCatalogFactory(); |
| 51 | + try { |
| 52 | + Catalog catalog = |
| 53 | + factory.createCatalog( |
| 54 | + catalogOptions.getOrDefault("default-database", "default"), |
| 55 | + catalogOptions); |
| 56 | + Preconditions.checkNotNull( |
| 57 | + catalog.listDatabases(), "catalog option of Paimon is invalid."); |
| 58 | + } catch (Exception e) { |
| 59 | + throw new RuntimeException("failed to create or use paimon catalog", e); |
| 60 | + } |
| 61 | + ZoneId zoneId = ZoneId.systemDefault(); |
| 62 | + if (!Objects.equals( |
| 63 | + context.getPipelineConfiguration().get(PipelineOptions.PIPELINE_LOCAL_TIME_ZONE), |
| 64 | + PipelineOptions.PIPELINE_LOCAL_TIME_ZONE.defaultValue())) { |
| 65 | + zoneId = |
| 66 | + ZoneId.of( |
| 67 | + context.getPipelineConfiguration() |
| 68 | + .get(PipelineOptions.PIPELINE_LOCAL_TIME_ZONE)); |
| 69 | + } |
| 70 | + String commitUser = |
| 71 | + context.getFactoryConfiguration().get(IcebergDataSinkOptions.COMMIT_USER); |
| 72 | + IcebergRecordSerializer<Event> serializer = |
| 73 | + new IcebergRecordEventSerializer(new HashedMap(), zoneId); |
| 74 | + String schemaOperatorUid = |
| 75 | + context.getPipelineConfiguration() |
| 76 | + .get(PipelineOptions.PIPELINE_SCHEMA_OPERATOR_UID); |
| 77 | + return new IcebergDataSink( |
| 78 | + catalogOptions, |
| 79 | + tableOptions, |
| 80 | + commitUser, |
| 81 | + new HashMap<>(), |
| 82 | + serializer, |
| 83 | + zoneId, |
| 84 | + schemaOperatorUid); |
| 85 | + } |
| 86 | + |
| 87 | + @Override |
| 88 | + public String identifier() { |
| 89 | + return IDENTIFIER; |
| 90 | + } |
| 91 | + |
| 92 | + @Override |
| 93 | + public Set<ConfigOption<?>> requiredOptions() { |
| 94 | + Set<ConfigOption<?>> options = new HashSet<>(); |
| 95 | + options.add(IcebergDataSinkOptions.METASTORE); |
| 96 | + return options; |
| 97 | + } |
| 98 | + |
| 99 | + @Override |
| 100 | + public Set<ConfigOption<?>> optionalOptions() { |
| 101 | + Set<ConfigOption<?>> options = new HashSet<>(); |
| 102 | + options.add(IcebergDataSinkOptions.WAREHOUSE); |
| 103 | + options.add(IcebergDataSinkOptions.URI); |
| 104 | + options.add(IcebergDataSinkOptions.COMMIT_USER); |
| 105 | + options.add(IcebergDataSinkOptions.PARTITION_KEY); |
| 106 | + return options; |
| 107 | + } |
| 108 | +} |
0 commit comments