forked from feast-dev/feast
-
Notifications
You must be signed in to change notification settings - Fork 4
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: Added support for Confluent Avro Format (#90)
* feat: Added support for Confluent Avro Format * Removed request source from SUPPORTED_KAFKA_BATCH_SOURCES * rename schoma to schema_str --------- Co-authored-by: Bhargav Dodla <[email protected]>
- Loading branch information
1 parent
c540117
commit f1a1571
Showing
6 changed files
with
325 additions
and
66 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -4,6 +4,7 @@ | |
Copyright 2023 Expedia Group | ||
Author: [email protected] | ||
""" | ||
|
||
import sys | ||
from datetime import timedelta | ||
from typing import Dict, List, Literal, Optional, Union | ||
|
@@ -17,6 +18,7 @@ | |
from feast.expediagroup.pydantic_models.stream_format_model import ( | ||
AnyStreamFormat, | ||
AvroFormatModel, | ||
ConfluentAvroFormatModel, | ||
JsonFormatModel, | ||
ProtoFormatModel, | ||
) | ||
|
@@ -230,8 +232,13 @@ def from_data_source( | |
) | ||
|
||
|
||
SUPPORTED_MESSAGE_FORMATS = [AvroFormatModel, JsonFormatModel, ProtoFormatModel] | ||
SUPPORTED_KAFKA_BATCH_SOURCES = [RequestSourceModel, SparkSourceModel] | ||
SUPPORTED_MESSAGE_FORMATS = [ | ||
AvroFormatModel, | ||
JsonFormatModel, | ||
ProtoFormatModel, | ||
ConfluentAvroFormatModel, | ||
] | ||
SUPPORTED_KAFKA_BATCH_SOURCES = [SparkSourceModel] | ||
|
||
|
||
class KafkaSourceModel(DataSourceModel): | ||
|
@@ -271,9 +278,9 @@ def to_data_source(self) -> KafkaSource: | |
description=self.description, | ||
tags=self.tags, | ||
owner=self.owner, | ||
batch_source=self.batch_source.to_data_source() | ||
if self.batch_source | ||
else None, | ||
batch_source=( | ||
self.batch_source.to_data_source() if self.batch_source else None | ||
), | ||
watermark_delay_threshold=self.watermark_delay_threshold, | ||
) | ||
|
||
|
@@ -317,16 +324,20 @@ def from_data_source( | |
name=data_source.name, | ||
timestamp_field=data_source.timestamp_field, | ||
message_format=message_format, | ||
kafka_bootstrap_servers=data_source.kafka_options.kafka_bootstrap_servers | ||
if data_source.kafka_options.kafka_bootstrap_servers | ||
else "", | ||
topic=data_source.kafka_options.topic | ||
if data_source.kafka_options.topic | ||
else "", | ||
kafka_bootstrap_servers=( | ||
data_source.kafka_options.kafka_bootstrap_servers | ||
if data_source.kafka_options.kafka_bootstrap_servers | ||
else "" | ||
), | ||
topic=( | ||
data_source.kafka_options.topic | ||
if data_source.kafka_options.topic | ||
else "" | ||
), | ||
created_timestamp_column=data_source.created_timestamp_column, | ||
field_mapping=data_source.field_mapping | ||
if data_source.field_mapping | ||
else None, | ||
field_mapping=( | ||
data_source.field_mapping if data_source.field_mapping else None | ||
), | ||
description=data_source.description, | ||
tags=data_source.tags if data_source.tags else None, | ||
owner=data_source.owner, | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.