2
2
3
3
import com .amazonaws .services .kinesisanalytics .runtime .KinesisAnalyticsRuntime ;
4
4
import org .apache .flink .api .common .eventtime .WatermarkStrategy ;
5
- import org .apache .flink .api .common .serialization .SimpleStringSchema ;
5
+ import org .apache .flink .api .common .serialization .DeserializationSchema ;
6
+ import org .apache .flink .api .common .serialization .SerializationSchema ;
6
7
import org .apache .flink .connector .base .DeliveryGuarantee ;
7
8
import org .apache .flink .connector .kafka .sink .KafkaRecordSerializationSchema ;
8
9
import org .apache .flink .connector .kafka .sink .KafkaSink ;
9
10
import org .apache .flink .connector .kafka .source .KafkaSource ;
10
11
import org .apache .flink .connector .kafka .source .enumerator .initializer .OffsetsInitializer ;
12
+ import org .apache .flink .formats .json .JsonDeserializationSchema ;
13
+ import org .apache .flink .formats .json .JsonSerializationSchema ;
11
14
import org .apache .flink .streaming .api .datastream .DataStream ;
12
15
import org .apache .flink .streaming .api .environment .LocalStreamEnvironment ;
13
16
import org .apache .flink .streaming .api .environment .StreamExecutionEnvironment ;
@@ -53,31 +56,26 @@ private static Map<String, Properties> loadApplicationProperties(StreamExecution
53
56
}
54
57
55
58
56
- private static KafkaSource <String > createKafkaSource (Properties inputProperties ) {
59
+ private static < T > KafkaSource <T > createKafkaSource (Properties inputProperties , final DeserializationSchema < T > valueDeserializationSchema ) {
57
60
OffsetsInitializer startingOffsetsInitializer = inputProperties .containsKey ("startTimestamp" ) ? OffsetsInitializer .timestamp (
58
61
Long .parseLong (inputProperties .getProperty ("startTimestamp" ))) : DEFAULT_OFFSETS_INITIALIZER ;
59
62
60
- return KafkaSource .<String >builder ()
63
+ return KafkaSource .<T >builder ()
61
64
.setBootstrapServers (inputProperties .getProperty ("bootstrap.servers" ))
62
65
.setTopics (inputProperties .getProperty ("topic" , DEFAULT_SOURCE_TOPIC ))
63
66
.setGroupId (inputProperties .getProperty ("group.id" , DEFAULT_GROUP_ID ))
64
67
.setStartingOffsets (startingOffsetsInitializer ) // Used when the application starts with no state
65
- .setValueOnlyDeserializer (new SimpleStringSchema () )
68
+ .setValueOnlyDeserializer (valueDeserializationSchema )
66
69
.setProperties (inputProperties )
67
70
.build ();
68
71
}
69
72
70
73
71
- private static KafkaSink <String > createKafkaSink (Properties outputProperties ) {
72
- return KafkaSink .<String >builder ()
74
+ private static < T > KafkaSink <T > createKafkaSink (Properties outputProperties , KafkaRecordSerializationSchema < T > recordSerializationSchema ) {
75
+ return KafkaSink .<T >builder ()
73
76
.setBootstrapServers (outputProperties .getProperty ("bootstrap.servers" ))
74
77
.setKafkaProducerConfig (outputProperties )
75
- .setRecordSerializer (KafkaRecordSerializationSchema .builder ()
76
- .setTopic (outputProperties .getProperty ("topic" , DEFAULT_SINK_TOPIC ))
77
- .setKeySerializationSchema (new SimpleStringSchema ())
78
- .setValueSerializationSchema (new SimpleStringSchema ())
79
- .build ()
80
- )
78
+ .setRecordSerializer (recordSerializationSchema )
81
79
.setDeliveryGuarantee (DeliveryGuarantee .EXACTLY_ONCE )
82
80
.build ();
83
81
}
@@ -91,6 +89,7 @@ private static Properties mergeProperties(Properties properties, Properties auth
91
89
public static void main (String [] args ) throws Exception {
92
90
// Set up the streaming execution environment
93
91
final StreamExecutionEnvironment env = StreamExecutionEnvironment .getExecutionEnvironment ();
92
+ env .enableCheckpointing (1000 );
94
93
95
94
// Load the application properties
96
95
final Map <String , Properties > applicationProperties = loadApplicationProperties (env );
@@ -105,11 +104,21 @@ public static void main(String[] args) throws Exception {
105
104
Properties outputProperties = mergeProperties (applicationProperties .get ("Output0" ), authProperties );
106
105
107
106
// Create and add the Source
108
- KafkaSource <String > source = createKafkaSource (inputProperties );
109
- DataStream <String > input = env .fromSource (source , WatermarkStrategy .noWatermarks (), "Kafka source" );
107
+ KafkaSource <Stock > source = createKafkaSource (inputProperties , new JsonDeserializationSchema <>(Stock .class ));
108
+ DataStream <Stock > input = env .fromSource (source , WatermarkStrategy .noWatermarks (), "Kafka source" );
109
+
110
+ KafkaRecordSerializationSchema <Stock > recordSerializationSchema = KafkaRecordSerializationSchema .<Stock >builder ()
111
+ .setTopic (outputProperties .getProperty ("topic" , DEFAULT_SINK_TOPIC ))
112
+ // Use a field as kafka record key
113
+ // Define no keySerializationSchema to publish kafka records with no key
114
+ .setKeySerializationSchema (stock -> stock .getTicker ().getBytes ())
115
+ // Serialize the Kafka record value (payload) as JSON
116
+ .setValueSerializationSchema (new JsonSerializationSchema <>())
117
+ .build ();
118
+
110
119
111
120
// Create and add the Sink
112
- KafkaSink <String > sink = createKafkaSink (outputProperties );
121
+ KafkaSink <Stock > sink = createKafkaSink (outputProperties , recordSerializationSchema );
113
122
input .sinkTo (sink );
114
123
115
124
env .execute ("Flink Kafka Source and Sink examples" );
0 commit comments