-
Notifications
You must be signed in to change notification settings - Fork 0
/
BaseToJsonTransform.java
90 lines (67 loc) · 3.05 KB
/
BaseToJsonTransform.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
package com.bevans.kafka.connect.transforms;
import org.apache.kafka.common.cache.Cache;
import org.apache.kafka.common.cache.LRUCache;
import org.apache.kafka.common.cache.SynchronizedCache;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.SchemaBuilder;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.sink.SinkRecord;
import org.apache.kafka.connect.transforms.Transformation;
import org.apache.kafka.connect.transforms.util.SchemaUtil;
import org.apache.kafka.connect.transforms.util.SimpleConfig;
import java.util.Map;
import static org.apache.kafka.connect.transforms.util.Requirements.requireStruct;
public abstract class BaseToJsonTransform implements Transformation<SinkRecord> {
private Cache<Schema, Schema> schemaUpdateCache;
@Override
public SinkRecord apply(SinkRecord record) {
return applyWithSchema(record);
}
private SinkRecord applyWithSchema(SinkRecord record) {
var value = requireStruct(record.value(), purpose());
var updatedSchema = getUpdatedSchema(value.schema());
var updatedValue = makeUpdatedValue(value, updatedSchema);
return newRecord(record, updatedSchema, updatedValue);
}
protected abstract String purpose();
private Schema getUpdatedSchema(Schema originalSchema) {
var updatedSchema = schemaUpdateCache.get(originalSchema);
if (updatedSchema == null) {
updatedSchema = makeUpdatedSchema(originalSchema);
schemaUpdateCache.put(originalSchema, updatedSchema);
}
return updatedSchema;
}
private Schema makeUpdatedSchema(Schema schema) {
var builder = SchemaUtil.copySchemaBasics(schema, SchemaBuilder.struct());
schema.fields().stream()
.filter(field -> !field.name().equals(fieldName()))
.forEach(field -> builder.field(field.name(), field.schema()));
builder.field(fieldName(), Schema.STRING_SCHEMA); // todo also copy the original schema props like optional?
return builder.build();
}
protected abstract String fieldName();
protected abstract Struct makeUpdatedValue(Struct value, Schema updatedSchema);
private SinkRecord newRecord(SinkRecord oldRecord, Schema updatedSchema, Struct updatedValue) {
return oldRecord.newRecord(oldRecord.topic(), oldRecord.kafkaPartition(),
oldRecord.keySchema(), oldRecord.key(),
updatedSchema, updatedValue, oldRecord.timestamp());
}
@Override
public ConfigDef config() {
return configDef();
}
protected abstract ConfigDef configDef();
@Override
public void configure(Map<String, ?> configs) {
final SimpleConfig config = new SimpleConfig(configDef(), configs);
getConfigValues(config);
schemaUpdateCache = new SynchronizedCache<>(new LRUCache<>(16));
}
protected abstract void getConfigValues(SimpleConfig config);
@Override
public void close() {
schemaUpdateCache = null;
}
}