diff --git a/dagger-common/build.gradle b/dagger-common/build.gradle index c31356460..bb9585d72 100644 --- a/dagger-common/build.gradle +++ b/dagger-common/build.gradle @@ -60,9 +60,11 @@ dependencies { compileOnly group: 'org.apache.flink', name: 'flink-connector-kafka_2.11', version: flinkVersion dependenciesCommonJar 'org.apache.flink:flink-metrics-dropwizard:' + flinkVersion + dependenciesCommonJar 'org.apache.flink:flink-json:' + flinkVersion + dependenciesCommonJar 'com.jayway.jsonpath:json-path:2.4.0' dependenciesCommonJar 'com.gojek:stencil:2.0.15' - testImplementation 'junit:junit:4.12' + testImplementation 'junit:junit:4.13' testImplementation 'org.jmockit:jmockit:1.25' testImplementation 'org.mockito:mockito-core:2.25.1' testImplementation 'io.grpc:grpc-protobuf:1.18.0' diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/DataTypeNotSupportedException.java b/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/DataTypeNotSupportedException.java similarity index 87% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/DataTypeNotSupportedException.java rename to dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/DataTypeNotSupportedException.java index 74cb439b9..4aa3d5afe 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/DataTypeNotSupportedException.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/DataTypeNotSupportedException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package io.odpf.dagger.common.exceptions.serde; /** * The class Exception for unsupported protobuf data type. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/EnumFieldNotFoundException.java b/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/EnumFieldNotFoundException.java similarity index 87% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/EnumFieldNotFoundException.java rename to dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/EnumFieldNotFoundException.java index 1af96a7ed..48f437e68 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/EnumFieldNotFoundException.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/EnumFieldNotFoundException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package io.odpf.dagger.common.exceptions.serde; /** * The class Exception if Enum field not found in proto descriptor. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidDataTypeException.java b/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/InvalidDataTypeException.java similarity index 87% rename from dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidDataTypeException.java rename to dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/InvalidDataTypeException.java index 4833cb22f..d47caa42f 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InvalidDataTypeException.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/exceptions/serde/InvalidDataTypeException.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.exception; +package io.odpf.dagger.common.exceptions.serde; /** * The class Exception if there is an Invalid Data type. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/EnumProtoHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/EnumProtoHandler.java similarity index 95% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/EnumProtoHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/EnumProtoHandler.java index 886c946ac..a36944ea9 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/EnumProtoHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/EnumProtoHandler.java @@ -1,11 +1,11 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; -import io.odpf.dagger.core.exception.EnumFieldNotFoundException; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; +import io.odpf.dagger.common.exceptions.serde.EnumFieldNotFoundException; /** * The type Enum proto handler. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/MapProtoHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/MapProtoHandler.java similarity index 99% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/MapProtoHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/MapProtoHandler.java index c137dbcdd..e6b6fd011 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/MapProtoHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/MapProtoHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/MessageProtoHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/MessageProtoHandler.java similarity index 98% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/MessageProtoHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/MessageProtoHandler.java index a3e55bbab..ba9445607 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/MessageProtoHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/MessageProtoHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.formats.json.JsonRowSerializationSchema; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/PrimitiveProtoHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/PrimitiveProtoHandler.java similarity index 87% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/PrimitiveProtoHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/PrimitiveProtoHandler.java index c2e248f98..31dbb6d66 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/PrimitiveProtoHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/PrimitiveProtoHandler.java @@ -1,10 +1,10 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; import org.apache.flink.api.common.typeinfo.TypeInformation; -import io.odpf.dagger.core.exception.InvalidDataTypeException; -import io.odpf.dagger.core.protohandler.typehandler.PrimitiveTypeHandler; -import io.odpf.dagger.core.protohandler.typehandler.PrimitiveTypeHandlerFactory; +import io.odpf.dagger.common.exceptions.serde.InvalidDataTypeException; +import io.odpf.dagger.common.protohandler.typehandler.PrimitiveTypeHandlerFactory; +import io.odpf.dagger.common.protohandler.typehandler.PrimitiveTypeHandler; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/ProtoHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/ProtoHandler.java similarity index 96% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/ProtoHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/ProtoHandler.java index 11dd623d8..3c34a70e3 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/ProtoHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/ProtoHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; import org.apache.flink.api.common.typeinfo.TypeInformation; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/ProtoHandlerFactory.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/ProtoHandlerFactory.java similarity index 97% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/ProtoHandlerFactory.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/ProtoHandlerFactory.java index cbc972b75..44bbba44e 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/ProtoHandlerFactory.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/ProtoHandlerFactory.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; import com.google.protobuf.Descriptors; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/RepeatedEnumProtoHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/RepeatedEnumProtoHandler.java similarity index 97% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/RepeatedEnumProtoHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/RepeatedEnumProtoHandler.java index 680db1271..0bf3e83f3 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/RepeatedEnumProtoHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/RepeatedEnumProtoHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/RepeatedMessageProtoHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/RepeatedMessageProtoHandler.java similarity index 99% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/RepeatedMessageProtoHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/RepeatedMessageProtoHandler.java index 99aa4ab4c..cffe1a5e6 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/RepeatedMessageProtoHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/RepeatedMessageProtoHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/RepeatedPrimitiveProtoHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/RepeatedPrimitiveProtoHandler.java similarity index 92% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/RepeatedPrimitiveProtoHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/RepeatedPrimitiveProtoHandler.java index e6750c46e..62a578304 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/RepeatedPrimitiveProtoHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/RepeatedPrimitiveProtoHandler.java @@ -1,9 +1,9 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; import org.apache.flink.api.common.typeinfo.TypeInformation; -import io.odpf.dagger.core.protohandler.typehandler.PrimitiveTypeHandler; -import io.odpf.dagger.core.protohandler.typehandler.PrimitiveTypeHandlerFactory; +import io.odpf.dagger.common.protohandler.typehandler.PrimitiveTypeHandlerFactory; +import io.odpf.dagger.common.protohandler.typehandler.PrimitiveTypeHandler; import com.google.gson.Gson; import com.google.protobuf.Descriptors.FieldDescriptor; import com.google.protobuf.DynamicMessage; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/RepeatedStructMessageProtoHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/RepeatedStructMessageProtoHandler.java similarity index 97% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/RepeatedStructMessageProtoHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/RepeatedStructMessageProtoHandler.java index 3bef5190a..48760eb27 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/RepeatedStructMessageProtoHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/RepeatedStructMessageProtoHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/RowFactory.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/RowFactory.java similarity index 98% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/RowFactory.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/RowFactory.java index c9593b0db..9b9a643e8 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/RowFactory.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/RowFactory.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors.FieldDescriptor; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/StructMessageProtoHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/StructMessageProtoHandler.java similarity index 97% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/StructMessageProtoHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/StructMessageProtoHandler.java index 0eadf96fa..0d1386eb4 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/StructMessageProtoHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/StructMessageProtoHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/TimestampProtoHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/TimestampProtoHandler.java similarity index 90% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/TimestampProtoHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/TimestampProtoHandler.java index 2574c52d3..f1dfdc994 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/TimestampProtoHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/TimestampProtoHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.types.Row; @@ -9,6 +9,8 @@ import java.text.SimpleDateFormat; import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneOffset; import java.time.format.DateTimeParseException; import java.util.TimeZone; @@ -44,6 +46,11 @@ public DynamicMessage.Builder transformForKafka(DynamicMessage.Builder builder, if (field instanceof java.sql.Timestamp) { timestamp = convertSqlTimestamp((java.sql.Timestamp) field); } + + if (field instanceof LocalDateTime) { + timestamp = convertLocalDateTime((LocalDateTime) field); + } + if (field instanceof Row) { Row timeField = (Row) field; if (timeField.getArity() == 2) { @@ -70,6 +77,12 @@ public DynamicMessage.Builder transformForKafka(DynamicMessage.Builder builder, return builder; } + private Timestamp convertLocalDateTime(LocalDateTime timeField) { + return Timestamp.newBuilder() + .setSeconds(timeField.toEpochSecond(ZoneOffset.UTC)) + .build(); + } + @Override public Object transformFromPostProcessor(Object field) { return isValid(field) ? field.toString() : null; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/TypeInformationFactory.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/TypeInformationFactory.java similarity index 96% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/TypeInformationFactory.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/TypeInformationFactory.java index 264185f8d..14a963370 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/TypeInformationFactory.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/TypeInformationFactory.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; import com.google.protobuf.Descriptors; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/BooleanPrimitiveTypeHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/BooleanPrimitiveTypeHandler.java similarity index 96% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/BooleanPrimitiveTypeHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/BooleanPrimitiveTypeHandler.java index b57645288..6953819f6 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/BooleanPrimitiveTypeHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/BooleanPrimitiveTypeHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler.typehandler; +package io.odpf.dagger.common.protohandler.typehandler; import com.google.common.primitives.Booleans; import com.google.protobuf.Descriptors; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/ByteStringPrimitiveTypeHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/ByteStringPrimitiveTypeHandler.java similarity index 96% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/ByteStringPrimitiveTypeHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/ByteStringPrimitiveTypeHandler.java index 649430f48..a142741cd 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/ByteStringPrimitiveTypeHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/ByteStringPrimitiveTypeHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler.typehandler; +package io.odpf.dagger.common.protohandler.typehandler; import com.google.protobuf.ByteString; import com.google.protobuf.Descriptors; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/DoublePrimitiveTypeHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/DoublePrimitiveTypeHandler.java similarity index 96% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/DoublePrimitiveTypeHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/DoublePrimitiveTypeHandler.java index ddeabd8c3..bdde4ceb5 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/DoublePrimitiveTypeHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/DoublePrimitiveTypeHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler.typehandler; +package io.odpf.dagger.common.protohandler.typehandler; import com.google.common.primitives.Doubles; import com.google.protobuf.Descriptors; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/FloatPrimitiveTypeHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/FloatPrimitiveTypeHandler.java similarity index 96% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/FloatPrimitiveTypeHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/FloatPrimitiveTypeHandler.java index 76f9dae82..e8365f5d3 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/FloatPrimitiveTypeHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/FloatPrimitiveTypeHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler.typehandler; +package io.odpf.dagger.common.protohandler.typehandler; import com.google.common.primitives.Floats; import com.google.protobuf.Descriptors; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/IntegerPrimitiveTypeHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/IntegerPrimitiveTypeHandler.java similarity index 96% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/IntegerPrimitiveTypeHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/IntegerPrimitiveTypeHandler.java index c53910447..ad302b931 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/IntegerPrimitiveTypeHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/IntegerPrimitiveTypeHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler.typehandler; +package io.odpf.dagger.common.protohandler.typehandler; import com.google.common.primitives.Ints; import com.google.protobuf.Descriptors; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/LongPrimitiveTypeHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/LongPrimitiveTypeHandler.java similarity index 96% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/LongPrimitiveTypeHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/LongPrimitiveTypeHandler.java index f6560926a..4d899d734 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/LongPrimitiveTypeHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/LongPrimitiveTypeHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler.typehandler; +package io.odpf.dagger.common.protohandler.typehandler; import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors.FieldDescriptor.JavaType; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/PrimitiveTypeHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/PrimitiveTypeHandler.java similarity index 95% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/PrimitiveTypeHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/PrimitiveTypeHandler.java index c958ea4d9..96fc9a298 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/PrimitiveTypeHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/PrimitiveTypeHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler.typehandler; +package io.odpf.dagger.common.protohandler.typehandler; import org.apache.flink.api.common.typeinfo.TypeInformation; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/PrimitiveTypeHandlerFactory.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/PrimitiveTypeHandlerFactory.java similarity index 91% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/PrimitiveTypeHandlerFactory.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/PrimitiveTypeHandlerFactory.java index c57b67794..c398c3ae1 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/PrimitiveTypeHandlerFactory.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/PrimitiveTypeHandlerFactory.java @@ -1,6 +1,6 @@ -package io.odpf.dagger.core.protohandler.typehandler; +package io.odpf.dagger.common.protohandler.typehandler; -import io.odpf.dagger.core.exception.DataTypeNotSupportedException; +import io.odpf.dagger.common.exceptions.serde.DataTypeNotSupportedException; import com.google.protobuf.Descriptors; import java.util.Arrays; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/StringPrimitiveTypeHandler.java b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/StringPrimitiveTypeHandler.java similarity index 96% rename from dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/StringPrimitiveTypeHandler.java rename to dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/StringPrimitiveTypeHandler.java index 5fb8c73d6..8aa5fdcec 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/protohandler/typehandler/StringPrimitiveTypeHandler.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/protohandler/typehandler/StringPrimitiveTypeHandler.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler.typehandler; +package io.odpf.dagger.common.protohandler.typehandler; import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors.FieldDescriptor.JavaType; diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/watermark/NoWatermark.java b/dagger-common/src/main/java/io/odpf/dagger/common/watermark/NoWatermark.java new file mode 100644 index 000000000..ecbcfd285 --- /dev/null +++ b/dagger-common/src/main/java/io/odpf/dagger/common/watermark/NoWatermark.java @@ -0,0 +1,11 @@ +package io.odpf.dagger.common.watermark; + +import org.apache.flink.api.common.eventtime.WatermarkStrategy; +import org.apache.flink.types.Row; + +public class NoWatermark implements WatermarkStrategyDefinition { + @Override + public WatermarkStrategy getWatermarkStrategy(long waterMarkDelayInMs) { + return WatermarkStrategy.noWatermarks(); + } +} diff --git a/dagger-common/src/main/java/io/odpf/dagger/common/watermark/StreamWatermarkAssigner.java b/dagger-common/src/main/java/io/odpf/dagger/common/watermark/StreamWatermarkAssigner.java index fdedebc0b..901807fa3 100644 --- a/dagger-common/src/main/java/io/odpf/dagger/common/watermark/StreamWatermarkAssigner.java +++ b/dagger-common/src/main/java/io/odpf/dagger/common/watermark/StreamWatermarkAssigner.java @@ -1,8 +1,6 @@ package io.odpf.dagger.common.watermark; import org.apache.flink.streaming.api.datastream.DataStream; -import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer; -import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase; import org.apache.flink.types.Row; import java.io.Serializable; @@ -23,9 +21,5 @@ public DataStream assignTimeStampAndWatermark(DataStream inputStream, return inputStream .assignTimestampsAndWatermarks(watermarkStrategyDefinition.getWatermarkStrategy(watermarkDelayMs)); } - - public FlinkKafkaConsumerBase consumerAssignTimeStampAndWatermark(FlinkKafkaConsumer flinkKafkaConsumer, long watermarkDelayMs, boolean enablePerPartitionWatermark) { - return enablePerPartitionWatermark ? flinkKafkaConsumer - .assignTimestampsAndWatermarks(watermarkStrategyDefinition.getWatermarkStrategy(watermarkDelayMs)) : flinkKafkaConsumer; - } } + diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/EnumProtoHandlerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/EnumProtoHandlerTest.java similarity index 96% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/EnumProtoHandlerTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/EnumProtoHandlerTest.java index 659c60ad3..a56dc5555 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/EnumProtoHandlerTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/EnumProtoHandlerTest.java @@ -1,12 +1,14 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import io.odpf.dagger.consumer.TestBookingLogMessage; import io.odpf.dagger.consumer.TestRepeatedEnumMessage; import io.odpf.dagger.consumer.TestServiceType; -import io.odpf.dagger.core.exception.EnumFieldNotFoundException; +import io.odpf.dagger.common.exceptions.serde.EnumFieldNotFoundException; import org.apache.flink.api.common.typeinfo.Types; + +import org.junit.Assert; import org.junit.Test; import static org.junit.Assert.*; @@ -71,7 +73,7 @@ public void shouldThrowExceptionIfFieldNotFoundInGivenEnumFieldTypeDescriptor() EnumProtoHandler enumProtoHandler = new EnumProtoHandler(enumFieldDescriptor); DynamicMessage.Builder builder = DynamicMessage.newBuilder(enumFieldDescriptor.getContainingType()); - EnumFieldNotFoundException exception = assertThrows(EnumFieldNotFoundException.class, () -> enumProtoHandler.transformForKafka(builder, "test")); + EnumFieldNotFoundException exception = Assert.assertThrows(EnumFieldNotFoundException.class, () -> enumProtoHandler.transformForKafka(builder, "test")); assertEquals("field: test not found in io.odpf.dagger.consumer.TestBookingLogMessage.service_type", exception.getMessage()); } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/MapProtoHandlerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/MapProtoHandlerTest.java similarity index 96% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/MapProtoHandlerTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/MapProtoHandlerTest.java index 130897391..2dfba166d 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/MapProtoHandlerTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/MapProtoHandlerTest.java @@ -1,16 +1,28 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; -import com.google.protobuf.*; +import org.apache.flink.api.common.typeinfo.Types; +import org.apache.flink.types.Row; + +import com.google.protobuf.Descriptors; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.MapEntry; +import com.google.protobuf.WireFormat; import io.odpf.dagger.consumer.TestBookingLogMessage; import io.odpf.dagger.consumer.TestComplexMap; import io.odpf.dagger.consumer.TestMessage; -import org.apache.flink.api.common.typeinfo.Types; -import org.apache.flink.types.Row; +import org.junit.Assert; import org.junit.Test; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; public class MapProtoHandlerTest { @@ -111,7 +123,7 @@ public void shouldThrowExceptionIfRowsPassedAreNotOfArityTwo() { Row inputRow = new Row(3); inputRows.add(inputRow); - IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, + IllegalArgumentException exception = Assert.assertThrows(IllegalArgumentException.class, () -> mapProtoHandler.transformForKafka(builder, inputRows.toArray())); assertEquals("Row: +I[null, null, null] of size: 3 cannot be converted to map", exception.getMessage()); } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/MessageProtoHandlerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/MessageProtoHandlerTest.java similarity index 98% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/MessageProtoHandlerTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/MessageProtoHandlerTest.java index c92ae7b55..57bd6d056 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/MessageProtoHandlerTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/MessageProtoHandlerTest.java @@ -1,18 +1,21 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; + +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.common.typeinfo.Types; +import org.apache.flink.types.Row; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; import io.odpf.dagger.consumer.TestBookingLogMessage; import io.odpf.dagger.consumer.TestPaymentOptionMetadata; -import org.apache.flink.api.common.typeinfo.TypeInformation; -import org.apache.flink.api.common.typeinfo.Types; -import org.apache.flink.types.Row; import org.junit.Test; import java.util.HashMap; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; public class MessageProtoHandlerTest { diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/PrimitiveProtoHandlerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/PrimitiveProtoHandlerTest.java similarity index 94% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/PrimitiveProtoHandlerTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/PrimitiveProtoHandlerTest.java index dbe3f75c9..c3837b2d9 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/PrimitiveProtoHandlerTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/PrimitiveProtoHandlerTest.java @@ -1,13 +1,16 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; + +import org.apache.flink.api.common.typeinfo.Types; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; +import io.odpf.dagger.common.exceptions.serde.InvalidDataTypeException; import io.odpf.dagger.consumer.TestBookingLogMessage; -import io.odpf.dagger.core.exception.InvalidDataTypeException; -import org.apache.flink.api.common.typeinfo.Types; +import org.junit.Assert; import org.junit.Test; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; public class PrimitiveProtoHandlerTest { @@ -80,7 +83,7 @@ public void shouldThrowInvalidDataTypeExceptionInCaseOfTypeMismatchForPostProces Descriptors.FieldDescriptor floatFieldDescriptor = TestBookingLogMessage.getDescriptor().findFieldByName("total_customer_discount"); PrimitiveProtoHandler primitiveProtoHandler = new PrimitiveProtoHandler(floatFieldDescriptor); - InvalidDataTypeException exception = assertThrows(InvalidDataTypeException.class, + InvalidDataTypeException exception = Assert.assertThrows(InvalidDataTypeException.class, () -> primitiveProtoHandler.transformFromPostProcessor("stringValue")); assertEquals("type mismatch of field: total_customer_discount, expecting FLOAT type, actual type class java.lang.String", exception.getMessage()); } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/ProtoHandlerFactoryTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/ProtoHandlerFactoryTest.java similarity index 98% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/ProtoHandlerFactoryTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/ProtoHandlerFactoryTest.java index 93a8db748..476b0fc70 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/ProtoHandlerFactoryTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/ProtoHandlerFactoryTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; import com.google.protobuf.Descriptors; import io.odpf.dagger.consumer.TestBookingLogMessage; @@ -12,7 +12,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; public class ProtoHandlerFactoryTest { @Before diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/RepeatedEnumProtoHandlerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/RepeatedEnumProtoHandlerTest.java similarity index 96% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/RepeatedEnumProtoHandlerTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/RepeatedEnumProtoHandlerTest.java index bb7d12d59..ae96d31b1 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/RepeatedEnumProtoHandlerTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/RepeatedEnumProtoHandlerTest.java @@ -1,4 +1,8 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; + +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.common.typeinfo.Types; +import org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; @@ -6,15 +10,14 @@ import io.odpf.dagger.consumer.TestBookingLogMessage; import io.odpf.dagger.consumer.TestEnumMessage; import io.odpf.dagger.consumer.TestRepeatedEnumMessage; -import org.apache.flink.api.common.typeinfo.TypeInformation; -import org.apache.flink.api.common.typeinfo.Types; -import org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo; import org.junit.Test; import java.util.ArrayList; import java.util.Collections; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; public class RepeatedEnumProtoHandlerTest { @Test diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/RepeatedMessageProtoHandlerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/RepeatedMessageProtoHandlerTest.java similarity index 97% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/RepeatedMessageProtoHandlerTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/RepeatedMessageProtoHandlerTest.java index 40dfb3c88..9f2c4ceb8 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/RepeatedMessageProtoHandlerTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/RepeatedMessageProtoHandlerTest.java @@ -1,4 +1,7 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; + +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.types.Row; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; @@ -7,16 +10,18 @@ import io.odpf.dagger.consumer.TestFeedbackLogMessage; import io.odpf.dagger.consumer.TestReason; import net.minidev.json.JSONArray; -import org.apache.flink.api.common.typeinfo.TypeInformation; -import org.apache.flink.types.Row; import org.junit.Test; import java.util.ArrayList; import java.util.HashMap; import java.util.List; -import static org.apache.flink.api.common.typeinfo.Types.*; -import static org.junit.Assert.*; +import static org.apache.flink.api.common.typeinfo.Types.OBJECT_ARRAY; +import static org.apache.flink.api.common.typeinfo.Types.ROW_NAMED; +import static org.apache.flink.api.common.typeinfo.Types.STRING; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; public class RepeatedMessageProtoHandlerTest { diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/RepeatedPrimitiveProtoHandlerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/RepeatedPrimitiveProtoHandlerTest.java similarity index 94% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/RepeatedPrimitiveProtoHandlerTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/RepeatedPrimitiveProtoHandlerTest.java index 800967965..4713b2ff7 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/RepeatedPrimitiveProtoHandlerTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/RepeatedPrimitiveProtoHandlerTest.java @@ -1,22 +1,27 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; + +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.common.typeinfo.Types; +import org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; +import io.odpf.dagger.common.exceptions.serde.DataTypeNotSupportedException; +import io.odpf.dagger.common.exceptions.serde.InvalidDataTypeException; import io.odpf.dagger.consumer.TestBookingLogMessage; import io.odpf.dagger.consumer.TestRepeatedEnumMessage; -import io.odpf.dagger.core.exception.DataTypeNotSupportedException; -import io.odpf.dagger.core.exception.InvalidDataTypeException; -import org.apache.flink.api.common.typeinfo.TypeInformation; -import org.apache.flink.api.common.typeinfo.Types; -import org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo; +import org.junit.Assert; import org.junit.Test; import java.util.ArrayList; import java.util.List; import static java.util.Arrays.asList; -import static org.junit.Assert.*; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; public class RepeatedPrimitiveProtoHandlerTest { @@ -172,7 +177,7 @@ public void shouldThrowExceptionIfFieldDesciptorTypeNotSupportedForPostProcessor RepeatedPrimitiveProtoHandler repeatedPrimitiveProtoHandler = new RepeatedPrimitiveProtoHandler(repeatedFieldDescriptor); ArrayList inputValues = new ArrayList<>(); inputValues.add("test"); - DataTypeNotSupportedException exception = assertThrows(DataTypeNotSupportedException.class, + DataTypeNotSupportedException exception = Assert.assertThrows(DataTypeNotSupportedException.class, () -> repeatedPrimitiveProtoHandler.transformFromPostProcessor(inputValues)); assertEquals("Data type MESSAGE not supported in primitive type handlers", exception.getMessage()); } @@ -183,7 +188,7 @@ public void shouldThrowInvalidDataTypeExceptionInCaseOfTypeMismatchForPostProces RepeatedPrimitiveProtoHandler repeatedPrimitiveProtoHandler = new RepeatedPrimitiveProtoHandler(repeatedFloatFieldDescriptor); ArrayList inputValues = new ArrayList<>(); inputValues.add("test"); - InvalidDataTypeException exception = assertThrows(InvalidDataTypeException.class, + InvalidDataTypeException exception = Assert.assertThrows(InvalidDataTypeException.class, () -> repeatedPrimitiveProtoHandler.transformFromPostProcessor(inputValues)); assertEquals("type mismatch of field: int_array_field, expecting INT32 type, actual type class java.lang.String", exception.getMessage()); } @@ -210,7 +215,7 @@ public void shouldReturnAllFieldsInAListOfObjectsIfMultipleFieldsPassedWithSameT public void shouldThrowUnsupportedDataTypeExceptionInCaseOfInCaseOfEnumForKafkaTransform() { Descriptors.FieldDescriptor fieldDescriptor = TestBookingLogMessage.getDescriptor().findFieldByName("status"); RepeatedPrimitiveProtoHandler repeatedPrimitiveProtoHandler = new RepeatedPrimitiveProtoHandler(fieldDescriptor); - DataTypeNotSupportedException exception = assertThrows(DataTypeNotSupportedException.class, + DataTypeNotSupportedException exception = Assert.assertThrows(DataTypeNotSupportedException.class, () -> repeatedPrimitiveProtoHandler.transformFromKafka("CREATED")); assertEquals("Data type ENUM not supported in primitive type handlers", exception.getMessage()); } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/RepeatedStructMessageProtoHandlerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/RepeatedStructMessageProtoHandlerTest.java similarity index 94% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/RepeatedStructMessageProtoHandlerTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/RepeatedStructMessageProtoHandlerTest.java index 61bca78a4..dc46ea059 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/RepeatedStructMessageProtoHandlerTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/RepeatedStructMessageProtoHandlerTest.java @@ -1,17 +1,21 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; + +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.common.typeinfo.Types; +import org.apache.flink.types.Row; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import io.odpf.dagger.consumer.TestBookingLogMessage; import io.odpf.dagger.consumer.TestNestedRepeatedMessage; -import org.apache.flink.api.common.typeinfo.TypeInformation; -import org.apache.flink.api.common.typeinfo.Types; -import org.apache.flink.types.Row; import org.junit.Test; import java.util.Collections; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; public class RepeatedStructMessageProtoHandlerTest { diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/RowFactoryTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/RowFactoryTest.java similarity index 99% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/RowFactoryTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/RowFactoryTest.java index b3cdb3099..af666522a 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/RowFactoryTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/RowFactoryTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.typeinfo.TypeInformation; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/StructMessageProtoHandlerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/StructMessageProtoHandlerTest.java similarity index 94% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/StructMessageProtoHandlerTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/StructMessageProtoHandlerTest.java index 37bdefb13..a9fad5cf0 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/StructMessageProtoHandlerTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/StructMessageProtoHandlerTest.java @@ -1,15 +1,19 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; + +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.common.typeinfo.Types; +import org.apache.flink.types.Row; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import io.odpf.dagger.consumer.TestBookingLogMessage; import io.odpf.dagger.consumer.TestRepeatedEnumMessage; -import org.apache.flink.api.common.typeinfo.TypeInformation; -import org.apache.flink.api.common.typeinfo.Types; -import org.apache.flink.types.Row; import org.junit.Test; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; public class StructMessageProtoHandlerTest { diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/TimestampProtoHandlerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/TimestampProtoHandlerTest.java similarity index 89% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/TimestampProtoHandlerTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/TimestampProtoHandlerTest.java index 6ac0d6eed..12bbe347b 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/TimestampProtoHandlerTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/TimestampProtoHandlerTest.java @@ -1,17 +1,24 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; + +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.common.typeinfo.Types; +import org.apache.flink.types.Row; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; import io.odpf.dagger.consumer.TestBookingLogMessage; -import org.apache.flink.api.common.typeinfo.TypeInformation; -import org.apache.flink.api.common.typeinfo.Types; -import org.apache.flink.types.Row; import org.junit.Test; import java.sql.Timestamp; +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneOffset; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; public class TimestampProtoHandlerTest { @Test @@ -69,6 +76,23 @@ public void shouldSetTimestampIfInstanceOfJavaSqlTimestampPassed() throws Invali assertEquals(inputTimestamp.getNanos(), bookingLogMessage.getEventTimestamp().getNanos()); } + @Test + public void shouldSetTimestampIfInstanceOfLocalDateTimePassed() throws InvalidProtocolBufferException { + Descriptors.FieldDescriptor timestampFieldDescriptor = TestBookingLogMessage.getDescriptor().findFieldByName("event_timestamp"); + TimestampProtoHandler timestampProtoHandler = new TimestampProtoHandler(timestampFieldDescriptor); + DynamicMessage.Builder builder = DynamicMessage.newBuilder(timestampFieldDescriptor.getContainingType()); + + long milliSeconds = System.currentTimeMillis(); + + Timestamp inputTimestamp = new Timestamp(milliSeconds); + LocalDateTime localDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(milliSeconds), ZoneOffset.UTC); + + DynamicMessage dynamicMessage = timestampProtoHandler.transformForKafka(builder, localDateTime).build(); + + TestBookingLogMessage bookingLogMessage = TestBookingLogMessage.parseFrom(dynamicMessage.toByteArray()); + assertEquals(milliSeconds / 1000, bookingLogMessage.getEventTimestamp().getSeconds()); + } + @Test public void shouldSetTimestampIfRowHavingTimestampIsPassed() throws InvalidProtocolBufferException { Descriptors.FieldDescriptor timestampFieldDescriptor = TestBookingLogMessage.getDescriptor().findFieldByName("event_timestamp"); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/TypeInformationFactoryTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/TypeInformationFactoryTest.java similarity index 73% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/TypeInformationFactoryTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/TypeInformationFactoryTest.java index 42911d948..8c7e9f00a 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/TypeInformationFactoryTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/TypeInformationFactoryTest.java @@ -1,16 +1,19 @@ -package io.odpf.dagger.core.protohandler; +package io.odpf.dagger.common.protohandler; -import io.odpf.dagger.consumer.TestBookingLogKey; -import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import com.google.protobuf.Descriptors; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.types.Row; +import com.google.protobuf.Descriptors; +import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; +import io.odpf.dagger.consumer.TestBookingLogKey; +import org.junit.Assert; import org.junit.Test; -import static org.apache.flink.api.common.typeinfo.Types.*; +import static org.apache.flink.api.common.typeinfo.Types.INT; +import static org.apache.flink.api.common.typeinfo.Types.LONG; +import static org.apache.flink.api.common.typeinfo.Types.ROW_NAMED; +import static org.apache.flink.api.common.typeinfo.Types.STRING; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThrows; public class TypeInformationFactoryTest { @@ -25,7 +28,7 @@ public void shouldReturnTypeInformationForDescriptor() { @Test public void shouldThrowExceptionIfNullPassed() { - assertThrows(DescriptorNotFoundException.class, + Assert.assertThrows(DescriptorNotFoundException.class, () -> TypeInformationFactory.getRowType(null)); } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/BooleanPrimitiveTypeHandlerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/BooleanPrimitiveTypeHandlerTest.java similarity index 94% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/BooleanPrimitiveTypeHandlerTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/BooleanPrimitiveTypeHandlerTest.java index e8dcd9b6b..aeba5a019 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/BooleanPrimitiveTypeHandlerTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/BooleanPrimitiveTypeHandlerTest.java @@ -1,14 +1,18 @@ -package io.odpf.dagger.core.protohandler.typehandler; +package io.odpf.dagger.common.protohandler.typehandler; + +import org.apache.flink.api.common.typeinfo.Types; import com.google.protobuf.Descriptors; import io.odpf.dagger.consumer.TestBookingLogMessage; -import org.apache.flink.api.common.typeinfo.Types; import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; -import static org.junit.Assert.*; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; public class BooleanPrimitiveTypeHandlerTest { diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/ByteStringPrimitiveTypeHandlerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/ByteStringPrimitiveTypeHandlerTest.java similarity index 93% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/ByteStringPrimitiveTypeHandlerTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/ByteStringPrimitiveTypeHandlerTest.java index 4a9209219..49e14a622 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/ByteStringPrimitiveTypeHandlerTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/ByteStringPrimitiveTypeHandlerTest.java @@ -1,16 +1,20 @@ -package io.odpf.dagger.core.protohandler.typehandler; +package io.odpf.dagger.common.protohandler.typehandler; + +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.common.typeinfo.Types; import com.google.protobuf.ByteString; import com.google.protobuf.Descriptors; import io.odpf.dagger.consumer.TestMessageEnvelope; -import org.apache.flink.api.common.typeinfo.TypeInformation; -import org.apache.flink.api.common.typeinfo.Types; import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; -import static org.junit.Assert.*; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; public class ByteStringPrimitiveTypeHandlerTest { @Test diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/DoublePrimitiveTypeHandlerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/DoublePrimitiveTypeHandlerTest.java similarity index 98% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/DoublePrimitiveTypeHandlerTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/DoublePrimitiveTypeHandlerTest.java index 1275cc5a2..967a1790c 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/DoublePrimitiveTypeHandlerTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/DoublePrimitiveTypeHandlerTest.java @@ -1,4 +1,4 @@ -package io.odpf.dagger.core.protohandler.typehandler; +package io.odpf.dagger.common.protohandler.typehandler; import com.google.protobuf.Descriptors; import io.odpf.dagger.consumer.TestBookingLogMessage; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/FloatPrimitiveTypeHandlerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/FloatPrimitiveTypeHandlerTest.java similarity index 95% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/FloatPrimitiveTypeHandlerTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/FloatPrimitiveTypeHandlerTest.java index e0213e471..045f16f37 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/FloatPrimitiveTypeHandlerTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/FloatPrimitiveTypeHandlerTest.java @@ -1,14 +1,17 @@ -package io.odpf.dagger.core.protohandler.typehandler; +package io.odpf.dagger.common.protohandler.typehandler; + +import org.apache.flink.api.common.typeinfo.Types; import com.google.protobuf.Descriptors; import io.odpf.dagger.consumer.TestBookingLogMessage; -import org.apache.flink.api.common.typeinfo.Types; import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; public class FloatPrimitiveTypeHandlerTest { @Test diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/IntegerPrimitiveTypeHandlerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/IntegerPrimitiveTypeHandlerTest.java similarity index 94% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/IntegerPrimitiveTypeHandlerTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/IntegerPrimitiveTypeHandlerTest.java index 18b63fdc4..460ed0ece 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/IntegerPrimitiveTypeHandlerTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/IntegerPrimitiveTypeHandlerTest.java @@ -1,14 +1,18 @@ -package io.odpf.dagger.core.protohandler.typehandler; +package io.odpf.dagger.common.protohandler.typehandler; + +import org.apache.flink.api.common.typeinfo.Types; import com.google.protobuf.Descriptors; import io.odpf.dagger.consumer.TestBookingLogMessage; -import org.apache.flink.api.common.typeinfo.Types; import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; -import static org.junit.Assert.*; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; public class IntegerPrimitiveTypeHandlerTest { diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/LongPrimitiveTypeHandlerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/LongPrimitiveTypeHandlerTest.java similarity index 94% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/LongPrimitiveTypeHandlerTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/LongPrimitiveTypeHandlerTest.java index a76cf1cb0..572be3232 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/LongPrimitiveTypeHandlerTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/LongPrimitiveTypeHandlerTest.java @@ -1,14 +1,18 @@ -package io.odpf.dagger.core.protohandler.typehandler; +package io.odpf.dagger.common.protohandler.typehandler; + +import org.apache.flink.api.common.typeinfo.Types; import com.google.protobuf.Descriptors; import io.odpf.dagger.consumer.TestAggregatedSupplyMessage; -import org.apache.flink.api.common.typeinfo.Types; import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; -import static org.junit.Assert.*; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; public class LongPrimitiveTypeHandlerTest { diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/PrimitiveTypeHandlerFactoryTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/PrimitiveTypeHandlerFactoryTest.java similarity index 91% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/PrimitiveTypeHandlerFactoryTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/PrimitiveTypeHandlerFactoryTest.java index 3055da916..b39fa904c 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/PrimitiveTypeHandlerFactoryTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/PrimitiveTypeHandlerFactoryTest.java @@ -1,12 +1,12 @@ -package io.odpf.dagger.core.protohandler.typehandler; +package io.odpf.dagger.common.protohandler.typehandler; +import io.odpf.dagger.common.exceptions.serde.DataTypeNotSupportedException; import io.odpf.dagger.consumer.TestBookingLogMessage; import io.odpf.dagger.consumer.TestMessageEnvelope; -import io.odpf.dagger.core.exception.DataTypeNotSupportedException; +import org.junit.Assert; import org.junit.Test; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThrows; public class PrimitiveTypeHandlerFactoryTest { @@ -61,7 +61,7 @@ public void shouldReturnByteStringTypeHandlerForByteString() { @Test public void shouldThrowExceptionIfTypeNotSupported() { - DataTypeNotSupportedException exception = assertThrows(DataTypeNotSupportedException.class, + DataTypeNotSupportedException exception = Assert.assertThrows(DataTypeNotSupportedException.class, () -> PrimitiveTypeHandlerFactory.getTypeHandler(TestBookingLogMessage.getDescriptor().findFieldByName("status"))); assertEquals("Data type ENUM not supported in primitive type handlers", exception.getMessage()); } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/StringPrimitiveTypeHandlerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/StringPrimitiveTypeHandlerTest.java similarity index 94% rename from dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/StringPrimitiveTypeHandlerTest.java rename to dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/StringPrimitiveTypeHandlerTest.java index fc3baae28..395f3ad7f 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/protohandler/typehandler/StringPrimitiveTypeHandlerTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/protohandler/typehandler/StringPrimitiveTypeHandlerTest.java @@ -1,15 +1,19 @@ -package io.odpf.dagger.core.protohandler.typehandler; +package io.odpf.dagger.common.protohandler.typehandler; -import com.google.protobuf.Descriptors; -import io.odpf.dagger.consumer.TestBookingLogMessage; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo; + +import com.google.protobuf.Descriptors; +import io.odpf.dagger.consumer.TestBookingLogMessage; import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; -import static org.junit.Assert.*; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; public class StringPrimitiveTypeHandlerTest { diff --git a/dagger-common/src/test/java/io/odpf/dagger/common/watermark/StreamWatermarkAssignerTest.java b/dagger-common/src/test/java/io/odpf/dagger/common/watermark/StreamWatermarkAssignerTest.java index c725d7308..7ffe0c7fd 100644 --- a/dagger-common/src/test/java/io/odpf/dagger/common/watermark/StreamWatermarkAssignerTest.java +++ b/dagger-common/src/test/java/io/odpf/dagger/common/watermark/StreamWatermarkAssignerTest.java @@ -37,25 +37,6 @@ public void shouldAssignTimestampAndWatermarksToInputStreamIfEnablePerPartitionW verify(inputStream, times(1)).assignTimestampsAndWatermarks(any(WatermarkStrategy.class)); } - - @Test - public void shouldAssignTimestampAndWatermarksToKafkaConsumer() { - LastColumnWatermark lastColumnWatermark = new LastColumnWatermark(); - StreamWatermarkAssigner streamWatermarkAssigner = new StreamWatermarkAssigner(lastColumnWatermark); - streamWatermarkAssigner.consumerAssignTimeStampAndWatermark(consumer, 10L, true); - - verify(consumer, times(1)).assignTimestampsAndWatermarks(any(WatermarkStrategy.class)); - } - - @Test - public void shouldNotAssignTimestampAndWatermarksToKafkaConsumerIfPerPartitionWatermarkDisabled() { - LastColumnWatermark lastColumnWatermark = new LastColumnWatermark(); - StreamWatermarkAssigner streamWatermarkAssigner = new StreamWatermarkAssigner(lastColumnWatermark); - streamWatermarkAssigner.consumerAssignTimeStampAndWatermark(consumer, 10L, false); - - verify(consumer, times(0)).assignTimestampsAndWatermarks(any(WatermarkStrategy.class)); - } - @Test public void shouldAssignTimestampAndWatermarksToSource() { LastColumnWatermark lastColumnWatermark = new LastColumnWatermark(); diff --git a/dagger-core/build.gradle b/dagger-core/build.gradle index 6448000f0..0e3dd7bd4 100644 --- a/dagger-core/build.gradle +++ b/dagger-core/build.gradle @@ -72,7 +72,6 @@ dependencies { dependenciesJar project(path: ':dagger-common', configuration: 'dependenciesCommonJar') dependenciesJar project(path: ':dagger-functions', configuration: 'dependenciesFunctionsJar') - dependenciesJar 'org.apache.flink:flink-json:' + flinkVersion dependenciesJar 'org.apache.flink:flink-connector-kafka_2.11:' + flinkVersion dependenciesJar 'com.google.protobuf:protobuf-java:3.1.0' dependenciesJar 'com.google.protobuf:protobuf-java-util:3.1.0' @@ -81,7 +80,6 @@ dependencies { dependenciesJar 'org.elasticsearch.client:elasticsearch-rest-client:6.6.1' dependenciesJar 'com.google.cloud.bigtable:bigtable-hbase-2.x:1.11.0' dependenciesJar 'org.asynchttpclient:async-http-client:2.10.1' - dependenciesJar 'com.jayway.jsonpath:json-path:2.4.0' dependenciesJar 'io.vertx:vertx-pg-client:3.9.0' dependenciesJar 'org.apache.commons:commons-pool2:2.4.3' @@ -101,6 +99,7 @@ dependencies { test { finalizedBy jacocoTestReport + jvmArgs '-Djdk.attach.allowAttachSelf=true' testLogging { events "passed", "skipped", "failed" } @@ -119,7 +118,7 @@ jacocoTestCoverageVerification { violationRules { rule { limit { - minimum = 0.89 + minimum = 0.88 } } } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/StreamManager.java b/dagger-core/src/main/java/io/odpf/dagger/core/StreamManager.java index e9f9dc15f..e3a6b975b 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/StreamManager.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/StreamManager.java @@ -14,7 +14,9 @@ import io.odpf.dagger.common.core.StreamInfo; import io.odpf.dagger.common.udfs.UdfFactory; import io.odpf.dagger.common.watermark.LastColumnWatermark; +import io.odpf.dagger.common.watermark.NoWatermark; import io.odpf.dagger.common.watermark.StreamWatermarkAssigner; +import io.odpf.dagger.common.watermark.WatermarkStrategyDefinition; import io.odpf.dagger.core.exception.UDFFactoryClassNotDefinedException; import io.odpf.dagger.core.processors.PostProcessorFactory; import io.odpf.dagger.core.processors.PreProcessorConfig; @@ -93,9 +95,10 @@ public StreamManager registerSourceWithPreProcessors() { Streams kafkaStreams = getKafkaStreams(); kafkaStreams.notifySubscriber(telemetryExporter); PreProcessorConfig preProcessorConfig = PreProcessorFactory.parseConfig(configuration); - kafkaStreams.getStreams().forEach((tableName, kafkaConsumer) -> { - DataStream kafkaStream = executionEnvironment.addSource(kafkaConsumer); - + kafkaStreams.getKafkaSource().forEach((tableName, kafkaSource) -> { + WatermarkStrategyDefinition watermarkStrategyDefinition = getSourceWatermarkDefinition(enablePerPartitionWatermark); + // TODO : Validate why/how should Source-name be defined + DataStream kafkaStream = executionEnvironment.fromSource(kafkaSource, watermarkStrategyDefinition.getWatermarkStrategy(watermarkDelay), tableName); StreamWatermarkAssigner streamWatermarkAssigner = new StreamWatermarkAssigner(new LastColumnWatermark()); DataStream rowSingleOutputStreamOperator = streamWatermarkAssigner @@ -112,6 +115,10 @@ public StreamManager registerSourceWithPreProcessors() { return this; } + private WatermarkStrategyDefinition getSourceWatermarkDefinition(Boolean enablePerPartitionWatermark) { + return enablePerPartitionWatermark ? new LastColumnWatermark() : new NoWatermark(); + } + private ApiExpression[] getApiExpressions(StreamInfo streamInfo) { String rowTimeAttributeName = configuration.getString(FLINK_ROWTIME_ATTRIBUTE_NAME_KEY, FLINK_ROWTIME_ATTRIBUTE_NAME_DEFAULT); String[] columnNames = streamInfo.getColumnNames(); @@ -213,13 +220,11 @@ private StreamInfo addPreProcessor(StreamInfo streamInfo, String tableName, PreP private void addSink(StreamInfo streamInfo) { SinkOrchestrator sinkOrchestrator = new SinkOrchestrator(); sinkOrchestrator.addSubscriber(telemetryExporter); - streamInfo.getDataStream().addSink(sinkOrchestrator.getSink(configuration, streamInfo.getColumnNames(), stencilClientOrchestrator)); + streamInfo.getDataStream().sinkTo(sinkOrchestrator.getSink(configuration, streamInfo.getColumnNames(), stencilClientOrchestrator)); } private Streams getKafkaStreams() { String rowTimeAttributeName = configuration.getString(FLINK_ROWTIME_ATTRIBUTE_NAME_KEY, FLINK_ROWTIME_ATTRIBUTE_NAME_DEFAULT); - Boolean enablePerPartitionWatermark = configuration.getBoolean(FLINK_WATERMARK_PER_PARTITION_ENABLE_KEY, FLINK_WATERMARK_PER_PARTITION_ENABLE_DEFAULT); - Long watermarkDelay = configuration.getLong(FLINK_WATERMARK_DELAY_MS_KEY, FLINK_WATERMARK_DELAY_MS_DEFAULT); - return new Streams(configuration, rowTimeAttributeName, stencilClientOrchestrator, enablePerPartitionWatermark, watermarkDelay); + return new Streams(configuration, rowTimeAttributeName, stencilClientOrchestrator); } } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/Streams.java b/dagger-core/src/main/java/io/odpf/dagger/core/Streams.java index e5feedfe2..eeb0148bf 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/Streams.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/Streams.java @@ -1,13 +1,16 @@ package io.odpf.dagger.core; +import org.apache.flink.connector.kafka.source.KafkaSource; +import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer; +import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema; +import org.apache.flink.types.Row; + import com.google.gson.Gson; import io.odpf.dagger.common.configuration.Configuration; import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.common.watermark.LastColumnWatermark; -import io.odpf.dagger.common.watermark.StreamWatermarkAssigner; import io.odpf.dagger.core.metrics.telemetry.TelemetryPublisher; -import io.odpf.dagger.core.source.FlinkKafkaConsumerCustom; import io.odpf.dagger.core.source.ProtoDeserializer; +import org.apache.kafka.clients.consumer.OffsetResetStrategy; import java.util.ArrayList; import java.util.Arrays; @@ -31,12 +34,11 @@ */ public class Streams implements TelemetryPublisher { private static final String KAFKA_PREFIX = "source_kafka_consumer_config_"; - private Map streams = new HashMap<>(); + + private Map kafkaSources = new HashMap<>(); private LinkedHashMap protoClassForTable = new LinkedHashMap<>(); private final Configuration configuration; private StencilClientOrchestrator stencilClientOrchestrator; - private boolean enablePerPartitionWatermark; - private long watermarkDelay; private Map> metrics = new HashMap<>(); private List topics = new ArrayList<>(); private List protoClassNames = new ArrayList<>(); @@ -46,32 +48,28 @@ public class Streams implements TelemetryPublisher { /** * Instantiates a new Streams. * - * @param configuration the configuration - * @param rowTimeAttributeName the row time attribute name - * @param stencilClientOrchestrator the stencil client orchestrator - * @param enablePerPartitionWatermark the enable per partition watermark - * @param watermarkDelay the watermark delay + * @param configuration the configuration + * @param rowTimeAttributeName the row time attribute name + * @param stencilClientOrchestrator the stencil client orchestrator */ - public Streams(Configuration configuration, String rowTimeAttributeName, StencilClientOrchestrator stencilClientOrchestrator, boolean enablePerPartitionWatermark, long watermarkDelay) { + public Streams(Configuration configuration, String rowTimeAttributeName, StencilClientOrchestrator stencilClientOrchestrator) { this.configuration = configuration; this.stencilClientOrchestrator = stencilClientOrchestrator; - this.watermarkDelay = watermarkDelay; - this.enablePerPartitionWatermark = enablePerPartitionWatermark; String jsonArrayString = configuration.getString(INPUT_STREAMS, ""); Map[] streamsConfig = GSON.fromJson(jsonArrayString, Map[].class); for (Map streamConfig : streamsConfig) { String tableName = streamConfig.getOrDefault(STREAM_INPUT_SCHEMA_TABLE, ""); - streams.put(tableName, getKafkaConsumer(rowTimeAttributeName, streamConfig)); + kafkaSources.put(tableName, getKafkaSource(rowTimeAttributeName, streamConfig)); } } /** - * Gets streams. + * Gets kafkaSource. * - * @return the streams + * @return the Sources */ - public Map getStreams() { - return streams; + public Map getKafkaSource() { + return kafkaSources; } /** @@ -98,8 +96,7 @@ private static String parseVarName(String varName, String kafkaPrefix) { return String.join(".", names); } - // TODO : refactor the watermark related things - private FlinkKafkaConsumerCustom getKafkaConsumer(String rowTimeAttributeName, Map streamConfig) { + private KafkaSource getKafkaSource(String rowTimeAttributeName, Map streamConfig) { String topicsForStream = streamConfig.getOrDefault(STREAM_SOURCE_KAFKA_TOPIC_NAMES_KEY, ""); topics.add(topicsForStream); String protoClassName = streamConfig.getOrDefault(STREAM_INPUT_SCHEMA_PROTO_CLASS, ""); @@ -116,13 +113,22 @@ private FlinkKafkaConsumerCustom getKafkaConsumer(String rowTimeAttributeName, M setAdditionalConfigs(kafkaProps); - FlinkKafkaConsumerCustom fc = new FlinkKafkaConsumerCustom(Pattern.compile(topicsForStream), - new ProtoDeserializer(protoClassName, timestampFieldIndex, rowTimeAttributeName, stencilClientOrchestrator), kafkaProps, configuration); - // https://ci.apache.org/projects/flink/flink-docs-stable/dev/event_timestamps_watermarks.html#timestamps-per-kafka-partition - StreamWatermarkAssigner streamWatermarkAssigner = new StreamWatermarkAssigner(new LastColumnWatermark()); - streamWatermarkAssigner.consumerAssignTimeStampAndWatermark(fc, watermarkDelay, enablePerPartitionWatermark); - return fc; + // TODO : OffsetReset Strategy can be more matured to support time-based offset-resets + KafkaSource source = KafkaSource.builder() + .setTopicPattern(Pattern.compile(topicsForStream)) + .setStartingOffsets(OffsetsInitializer.committedOffsets(getOffsetResetStrategy(streamConfig))) + .setProperties(kafkaProps) + .setDeserializer(KafkaRecordDeserializationSchema.of(new ProtoDeserializer(protoClassName, timestampFieldIndex, rowTimeAttributeName, stencilClientOrchestrator))) + .build(); + + return source; + } + + private OffsetResetStrategy getOffsetResetStrategy(Map streamConfig) { + String consumerOffsetResetStrategy = streamConfig.getOrDefault(SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET_KEY, SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET_DEFAULT); + OffsetResetStrategy offsetResetStrategy = OffsetResetStrategy.valueOf(consumerOffsetResetStrategy.toUpperCase()); + return offsetResetStrategy; } private void setAdditionalConfigs(Properties kafkaProps) { diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/exception/InfluxWriteException.java b/dagger-core/src/main/java/io/odpf/dagger/core/exception/InfluxWriteException.java new file mode 100644 index 000000000..60d200469 --- /dev/null +++ b/dagger-core/src/main/java/io/odpf/dagger/core/exception/InfluxWriteException.java @@ -0,0 +1,9 @@ +package io.odpf.dagger.core.exception; + +import java.io.IOException; + +public class InfluxWriteException extends IOException { + public InfluxWriteException(Throwable err) { + super(err); + } +} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorReporter.java b/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorReporter.java index fdd6ffd34..5ce90a3f1 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorReporter.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorReporter.java @@ -1,5 +1,8 @@ package io.odpf.dagger.core.metrics.reporters; +import org.apache.flink.metrics.Counter; +import org.apache.flink.metrics.MetricGroup; + /** * The interface Error reporter. */ @@ -10,11 +13,15 @@ public interface ErrorReporter { * @param exception the exception */ void reportFatalException(Exception exception); - /** * Report non fatal exception. * * @param exception the exception */ void reportNonFatalException(Exception exception); + + + default Counter addExceptionToCounter(Exception exception, MetricGroup metricGroup, String metricGroupKey) { + return metricGroup.addGroup(metricGroupKey, exception.getClass().getName()).counter("value"); + } } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorReporterFactory.java b/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorReporterFactory.java index 5485c0f01..729a54e17 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorReporterFactory.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorReporterFactory.java @@ -1,6 +1,6 @@ package io.odpf.dagger.core.metrics.reporters; -import org.apache.flink.api.common.functions.RuntimeContext; +import org.apache.flink.metrics.MetricGroup; import io.odpf.dagger.common.configuration.Configuration; import io.odpf.dagger.core.utils.Constants; @@ -13,27 +13,27 @@ public class ErrorReporterFactory { /** * Gets error reporter. * - * @param runtimeContext the runtime context - * @param configuration the configuration + * @param metricGroup the runtime context + * @param configuration the configuration * @return the error reporter */ - public static ErrorReporter getErrorReporter(RuntimeContext runtimeContext, Configuration configuration) { + public static ErrorReporter getErrorReporter(MetricGroup metricGroup, Configuration configuration) { long shutDownPeriod = configuration.getLong(Constants.METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_KEY, Constants.METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_DEFAULT); boolean telemetryEnabled = configuration.getBoolean(Constants.METRIC_TELEMETRY_ENABLE_KEY, Constants.METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT); - return getErrorReporter(runtimeContext, telemetryEnabled, shutDownPeriod); + return getErrorReporter(metricGroup, telemetryEnabled, shutDownPeriod); } /** * Gets error reporter. * - * @param runtimeContext the runtime context + * @param metricGroup the metric-group * @param telemetryEnable the telemetry enable * @param shutDownPeriod the shut down period * @return the error reporter */ - public static ErrorReporter getErrorReporter(RuntimeContext runtimeContext, Boolean telemetryEnable, long shutDownPeriod) { + public static ErrorReporter getErrorReporter(MetricGroup metricGroup, Boolean telemetryEnable, long shutDownPeriod) { if (telemetryEnable) { - return new ErrorStatsReporter(runtimeContext, shutDownPeriod); + return new ErrorStatsReporter(metricGroup, shutDownPeriod); } else { return new NoOpErrorReporter(); } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorStatsReporter.java b/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorStatsReporter.java index df0414676..eda80e319 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorStatsReporter.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/metrics/reporters/ErrorStatsReporter.java @@ -1,10 +1,10 @@ package io.odpf.dagger.core.metrics.reporters; -import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; -import io.odpf.dagger.core.utils.Constants; -import org.apache.flink.api.common.functions.RuntimeContext; import org.apache.flink.metrics.Counter; +import org.apache.flink.metrics.MetricGroup; +import io.odpf.dagger.core.processors.telemetry.processor.MetricsTelemetryExporter; +import io.odpf.dagger.core.utils.Constants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -12,26 +12,18 @@ * The Error stats reporter. */ public class ErrorStatsReporter implements ErrorReporter { - private RuntimeContext runtimeContext; + private MetricGroup metricGroup; private long shutDownPeriod; private static final Logger LOGGER = LoggerFactory.getLogger(MetricsTelemetryExporter.class.getName()); - - /** - * Instantiates a new Error stats reporter. - * - * @param runtimeContext the runtime context - * @param shutDownPeriod the shut down period - */ - public ErrorStatsReporter(RuntimeContext runtimeContext, long shutDownPeriod) { - this.runtimeContext = runtimeContext; + public ErrorStatsReporter(MetricGroup metricGroup, long shutDownPeriod) { + this.metricGroup = metricGroup; this.shutDownPeriod = shutDownPeriod; } @Override public void reportFatalException(Exception exception) { - Counter counter = runtimeContext.getMetricGroup() - .addGroup(Constants.FATAL_EXCEPTION_METRIC_GROUP_KEY, exception.getClass().getName()).counter("value"); + Counter counter = addExceptionToCounter(exception, metricGroup, Constants.FATAL_EXCEPTION_METRIC_GROUP_KEY); counter.inc(); try { Thread.sleep(shutDownPeriod); @@ -42,8 +34,7 @@ public void reportFatalException(Exception exception) { @Override public void reportNonFatalException(Exception exception) { - Counter counter = runtimeContext.getMetricGroup() - .addGroup(Constants.NONFATAL_EXCEPTION_METRIC_GROUP_KEY, exception.getClass().getName()).counter("value"); + Counter counter = addExceptionToCounter(exception, metricGroup, Constants.NONFATAL_EXCEPTION_METRIC_GROUP_KEY); counter.inc(); } } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/EndpointHandler.java b/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/EndpointHandler.java index f0573a763..ffd939433 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/EndpointHandler.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/EndpointHandler.java @@ -10,8 +10,8 @@ import io.odpf.dagger.common.metrics.managers.MeterStatsManager; import io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects; import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.protohandler.ProtoHandler; -import io.odpf.dagger.core.protohandler.ProtoHandlerFactory; +import io.odpf.dagger.common.protohandler.ProtoHandler; +import io.odpf.dagger.common.protohandler.ProtoHandlerFactory; import com.google.protobuf.Descriptors; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/FetchOutputDecorator.java b/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/FetchOutputDecorator.java index 4452cfe6c..223dcedf0 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/FetchOutputDecorator.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/FetchOutputDecorator.java @@ -3,7 +3,7 @@ import io.odpf.dagger.common.core.StencilClientOrchestrator; import io.odpf.dagger.core.processors.types.MapDecorator; import io.odpf.dagger.core.processors.external.SchemaConfig; -import io.odpf.dagger.core.protohandler.ProtoHandlerFactory; +import io.odpf.dagger.common.protohandler.ProtoHandlerFactory; import io.odpf.dagger.core.utils.Constants; import com.google.protobuf.Descriptors; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/ValidRecordsDecorator.java b/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/ValidRecordsDecorator.java index 114eaf89f..33cefb7cb 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/ValidRecordsDecorator.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/processors/common/ValidRecordsDecorator.java @@ -40,7 +40,7 @@ public ValidRecordsDecorator(String tableName, String[] columns, Configuration c @Override public void open(org.apache.flink.configuration.Configuration internalFlinkConfig) throws Exception { - errorReporter = ErrorReporterFactory.getErrorReporter(getRuntimeContext(), this.configuration); + errorReporter = ErrorReporterFactory.getErrorReporter(getRuntimeContext().getMetricGroup(), this.configuration); } @Override diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/AsyncConnector.java b/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/AsyncConnector.java index 70587e331..0dba97607 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/AsyncConnector.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/AsyncConnector.java @@ -1,23 +1,23 @@ package io.odpf.dagger.core.processors.external; -import io.odpf.dagger.core.processors.ColumnNameManager; -import io.odpf.dagger.core.processors.common.DescriptorManager; -import io.odpf.dagger.core.processors.common.EndpointHandler; -import io.odpf.dagger.core.processors.types.SourceConfig; import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.functions.async.ResultFuture; import org.apache.flink.streaming.api.functions.async.RichAsyncFunction; import org.apache.flink.types.Row; import com.google.protobuf.Descriptors; -import io.odpf.dagger.core.metrics.telemetry.TelemetryPublisher; import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.core.exception.InvalidConfigurationException; import io.odpf.dagger.common.metrics.managers.MeterStatsManager; +import io.odpf.dagger.core.exception.InvalidConfigurationException; import io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects; import io.odpf.dagger.core.metrics.reporters.ErrorReporter; import io.odpf.dagger.core.metrics.reporters.ErrorReporterFactory; +import io.odpf.dagger.core.metrics.telemetry.TelemetryPublisher; import io.odpf.dagger.core.metrics.telemetry.TelemetryTypes; +import io.odpf.dagger.core.processors.ColumnNameManager; +import io.odpf.dagger.core.processors.common.DescriptorManager; +import io.odpf.dagger.core.processors.common.EndpointHandler; +import io.odpf.dagger.core.processors.types.SourceConfig; import org.apache.commons.lang3.StringUtils; import java.util.ArrayList; @@ -154,7 +154,7 @@ public void open(Configuration configuration) throws Exception { if (errorReporter == null) { errorReporter = ErrorReporterFactory - .getErrorReporter(getRuntimeContext(), externalMetricConfig.isTelemetryEnabled(), externalMetricConfig.getShutDownPeriod()); + .getErrorReporter(getRuntimeContext().getMetricGroup(), externalMetricConfig.isTelemetryEnabled(), externalMetricConfig.getShutDownPeriod()); } if (meterStatsManager == null) { meterStatsManager = new MeterStatsManager(getRuntimeContext().getMetricGroup(), true); diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsResponseHandler.java b/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsResponseHandler.java index d206e20ae..a92f5588a 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsResponseHandler.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/es/EsResponseHandler.java @@ -6,8 +6,8 @@ import io.odpf.dagger.core.processors.ColumnNameManager; import io.odpf.dagger.core.processors.common.PostResponseTelemetry; import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.protohandler.ProtoHandler; -import io.odpf.dagger.core.protohandler.ProtoHandlerFactory; +import io.odpf.dagger.common.protohandler.ProtoHandler; +import io.odpf.dagger.common.protohandler.ProtoHandlerFactory; import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors.Descriptor; import com.jayway.jsonpath.JsonPath; @@ -29,7 +29,7 @@ import java.util.List; import java.util.Map; -import static io.odpf.dagger.core.protohandler.RowFactory.createRow; +import static io.odpf.dagger.common.protohandler.RowFactory.createRow; import static java.util.Collections.singleton; import static org.apache.http.HttpStatus.SC_OK; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcResponseHandler.java b/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcResponseHandler.java index 10fafc477..b3d77b87e 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcResponseHandler.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/grpc/GrpcResponseHandler.java @@ -7,8 +7,8 @@ import io.odpf.dagger.core.processors.common.OutputMapping; import io.odpf.dagger.core.processors.common.PostResponseTelemetry; import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.protohandler.ProtoHandler; -import io.odpf.dagger.core.protohandler.ProtoHandlerFactory; +import io.odpf.dagger.common.protohandler.ProtoHandler; +import io.odpf.dagger.common.protohandler.ProtoHandlerFactory; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/HttpResponseHandler.java b/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/HttpResponseHandler.java index 9e0ac2caf..31e7c1e3e 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/HttpResponseHandler.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/http/HttpResponseHandler.java @@ -7,8 +7,8 @@ import io.odpf.dagger.core.processors.common.OutputMapping; import io.odpf.dagger.core.processors.common.PostResponseTelemetry; import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.protohandler.ProtoHandler; -import io.odpf.dagger.core.protohandler.ProtoHandlerFactory; +import io.odpf.dagger.common.protohandler.ProtoHandler; +import io.odpf.dagger.common.protohandler.ProtoHandlerFactory; import com.google.protobuf.Descriptors; import com.jayway.jsonpath.JsonPath; import com.jayway.jsonpath.PathNotFoundException; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgResponseHandler.java b/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgResponseHandler.java index de243e500..7d9f13bc9 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgResponseHandler.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/processors/external/pg/PgResponseHandler.java @@ -6,8 +6,8 @@ import io.odpf.dagger.core.processors.ColumnNameManager; import io.odpf.dagger.core.processors.common.PostResponseTelemetry; import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.protohandler.ProtoHandler; -import io.odpf.dagger.core.protohandler.ProtoHandlerFactory; +import io.odpf.dagger.common.protohandler.ProtoHandler; +import io.odpf.dagger.common.protohandler.ProtoHandlerFactory; import com.google.protobuf.Descriptors; import io.odpf.dagger.core.metrics.aspects.ExternalSourceAspects; import io.vertx.core.AsyncResult; @@ -24,7 +24,7 @@ import java.util.List; import java.util.Map; -import static io.odpf.dagger.core.protohandler.RowFactory.createRow; +import static io.odpf.dagger.common.protohandler.RowFactory.createRow; /** * The Postgre response handler. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/processor/LongbowReader.java b/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/processor/LongbowReader.java index 9345386a6..381abf73a 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/processor/LongbowReader.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/processor/LongbowReader.java @@ -102,7 +102,7 @@ public void open(org.apache.flink.configuration.Configuration internalFlinkConfi meterStatsManager = new MeterStatsManager(getRuntimeContext().getMetricGroup(), true); } if (errorReporter == null) { - errorReporter = ErrorReporterFactory.getErrorReporter(getRuntimeContext(), configuration); + errorReporter = ErrorReporterFactory.getErrorReporter(getRuntimeContext().getMetricGroup(), configuration); } meterStatsManager.register("longbow.reader", LongbowReaderAspects.values()); } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/processor/LongbowWriter.java b/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/processor/LongbowWriter.java index 5015baa0e..bd02ac9c9 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/processor/LongbowWriter.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/processor/LongbowWriter.java @@ -106,7 +106,7 @@ public void open(org.apache.flink.configuration.Configuration internalFlinkConfi meterStatsManager.register("longbow.writer", LongbowWriterAspects.values()); if (errorReporter == null) { - errorReporter = ErrorReporterFactory.getErrorReporter(getRuntimeContext(), configuration); + errorReporter = ErrorReporterFactory.getErrorReporter(getRuntimeContext().getMetricGroup(), configuration); } if (!longBowStore.tableExists(tableId)) { diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/ProtoBytePutRequest.java b/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/ProtoBytePutRequest.java index 11932374a..a6b8bbc80 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/ProtoBytePutRequest.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/ProtoBytePutRequest.java @@ -1,10 +1,10 @@ package io.odpf.dagger.core.processors.longbow.request; +import org.apache.flink.types.Row; + import io.odpf.dagger.core.processors.longbow.LongbowSchema; import io.odpf.dagger.core.processors.longbow.storage.PutRequest; import io.odpf.dagger.core.sink.ProtoSerializer; -import org.apache.flink.types.Row; - import io.odpf.dagger.core.utils.Constants; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.util.Bytes; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/PutRequestFactory.java b/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/PutRequestFactory.java index d8f1f5176..be7744dd1 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/PutRequestFactory.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/processors/longbow/request/PutRequestFactory.java @@ -1,11 +1,11 @@ package io.odpf.dagger.core.processors.longbow.request; +import org.apache.flink.types.Row; + import io.odpf.dagger.core.processors.longbow.LongbowSchema; import io.odpf.dagger.core.processors.longbow.storage.PutRequest; import io.odpf.dagger.core.sink.ProtoSerializer; -import org.apache.flink.types.Row; - import java.io.Serializable; /** diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/FlinkKafkaProducerCustom.java b/dagger-core/src/main/java/io/odpf/dagger/core/sink/FlinkKafkaProducerCustom.java deleted file mode 100644 index 10094eeec..000000000 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/FlinkKafkaProducerCustom.java +++ /dev/null @@ -1,111 +0,0 @@ -package io.odpf.dagger.core.sink; - -import org.apache.flink.api.common.functions.IterationRuntimeContext; -import org.apache.flink.api.common.functions.RuntimeContext; -import org.apache.flink.api.common.state.CheckpointListener; -import org.apache.flink.runtime.state.FunctionInitializationContext; -import org.apache.flink.runtime.state.FunctionSnapshotContext; -import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction; -import org.apache.flink.streaming.api.functions.sink.RichSinkFunction; -import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer; -import org.apache.flink.types.Row; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.reporters.ErrorReporterFactory; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * A class responsible for produce the messages to kafka. - */ -public class FlinkKafkaProducerCustom extends RichSinkFunction implements CheckpointedFunction, CheckpointListener { - private static final Logger LOGGER = LoggerFactory.getLogger(FlinkKafkaProducerCustom.class.getName()); - private ErrorReporter errorReporter; - private FlinkKafkaProducer flinkKafkaProducer; - private Configuration configuration; - - /** - * Instantiates a new Flink kafka producer custom. - * - * @param flinkKafkaProducer the flink kafka producer - * @param configuration the configuration - */ - public FlinkKafkaProducerCustom(FlinkKafkaProducer flinkKafkaProducer, Configuration configuration) { - this.flinkKafkaProducer = flinkKafkaProducer; - this.configuration = configuration; - } - - @Override - public void open(org.apache.flink.configuration.Configuration internalFlinkConfig) throws Exception { - flinkKafkaProducer.open(internalFlinkConfig); - } - - @Override - public void close() throws Exception { - flinkKafkaProducer.close(); - } - - @Override - public void invoke(Row value, Context context) throws Exception { - try { - invokeBaseProducer(value, context); - LOGGER.info("row to kafka :" + value.toString()); - } catch (Exception exception) { - errorReporter = getErrorReporter(getRuntimeContext()); - errorReporter.reportFatalException(exception); - throw exception; - } - } - - @Override - public void notifyCheckpointComplete(long l) throws Exception { - flinkKafkaProducer.notifyCheckpointComplete(l); - } - - @Override - public void snapshotState(FunctionSnapshotContext functionSnapshotContext) throws Exception { - flinkKafkaProducer.snapshotState(functionSnapshotContext); - } - - @Override - public void initializeState(FunctionInitializationContext functionInitializationContext) throws Exception { - flinkKafkaProducer.initializeState(functionInitializationContext); - } - - @Override - public IterationRuntimeContext getIterationRuntimeContext() { - return flinkKafkaProducer.getIterationRuntimeContext(); - } - - @Override - public RuntimeContext getRuntimeContext() { - return flinkKafkaProducer.getRuntimeContext(); - } - - @Override - public void setRuntimeContext(RuntimeContext t) { - flinkKafkaProducer.setRuntimeContext(t); - } - - /** - * Invoke base producer. - * - * @param value the value - * @param context the context - * @throws Exception the exception - */ - protected void invokeBaseProducer(Row value, Context context) throws Exception { - flinkKafkaProducer.invoke(value, context); - } - - /** - * Gets error reporter. - * - * @param runtimeContext the runtime context - * @return the error reporter - */ - protected ErrorReporter getErrorReporter(RuntimeContext runtimeContext) { - return ErrorReporterFactory.getErrorReporter(runtimeContext, configuration); - } -} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/ProtoSerializer.java b/dagger-core/src/main/java/io/odpf/dagger/core/sink/ProtoSerializer.java index 552de68a5..1ef12af02 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/ProtoSerializer.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/sink/ProtoSerializer.java @@ -1,34 +1,34 @@ package io.odpf.dagger.core.sink; +import org.apache.flink.api.common.serialization.SerializationSchema.InitializationContext; +import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema; +import org.apache.flink.types.Row; + +import com.google.protobuf.Descriptors; +import com.google.protobuf.DynamicMessage; import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.core.exception.DaggerSerializationException; import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; +import io.odpf.dagger.common.protohandler.ProtoHandler; +import io.odpf.dagger.common.protohandler.ProtoHandlerFactory; +import io.odpf.dagger.core.exception.DaggerSerializationException; import io.odpf.dagger.core.exception.InvalidColumnMappingException; -import io.odpf.dagger.core.protohandler.ProtoHandler; -import io.odpf.dagger.core.protohandler.ProtoHandlerFactory; -import com.google.protobuf.Descriptors; -import com.google.protobuf.DynamicMessage; -import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema; -import org.apache.flink.types.Row; import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import javax.annotation.Nullable; import java.util.Arrays; import java.util.Objects; -/** - * Serialize the proto if sink type is kafka. - */ -public class ProtoSerializer implements KafkaSerializationSchema { +public class ProtoSerializer implements KafkaRecordSerializationSchema { private String[] columnNames; private StencilClientOrchestrator stencilClientOrchestrator; private String keyProtoClassName; private String messageProtoClassName; private String outputTopic; - + private static final Logger LOGGER = LoggerFactory.getLogger("KafkaSink"); /** - * Instantiates a new Proto serializer. + * Instantiates a new Proto serializer with specified output topic name. * * @param keyProtoClassName the key proto class name * @param messageProtoClassName the message proto class name @@ -60,10 +60,16 @@ public ProtoSerializer(String keyProtoClassName, String messageProtoClassName, S } @Override - public ProducerRecord serialize(Row row, @Nullable Long aLong) { + public void open(InitializationContext context, KafkaSinkContext sinkContext) throws Exception { + KafkaRecordSerializationSchema.super.open(context, sinkContext); + } + + @Override + public ProducerRecord serialize(Row row, KafkaSinkContext context, Long timestamp) { if (Objects.isNull(outputTopic) || outputTopic.equals("")) { throw new DaggerSerializationException("outputTopic is required"); } + LOGGER.info("row to kafka: " + row); byte[] key = serializeKey(row); byte[] message = serializeValue(row); return new ProducerRecord<>(outputTopic, key, message); @@ -155,5 +161,4 @@ private Descriptors.Descriptor getDescriptor(String className) { } return dsc; } - } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/SinkOrchestrator.java b/dagger-core/src/main/java/io/odpf/dagger/core/sink/SinkOrchestrator.java index 6de4eb974..b530f2b0c 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/SinkOrchestrator.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/sink/SinkOrchestrator.java @@ -1,7 +1,8 @@ package io.odpf.dagger.core.sink; -import org.apache.flink.streaming.api.functions.sink.SinkFunction; -import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer; +import org.apache.flink.api.connector.sink.Sink; +import org.apache.flink.connector.base.DeliveryGuarantee; +import org.apache.flink.connector.kafka.sink.KafkaSink; import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducerBase; import org.apache.flink.types.Row; @@ -10,7 +11,7 @@ import io.odpf.dagger.core.metrics.telemetry.TelemetryPublisher; import io.odpf.dagger.core.sink.influx.ErrorHandler; import io.odpf.dagger.core.sink.influx.InfluxDBFactoryWrapper; -import io.odpf.dagger.core.sink.influx.InfluxRowSink; +import io.odpf.dagger.core.sink.influx.InfluxDBSink; import io.odpf.dagger.core.sink.log.LogSink; import java.util.ArrayList; @@ -46,29 +47,36 @@ public Map> getTelemetry() { * @columnNames columnNames the column names * @StencilClientOrchestrator stencilClientOrchestrator the stencil client orchestrator */ - public SinkFunction getSink(Configuration configuration, String[] columnNames, StencilClientOrchestrator stencilClientOrchestrator) { + public Sink getSink(Configuration configuration, String[] columnNames, StencilClientOrchestrator stencilClientOrchestrator) { + // TODO : Convert this to enum String sinkType = configuration.getString("SINK_TYPE", "influx"); addMetric(SINK_TYPE.getValue(), sinkType); - SinkFunction sink; + Sink sink; switch (sinkType) { case "kafka": String outputTopic = configuration.getString(SINK_KAFKA_TOPIC_KEY, ""); String outputProtoKey = configuration.getString(SINK_KAFKA_PROTO_KEY, null); String outputProtoMessage = configuration.getString(SINK_KAFKA_PROTO_MESSAGE_KEY, ""); String outputStream = configuration.getString(SINK_KAFKA_STREAM_KEY, ""); + String outputBootStrapServers = configuration.getString(SINK_KAFKA_BROKERS_KEY, ""); addMetric(OUTPUT_TOPIC.getValue(), outputTopic); addMetric(OUTPUT_PROTO.getValue(), outputProtoMessage); addMetric(OUTPUT_STREAM.getValue(), outputStream); - ProtoSerializer protoSerializer = new ProtoSerializer(outputProtoKey, outputProtoMessage, columnNames, stencilClientOrchestrator, outputTopic); - FlinkKafkaProducer rowFlinkKafkaProducer = new FlinkKafkaProducer<>(outputTopic, protoSerializer, getProducerProperties(configuration), FlinkKafkaProducer.Semantic.AT_LEAST_ONCE); - sink = new FlinkKafkaProducerCustom(rowFlinkKafkaProducer, configuration); + ProtoSerializer recordSerializer = new ProtoSerializer(outputProtoKey, outputProtoMessage, columnNames, stencilClientOrchestrator, outputTopic); + sink = KafkaSink.builder() + .setBootstrapServers(outputBootStrapServers) + .setKafkaProducerConfig(getProducerProperties(configuration)) + .setRecordSerializer(recordSerializer) + .setDeliverGuarantee(DeliveryGuarantee.AT_LEAST_ONCE) + .build(); + break; case "log": sink = new LogSink(columnNames); break; default: - sink = new InfluxRowSink(new InfluxDBFactoryWrapper(), columnNames, configuration, new ErrorHandler()); + sink = new InfluxDBSink(new InfluxDBFactoryWrapper(), configuration, columnNames, new ErrorHandler()); } notifySubscriber(); return sink; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/ErrorHandler.java b/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/ErrorHandler.java index 85d494f75..6064328d7 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/ErrorHandler.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/ErrorHandler.java @@ -1,9 +1,9 @@ package io.odpf.dagger.core.sink.influx; -import org.apache.flink.api.common.functions.RuntimeContext; +import org.apache.flink.api.connector.sink.Sink.InitContext; -import io.odpf.dagger.core.sink.influx.errors.LateRecordDropError; import io.odpf.dagger.core.sink.influx.errors.InfluxError; +import io.odpf.dagger.core.sink.influx.errors.LateRecordDropError; import io.odpf.dagger.core.sink.influx.errors.NoError; import io.odpf.dagger.core.sink.influx.errors.ValidError; import io.odpf.dagger.core.sink.influx.errors.ValidException; @@ -26,11 +26,11 @@ public class ErrorHandler implements Serializable { /** * Init runtime context. * - * @param runtimeContext the runtime context + * @param initContext the runtime context */ - public void init(RuntimeContext runtimeContext) { + public void init(InitContext initContext) { List influxErrors = Arrays.asList( - new LateRecordDropError(runtimeContext), + new LateRecordDropError(initContext), new ValidError(), new ValidException()); @@ -42,7 +42,6 @@ public void init(RuntimeContext runtimeContext) { error.handle(points, throwable); }; } - /** * Gets exception handler. * diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/InfluxDBSink.java b/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/InfluxDBSink.java new file mode 100644 index 000000000..3071a187b --- /dev/null +++ b/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/InfluxDBSink.java @@ -0,0 +1,79 @@ +package io.odpf.dagger.core.sink.influx; + +import org.apache.flink.api.connector.sink.Committer; +import org.apache.flink.api.connector.sink.GlobalCommitter; +import org.apache.flink.api.connector.sink.Sink; +import org.apache.flink.api.connector.sink.SinkWriter; +import org.apache.flink.core.io.SimpleVersionedSerializer; +import org.apache.flink.types.Row; + +import io.odpf.dagger.common.configuration.Configuration; +import io.odpf.dagger.core.metrics.reporters.ErrorReporter; +import io.odpf.dagger.core.metrics.reporters.ErrorReporterFactory; +import org.influxdb.InfluxDB; + +import java.io.IOException; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; + +import static io.odpf.dagger.core.utils.Constants.*; + +public class InfluxDBSink implements Sink { + private InfluxDBFactoryWrapper influxDBFactory; + private Configuration configuration; + private String[] columnNames; + private ErrorHandler errorHandler; + private ErrorReporter errorReporter; + + public InfluxDBSink(InfluxDBFactoryWrapper influxDBFactory, Configuration configuration, String[] columnNames, ErrorHandler errorHandler) { + this.influxDBFactory = influxDBFactory; + this.configuration = configuration; + this.columnNames = columnNames; + this.errorHandler = errorHandler; + } + + @Override + public SinkWriter createWriter(InitContext context, List states) throws IOException { + InfluxDB influxDB = influxDBFactory.connect(configuration.getString(SINK_INFLUX_URL_KEY, SINK_INFLUX_URL_DEFAULT), + configuration.getString(SINK_INFLUX_USERNAME_KEY, SINK_INFLUX_USERNAME_DEFAULT), + configuration.getString(SINK_INFLUX_PASSWORD_KEY, SINK_INFLUX_PASSWORD_DEFAULT)); + errorHandler.init(context); + influxDB.enableBatch(configuration.getInteger(SINK_INFLUX_BATCH_SIZE_KEY, SINK_INFLUX_BATCH_SIZE_DEFAULT), + configuration.getInteger(SINK_INFLUX_FLUSH_DURATION_MS_KEY, SINK_INFLUX_FLUSH_DURATION_MS_DEFAULT), + TimeUnit.MILLISECONDS, Executors.defaultThreadFactory(), errorHandler.getExceptionHandler()); + if (errorReporter == null) { + errorReporter = ErrorReporterFactory.getErrorReporter(context.metricGroup(), configuration); + } + + InfluxDBWriter influxDBWriter = new InfluxDBWriter(configuration, influxDB, columnNames, errorHandler, errorReporter); + return influxDBWriter; + } + + @Override + public Optional> getWriterStateSerializer() { + return Optional.empty(); + } + + @Override + public Optional> createCommitter() throws IOException { + return Optional.empty(); + } + + @Override + public Optional> createGlobalCommitter() throws IOException { + return Optional.empty(); + } + + @Override + public Optional> getCommittableSerializer() { + return Optional.empty(); + } + + @Override + public Optional> getGlobalCommittableSerializer() { + return Optional.empty(); + } + +} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/InfluxDBWriter.java b/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/InfluxDBWriter.java new file mode 100644 index 000000000..2691dbe9c --- /dev/null +++ b/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/InfluxDBWriter.java @@ -0,0 +1,113 @@ +package io.odpf.dagger.core.sink.influx; + +import org.apache.flink.api.connector.sink.SinkWriter; +import org.apache.flink.types.Row; + +import com.google.common.base.Strings; +import io.odpf.dagger.common.configuration.Configuration; +import io.odpf.dagger.core.metrics.reporters.ErrorReporter; +import org.influxdb.InfluxDB; +import org.influxdb.dto.Point; +import org.influxdb.dto.Point.Builder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.time.LocalDateTime; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static io.odpf.dagger.core.utils.Constants.*; + +public class InfluxDBWriter implements SinkWriter { + private static final Logger LOGGER = LoggerFactory.getLogger(InfluxDBWriter.class.getName()); + private final String databaseName; + private final String retentionPolicy; + private final String measurementName; + private InfluxDB influxDB; + private String[] columnNames; + private ErrorHandler errorHandler; + private ErrorReporter errorReporter; + + public InfluxDBWriter(Configuration configuration, InfluxDB influxDB, String[] columnNames, ErrorHandler errorHandler, ErrorReporter errorReporter) { + databaseName = configuration.getString(SINK_INFLUX_DB_NAME_KEY, SINK_INFLUX_DB_NAME_DEFAULT); + retentionPolicy = configuration.getString(SINK_INFLUX_RETENTION_POLICY_KEY, SINK_INFLUX_RETENTION_POLICY_DEFAULT); + measurementName = configuration.getString(SINK_INFLUX_MEASUREMENT_NAME_KEY, SINK_INFLUX_MEASUREMENT_NAME_DEFAULT); + this.influxDB = influxDB; + this.columnNames = columnNames; + this.errorHandler = errorHandler; + this.errorReporter = errorReporter; + } + + @Override + public void write(Row row, Context context) throws IOException, InterruptedException { + // TODO : check if check-pointing logs can be skipped + LOGGER.info("row to influx: " + row); + + Builder pointBuilder = Point.measurement(measurementName); + Map fields = new HashMap<>(); + for (int i = 0; i < columnNames.length; i++) { + String columnName = columnNames[i]; + if (columnName.equals("window_timestamp")) { + LocalDateTime timeField = (LocalDateTime) row.getField(i); + ZonedDateTime zonedDateTime = timeField.atZone(ZoneOffset.UTC); + pointBuilder.time(zonedDateTime.toInstant().toEpochMilli(), TimeUnit.MILLISECONDS); + } else if (columnName.startsWith("tag_")) { + pointBuilder.tag(columnName, String.valueOf(row.getField(i))); + } else if (columnName.startsWith("label_")) { + pointBuilder.tag(columnName.substring("label_".length()), ((String) row.getField(i))); + } else { + if (!(Strings.isNullOrEmpty(columnName) || row.getField(i) == null)) { + fields.put(columnName, row.getField(i)); + } + } + } + //TODO : How to handle other exceptions + + addErrorMetricsAndThrow(); + + try { + influxDB.write(databaseName, retentionPolicy, pointBuilder.fields(fields).build()); + } catch (Exception exception) { + errorReporter.reportFatalException(exception); + throw exception; + } + } + + @Override + public List prepareCommit(boolean flush) throws IOException, InterruptedException { + return null; + } + + @Override + public void close() throws Exception { + influxDB.close(); + } + + + private void addErrorMetricsAndThrow() throws IOException { + if (errorHandler.getError().isPresent() && errorHandler.getError().get().hasException()) { + IOException currentException = errorHandler.getError().get().getCurrentException(); + errorReporter.reportFatalException(currentException); + throw currentException; + } + } + + @Override + public List snapshotState(long checkpointId) throws IOException { + addErrorMetricsAndThrow(); + try { + influxDB.flush(); + } catch (Exception exception) { + errorReporter.reportFatalException(exception); + throw exception; + } + addErrorMetricsAndThrow(); + return Collections.emptyList(); + } +} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/InfluxRowSink.java b/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/InfluxRowSink.java deleted file mode 100644 index abd6417f4..000000000 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/InfluxRowSink.java +++ /dev/null @@ -1,156 +0,0 @@ -package io.odpf.dagger.core.sink.influx; - -import org.apache.flink.runtime.state.FunctionInitializationContext; -import org.apache.flink.runtime.state.FunctionSnapshotContext; -import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction; -import org.apache.flink.streaming.api.functions.sink.RichSinkFunction; -import org.apache.flink.types.Row; - -import com.google.common.base.Strings; -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.reporters.ErrorReporterFactory; -import org.influxdb.InfluxDB; -import org.influxdb.dto.Point; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Timestamp; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; - -import static io.odpf.dagger.core.utils.Constants.*; - -/** - * The Influx row sink. - */ -public class InfluxRowSink extends RichSinkFunction implements CheckpointedFunction { - private static final Logger LOGGER = LoggerFactory.getLogger(InfluxRowSink.class.getName()); - - private InfluxDB influxDB; - private InfluxDBFactoryWrapper influxDBFactory; - private String[] columnNames; - private Configuration configurations; - private String databaseName; - private String retentionPolicy; - private String measurementName; - private ErrorHandler errorHandler; - private ErrorReporter errorReporter; - - /** - * Instantiates a new Influx row sink. - * - * @param influxDBFactory the influx db factory - * @param columnNames the column names - * @param configuration the configuration - * @param errorHandler the error handler - */ - public InfluxRowSink(InfluxDBFactoryWrapper influxDBFactory, String[] columnNames, Configuration configuration, ErrorHandler errorHandler) { - this.influxDBFactory = influxDBFactory; - this.columnNames = columnNames; - this.configurations = configuration; - this.errorHandler = errorHandler; - databaseName = configuration.getString(SINK_INFLUX_DB_NAME_KEY, SINK_INFLUX_DB_NAME_DEFAULT); - retentionPolicy = configuration.getString(SINK_INFLUX_RETENTION_POLICY_KEY, SINK_INFLUX_RETENTION_POLICY_DEFAULT); - measurementName = configuration.getString(SINK_INFLUX_MEASUREMENT_NAME_KEY, SINK_INFLUX_MEASUREMENT_NAME_DEFAULT); - } - - /** - * Instantiates a new Influx row sink with specified error reporter. - * - * @param influxDBFactory the influx db factory - * @param columnNames the column names - * @param configuration the configuration - * @param errorHandler the error handler - * @param errorReporter the error reporter - */ - public InfluxRowSink(InfluxDBFactoryWrapper influxDBFactory, String[] columnNames, Configuration configuration, ErrorHandler errorHandler, ErrorReporter errorReporter) { - this.influxDBFactory = influxDBFactory; - this.columnNames = columnNames; - this.configurations = configuration; - this.errorHandler = errorHandler; - this.errorReporter = errorReporter; - databaseName = configuration.getString(SINK_INFLUX_DB_NAME_KEY, SINK_INFLUX_DB_NAME_DEFAULT); - retentionPolicy = configuration.getString(SINK_INFLUX_RETENTION_POLICY_KEY, SINK_INFLUX_RETENTION_POLICY_DEFAULT); - measurementName = configuration.getString(SINK_INFLUX_MEASUREMENT_NAME_KEY, SINK_INFLUX_MEASUREMENT_NAME_DEFAULT); - } - - @Override - public void open(org.apache.flink.configuration.Configuration internalFlinkConfig) throws Exception { - errorHandler.init(getRuntimeContext()); - influxDB = influxDBFactory.connect(configurations.getString(SINK_INFLUX_URL_KEY, SINK_INFLUX_URL_DEFAULT), - configurations.getString(SINK_INFLUX_USERNAME_KEY, SINK_INFLUX_USERNAME_DEFAULT), - configurations.getString(SINK_INFLUX_PASSWORD_KEY, SINK_INFLUX_PASSWORD_DEFAULT) - ); - - influxDB.enableBatch(configurations.getInteger(SINK_INFLUX_BATCH_SIZE_KEY, SINK_INFLUX_BATCH_SIZE_DEFAULT), - configurations.getInteger(SINK_INFLUX_FLUSH_DURATION_MS_KEY, SINK_INFLUX_FLUSH_DURATION_MS_DEFAULT), - TimeUnit.MILLISECONDS, Executors.defaultThreadFactory(), errorHandler.getExceptionHandler() - ); - if (errorReporter == null) { - errorReporter = ErrorReporterFactory.getErrorReporter(getRuntimeContext(), configurations); - } - } - - @Override - public void close() throws Exception { - influxDB.close(); - super.close(); - } - - @Override - public void invoke(Row row, Context context) throws Exception { - LOGGER.info("row to influx: " + row); - Point.Builder pointBuilder = Point.measurement(measurementName); - Map fields = new HashMap<>(); - for (int i = 0; i < columnNames.length; i++) { - String columnName = columnNames[i]; - if (columnName.equals("window_timestamp")) { - Timestamp field = (Timestamp) row.getField(i); - pointBuilder.time(field.getTime(), TimeUnit.MILLISECONDS); - } else if (columnName.startsWith("tag_")) { - pointBuilder.tag(columnName, String.valueOf(row.getField(i))); - } else if (columnName.startsWith("label_")) { - pointBuilder.tag(columnName.substring("label_".length()), ((String) row.getField(i))); - } else { - if (!(Strings.isNullOrEmpty(columnName) || row.getField(i) == null)) { - fields.put(columnName, row.getField(i)); - } - } - } - addErrorMetricsAndThrow(); - try { - influxDB.write(databaseName, retentionPolicy, pointBuilder.fields(fields).build()); - } catch (Exception exception) { - errorReporter.reportFatalException(exception); - throw exception; - } - } - - @Override - public void snapshotState(FunctionSnapshotContext context) throws Exception { - addErrorMetricsAndThrow(); - try { - influxDB.flush(); - } catch (Exception exception) { - errorReporter.reportFatalException(exception); - throw exception; - } - addErrorMetricsAndThrow(); - } - - @Override - public void initializeState(FunctionInitializationContext context) { - // do nothing - } - - private void addErrorMetricsAndThrow() throws Exception { - if (errorHandler.getError().isPresent() && errorHandler.getError().get().hasException()) { - Exception currentException = errorHandler.getError().get().getCurrentException(); - errorReporter.reportFatalException(currentException); - throw currentException; - } - } -} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/InfluxError.java b/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/InfluxError.java index eb70a4460..298842900 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/InfluxError.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/InfluxError.java @@ -4,6 +4,8 @@ import org.slf4j.Logger; +import java.io.IOException; + /** * The interface Influx error. */ @@ -21,7 +23,7 @@ public interface InfluxError { * * @return the current exception */ - Exception getCurrentException(); + IOException getCurrentException(); /** * Filter the error. diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/LateRecordDropError.java b/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/LateRecordDropError.java index 71e58c5fe..862076af4 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/LateRecordDropError.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/LateRecordDropError.java @@ -1,8 +1,9 @@ package io.odpf.dagger.core.sink.influx.errors; -import org.apache.flink.api.common.functions.RuntimeContext; +import org.apache.flink.api.connector.sink.Sink.InitContext; import org.apache.flink.metrics.Counter; +import io.odpf.dagger.core.metrics.reporters.ErrorReporter; import io.odpf.dagger.core.metrics.reporters.ErrorStatsReporter; import io.odpf.dagger.core.utils.Constants; import org.influxdb.InfluxDBException; @@ -10,24 +11,27 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; + /** * The Late record drop error. */ public class LateRecordDropError implements InfluxError { + // TODO : Fix Error Handling part private final Counter counter; private static final Logger LOGGER = LoggerFactory.getLogger(LateRecordDropError.class.getName()); - private ErrorStatsReporter errorStatsReporter; + private ErrorReporter errorStatsReporter; private static final String PREFIX = "{\"error\":\"partial write: points beyond retention policy dropped="; /** * Instantiates a new Late record drop error. * - * @param runtimeContext the runtime context + * @param initContext the context available in sink functions */ - public LateRecordDropError(RuntimeContext runtimeContext) { - this.counter = runtimeContext.getMetricGroup() + public LateRecordDropError(InitContext initContext) { + this.counter = initContext.metricGroup() .addGroup(Constants.SINK_INFLUX_LATE_RECORDS_DROPPED_KEY).counter("value"); - this.errorStatsReporter = new ErrorStatsReporter(runtimeContext, + this.errorStatsReporter = new ErrorStatsReporter(initContext.metricGroup(), Constants.METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_DEFAULT); } @@ -37,7 +41,7 @@ public boolean hasException() { } @Override - public Exception getCurrentException() { + public IOException getCurrentException() { return null; } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/NoError.java b/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/NoError.java index 1b7780aef..d78a7927c 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/NoError.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/NoError.java @@ -2,6 +2,8 @@ import org.influxdb.dto.Point; +import java.io.IOException; + /** * No error found on Influx sink. */ @@ -12,7 +14,7 @@ public boolean hasException() { } @Override - public Exception getCurrentException() { + public IOException getCurrentException() { return null; } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/ValidError.java b/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/ValidError.java index da1ed27b9..eaa5ef91a 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/ValidError.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/ValidError.java @@ -1,17 +1,20 @@ package io.odpf.dagger.core.sink.influx.errors; -import io.odpf.dagger.core.sink.influx.InfluxRowSink; +import io.odpf.dagger.core.exception.InfluxWriteException; +import io.odpf.dagger.core.sink.influx.InfluxDBSink; import org.influxdb.dto.Point; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; + /** * The Valid error. */ public class ValidError implements InfluxError { - private static final Logger LOGGER = LoggerFactory.getLogger(InfluxRowSink.class.getName()); - private Exception error; + private static final Logger LOGGER = LoggerFactory.getLogger(InfluxDBSink.class.getName()); + private IOException error; @Override public boolean hasException() { @@ -19,7 +22,7 @@ public boolean hasException() { } @Override - public Exception getCurrentException() { + public IOException getCurrentException() { return error; } @@ -30,7 +33,7 @@ public boolean filterError(Throwable throwable) { @Override public void handle(Iterable points, Throwable throwable) { - error = new Exception(throwable); + error = new InfluxWriteException(throwable); logFailedPoints(points, LOGGER); } } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/ValidException.java b/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/ValidException.java index df1d670e8..db0474c1b 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/ValidException.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/sink/influx/errors/ValidException.java @@ -1,15 +1,18 @@ package io.odpf.dagger.core.sink.influx.errors; +import io.odpf.dagger.core.exception.InfluxWriteException; import org.influxdb.dto.Point; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; + /** * The Valid exception. */ public class ValidException implements InfluxError { private static final Logger LOGGER = LoggerFactory.getLogger(ValidException.class.getName()); - private Exception exception; + private IOException exception; @Override public boolean hasException() { @@ -17,7 +20,7 @@ public boolean hasException() { } @Override - public Exception getCurrentException() { + public IOException getCurrentException() { return exception; } @@ -28,7 +31,7 @@ public boolean filterError(Throwable throwable) { @Override public void handle(Iterable points, Throwable throwable) { - exception = (Exception) throwable; + exception = new InfluxWriteException(throwable); logFailedPoints(points, LOGGER); } } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/log/LogSink.java b/dagger-core/src/main/java/io/odpf/dagger/core/sink/log/LogSink.java index 43a159e76..d2bb3854d 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/sink/log/LogSink.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/sink/log/LogSink.java @@ -1,20 +1,20 @@ package io.odpf.dagger.core.sink.log; -import org.apache.flink.streaming.api.functions.sink.RichSinkFunction; +import org.apache.flink.api.connector.sink.Committer; +import org.apache.flink.api.connector.sink.GlobalCommitter; +import org.apache.flink.api.connector.sink.Sink; +import org.apache.flink.api.connector.sink.SinkWriter; +import org.apache.flink.core.io.SimpleVersionedSerializer; import org.apache.flink.types.Row; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import java.util.HashMap; -import java.util.Map; +import java.util.List; +import java.util.Optional; /** * The Log sink. */ -public class LogSink extends RichSinkFunction { - private static final Logger LOGGER = LoggerFactory.getLogger(LogSink.class.getName()); - - private String[] columnNames; +public class LogSink implements Sink { + private final String[] columnNames; /** * Instantiates a new Log sink. @@ -26,15 +26,32 @@ public LogSink(String[] columnNames) { } @Override - public void invoke(Row row, Context context) throws Exception { - - Map map = new HashMap(); - for (int i = 0; i < columnNames.length; i++) { - Object field = row.getField(i); - if (field != null) { - map.put(columnNames[i], field.toString()); - } - } - LOGGER.info(map.toString()); + public SinkWriter createWriter(InitContext context, List states) { + return new LogSinkWriter(columnNames); + } + + @Override + public Optional> getWriterStateSerializer() { + return Optional.empty(); + } + + @Override + public Optional> createCommitter() { + return Optional.empty(); + } + + @Override + public Optional> createGlobalCommitter() { + return Optional.empty(); + } + + @Override + public Optional> getCommittableSerializer() { + return Optional.empty(); + } + + @Override + public Optional> getGlobalCommittableSerializer() { + return Optional.empty(); } } diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/sink/log/LogSinkWriter.java b/dagger-core/src/main/java/io/odpf/dagger/core/sink/log/LogSinkWriter.java new file mode 100644 index 000000000..bd358ca52 --- /dev/null +++ b/dagger-core/src/main/java/io/odpf/dagger/core/sink/log/LogSinkWriter.java @@ -0,0 +1,42 @@ +package io.odpf.dagger.core.sink.log; + +import org.apache.flink.api.connector.sink.SinkWriter; +import org.apache.flink.types.Row; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class LogSinkWriter implements SinkWriter { + private static final Logger LOGGER = LoggerFactory.getLogger(LogSinkWriter.class.getName()); + private final String[] columnNames; + + public LogSinkWriter(String[] columnNames) { + this.columnNames = columnNames; + } + + @Override + public void write(Row row, Context context) { + Map map = new HashMap<>(); + for (int i = 0; i < columnNames.length; i++) { + Object field = row.getField(i); + if (field != null) { + map.put(columnNames[i], field.toString()); + } + } + LOGGER.info(map.toString()); + } + + @Override + public List prepareCommit(boolean flush) { + return null; + } + + @Override + public void close() throws Exception { + + } +} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/FlinkKafkaConsumerCustom.java b/dagger-core/src/main/java/io/odpf/dagger/core/source/FlinkKafkaConsumerCustom.java deleted file mode 100644 index 560a5a4fd..000000000 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/FlinkKafkaConsumerCustom.java +++ /dev/null @@ -1,70 +0,0 @@ -package io.odpf.dagger.core.source; - -import org.apache.flink.api.common.functions.RuntimeContext; -import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer; -import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema; -import org.apache.flink.streaming.runtime.tasks.ExceptionInChainedOperatorException; -import org.apache.flink.types.Row; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.reporters.ErrorReporterFactory; - -import java.util.Properties; -import java.util.regex.Pattern; - -/** - * A class responsible for consuming the messages in kafka. - * {@link FlinkKafkaConsumer}. - */ -public class FlinkKafkaConsumerCustom extends FlinkKafkaConsumer { - - private Configuration configuration; - - /** - * Instantiates a new Flink kafka consumer custom. - * - * @param subscriptionPattern the subscription pattern - * @param deserializer the deserializer - * @param props the props - * @param configuration the configuration - */ - public FlinkKafkaConsumerCustom(Pattern subscriptionPattern, KafkaDeserializationSchema deserializer, - Properties props, Configuration configuration) { - super(subscriptionPattern, deserializer, props); - this.configuration = configuration; - } - - @Override - public void run(SourceContext sourceContext) throws Exception { - try { - runBaseConsumer(sourceContext); - } catch (ExceptionInChainedOperatorException chainedOperatorException) { - throw chainedOperatorException; - } catch (Exception exception) { - ErrorReporter errorReporter = getErrorReporter(getRuntimeContext()); - errorReporter.reportFatalException(exception); - throw exception; - } - } - - /** - * Run base consumer. - * - * @param sourceContext the source context - * @throws Exception the exception - */ - protected void runBaseConsumer(SourceContext sourceContext) throws Exception { - super.run(sourceContext); - } - - /** - * Gets error reporter. - * - * @param runtimeContext the runtime context - * @return the error reporter - */ - protected ErrorReporter getErrorReporter(RuntimeContext runtimeContext) { - return ErrorReporterFactory.getErrorReporter(runtimeContext, configuration); - } -} diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/ProtoDeserializer.java b/dagger-core/src/main/java/io/odpf/dagger/core/source/ProtoDeserializer.java index d758c1a2b..f5e0d959b 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/ProtoDeserializer.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/source/ProtoDeserializer.java @@ -3,7 +3,7 @@ import io.odpf.dagger.common.core.StencilClientOrchestrator; import io.odpf.dagger.core.exception.DaggerDeserializationException; import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.core.protohandler.RowFactory; +import io.odpf.dagger.common.protohandler.RowFactory; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.InvalidProtocolBufferException; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/source/ProtoType.java b/dagger-core/src/main/java/io/odpf/dagger/core/source/ProtoType.java index 8747122c6..a6e44d507 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/source/ProtoType.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/source/ProtoType.java @@ -2,7 +2,7 @@ import io.odpf.dagger.common.core.StencilClientOrchestrator; import io.odpf.dagger.common.exceptions.DescriptorNotFoundException; -import io.odpf.dagger.core.protohandler.TypeInformationFactory; +import io.odpf.dagger.common.protohandler.TypeInformationFactory; import io.odpf.dagger.core.utils.Constants; import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors.Descriptor; diff --git a/dagger-core/src/main/java/io/odpf/dagger/core/utils/Constants.java b/dagger-core/src/main/java/io/odpf/dagger/core/utils/Constants.java index ac2960708..1d31f6326 100644 --- a/dagger-core/src/main/java/io/odpf/dagger/core/utils/Constants.java +++ b/dagger-core/src/main/java/io/odpf/dagger/core/utils/Constants.java @@ -94,6 +94,9 @@ public class Constants { public static final String STREAM_SOURCE_KAFKA_TOPIC_NAMES_KEY = "SOURCE_KAFKA_TOPIC_NAMES"; public static final String INPUT_STREAM_NAME_KEY = "SOURCE_KAFKA_NAME"; + public static final String SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET_KEY = "SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET"; + public static final String SOURCE_KAFKA_CONSUMER_CONFIG_AUTO_OFFSET_RESET_DEFAULT = "latest"; + public static final String METRIC_TELEMETRY_ENABLE_KEY = "METRIC_TELEMETRY_ENABLE"; public static final boolean METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT = true; public static final String METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_KEY = "METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS"; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/StreamManagerTest.java b/dagger-core/src/test/java/io/odpf/dagger/core/StreamManagerTest.java index a5a83b9cd..f39f5670b 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/StreamManagerTest.java +++ b/dagger-core/src/test/java/io/odpf/dagger/core/StreamManagerTest.java @@ -3,6 +3,7 @@ import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.connector.kafka.source.KafkaSource; import org.apache.flink.streaming.api.CheckpointingMode; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.DataStreamSource; @@ -18,7 +19,6 @@ import io.odpf.dagger.common.configuration.Configuration; import io.odpf.dagger.common.core.StreamInfo; -import io.odpf.dagger.core.source.FlinkKafkaConsumerCustom; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; @@ -110,7 +110,7 @@ public void setup() { when(env.getConfig()).thenReturn(executionConfig); when(env.getCheckpointConfig()).thenReturn(checkpointConfig); when(tableEnvironment.getConfig()).thenReturn(tableConfig); - when(env.addSource(any(FlinkKafkaConsumerCustom.class))).thenReturn(source); + when(env.fromSource(any(KafkaSource.class), any(WatermarkStrategy.class), any(String.class))).thenReturn(source); when(source.getType()).thenReturn(typeInformation); when(typeInformation.getTypeClass()).thenReturn(Row.class); when(schema.getFieldNames()).thenReturn(new String[0]); @@ -136,7 +136,7 @@ public void shouldRegisterRequiredConfigsOnExecutionEnvironment() { @Test public void shouldRegisterSourceWithPreprocessorsWithWaterMarks() { - when(env.addSource(any(FlinkKafkaConsumerCustom.class))).thenReturn(source); + when(env.fromSource(any(KafkaSource.class), any(WatermarkStrategy.class), any(String.class))).thenReturn(source); when(source.assignTimestampsAndWatermarks(any(WatermarkStrategy.class))).thenReturn(singleOutputStream); StreamManagerStub streamManagerStub = new StreamManagerStub(configuration, env, tableEnvironment, new StreamInfo(dataStream, new String[]{})); @@ -146,6 +146,17 @@ public void shouldRegisterSourceWithPreprocessorsWithWaterMarks() { verify(tableEnvironment, Mockito.times(1)).fromDataStream(any(), new ApiExpression[]{}); } + @Test + public void shouldCreateValidSourceWithWatermarks() { + when(source.assignTimestampsAndWatermarks(any(WatermarkStrategy.class))).thenReturn(singleOutputStream); + + StreamManagerStub streamManagerStub = new StreamManagerStub(configuration, env, tableEnvironment, new StreamInfo(dataStream, new String[]{})); + streamManagerStub.registerConfigs(); + streamManagerStub.registerSourceWithPreProcessors(); + + verify(env, Mockito.times(1)).fromSource(any(KafkaSource.class), any(WatermarkStrategy.class), any(String.class)); + } + @Test public void shouldCreateOutputStream() { StreamManagerStub streamManagerStub = new StreamManagerStub(configuration, env, tableEnvironment, new StreamInfo(dataStream, new String[]{})); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/StreamsTest.java b/dagger-core/src/test/java/io/odpf/dagger/core/StreamsTest.java index 05619db95..b1f8986f5 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/StreamsTest.java +++ b/dagger-core/src/test/java/io/odpf/dagger/core/StreamsTest.java @@ -1,10 +1,11 @@ package io.odpf.dagger.core; +import org.apache.flink.connector.kafka.source.KafkaSource; + import com.gojek.de.stencil.StencilClientFactory; import com.gojek.de.stencil.client.StencilClient; import io.odpf.dagger.common.configuration.Configuration; import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.core.source.FlinkKafkaConsumerCustom; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; @@ -64,8 +65,8 @@ public void shouldTakeAJSONArrayWithSingleObject() { + "]"; when(configuration.getString("STREAMS", "")).thenReturn(configString); - Streams streams = new Streams(configuration, "rowtime", stencilClientOrchestrator, false, 0); - Map mapOfStreams = streams.getStreams(); + Streams streams = new Streams(configuration, "rowtime", stencilClientOrchestrator); + Map mapOfStreams = streams.getKafkaSource(); assertEquals(1, mapOfStreams.size()); assertEquals("data_stream", mapOfStreams.keySet().toArray()[0]); } @@ -99,7 +100,7 @@ public void shouldAddTopicsStreamsAndProtosToMetrics() { System.out.println(metrics); when(configuration.getString("STREAMS", "")).thenReturn(configString); - Streams streams = new Streams(configuration, "rowtime", stencilClientOrchestrator, false, 0); + Streams streams = new Streams(configuration, "rowtime", stencilClientOrchestrator); streams.preProcessBeforeNotifyingSubscriber(); Map> telemetry = streams.getTelemetry(); @@ -150,7 +151,7 @@ public void shouldAddTopicsStreamsAndProtosToMetricsInCaseOfJoins() { System.out.println(metrics); when(configuration.getString("STREAMS", "")).thenReturn(configString); - Streams streams = new Streams(configuration, "rowtime", stencilClientOrchestrator, false, 0); + Streams streams = new Streams(configuration, "rowtime", stencilClientOrchestrator); streams.preProcessBeforeNotifyingSubscriber(); Map> telemetry = streams.getTelemetry(); @@ -176,7 +177,7 @@ public void shouldReturnProtoClassName() { protoClassForTable.put("data_stream", "io.odpf.dagger.consumer.TestBookingLogMessage"); when(configuration.getString("STREAMS", "")).thenReturn(configString); - Streams streams = new Streams(configuration, "rowtime", stencilClientOrchestrator, false, 0); + Streams streams = new Streams(configuration, "rowtime", stencilClientOrchestrator); assertEquals(protoClassForTable, streams.getProtos()); } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/ErrorReporterFactoryTest.java b/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/ErrorReporterFactoryTest.java index 20eac403a..1b7103405 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/ErrorReporterFactoryTest.java +++ b/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/ErrorReporterFactoryTest.java @@ -1,6 +1,7 @@ package io.odpf.dagger.core.metrics.reporters; import org.apache.flink.api.common.functions.RuntimeContext; +import org.apache.flink.metrics.MetricGroup; import io.odpf.dagger.common.configuration.Configuration; import io.odpf.dagger.core.utils.Constants; @@ -22,6 +23,9 @@ public class ErrorReporterFactoryTest { @Mock private Configuration configuration; + @Mock + private MetricGroup metricGroup; + @Before public void setup() { initMocks(this); @@ -31,19 +35,25 @@ public void setup() { @Test public void shouldReturnErrorTelemetryFormConfigOnly() { - ErrorReporter errorReporter = ErrorReporterFactory.getErrorReporter(runtimeContext, configuration); + ErrorReporter errorReporter = ErrorReporterFactory.getErrorReporter(runtimeContext.getMetricGroup(), configuration); + assertEquals(errorReporter.getClass(), ErrorStatsReporter.class); + } + + @Test + public void shouldReturnErrorTelemetryFormMetricGroup() { + ErrorReporter errorReporter = ErrorReporterFactory.getErrorReporter(metricGroup, configuration); assertEquals(errorReporter.getClass(), ErrorStatsReporter.class); } @Test public void shouldReturnErrorStatsReporterIfTelemetryEnabled() { - ErrorReporter errorReporter = ErrorReporterFactory.getErrorReporter(runtimeContext, true, 0L); + ErrorReporter errorReporter = ErrorReporterFactory.getErrorReporter(runtimeContext.getMetricGroup(), true, 0L); assertEquals(errorReporter.getClass(), ErrorStatsReporter.class); } @Test public void shouldReturnNoOpReporterIfTelemetryDisabled() { - ErrorReporter errorReporter = ErrorReporterFactory.getErrorReporter(runtimeContext, false, 0L); + ErrorReporter errorReporter = ErrorReporterFactory.getErrorReporter(runtimeContext.getMetricGroup(), false, 0L); assertEquals(errorReporter.getClass(), NoOpErrorReporter.class); } } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/ErrorStatsReporterTest.java b/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/ErrorStatsReporterTest.java index ae7f14718..f15e51c09 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/ErrorStatsReporterTest.java +++ b/dagger-core/src/test/java/io/odpf/dagger/core/metrics/reporters/ErrorStatsReporterTest.java @@ -30,12 +30,12 @@ public class ErrorStatsReporterTest { public void setup() { initMocks(this); long shutDownPeriod = 0L; - errorStatsReporter = new ErrorStatsReporter(runtimeContext, shutDownPeriod); + when(runtimeContext.getMetricGroup()).thenReturn(metricGroup); + errorStatsReporter = new ErrorStatsReporter(runtimeContext.getMetricGroup(), shutDownPeriod); } @Test public void shouldReportError() { - when(runtimeContext.getMetricGroup()).thenReturn(metricGroup); when(metricGroup.addGroup("fatal.exception", "java.lang.RuntimeException")).thenReturn(metricGroup); when(metricGroup.counter("value")).thenReturn(counter); errorStatsReporter.reportFatalException(new RuntimeException()); @@ -45,7 +45,6 @@ public void shouldReportError() { @Test public void shouldReportNonFatalError() { - when(runtimeContext.getMetricGroup()).thenReturn(metricGroup); when(metricGroup.addGroup("non.fatal.exception", "java.lang.RuntimeException")).thenReturn(metricGroup); when(metricGroup.counter("value")).thenReturn(counter); errorStatsReporter.reportNonFatalException(new RuntimeException()); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/es/EsResponseHandlerTest.java b/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/es/EsResponseHandlerTest.java index d8b548c43..05a11bc9e 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/es/EsResponseHandlerTest.java +++ b/dagger-core/src/test/java/io/odpf/dagger/core/processors/external/es/EsResponseHandlerTest.java @@ -12,8 +12,8 @@ import io.odpf.dagger.core.processors.common.OutputMapping; import io.odpf.dagger.core.processors.common.PostResponseTelemetry; import io.odpf.dagger.core.processors.common.RowManager; -import io.odpf.dagger.core.protohandler.ProtoHandlerFactory; -import io.odpf.dagger.core.protohandler.RowFactory; +import io.odpf.dagger.common.protohandler.ProtoHandlerFactory; +import io.odpf.dagger.common.protohandler.RowFactory; import mockit.Mock; import mockit.MockUp; import org.apache.flink.streaming.api.functions.async.ResultFuture; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/ProtoBytePutRequestTest.java b/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/ProtoBytePutRequestTest.java index 4b12ff3ca..9916af446 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/ProtoBytePutRequestTest.java +++ b/dagger-core/src/test/java/io/odpf/dagger/core/processors/longbow/request/ProtoBytePutRequestTest.java @@ -1,8 +1,8 @@ package io.odpf.dagger.core.processors.longbow.request; -import io.odpf.dagger.core.processors.longbow.LongbowSchema; import org.apache.flink.types.Row; +import io.odpf.dagger.core.processors.longbow.LongbowSchema; import io.odpf.dagger.core.sink.ProtoSerializer; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.util.Bytes; diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/FlinkKafkaProducerCustomTest.java b/dagger-core/src/test/java/io/odpf/dagger/core/sink/FlinkKafkaProducerCustomTest.java deleted file mode 100644 index a3332cfcc..000000000 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/FlinkKafkaProducerCustomTest.java +++ /dev/null @@ -1,177 +0,0 @@ -package io.odpf.dagger.core.sink; - -import org.apache.flink.api.common.functions.RuntimeContext; -import org.apache.flink.runtime.state.FunctionInitializationContext; -import org.apache.flink.runtime.state.FunctionSnapshotContext; -import org.apache.flink.streaming.api.functions.sink.SinkFunction.Context; -import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer; -import org.apache.flink.types.Row; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.reporters.ErrorReporterFactory; -import io.odpf.dagger.core.metrics.reporters.NoOpErrorReporter; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; - -import static io.odpf.dagger.core.utils.Constants.METRIC_TELEMETRY_ENABLE_KEY; -import static io.odpf.dagger.core.utils.Constants.METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT; -import static io.odpf.dagger.core.utils.Constants.METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_DEFAULT; -import static io.odpf.dagger.core.utils.Constants.METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_KEY; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.mockito.MockitoAnnotations.initMocks; - -public class FlinkKafkaProducerCustomTest { - - @Mock - private FlinkKafkaProducer flinkKafkaProducer; - - @Mock - private FunctionSnapshotContext functionSnapshotContext; - - @Mock - private FunctionInitializationContext functionInitializationContext; - - @Mock - private org.apache.flink.configuration.Configuration flinkInternalConfig; - - @Mock - private Configuration configuration; - - @Mock - private Context defaultContext; - - @Mock - private RuntimeContext defaultRuntimeContext; - - @Mock - private ErrorReporter errorStatsReporter; - - @Mock - private NoOpErrorReporter noOpErrorReporter; - - private FlinkKafkaProducerCustomStub flinkKafkaProducerCustomStub; - private Row row; - - @Before - public void setUp() { - initMocks(this); - flinkKafkaProducerCustomStub = new FlinkKafkaProducerCustomStub(flinkKafkaProducer, configuration); - row = Row.of("some field"); - } - - @Test - public void shouldCallFlinkProducerOpenMethodOnOpen() throws Exception { - FlinkKafkaProducerCustom flinkKafkaProducerCustom = new FlinkKafkaProducerCustom(flinkKafkaProducer, configuration); - flinkKafkaProducerCustom.open(flinkInternalConfig); - - verify(flinkKafkaProducer, times(1)).open(flinkInternalConfig); - } - - @Test - public void shouldCallFlinkProducerCloseMethodOnClose() throws Exception { - FlinkKafkaProducerCustom flinkKafkaProducerCustom = new FlinkKafkaProducerCustom(flinkKafkaProducer, configuration); - flinkKafkaProducerCustom.close(); - - verify(flinkKafkaProducer, times(1)).close(); - } - - @Test - public void shouldCallFlinkProducerSnapshotState() throws Exception { - FlinkKafkaProducerCustom flinkKafkaProducerCustom = new FlinkKafkaProducerCustom(flinkKafkaProducer, configuration); - flinkKafkaProducerCustom.snapshotState(functionSnapshotContext); - - verify(flinkKafkaProducer, times(1)).snapshotState(functionSnapshotContext); - } - - @Test - public void shouldCallFlinkProducerInitializeState() throws Exception { - FlinkKafkaProducerCustom flinkKafkaProducerCustom = new FlinkKafkaProducerCustom(flinkKafkaProducer, configuration); - flinkKafkaProducerCustom.initializeState(functionInitializationContext); - - verify(flinkKafkaProducer, times(1)).initializeState(functionInitializationContext); - } - - @Test - public void shouldCallFlinkProducerGetIterationRuntimeContext() { - FlinkKafkaProducerCustom flinkKafkaProducerCustom = new FlinkKafkaProducerCustom(flinkKafkaProducer, configuration); - flinkKafkaProducerCustom.getIterationRuntimeContext(); - - verify(flinkKafkaProducer, times(1)).getIterationRuntimeContext(); - } - - @Test - public void shouldCallFlinkProducerGetRuntimeContext() { - FlinkKafkaProducerCustom flinkKafkaProducerCustom = new FlinkKafkaProducerCustom(flinkKafkaProducer, configuration); - flinkKafkaProducerCustom.getRuntimeContext(); - - verify(flinkKafkaProducer, times(1)).getRuntimeContext(); - } - - @Test - public void shouldCallFlinkProducerSetRuntimeContext() { - FlinkKafkaProducerCustom flinkKafkaProducerCustom = new FlinkKafkaProducerCustom(flinkKafkaProducer, configuration); - flinkKafkaProducerCustom.setRuntimeContext(defaultRuntimeContext); - - verify(flinkKafkaProducer, times(1)).setRuntimeContext(defaultRuntimeContext); - } - - - @Test - public void shouldReportErrorIfTelemetryEnabled() { - when(configuration.getBoolean(METRIC_TELEMETRY_ENABLE_KEY, METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(true); - when(configuration.getLong(METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_KEY, METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_DEFAULT)).thenReturn(0L); - - Exception exception = assertThrows(Exception.class, - () -> flinkKafkaProducerCustomStub.invoke(row, defaultContext)); - assertEquals("test producer exception", exception.getMessage()); - verify(errorStatsReporter, times(1)).reportFatalException(any(RuntimeException.class)); - } - - @Test - public void shouldNotReportIfTelemetryDisabled() { - when(configuration.getBoolean(METRIC_TELEMETRY_ENABLE_KEY, METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(false); - - Exception exception = assertThrows(Exception.class, - () -> flinkKafkaProducerCustomStub.invoke(row, defaultContext)); - assertEquals("test producer exception", exception.getMessage()); - verify(noOpErrorReporter, times(1)).reportFatalException(any(RuntimeException.class)); - } - - @Test - public void shouldReturnErrorStatsReporter() { - when(configuration.getBoolean(METRIC_TELEMETRY_ENABLE_KEY, METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(true); - ErrorReporter expectedErrorStatsReporter = ErrorReporterFactory.getErrorReporter(defaultRuntimeContext, configuration); - FlinkKafkaProducerCustom flinkKafkaProducerCustom = new FlinkKafkaProducerCustom(flinkKafkaProducer, configuration); - assertEquals(expectedErrorStatsReporter.getClass(), flinkKafkaProducerCustom.getErrorReporter(defaultRuntimeContext).getClass()); - } - - public class FlinkKafkaProducerCustomStub extends FlinkKafkaProducerCustom { - FlinkKafkaProducerCustomStub(FlinkKafkaProducer flinkKafkaProducer, Configuration configuration) { - super(flinkKafkaProducer, configuration); - } - - @Override - public RuntimeContext getRuntimeContext() { - return defaultRuntimeContext; - } - - protected ErrorReporter getErrorReporter(RuntimeContext runtimeContext) { - if (configuration.getBoolean(METRIC_TELEMETRY_ENABLE_KEY, METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)) { - return errorStatsReporter; - } else { - return noOpErrorReporter; - } - } - - protected void invokeBaseProducer(Row value, Context context) { - throw new RuntimeException("test producer exception"); - } - } -} diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/ProtoSerializerTest.java b/dagger-core/src/test/java/io/odpf/dagger/core/sink/ProtoSerializerTest.java index ffb165a77..970b262df 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/ProtoSerializerTest.java +++ b/dagger-core/src/test/java/io/odpf/dagger/core/sink/ProtoSerializerTest.java @@ -64,7 +64,7 @@ public void shouldSerializeKeyForProto() throws InvalidProtocolBufferException { element.setField(3, 3322909458387959808L); element.setField(4, TestServiceType.Enum.GO_RIDE); - ProducerRecord producerRecord = protoSerializer.serialize(element, null); + ProducerRecord producerRecord = protoSerializer.serialize(element, null, seconds); TestSerDeLogKey actualKey = TestSerDeLogKey.parseFrom(producerRecord.key()); @@ -114,7 +114,7 @@ public void shouldSerializeMessageProto() throws InvalidProtocolBufferException put("key", "value"); }}); - ProducerRecord producerRecord = protoSerializer.serialize(element, null); + ProducerRecord producerRecord = protoSerializer.serialize(element, null, seconds); TestSerDeLogMessage actualMessage = TestSerDeLogMessage.parseFrom(producerRecord.value()); @@ -147,7 +147,7 @@ public void shouldSerializeDataForOneFieldInNestedProtoWhenMappedFromQuery() thr element.setField(0, "test-id"); - ProducerRecord producerRecord = protoSerializer.serialize(element, null); + ProducerRecord producerRecord = protoSerializer.serialize(element, null, System.currentTimeMillis() / 1000); TestEnrichedBookingLogMessage actualValue = TestEnrichedBookingLogMessage.parseFrom(producerRecord.value()); @@ -167,7 +167,7 @@ public void shouldSerializeDataForMultipleFieldsInSameNestedProtoWhenMappedFromQ element.setField(1, "test_email@go-jek.com"); element.setField(2, true); - ProducerRecord producerRecord = protoSerializer.serialize(element, null); + ProducerRecord producerRecord = protoSerializer.serialize(element, null, System.currentTimeMillis() / 1000); TestEnrichedBookingLogMessage actualValue = TestEnrichedBookingLogMessage.parseFrom(producerRecord.value()); @@ -192,7 +192,7 @@ public void shouldSerializeDataForMultipleFieldsInDifferentNestedProtoWhenMapped element.setField(4, "driver_name"); element.setField(5, 876D); - ProducerRecord producerRecord = protoSerializer.serialize(element, null); + ProducerRecord producerRecord = protoSerializer.serialize(element, null, System.currentTimeMillis() / 1000); TestBookingLogMessage actualValue = TestBookingLogMessage.parseFrom(producerRecord.value()); @@ -215,7 +215,7 @@ public void shouldThrowExceptionWhenColumnDoesNotExists() { element.setField(1, 876D); InvalidColumnMappingException exception = assertThrows(InvalidColumnMappingException.class, - () -> protoSerializer.serialize(element, null)); + () -> protoSerializer.serialize(element, null, System.currentTimeMillis() / 1000)); assertEquals("column invalid doesn't exists in the proto of io.odpf.dagger.consumer.TestLocation", exception.getMessage()); @@ -231,7 +231,7 @@ public void shouldMapOtherFieldsWhenOneOfTheFirstFieldIsInvalidForANestedFieldIn element.setField(0, "order_number"); element.setField(1, "customer_email@go-jek.com"); - ProducerRecord producerRecord = protoSerializer.serialize(element, null); + ProducerRecord producerRecord = protoSerializer.serialize(element, null, System.currentTimeMillis() / 1000); assertEquals("customer_email@go-jek.com", TestBookingLogMessage.parseFrom(producerRecord.value()).getCustomerEmail()); } @@ -245,7 +245,7 @@ public void shouldMapEmptyDataWhenFieldIsInvalidInTheQuery() { Row element = new Row(1); element.setField(0, "order_number"); - ProducerRecord producerRecord = protoSerializer.serialize(element, null); + ProducerRecord producerRecord = protoSerializer.serialize(element, null, System.currentTimeMillis() / 1000); assertEquals(0, producerRecord.value().length); } @@ -260,7 +260,7 @@ public void shouldMapOtherFieldsWhenOneOfTheFieldIsInvalidInTheQuery() throws In element.setField(0, "some_data"); element.setField(1, "order_number"); - ProducerRecord producerRecord = protoSerializer.serialize(element, null); + ProducerRecord producerRecord = protoSerializer.serialize(element, null, System.currentTimeMillis() / 1000); assertEquals("order_number", TestBookingLogMessage.parseFrom(producerRecord.value()).getOrderNumber()); } @@ -275,7 +275,7 @@ public void shouldThrowExceptionWhenTypeDoesNotMatch() { element.setField(0, 1234); InvalidColumnMappingException exception = assertThrows(InvalidColumnMappingException.class, - () -> protoSerializer.serialize(element, null)); + () -> protoSerializer.serialize(element, null, System.currentTimeMillis() / 1000)); assertEquals("column invalid: type mismatch of column order_number, expecting STRING type. Actual type class java.lang.Integer", exception.getMessage()); } @@ -291,7 +291,7 @@ public void shouldHandleRepeatedTypeWhenTypeDoesNotMatch() { element.setField(0, 1234); InvalidColumnMappingException exception = assertThrows(InvalidColumnMappingException.class, - () -> protoSerializer.serialize(element, null)); + () -> protoSerializer.serialize(element, null, System.currentTimeMillis() / 1000)); assertEquals("column invalid: type mismatch of column meta_array, expecting REPEATED STRING type. Actual type class java.lang.Integer", exception.getMessage()); } @@ -308,7 +308,7 @@ public void shouldSerializeMessageWhenOnlyMessageProtoProvided() throws InvalidP element.setField(0, orderNumber); element.setField(1, "DR-124"); - ProducerRecord producerRecord = protoSerializer.serialize(element, null); + ProducerRecord producerRecord = protoSerializer.serialize(element, null, System.currentTimeMillis() / 1000); TestBookingLogMessage actualMessage = TestBookingLogMessage.parseFrom(producerRecord.value()); assertEquals(orderNumber, actualMessage.getOrderNumber()); @@ -334,7 +334,7 @@ public void shouldReturnNullKeyWhenOnlyMessageProtoProvided() { Row element = new Row(1); element.setField(0, 13); - ProducerRecord producerRecord = protoSerializer.serialize(element, null); + ProducerRecord producerRecord = protoSerializer.serialize(element, null, System.currentTimeMillis() / 1000); assertNull(producerRecord.key()); assertNotNull(producerRecord.value()); @@ -349,7 +349,7 @@ public void shouldReturnNullKeyWhenKeyIsEmptyString() { Row element = new Row(1); element.setField(0, 13); - ProducerRecord producerRecord = protoSerializer.serialize(element, null); + ProducerRecord producerRecord = protoSerializer.serialize(element, null, System.currentTimeMillis() / 1000); assertNull(producerRecord.key()); assertNotNull(producerRecord.value()); @@ -365,7 +365,7 @@ public void shouldThrowDescriptorNotFoundException() { Row element = new Row(1); element.setField(0, s2IdLevel); - protoSerializer.serialize(element, null); + protoSerializer.serialize(element, null, System.currentTimeMillis() / 1000); } @Test @@ -377,7 +377,7 @@ public void shouldThrowExceptionWhenOutputTopicIsNullForSerializeMethod() { Row element = new Row(1); element.setField(0, "1234"); DaggerSerializationException exception = assertThrows(DaggerSerializationException.class, - () -> protoSerializer.serialize(element, null)); + () -> protoSerializer.serialize(element, null, System.currentTimeMillis() / 1000)); assertEquals("outputTopic is required", exception.getMessage()); } @@ -391,7 +391,7 @@ public void shouldThrowExceptionWhenOutputTopicIsEmptyForSerializeMethod() { element.setField(0, "1234"); DaggerSerializationException exception = assertThrows(DaggerSerializationException.class, - () -> protoSerializer.serialize(element, null)); + () -> protoSerializer.serialize(element, null, System.currentTimeMillis() / 1000)); assertEquals("outputTopic is required", exception.getMessage()); } } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/SinkOrchestratorTest.java b/dagger-core/src/test/java/io/odpf/dagger/core/sink/SinkOrchestratorTest.java index 530e9b085..9e7c7fe57 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/SinkOrchestratorTest.java +++ b/dagger-core/src/test/java/io/odpf/dagger/core/sink/SinkOrchestratorTest.java @@ -1,10 +1,11 @@ package io.odpf.dagger.core.sink; -import org.apache.flink.api.common.functions.Function; +import org.apache.flink.api.connector.sink.Sink; +import org.apache.flink.connector.kafka.sink.KafkaSink; import io.odpf.dagger.common.configuration.Configuration; import io.odpf.dagger.common.core.StencilClientOrchestrator; -import io.odpf.dagger.core.sink.influx.InfluxRowSink; +import io.odpf.dagger.core.sink.influx.InfluxDBSink; import io.odpf.dagger.core.sink.log.LogSink; import org.junit.Before; import org.junit.Test; @@ -44,15 +45,15 @@ public void setup() { @Test public void shouldGiveInfluxSinkWhenConfiguredToUseInflux() throws Exception { when(configuration.getString(eq("SINK_TYPE"), anyString())).thenReturn("influx"); - Function sinkFunction = sinkOrchestrator.getSink(configuration, new String[]{}, stencilClientOrchestrator); + Sink sinkFunction = sinkOrchestrator.getSink(configuration, new String[]{}, stencilClientOrchestrator); - assertThat(sinkFunction, instanceOf(InfluxRowSink.class)); + assertThat(sinkFunction, instanceOf(InfluxDBSink.class)); } @Test public void shouldGiveLogSinkWhenConfiguredToUseLog() throws Exception { when(configuration.getString(eq("SINK_TYPE"), anyString())).thenReturn("log"); - Function sinkFunction = sinkOrchestrator.getSink(configuration, new String[]{}, stencilClientOrchestrator); + Sink sinkFunction = sinkOrchestrator.getSink(configuration, new String[]{}, stencilClientOrchestrator); assertThat(sinkFunction, instanceOf(LogSink.class)); } @@ -60,9 +61,9 @@ public void shouldGiveLogSinkWhenConfiguredToUseLog() throws Exception { @Test public void shouldGiveInfluxWhenConfiguredToUseNothing() throws Exception { when(configuration.getString(eq("SINK_TYPE"), anyString())).thenReturn(""); - Function sinkFunction = sinkOrchestrator.getSink(configuration, new String[]{}, stencilClientOrchestrator); + Sink sinkFunction = sinkOrchestrator.getSink(configuration, new String[]{}, stencilClientOrchestrator); - assertThat(sinkFunction, instanceOf(InfluxRowSink.class)); + assertThat(sinkFunction, instanceOf(InfluxDBSink.class)); } @@ -83,9 +84,9 @@ public void shouldGiveKafkaProducerWhenConfiguredToUseKafkaSink() throws Excepti when(configuration.getString(eq("SINK_KAFKA_BROKERS"), anyString())).thenReturn("output_broker:2667"); when(configuration.getString(eq("SINK_KAFKA_TOPIC"), anyString())).thenReturn("output_topic"); - Function sinkFunction = sinkOrchestrator.getSink(configuration, new String[]{}, stencilClientOrchestrator); + Sink sinkFunction = sinkOrchestrator.getSink(configuration, new String[]{}, stencilClientOrchestrator); - assertThat(sinkFunction, instanceOf(FlinkKafkaProducerCustom.class)); + assertThat(sinkFunction, instanceOf(KafkaSink.class)); } @Test diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/InfluxDBSinkTest.java b/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/InfluxDBSinkTest.java new file mode 100644 index 000000000..d2cafe64d --- /dev/null +++ b/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/InfluxDBSinkTest.java @@ -0,0 +1,101 @@ +package io.odpf.dagger.core.sink.influx; + +import org.apache.flink.api.connector.sink.Sink.InitContext; +import org.apache.flink.api.connector.sink.SinkWriter; +import org.apache.flink.metrics.Counter; +import org.apache.flink.metrics.groups.SinkWriterMetricGroup; +import org.apache.flink.types.Row; + +import io.odpf.dagger.common.configuration.Configuration; +import io.odpf.dagger.core.utils.Constants; +import org.influxdb.InfluxDB; +import org.influxdb.InfluxDBException; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; + +import static io.odpf.dagger.core.utils.Constants.*; +import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.initMocks; + +public class InfluxDBSinkTest { + + private static final int SINK_INFLUX_BATCH_SIZE = 100; + private static final int INFLUX_FLUSH_DURATION = 1000; + + @Mock + private Configuration configuration; + + @Mock + private InitContext context; + + @Mock + private InfluxDBFactoryWrapper influxDBFactory; + + @Mock + private InfluxDB influxDb; + + @Mock + private Counter counter; + + @Mock + private SinkWriterMetricGroup metricGroup; + + private ErrorHandler errorHandler = new ErrorHandler(); + + @Before + public void setUp() throws Exception { + initMocks(this); + when(configuration.getString(SINK_INFLUX_URL_KEY, SINK_INFLUX_URL_DEFAULT)).thenReturn("http://localhost:1111"); + when(configuration.getString(SINK_INFLUX_USERNAME_KEY, SINK_INFLUX_USERNAME_DEFAULT)).thenReturn("usr"); + when(configuration.getString(SINK_INFLUX_PASSWORD_KEY, SINK_INFLUX_PASSWORD_DEFAULT)).thenReturn("pwd"); + + when(configuration.getInteger(SINK_INFLUX_BATCH_SIZE_KEY, SINK_INFLUX_BATCH_SIZE_DEFAULT)).thenReturn(100); + when(configuration.getInteger(SINK_INFLUX_FLUSH_DURATION_MS_KEY, SINK_INFLUX_FLUSH_DURATION_MS_DEFAULT)).thenReturn(1000); + + when(influxDBFactory.connect(any(), any(), any())).thenReturn(influxDb); + when(context.metricGroup()).thenReturn(metricGroup); + when(metricGroup.addGroup(Constants.SINK_INFLUX_LATE_RECORDS_DROPPED_KEY)).thenReturn(metricGroup); + when(metricGroup.addGroup(Constants.NONFATAL_EXCEPTION_METRIC_GROUP_KEY, + InfluxDBException.class.getName())).thenReturn(metricGroup); + when(metricGroup.counter("value")).thenReturn(counter); + } + + + @Test + public void shouldCallInfluxDbFactoryWhileCreatingWriter() throws Exception { + InfluxDBSink influxDBSink = new InfluxDBSink(influxDBFactory, configuration, new String[]{}, errorHandler); + List state = new ArrayList<>(); + influxDBSink.createWriter(context, state); + + verify(influxDBFactory).connect("http://localhost:1111", "usr", "pwd"); + } + + @Test + public void shouldCreateInfluxWriter() throws IOException { + InfluxDBSink influxDBSink = new InfluxDBSink(influxDBFactory, configuration, new String[]{}, errorHandler); + List state = new ArrayList<>(); + SinkWriter writer = influxDBSink.createWriter(context, state); + + assertEquals(writer.getClass(), InfluxDBWriter.class); + } + + @Test + public void shouldCallBatchModeOnInfluxWhenBatchSettingsExist() throws Exception { + InfluxDBSink influxDBSink = new InfluxDBSink(influxDBFactory, configuration, new String[]{}, errorHandler); + List state = new ArrayList<>(); + influxDBSink.createWriter(context, state); + verify(influxDb).enableBatch(eq(SINK_INFLUX_BATCH_SIZE), eq(INFLUX_FLUSH_DURATION), eq(TimeUnit.MILLISECONDS), any(ThreadFactory.class), any(BiConsumer.class)); + } +} diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/InfluxRowSinkTest.java b/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/InfluxDBWriterTest.java similarity index 67% rename from dagger-core/src/test/java/io/odpf/dagger/core/sink/InfluxRowSinkTest.java rename to dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/InfluxDBWriterTest.java index b1bb530ad..427752afd 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/InfluxRowSinkTest.java +++ b/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/InfluxDBWriterTest.java @@ -1,15 +1,14 @@ -package io.odpf.dagger.core.sink; +package io.odpf.dagger.core.sink.influx; -import org.apache.flink.api.common.functions.RuntimeContext; +import org.apache.flink.api.connector.sink.Sink.InitContext; +import org.apache.flink.api.connector.sink.SinkWriter.Context; import org.apache.flink.metrics.Counter; -import org.apache.flink.metrics.groups.OperatorMetricGroup; +import org.apache.flink.metrics.groups.SinkWriterMetricGroup; import org.apache.flink.types.Row; import io.odpf.dagger.common.configuration.Configuration; +import io.odpf.dagger.core.exception.InfluxWriteException; import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.sink.influx.ErrorHandler; -import io.odpf.dagger.core.sink.influx.InfluxDBFactoryWrapper; -import io.odpf.dagger.core.sink.influx.InfluxRowSink; import io.odpf.dagger.core.utils.Constants; import org.influxdb.InfluxDB; import org.influxdb.InfluxDBException; @@ -22,10 +21,10 @@ import java.sql.Timestamp; import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneOffset; import java.util.ArrayList; -import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; -import java.util.function.BiConsumer; import static io.odpf.dagger.core.utils.Constants.*; import static org.junit.Assert.assertEquals; @@ -38,59 +37,48 @@ import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; - -public class InfluxRowSinkTest { - - private static final int SINK_INFLUX_BATCH_SIZE = 100; - private static final int INFLUX_FLUSH_DURATION = 1000; +public class InfluxDBWriterTest { @Mock private Configuration configuration; + @Mock private InfluxDBFactoryWrapper influxDBFactory; + @Mock private InfluxDB influxDb; + @Mock - private InfluxRowSink influxRowSink; - @Mock - private RuntimeContext runtimeContext; - @Mock - private OperatorMetricGroup metricGroup; + private SinkWriterMetricGroup metricGroup; + @Mock private ErrorReporter errorReporter; + @Mock private Counter counter; - private ErrorHandler errorHandler = new ErrorHandler(); + @Mock + private Context context; + + @Mock + private InitContext initContext; + + private ErrorHandler errorHandler = new ErrorHandler(); @Before public void setUp() throws Exception { initMocks(this); - when(configuration.getString(SINK_INFLUX_URL_KEY, SINK_INFLUX_URL_DEFAULT)).thenReturn("http://localhost:1111"); - when(configuration.getString(SINK_INFLUX_USERNAME_KEY, SINK_INFLUX_USERNAME_DEFAULT)).thenReturn("usr"); - when(configuration.getString(SINK_INFLUX_PASSWORD_KEY, SINK_INFLUX_PASSWORD_DEFAULT)).thenReturn("pwd"); - when(configuration.getInteger(SINK_INFLUX_BATCH_SIZE_KEY, SINK_INFLUX_BATCH_SIZE_DEFAULT)).thenReturn(100); - - when(configuration.getInteger(SINK_INFLUX_BATCH_SIZE_KEY, SINK_INFLUX_BATCH_SIZE_DEFAULT)).thenReturn(100); - when(configuration.getInteger(SINK_INFLUX_FLUSH_DURATION_MS_KEY, SINK_INFLUX_FLUSH_DURATION_MS_DEFAULT)).thenReturn(1000); - when(configuration.getString(SINK_INFLUX_DB_NAME_KEY, SINK_INFLUX_DB_NAME_DEFAULT)).thenReturn("dagger_test"); when(configuration.getString(SINK_INFLUX_RETENTION_POLICY_KEY, SINK_INFLUX_RETENTION_POLICY_DEFAULT)).thenReturn("two_day_policy"); when(configuration.getString(SINK_INFLUX_MEASUREMENT_NAME_KEY, SINK_INFLUX_MEASUREMENT_NAME_DEFAULT)).thenReturn("test_table"); - when(influxDBFactory.connect(any(), any(), any())).thenReturn(influxDb); - when(runtimeContext.getMetricGroup()).thenReturn(metricGroup); + when(initContext.metricGroup()).thenReturn(metricGroup); when(metricGroup.addGroup(Constants.SINK_INFLUX_LATE_RECORDS_DROPPED_KEY)).thenReturn(metricGroup); when(metricGroup.addGroup(Constants.NONFATAL_EXCEPTION_METRIC_GROUP_KEY, InfluxDBException.class.getName())).thenReturn(metricGroup); when(metricGroup.counter("value")).thenReturn(counter); } - private void setupStubedInfluxDB(String[] rowColumns) throws Exception { - influxRowSink = new InfluxRowSinkStub(influxDBFactory, rowColumns, configuration, errorHandler, errorReporter); - influxRowSink.open(null); - } - - private Point getPoint() { + private Row getRow() { final int numberOfRows = 3; final int integerTag = 123; final int expectedFieldOneValue = 100; @@ -98,15 +86,11 @@ private Point getPoint() { Row simpleFieldsRow = new Row(numberOfRows); simpleFieldsRow.setField(0, integerTag); simpleFieldsRow.setField(1, expectedFieldOneValue); - simpleFieldsRow.setField(2, Timestamp.from(now)); - String[] rowColumns = {"tag_field1", "field2", "window_timestamp"}; - return Point.measurement("test_table") - .tag(rowColumns[0], String.valueOf(integerTag)) - .addField(rowColumns[1], expectedFieldOneValue) - .time(Timestamp.from(now).getTime(), TimeUnit.MILLISECONDS).build(); + simpleFieldsRow.setField(2, LocalDateTime.ofInstant(now, ZoneOffset.UTC)); + return simpleFieldsRow; } - private Row getRow() { + private Point getPoint() { final int numberOfRows = 3; final int integerTag = 123; final int expectedFieldOneValue = 100; @@ -114,40 +98,20 @@ private Row getRow() { Row simpleFieldsRow = new Row(numberOfRows); simpleFieldsRow.setField(0, integerTag); simpleFieldsRow.setField(1, expectedFieldOneValue); - simpleFieldsRow.setField(2, Timestamp.from(now)); - return simpleFieldsRow; - } - - @Test - public void shouldCallInfluxDbFactoryOnOpen() throws Exception { - setupStubedInfluxDB(new String[]{}); - - verify(influxDBFactory).connect("http://localhost:1111", "usr", "pwd"); - } - - @Test - public void shouldCallBatchModeOnInfluxWhenBatchSettingsExist() throws Exception { - setupStubedInfluxDB(new String[]{}); - - verify(influxDb).enableBatch(eq(SINK_INFLUX_BATCH_SIZE), eq(INFLUX_FLUSH_DURATION), eq(TimeUnit.MILLISECONDS), any(ThreadFactory.class), any(BiConsumer.class)); - } - - @Test - public void shouldCloseInfluxDBWhenCloseCalled() throws Exception { - setupStubedInfluxDB(new String[]{}); - - influxRowSink.close(); - - verify(influxDb).close(); + simpleFieldsRow.setField(2, LocalDateTime.ofInstant(now, ZoneOffset.UTC)); + String[] rowColumns = {"tag_field1", "field2", "window_timestamp"}; + return Point.measurement("test_table") + .tag(rowColumns[0], String.valueOf(integerTag)) + .addField(rowColumns[1], expectedFieldOneValue) + .time(now.toEpochMilli(), TimeUnit.MILLISECONDS).build(); } @Test public void shouldWriteToConfiguredInfluxDatabase() throws Exception { - setupStubedInfluxDB(new String[]{"some_field_name"}); - Row row = new Row(1); row.setField(0, "some field"); - influxRowSink.invoke(row, null); + InfluxDBWriter influxDBWriter = new InfluxDBWriter(configuration, influxDb, new String[]{"some_field_name"}, errorHandler, errorReporter); + influxDBWriter.write(row, context); verify(influxDb).write(eq("dagger_test"), eq("two_day_policy"), any()); } @@ -161,23 +125,22 @@ public void shouldWriteRowToInfluxAsfields() throws Exception { Row simpleFieldsRow = new Row(numberOfRows); simpleFieldsRow.setField(0, expectedFieldZeroValue); simpleFieldsRow.setField(1, expectedFieldOneValue); - simpleFieldsRow.setField(2, Timestamp.from(now)); + simpleFieldsRow.setField(2, LocalDateTime.ofInstant(now, ZoneOffset.UTC)); String[] rowColumns = {"field1", "field2", "window_timestamp"}; Point expectedPoint = Point.measurement("test_table") .addField(rowColumns[0], expectedFieldZeroValue) .addField(rowColumns[1], expectedFieldOneValue) - .time(Timestamp.from(now).getTime(), TimeUnit.MILLISECONDS).build(); - - setupStubedInfluxDB(rowColumns); - - influxRowSink.invoke(simpleFieldsRow, null); + .time(now.toEpochMilli(), TimeUnit.MILLISECONDS).build(); + InfluxDBWriter influxDBWriter = new InfluxDBWriter(configuration, influxDb, rowColumns, errorHandler, errorReporter); + influxDBWriter.write(simpleFieldsRow, context); ArgumentCaptor pointArg = ArgumentCaptor.forClass(Point.class); verify(influxDb).write(any(), any(), pointArg.capture()); assertEquals(expectedPoint.lineProtocol(), pointArg.getValue().lineProtocol()); } + @Test public void shouldNotWriteNullColumnsInRowToInfluxAsfields() throws Exception { final int numberOfRows = 3; @@ -187,16 +150,14 @@ public void shouldNotWriteNullColumnsInRowToInfluxAsfields() throws Exception { Row simpleFieldsRow = new Row(numberOfRows); simpleFieldsRow.setField(0, integerValue); simpleFieldsRow.setField(1, nullValue); - simpleFieldsRow.setField(2, Timestamp.from(now)); + simpleFieldsRow.setField(2, LocalDateTime.ofInstant(now, ZoneOffset.UTC)); String[] rowColumns = {"field1", "field2", "window_timestamp"}; Point expectedPoint = Point.measurement("test_table") .addField(rowColumns[0], integerValue) .time(Timestamp.from(now).getTime(), TimeUnit.MILLISECONDS).build(); - setupStubedInfluxDB(rowColumns); - - influxRowSink.invoke(simpleFieldsRow, null); - + InfluxDBWriter influxDBWriter = new InfluxDBWriter(configuration, influxDb, rowColumns, errorHandler, errorReporter); + influxDBWriter.write(simpleFieldsRow, context); ArgumentCaptor pointArg = ArgumentCaptor.forClass(Point.class); verify(influxDb).write(any(), any(), pointArg.capture()); @@ -212,17 +173,15 @@ public void shouldWriteRowWithTagColumns() throws Exception { Row simpleFieldsRow = new Row(numberOfRows); simpleFieldsRow.setField(0, expectedFieldZeroValue); simpleFieldsRow.setField(1, expectedFieldOneValue); - simpleFieldsRow.setField(2, Timestamp.from(now)); + simpleFieldsRow.setField(2, LocalDateTime.ofInstant(now, ZoneOffset.UTC)); String[] rowColumns = {"tag_field1", "field2", "window_timestamp"}; Point expectedPoint = Point.measurement("test_table") .tag(rowColumns[0], expectedFieldZeroValue) .addField(rowColumns[1], expectedFieldOneValue) .time(Timestamp.from(now).getTime(), TimeUnit.MILLISECONDS).build(); - setupStubedInfluxDB(rowColumns); - - influxRowSink.invoke(simpleFieldsRow, null); - + InfluxDBWriter influxDBWriter = new InfluxDBWriter(configuration, influxDb, rowColumns, errorHandler, errorReporter); + influxDBWriter.write(simpleFieldsRow, context); ArgumentCaptor pointArg = ArgumentCaptor.forClass(Point.class); verify(influxDb).write(any(), any(), pointArg.capture()); @@ -238,17 +197,15 @@ public void shouldWriteRowWithTagColumnsOfTypeInteger() throws Exception { Row simpleFieldsRow = new Row(numberOfRows); simpleFieldsRow.setField(0, integerTag); simpleFieldsRow.setField(1, expectedFieldOneValue); - simpleFieldsRow.setField(2, Timestamp.from(now)); + simpleFieldsRow.setField(2, LocalDateTime.ofInstant(now, ZoneOffset.UTC)); String[] rowColumns = {"tag_field1", "field2", "window_timestamp"}; Point expectedPoint = Point.measurement("test_table") .tag(rowColumns[0], String.valueOf(integerTag)) .addField(rowColumns[1], expectedFieldOneValue) .time(Timestamp.from(now).getTime(), TimeUnit.MILLISECONDS).build(); - setupStubedInfluxDB(rowColumns); - - influxRowSink.invoke(simpleFieldsRow, null); - + InfluxDBWriter influxDBWriter = new InfluxDBWriter(configuration, influxDb, rowColumns, errorHandler, errorReporter); + influxDBWriter.write(simpleFieldsRow, context); ArgumentCaptor pointArg = ArgumentCaptor.forClass(Point.class); verify(influxDb).write(any(), any(), pointArg.capture()); @@ -264,17 +221,15 @@ public void shouldWriteRowWithLabelColumns() throws Exception { Row simpleFieldsRow = new Row(numberOfRows); simpleFieldsRow.setField(0, expectedFieldZeroValue); simpleFieldsRow.setField(1, expectedFieldOneValue); - simpleFieldsRow.setField(2, Timestamp.from(now)); + simpleFieldsRow.setField(2, LocalDateTime.ofInstant(now, ZoneOffset.UTC)); String[] rowColumns = {"label_field1", "field2", "window_timestamp"}; Point expectedPoint = Point.measurement("test_table") .tag(rowColumns[0].substring("label_".length()), expectedFieldZeroValue) .addField(rowColumns[1], expectedFieldOneValue) .time(Timestamp.from(now).getTime(), TimeUnit.MILLISECONDS).build(); - setupStubedInfluxDB(rowColumns); - - influxRowSink.invoke(simpleFieldsRow, null); - + InfluxDBWriter influxDBWriter = new InfluxDBWriter(configuration, influxDb, rowColumns, errorHandler, errorReporter); + influxDBWriter.write(simpleFieldsRow, context); ArgumentCaptor pointArg = ArgumentCaptor.forClass(Point.class); verify(influxDb).write(any(), any(), pointArg.capture()); @@ -283,109 +238,108 @@ public void shouldWriteRowWithLabelColumns() throws Exception { @Test(expected = RuntimeException.class) public void shouldThrowIfExceptionInWrite() throws Exception { - setupStubedInfluxDB(new String[]{"some_field_name"}); - + String[] columns = {"some_field_name"}; Row row = new Row(1); row.setField(0, "some field"); doThrow(new RuntimeException()).when(influxDb).write(any(), any(), any()); - influxRowSink.invoke(row, null); + InfluxDBWriter influxDBWriter = new InfluxDBWriter(configuration, influxDb, columns, errorHandler, errorReporter); + influxDBWriter.write(row, context); } @Test public void shouldReportIncaseOfFatalError() throws Exception { - String[] rowColumns = {"tag_field1", "field2", "window_timestamp"}; Point point = getPoint(); - setupStubedInfluxDB(rowColumns); ArrayList points = new ArrayList(); points.add(point); + + errorHandler.init(initContext); + errorHandler.getExceptionHandler().accept(points, new RuntimeException("exception from handler")); + InfluxDBWriter influxDBWriter = new InfluxDBWriter(configuration, influxDb, rowColumns, errorHandler, errorReporter); Exception exception = assertThrows(Exception.class, - () -> influxRowSink.invoke(getRow(), null)); - assertEquals("exception from handler", exception.getMessage()); - verify(errorReporter, times(1)).reportFatalException(any(RuntimeException.class)); + () -> influxDBWriter.write(getRow(), context)); + assertEquals("java.lang.RuntimeException: exception from handler", exception.getMessage()); + verify(errorReporter, times(1)).reportFatalException(any(InfluxWriteException.class)); } @Test public void shouldReportInCaseOfMaxSeriesExceeded() throws Exception { String[] rowColumns = {"tag_field1", "field2", "window_timestamp"}; Point point = getPoint(); - setupStubedInfluxDB(rowColumns); + ArrayList points = new ArrayList(); points.add(point); + + errorHandler.init(initContext); + errorHandler.getExceptionHandler().accept(points, new InfluxDBException("{\"error\":\"partial write:" + " max-values-per-tag limit exceeded (100453/100000)")); + InfluxDBWriter influxDBWriter = new InfluxDBWriter(configuration, influxDb, rowColumns, errorHandler, errorReporter); Exception exception = assertThrows(Exception.class, - () -> influxRowSink.invoke(getRow(), null)); - assertEquals("{\"error\":\"partial write: max-values-per-tag limit exceeded (100453/100000)", exception.getMessage()); - verify(errorReporter, times(1)).reportFatalException(any(InfluxDBException.class)); + () -> influxDBWriter.write(getRow(), context)); + assertEquals("org.influxdb.InfluxDBException: {\"error\":\"partial write: max-values-per-tag limit exceeded (100453/100000)", exception.getMessage()); + verify(errorReporter, times(1)).reportFatalException(any(InfluxWriteException.class)); } @Test public void shouldNotReportInCaseOfFailedRecordFatalError() throws Exception { - String[] rowColumns = {"tag_field1", "field2", "window_timestamp"}; Point point = getPoint(); - setupStubedInfluxDB(rowColumns); + ArrayList points = new ArrayList(); points.add(point); + + errorHandler.init(initContext); + errorHandler.getExceptionHandler().accept(points, new InfluxDBException("{\"error\":\"partial write: points beyond retention policy dropped=11\"}")); - influxRowSink.invoke(getRow(), null); - - verify(errorReporter, times(0)).reportFatalException(any()); + InfluxDBWriter influxDBWriter = new InfluxDBWriter(configuration, influxDb, rowColumns, errorHandler, errorReporter); + influxDBWriter.write(getRow(), context); + verify(errorReporter, times(0)).reportFatalException(any(InfluxWriteException.class)); } @Test public void invokeShouldThrowErrorSetByHandler() throws Exception { String[] rowColumns = {"tag_field1", "field2", "window_timestamp"}; Point point = getPoint(); - setupStubedInfluxDB(rowColumns); + ArrayList points = new ArrayList(); points.add(point); + + errorHandler.init(initContext); + errorHandler.getExceptionHandler().accept(points, new RuntimeException("exception from handler")); + InfluxDBWriter influxDBWriter = new InfluxDBWriter(configuration, influxDb, rowColumns, errorHandler, errorReporter); - RuntimeException exception = assertThrows(RuntimeException.class, - () -> influxRowSink.invoke(getRow(), null)); - assertEquals("exception from handler", exception.getMessage()); + InfluxWriteException exception = assertThrows(InfluxWriteException.class, + () -> influxDBWriter.write(getRow(), context)); + assertEquals("java.lang.RuntimeException: exception from handler", exception.getMessage()); } @Test public void failSnapshotStateOnInfluxError() throws Exception { String[] rowColumns = {"tag_field1", "field2", "window_timestamp"}; - setupStubedInfluxDB(rowColumns); + InfluxDBWriter influxDBWriter = new InfluxDBWriter(configuration, influxDb, rowColumns, errorHandler, errorReporter); + errorHandler.init(initContext); errorHandler.getExceptionHandler().accept(new ArrayList(), new RuntimeException("exception from handler")); - RuntimeException exception = assertThrows(RuntimeException.class, - () -> influxRowSink.snapshotState(null)); - assertEquals("exception from handler", exception.getMessage()); + InfluxWriteException exception = assertThrows(InfluxWriteException.class, + () -> influxDBWriter.snapshotState(10L)); + assertEquals("java.lang.RuntimeException: exception from handler", exception.getMessage()); } @Test public void failSnapshotStateOnFlushFailure() throws Exception { String[] rowColumns = {"tag_field1", "field2", "window_timestamp"}; - setupStubedInfluxDB(rowColumns); - + InfluxDBWriter influxDBWriter = new InfluxDBWriter(configuration, influxDb, rowColumns, errorHandler, errorReporter); Mockito.doThrow(new RuntimeException("exception from flush")).when(influxDb).flush(); - RuntimeException exception = assertThrows(RuntimeException.class, - () -> influxRowSink.snapshotState(null)); + Exception exception = assertThrows(Exception.class, + () -> influxDBWriter.snapshotState(10L)); assertEquals("exception from flush", exception.getMessage()); } - - public class InfluxRowSinkStub extends InfluxRowSink { - public InfluxRowSinkStub(InfluxDBFactoryWrapper influxDBFactory, String[] columnNames, - Configuration configuration, ErrorHandler errorHandler, ErrorReporter errorReporter) { - super(influxDBFactory, columnNames, configuration, errorHandler, errorReporter); - } - - @Override - public RuntimeContext getRuntimeContext() { - return runtimeContext; - } - - } } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/LateRecordDropErrorTest.java b/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/LateRecordDropErrorTest.java index 29ef59a45..8ab25a6ea 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/LateRecordDropErrorTest.java +++ b/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/LateRecordDropErrorTest.java @@ -1,10 +1,10 @@ package io.odpf.dagger.core.sink.influx.errors; -import org.apache.flink.api.common.functions.RuntimeContext; +import org.apache.flink.api.connector.sink.Sink.InitContext; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.MetricGroup; import org.apache.flink.metrics.SimpleCounter; -import org.apache.flink.metrics.groups.OperatorMetricGroup; +import org.apache.flink.metrics.groups.SinkWriterMetricGroup; import io.odpf.dagger.core.utils.Constants; import org.influxdb.InfluxDBException; @@ -26,10 +26,10 @@ public class LateRecordDropErrorTest { @Mock - private RuntimeContext runtimeContext; + private InitContext initContext; @Mock - private OperatorMetricGroup metricGroup; + private SinkWriterMetricGroup metricGroup; @Mock private MetricGroup metricGroupForLateRecords; @@ -43,7 +43,7 @@ public class LateRecordDropErrorTest { @Before public void setUp() { initMocks(this); - when(runtimeContext.getMetricGroup()).thenReturn(metricGroup); + when(initContext.metricGroup()).thenReturn(metricGroup); when(metricGroup.addGroup(Constants.SINK_INFLUX_LATE_RECORDS_DROPPED_KEY)).thenReturn(metricGroup); when(metricGroup.addGroup(Constants.NONFATAL_EXCEPTION_METRIC_GROUP_KEY, InfluxDBException.class.getName())).thenReturn(metricGroup); @@ -52,21 +52,21 @@ public void setUp() { @Test public void shouldFilterLateRecordDrops() { - LateRecordDropError lateRecordDropError = new LateRecordDropError(runtimeContext); + LateRecordDropError lateRecordDropError = new LateRecordDropError(initContext); assertTrue(lateRecordDropError .filterError(new InfluxDBException("{\"error\":\"partial write: points beyond retention policy dropped=11\"}"))); } @Test public void shouldNotFilterAnythingElseExceptRecordDrops() { - LateRecordDropError lateRecordDropError = new LateRecordDropError(runtimeContext); + LateRecordDropError lateRecordDropError = new LateRecordDropError(initContext); assertFalse(lateRecordDropError .filterError(new InfluxDBException("{\"error\":\"partial write: max-values-per-tag limit exceeded (100453/100000)"))); } @Test public void shouldParseNumberOfFailedPoints() { - LateRecordDropError lateRecordDropError = new LateRecordDropError(runtimeContext); + LateRecordDropError lateRecordDropError = new LateRecordDropError(initContext); lateRecordDropError.handle(points, new InfluxDBException("{\"error\":\"partial write: points beyond retention policy dropped=11\"}")); @@ -76,7 +76,7 @@ public void shouldParseNumberOfFailedPoints() { @Test public void shouldReportNonFatalExceptionsInHandle() { - LateRecordDropError lateRecordDropError = new LateRecordDropError(runtimeContext); + LateRecordDropError lateRecordDropError = new LateRecordDropError(initContext); lateRecordDropError.handle(points, new InfluxDBException("{\"error\":\"partial write: points beyond retention policy dropped=11\"}")); @@ -85,7 +85,7 @@ public void shouldReportNonFatalExceptionsInHandle() { @Test public void shouldReportCounterWithNum() { - LateRecordDropError lateRecordDropError = new LateRecordDropError(runtimeContext); + LateRecordDropError lateRecordDropError = new LateRecordDropError(initContext); lateRecordDropError.handle(points, new InfluxDBException("{\"error\":\"partial write: points beyond retention policy dropped=11\"}")); @@ -94,13 +94,13 @@ public void shouldReportCounterWithNum() { @Test public void shouldNotReturnAnyError() { - LateRecordDropError lateRecordDropError = new LateRecordDropError(runtimeContext); + LateRecordDropError lateRecordDropError = new LateRecordDropError(initContext); assertNull(lateRecordDropError.getCurrentException()); } @Test public void shouldHaveNoError() { - LateRecordDropError lateRecordDropError = new LateRecordDropError(runtimeContext); + LateRecordDropError lateRecordDropError = new LateRecordDropError(initContext); assertFalse(lateRecordDropError.hasException()); } @@ -109,7 +109,7 @@ public void shouldIncreaseTheCountersInCaseOfMultipleErrors() { SimpleCounter simpleCounter = new SimpleCounter(); when(metricGroup.addGroup(Constants.SINK_INFLUX_LATE_RECORDS_DROPPED_KEY)).thenReturn(metricGroupForLateRecords); when(metricGroupForLateRecords.counter("value")).thenReturn(simpleCounter); - LateRecordDropError lateRecordDropError = new LateRecordDropError(runtimeContext); + LateRecordDropError lateRecordDropError = new LateRecordDropError(initContext); lateRecordDropError.handle(points, new InfluxDBException("{\"error\":\"partial write: points beyond retention policy dropped=11\"}")); diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/ValidExceptionTest.java b/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/ValidExceptionTest.java index 53b1e324b..731154eb3 100644 --- a/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/ValidExceptionTest.java +++ b/dagger-core/src/test/java/io/odpf/dagger/core/sink/influx/errors/ValidExceptionTest.java @@ -36,6 +36,7 @@ public void shouldWrapErrorsInExceptions() { validException.handle(points, new Exception("Test")); Exception currentException = validException.getCurrentException(); assertTrue(currentException instanceof Exception); - assertEquals("Test", currentException.getMessage()); + System.out.println(currentException.getMessage()); + assertEquals("java.lang.Exception: Test", currentException.getMessage()); } } diff --git a/dagger-core/src/test/java/io/odpf/dagger/core/source/FlinkKafkaConsumerCustomTest.java b/dagger-core/src/test/java/io/odpf/dagger/core/source/FlinkKafkaConsumerCustomTest.java deleted file mode 100644 index 15c191ff3..000000000 --- a/dagger-core/src/test/java/io/odpf/dagger/core/source/FlinkKafkaConsumerCustomTest.java +++ /dev/null @@ -1,128 +0,0 @@ -package io.odpf.dagger.core.source; - -import org.apache.flink.api.common.functions.RuntimeContext; -import org.apache.flink.streaming.api.functions.source.SourceFunction; -import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema; -import org.apache.flink.streaming.runtime.tasks.ExceptionInChainedOperatorException; - -import io.odpf.dagger.common.configuration.Configuration; -import io.odpf.dagger.core.metrics.reporters.ErrorReporter; -import io.odpf.dagger.core.metrics.reporters.ErrorReporterFactory; -import io.odpf.dagger.core.metrics.reporters.NoOpErrorReporter; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; - -import java.util.Properties; -import java.util.regex.Pattern; - -import static io.odpf.dagger.core.utils.Constants.METRIC_TELEMETRY_ENABLE_KEY; -import static io.odpf.dagger.core.utils.Constants.METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT; -import static io.odpf.dagger.core.utils.Constants.METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_DEFAULT; -import static io.odpf.dagger.core.utils.Constants.METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_KEY; -import static org.junit.Assert.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.mockito.MockitoAnnotations.initMocks; - -public class FlinkKafkaConsumerCustomTest { - - @Mock - private SourceFunction.SourceContext defaultSourceContext; - - @Mock - private Configuration configuration; - - @Mock - private KafkaDeserializationSchema kafkaDeserializationSchema; - - @Mock - private RuntimeContext defaultRuntimeContext; - - @Mock - private Properties properties; - - @Mock - private ErrorReporter errorReporter; - - @Mock - private NoOpErrorReporter noOpErrorReporter; - - private FlinkKafkaConsumerCustomStub flinkKafkaConsumer011Custom; - - @Before - public void setup() { - initMocks(this); - flinkKafkaConsumer011Custom = new FlinkKafkaConsumerCustomStub(Pattern.compile("test_topics"), kafkaDeserializationSchema, properties, configuration, new RuntimeException("test exception")); - } - - @Test - public void shouldReportIfTelemetryEnabled() { - when(configuration.getBoolean(METRIC_TELEMETRY_ENABLE_KEY, METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(true); - when(configuration.getLong(METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_KEY, METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_DEFAULT)).thenReturn(0L); - - Exception exception = Assert.assertThrows(Exception.class, - () -> flinkKafkaConsumer011Custom.run(defaultSourceContext)); - assertEquals("test exception", exception.getMessage()); - verify(errorReporter, times(1)).reportFatalException(any(RuntimeException.class)); - } - - @Test - public void shouldNotReportIfChainedOperatorException() { - when(configuration.getBoolean(METRIC_TELEMETRY_ENABLE_KEY, METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(true); - Throwable throwable = new Throwable(); - flinkKafkaConsumer011Custom = new FlinkKafkaConsumerCustomStub(Pattern.compile("test_topics"), kafkaDeserializationSchema, properties, configuration, new ExceptionInChainedOperatorException("chaining exception", throwable)); - Exception exception = Assert.assertThrows(Exception.class, - () -> flinkKafkaConsumer011Custom.run(defaultSourceContext)); - assertEquals("chaining exception", exception.getMessage()); - verify(errorReporter, times(0)).reportFatalException(any(RuntimeException.class)); - } - - @Test - public void shouldNotReportIfTelemetryDisabled() { - when(configuration.getBoolean(METRIC_TELEMETRY_ENABLE_KEY, METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(false); - Exception exception = Assert.assertThrows(Exception.class, - () -> flinkKafkaConsumer011Custom.run(defaultSourceContext)); - assertEquals("test exception", exception.getMessage()); - verify(noOpErrorReporter, times(1)).reportFatalException(any(RuntimeException.class)); - } - - @Test - public void shouldReturnErrorStatsReporter() { - when(configuration.getLong(METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_KEY, METRIC_TELEMETRY_SHUTDOWN_PERIOD_MS_DEFAULT)).thenReturn(0L); - when(configuration.getBoolean(METRIC_TELEMETRY_ENABLE_KEY, METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)).thenReturn(true); - ErrorReporter expectedErrorStatsReporter = ErrorReporterFactory.getErrorReporter(defaultRuntimeContext, configuration); - FlinkKafkaConsumerCustom flinkKafkaConsumerCustom = new FlinkKafkaConsumerCustom(Pattern.compile("test_topics"), kafkaDeserializationSchema, properties, configuration); - assertEquals(expectedErrorStatsReporter.getClass(), flinkKafkaConsumerCustom.getErrorReporter(defaultRuntimeContext).getClass()); - } - - public class FlinkKafkaConsumerCustomStub extends FlinkKafkaConsumerCustom { - private Exception exception; - - public FlinkKafkaConsumerCustomStub(Pattern subscriptionPattern, KafkaDeserializationSchema deserializer, - Properties props, Configuration configuration, Exception exception) { - super(subscriptionPattern, deserializer, props, configuration); - this.exception = exception; - } - - @Override - public RuntimeContext getRuntimeContext() { - return defaultRuntimeContext; - } - - protected void runBaseConsumer(SourceContext sourceContext) throws Exception { - throw exception; - } - - protected ErrorReporter getErrorReporter(RuntimeContext runtimeContext) { - if (configuration.getBoolean(METRIC_TELEMETRY_ENABLE_KEY, METRIC_TELEMETRY_ENABLE_VALUE_DEFAULT)) { - return errorReporter; - } else { - return noOpErrorReporter; - } - } - } -}