From 2c565bd90bb8ce3f2a83c6a2f1174d83d47dbd41 Mon Sep 17 00:00:00 2001 From: David Sloan <33483659+davidsloan@users.noreply.github.com> Date: Wed, 22 May 2024 09:47:01 +0100 Subject: [PATCH] GCP Pub/Sub Source Connector (#1224) * GCP Pub/Sub Source Connector * Reducing verbose logging * Changes following code review * Validate kcql on start * Bounce messages when queue full. * Providing unix epoch seconds and date * Applying formats * Revert double date * Update java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/config/base/KcqlSettings.java Co-authored-by: Mati Urban <157909548+GoMati-MU@users.noreply.github.com> Signed-off-by: David Sloan <33483659+davidsloan@users.noreply.github.com> * Review amendments * Use Map.of wherever possible --------- Signed-off-by: David Sloan <33483659+davidsloan@users.noreply.github.com> Co-authored-by: Mati Urban <157909548+GoMati-MU@users.noreply.github.com> --- build.sbt | 3 +- java-connectors/build.gradle | 4 +- .../kafka-connect-common/build.gradle | 3 + .../common/config/base/KcqlSettings.java | 61 ++++++ .../common/config/base/intf/Converter.java | 33 ++++ .../config/base/intf/KcqlConverter.java | 29 +++ .../config/source/ConfigWrapperSource.java | 6 + .../common/util/ListSplitter.java | 60 ++++++ .../streamreactor/common/util/MapUtils.java | 61 ++++++ .../common/util/TasksSplitter.java | 68 +++++++ .../src/main/resources/logback.xml | 2 +- .../common/util/ListSplitterTest.java | 92 +++++++++ .../common/util/MapUtilsTest.java | 149 ++++++++++++++ .../common/util/TasksSplitterTest.java | 83 ++++++++ .../kafka-connect-gcp-common/build.gradle | 8 +- .../auth/GCPServiceBuilderConfigurer.java | 7 +- .../gcp/common/config/GCPSettings.java | 63 +++--- .../gcp/common/auth/mode/TestFileUtil.java | 9 +- .../kafka-connect-gcp-pubsub/build.gradle | 29 +++ .../source/GCPPubSubSourceConnector.java | 91 +++++++++ .../pubsub/source/GCPPubSubSourceTask.java | 106 ++++++++++ .../pubsub/source/admin/PubSubService.java | 86 ++++++++ .../pubsub/source/config/PubSubConfig.java | 43 ++++ .../source/config/PubSubSourceConfig.java | 48 +++++ .../source/config/PubSubSubscription.java | 41 ++++ .../configdef/PubSubConfigSettings.java | 76 ++++++++ .../source/configdef/PubSubKcqlConverter.java | 66 +++++++ .../source/configdef/PubSubSettings.java | 88 +++++++++ .../pubsub/source/mapping/MappingConfig.java | 71 +++++++ .../source/mapping/SourceRecordConverter.java | 64 ++++++ .../source/mapping/headers/HeaderMapper.java | 30 +++ ...nimalAndMessageAttributesHeaderMapper.java | 42 ++++ .../mapping/headers/MinimalHeaderMapper.java | 35 ++++ .../mapping/key/CompatibilityKeyMapper.java | 57 ++++++ .../pubsub/source/mapping/key/KeyMapper.java | 29 +++ .../mapping/key/MessageIdKeyMapper.java | 35 ++++ .../value/CompatibilityValueMapper.java | 65 +++++++ .../mapping/value/MessageValueMapper.java | 35 ++++ .../source/mapping/value/ValueMapper.java | 29 +++ .../subscriber/LooselyBoundedQueue.java | 96 +++++++++ .../source/subscriber/PubSubMessageData.java | 39 ++++ .../source/subscriber/PubSubSourceOffset.java | 44 +++++ .../subscriber/PubSubSourcePartition.java | 57 ++++++ .../source/subscriber/PubSubSubscriber.java | 126 ++++++++++++ .../subscriber/PubSubSubscriberManager.java | 80 ++++++++ .../source/subscriber/SubscriberCreator.java | 29 +++ ...pache.kafka.connect.source.SourceConnector | 1 + .../resources/gcp-pubsub-ascii-source.txt | 30 +++ .../source/admin/PubSubServiceTest.java | 83 ++++++++ .../configdef/PubSubConfigSettingsTest.java | 52 +++++ .../configdef/PubSubKcqlConverterTest.java | 92 +++++++++ .../source/configdef/PubSubSettingsTest.java | 61 ++++++ .../mapping/SourceRecordConverterTest.java | 79 ++++++++ ...lAndMessageAttributesHeaderMapperTest.java | 72 +++++++ .../headers/MinimalHeaderMapperTest.java | 66 +++++++ .../mapping/value/MessageValueMapperTest.java | 70 +++++++ .../subscriber/PubSubSourceOffsetTest.java | 41 ++++ .../subscriber/PubSubSourcePartitionTest.java | 52 +++++ .../PubSubSubscriberManagerTest.java | 146 ++++++++++++++ .../subscriber/PubSubSubscriberTest.java | 184 ++++++++++++++++++ .../src/main/java/io/lenses/kcql/Kcql.java | 25 ++- .../test/java/io/lenses/kcql/KcqlTest.java | 62 ++++++ .../kcql/KcqlNestedFieldTest.scala | 0 .../kcql/KcqlPropertiesTest.scala | 0 .../kcql/KcqlSelectOnlyTest.scala | 0 .../io/lenses => scala}/kcql/KcqlTest.scala | 0 java-connectors/settings.gradle | 3 +- .../source/S3SourceTaskBucketRootTest.scala | 4 +- .../aws/s3/source/S3SourceTaskTest.scala | 3 +- .../aws/s3/config/S3CommonConfigDef.scala | 3 +- .../aws/s3/config/S3ConfigSettings.scala | 4 - .../aws/s3/config/S3CommonConfigDefTest.scala | 3 + .../source/config/S3SourceConfigTests.scala | 3 + .../datalake/config/AzureConfigSettings.scala | 4 - .../datalake/config/CommonConfigDef.scala | 3 +- .../documentdb/config/DocumentDbConfig.scala | 14 +- .../config/DocumentDbConfigConstants.scala | 11 +- .../storage/utils/GCPProxyContainerTest.scala | 12 +- .../gcp/storage/config/CommonConfigDef.scala | 9 +- .../storage/config/GCPConfigSettings.scala | 4 - .../storage/config/CommonConfigDefTest.scala | 13 +- 81 files changed, 3493 insertions(+), 94 deletions(-) create mode 100644 java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/config/base/KcqlSettings.java create mode 100644 java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/config/base/intf/Converter.java create mode 100644 java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/config/base/intf/KcqlConverter.java create mode 100644 java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/util/ListSplitter.java create mode 100644 java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/util/MapUtils.java create mode 100644 java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/util/TasksSplitter.java create mode 100644 java-connectors/kafka-connect-common/src/test/java/io/lenses/streamreactor/common/util/ListSplitterTest.java create mode 100644 java-connectors/kafka-connect-common/src/test/java/io/lenses/streamreactor/common/util/MapUtilsTest.java create mode 100644 java-connectors/kafka-connect-common/src/test/java/io/lenses/streamreactor/common/util/TasksSplitterTest.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/build.gradle create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/GCPPubSubSourceConnector.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/GCPPubSubSourceTask.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/admin/PubSubService.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/config/PubSubConfig.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/config/PubSubSourceConfig.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/config/PubSubSubscription.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubConfigSettings.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubKcqlConverter.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubSettings.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/MappingConfig.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/SourceRecordConverter.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/HeaderMapper.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/MinimalAndMessageAttributesHeaderMapper.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/MinimalHeaderMapper.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/key/CompatibilityKeyMapper.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/key/KeyMapper.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/key/MessageIdKeyMapper.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/value/CompatibilityValueMapper.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/value/MessageValueMapper.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/value/ValueMapper.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/LooselyBoundedQueue.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubMessageData.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSourceOffset.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSourcePartition.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSubscriber.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSubscriberManager.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/SubscriberCreator.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/resources/META-INF/services/org.apache.kafka.connect.source.SourceConnector create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/main/resources/gcp-pubsub-ascii-source.txt create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/admin/PubSubServiceTest.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubConfigSettingsTest.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubKcqlConverterTest.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubSettingsTest.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/SourceRecordConverterTest.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/MinimalAndMessageAttributesHeaderMapperTest.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/MinimalHeaderMapperTest.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/value/MessageValueMapperTest.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSourceOffsetTest.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSourcePartitionTest.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSubscriberManagerTest.java create mode 100644 java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSubscriberTest.java create mode 100644 java-connectors/kafka-connect-query-language/src/test/java/io/lenses/kcql/KcqlTest.java rename java-connectors/kafka-connect-query-language/src/test/{java/io/lenses => scala}/kcql/KcqlNestedFieldTest.scala (100%) rename java-connectors/kafka-connect-query-language/src/test/{java/io/lenses => scala}/kcql/KcqlPropertiesTest.scala (100%) rename java-connectors/kafka-connect-query-language/src/test/{java/io/lenses => scala}/kcql/KcqlSelectOnlyTest.scala (100%) rename java-connectors/kafka-connect-query-language/src/test/{java/io/lenses => scala}/kcql/KcqlTest.scala (100%) diff --git a/build.sbt b/build.sbt index a135dfd1f..d0f4daf73 100644 --- a/build.sbt +++ b/build.sbt @@ -58,10 +58,11 @@ lazy val `query-language` = (project in file("java-connectors/kafka-connect-quer ), ) .configureAssembly(true) - .configureTests(baseTestDeps) + .configureTests(baseTestDeps ++ javaCommonTestDeps) .configureAntlr() lazy val `java-common` = (project in file("java-connectors/kafka-connect-common")) + .dependsOn(`query-language`) .settings( settings ++ Seq( diff --git a/java-connectors/build.gradle b/java-connectors/build.gradle index 2358bb5f9..be31b4b98 100644 --- a/java-connectors/build.gradle +++ b/java-connectors/build.gradle @@ -29,6 +29,7 @@ allprojects { jUnitVersion = '5.9.1' mockitoJupiterVersion = '5.10.0' apacheToConfluentVersionAxis = ["2.8.1": "6.2.2", "3.3.0": "7.3.1"] + caffeineVersion = '3.1.8' //Other Manifest Info mainClassName = '' @@ -62,7 +63,8 @@ allprojects { //tests testImplementation group: 'org.mockito', name: 'mockito-core', version: mockitoJupiterVersion - testImplementation group: 'org.junit.jupiter', name: 'junit-jupiter', version: mockitoJupiterVersion + testImplementation group: 'org.mockito', name: 'mockito-junit-jupiter', version: mockitoJupiterVersion + testImplementation group: 'org.junit.jupiter', name: 'junit-jupiter', version: jUnitVersion testImplementation group: 'org.assertj', name: 'assertj-core', version: '3.25.3' } diff --git a/java-connectors/kafka-connect-common/build.gradle b/java-connectors/kafka-connect-common/build.gradle index e5fd180cf..2a6a8870b 100644 --- a/java-connectors/kafka-connect-common/build.gradle +++ b/java-connectors/kafka-connect-common/build.gradle @@ -4,6 +4,9 @@ project(":kafka-connect-common") { } dependencies { + + implementation project(":kafka-connect-query-language") + //apache kafka api group: 'org.apache.kafka', name: 'connect-json', version: kafkaVersion api group: 'org.apache.kafka', name: 'kafka-clients', version: kafkaVersion diff --git a/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/config/base/KcqlSettings.java b/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/config/base/KcqlSettings.java new file mode 100644 index 000000000..55db05a61 --- /dev/null +++ b/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/config/base/KcqlSettings.java @@ -0,0 +1,61 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.common.config.base; + +import java.util.List; + +import lombok.Getter; +import org.apache.kafka.common.config.ConfigDef; +import org.apache.kafka.common.config.ConfigException; + +import io.lenses.kcql.Kcql; +import io.lenses.streamreactor.common.config.base.model.ConnectorPrefix; +import io.lenses.streamreactor.common.config.source.ConfigSource; +import lombok.val; + +@Getter +public class KcqlSettings implements ConfigSettings> { + + private static final String KCQL_DOC = + "Contains the Kafka Connect Query Language describing data mappings from the source to the target system."; + + private final String kcqlSettingsKey; + + public KcqlSettings( + ConnectorPrefix connectorPrefix) { + kcqlSettingsKey = connectorPrefix.prefixKey("kcql"); + } + + @Override + public ConfigDef withSettings(ConfigDef configDef) { + return configDef.define( + kcqlSettingsKey, + ConfigDef.Type.STRING, + ConfigDef.Importance.HIGH, + KCQL_DOC + ); + } + + @Override + public List parseFromConfig(ConfigSource configSource) { + return Kcql.parseMultiple(getKCQLString(configSource)); + } + + private String getKCQLString(ConfigSource configSource) { + val raw = configSource.getString(kcqlSettingsKey); + return raw.orElseThrow(() -> new ConfigException(String.format("Missing [%s]", kcqlSettingsKey))); + } +} diff --git a/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/config/base/intf/Converter.java b/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/config/base/intf/Converter.java new file mode 100644 index 000000000..bc6dd1c24 --- /dev/null +++ b/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/config/base/intf/Converter.java @@ -0,0 +1,33 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.common.config.base.intf; + +import org.apache.kafka.common.config.ConfigException; + +import java.util.List; +import java.util.stream.Collectors; + +/** + * Picks out the settings required from KCQL + */ +public abstract class Converter { + + public List convertAll(List source) throws ConfigException { + return source.stream().map(this::convert).collect(Collectors.toList()); + } + + protected abstract T convert(S source) throws ConfigException; +} diff --git a/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/config/base/intf/KcqlConverter.java b/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/config/base/intf/KcqlConverter.java new file mode 100644 index 000000000..7c9519d24 --- /dev/null +++ b/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/config/base/intf/KcqlConverter.java @@ -0,0 +1,29 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.common.config.base.intf; + +import io.lenses.kcql.Kcql; +import org.apache.kafka.common.config.ConfigException; + +import java.util.List; +import java.util.stream.Collectors; + +/** + * Picks out the settings required from KCQL + */ +public abstract class KcqlConverter extends Converter { + +} diff --git a/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/config/source/ConfigWrapperSource.java b/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/config/source/ConfigWrapperSource.java index 2dccc6177..d0792ddcf 100644 --- a/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/config/source/ConfigWrapperSource.java +++ b/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/config/source/ConfigWrapperSource.java @@ -15,9 +15,11 @@ */ package io.lenses.streamreactor.common.config.source; +import java.util.Map; import java.util.Optional; import lombok.AllArgsConstructor; import org.apache.kafka.common.config.AbstractConfig; +import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.types.Password; /** @@ -27,6 +29,10 @@ @AllArgsConstructor public class ConfigWrapperSource implements ConfigSource { + public static ConfigWrapperSource fromConfigDef(ConfigDef configDef, Map props) { + return new ConfigWrapperSource(new AbstractConfig(configDef, props)); + } + private final AbstractConfig abstractConfig; @Override diff --git a/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/util/ListSplitter.java b/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/util/ListSplitter.java new file mode 100644 index 000000000..d367f64de --- /dev/null +++ b/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/util/ListSplitter.java @@ -0,0 +1,60 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.common.util; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; + +/** + * Utility class for List splitting. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public class ListSplitter { + + /** + * Splits the given list into {@code maxN} sublists of roughly equal size. + * If the list cannot be divided evenly, the remaining elements are distributed + * among the sublists so that the size difference between any two sublists is at most 1. + * + * @param list the list to be split + * @param maxN the number of sublists to create + * @param the type of elements in the list + * @return a list of sublists, where each sublist contains a portion of the original list + * @throws IllegalArgumentException if {@code maxN} is less than or equal to 0 + */ + public static List> splitList(List list, int maxN) { + if (maxN <= 0) { + throw new IllegalArgumentException("Number of parts must be greater than zero."); + } + + int totalSize = list.size(); + int partSize = totalSize / maxN; + int remainder = totalSize % maxN; + + return IntStream.range(0, maxN) + .mapToObj(i -> { + int start = i * partSize + Math.min(i, remainder); + int end = start + partSize + (i < remainder ? 1 : 0); + return list.subList(start, Math.min(end, totalSize)); + }) + .filter(sublist -> !sublist.isEmpty()) + .collect(Collectors.toList()); + } +} diff --git a/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/util/MapUtils.java b/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/util/MapUtils.java new file mode 100644 index 000000000..0c5dd47f6 --- /dev/null +++ b/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/util/MapUtils.java @@ -0,0 +1,61 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.common.util; + +import java.util.Map; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; + +/** + * Utility class for map operations. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public class MapUtils { + + /** + * Casts a map to a specified key and value type. + * + * @param map the map to cast + * @param targetKeyType the class of the key type + * @param targetValueType the class of the value type + * @param the target key type + * @param the target value type + * @return the casted map + * @throws IllegalArgumentException if the map contains keys or values of incorrect types + */ + @SuppressWarnings("unchecked") + public static Map castMap(Map map, Class targetKeyType, Class targetValueType) { + map.forEach((key, value) -> { + if (!isAssignable(key, targetKeyType) || !isAssignable(value, targetValueType)) { + throw new IllegalArgumentException("Map contains invalid key or value type"); + } + }); + return (Map) map; + } + + /** + * Checks if an object is assignable to a specified type, allowing for null values. + * + * @param obj the object to check + * @param type the target type + * @param the target type + * @return true if the object is null or assignable to the type, false otherwise + */ + private static boolean isAssignable(Object obj, Class type) { + return obj == null || type.isAssignableFrom(obj.getClass()); + } +} diff --git a/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/util/TasksSplitter.java b/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/util/TasksSplitter.java new file mode 100644 index 000000000..3571df3e9 --- /dev/null +++ b/java-connectors/kafka-connect-common/src/main/java/io/lenses/streamreactor/common/util/TasksSplitter.java @@ -0,0 +1,68 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.common.util; + +import static io.lenses.kcql.Kcql.KCQL_MULTI_STATEMENT_SEPARATOR; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import io.lenses.streamreactor.common.config.base.KcqlSettings; +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import lombok.val; + +/** + * Utility class for splitting tasks based on KCQL statements. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public class TasksSplitter { + + /** + * Splits tasks based on the KCQL statements provided in the properties map. + * Each resulting map will contain the original properties and a subset of the KCQL statements. + * + * @param maxTasks the maximum number of tasks to split into + * @param props the original properties map containing KCQL settings + * @param kcqlSettings the KCQL settings object that provides the key for KCQL settings in the properties map + * @return a list of maps, each containing the original properties and a subset of the KCQL statements + */ + public static List> splitByKcqlStatements(int maxTasks, Map props, + KcqlSettings kcqlSettings) { + val kcqlSettingsKey = kcqlSettings.getKcqlSettingsKey(); + val kcqls = + Arrays + .stream(props.get(kcqlSettingsKey).split(KCQL_MULTI_STATEMENT_SEPARATOR)) + .collect(Collectors.toList()); + + return ListSplitter + .splitList(kcqls, maxTasks) + .stream() + .map(kcqlsForTask -> Stream.concat( + props.entrySet().stream(), + Stream.of(Map.entry(kcqlSettingsKey, String.join(";", kcqlsForTask))) + ).collect(Collectors.toUnmodifiableMap( + Map.Entry::getKey, + Map.Entry::getValue, + (existing, replacement) -> replacement + ))) + .collect(Collectors.toUnmodifiableList()); + } + +} diff --git a/java-connectors/kafka-connect-common/src/main/resources/logback.xml b/java-connectors/kafka-connect-common/src/main/resources/logback.xml index 920828a04..a7902682c 100644 --- a/java-connectors/kafka-connect-common/src/main/resources/logback.xml +++ b/java-connectors/kafka-connect-common/src/main/resources/logback.xml @@ -1,7 +1,7 @@ - + diff --git a/java-connectors/kafka-connect-common/src/test/java/io/lenses/streamreactor/common/util/ListSplitterTest.java b/java-connectors/kafka-connect-common/src/test/java/io/lenses/streamreactor/common/util/ListSplitterTest.java new file mode 100644 index 000000000..4a9954909 --- /dev/null +++ b/java-connectors/kafka-connect-common/src/test/java/io/lenses/streamreactor/common/util/ListSplitterTest.java @@ -0,0 +1,92 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.common.util; + +import static org.junit.jupiter.api.Assertions.*; + +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.function.Executable; + +class ListSplitterTest { + + private final List list = IntStream.range(1, 11).boxed().collect(Collectors.toList()); + + @Test + void testSplitListIntoEqualParts() { + List> result = ListSplitter.splitList(list, 5); + assertEquals(5, result.size()); + for (List sublist : result) { + assertEquals(2, sublist.size()); + } + } + + @Test + void testSplitListWithRemainder() { + List> result = ListSplitter.splitList(list, 3); + assertEquals(3, result.size()); + assertEquals(4, result.get(0).size()); + assertEquals(3, result.get(1).size()); + assertEquals(3, result.get(2).size()); + } + + @Test + void testSplitListSinglePart() { + List> result = ListSplitter.splitList(list, 1); + assertEquals(1, result.size()); + assertEquals(10, result.get(0).size()); + } + + @Test + void testSplitListMorePartsThanElements() { + List> result = ListSplitter.splitList(list, 12); + assertEquals(12, result.size()); + int nonEmptyLists = (int) result.stream().filter(sublist -> !sublist.isEmpty()).count(); + assertEquals(10, nonEmptyLists); + for (List sublist : result) { + assertTrue(sublist.size() <= 1); + } + } + + @Test + void testSplitEmptyList() { + List emptyList = Collections.emptyList(); + List> result = ListSplitter.splitList(emptyList, 3); + assertEquals(3, result.size()); + for (List sublist : result) { + assertTrue(sublist.isEmpty()); + } + } + + @Test + void testSplitListInvalidParts() { + Executable executable = () -> ListSplitter.splitList(list, 0); + assertThrows(IllegalArgumentException.class, executable); + } + + @Test + void testListSmallerThanMaxNShouldProvideMaxNResults() { + List> result = ListSplitter.splitList(Collections.singletonList(1), 100); + assertEquals(1, result.size()); + for (List sublist : result) { + assertEquals(1, sublist.size()); + } + } +} diff --git a/java-connectors/kafka-connect-common/src/test/java/io/lenses/streamreactor/common/util/MapUtilsTest.java b/java-connectors/kafka-connect-common/src/test/java/io/lenses/streamreactor/common/util/MapUtilsTest.java new file mode 100644 index 000000000..afd53e296 --- /dev/null +++ b/java-connectors/kafka-connect-common/src/test/java/io/lenses/streamreactor/common/util/MapUtilsTest.java @@ -0,0 +1,149 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.common.util; + +import static org.junit.jupiter.api.Assertions.*; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Test; + +class MapUtilsTest { + + @Test + void testCastMap_ValidStringMap_ReturnsTypedMap() { + Map rawMap = + Map.of( + "key1", "value1", + "key2", "value2" + ); + + Map typedMap = MapUtils.castMap(rawMap, String.class, String.class); + + assertEquals("value1", typedMap.get("key1")); + assertEquals("value2", typedMap.get("key2")); + } + + @Test + void testCastMap_NonStringKeyOrValue_ThrowsException() { + Map rawMap = + Map.of( + "key1", "value1", + 123, "value2" // Non-String key + ); + + assertThrows(IllegalArgumentException.class, () -> MapUtils.castMap(rawMap, String.class, String.class)); + } + + @Test + void testCastMap_ValidIntegerMap_ReturnsTypedMap() { + Map rawMap = + Map.of( + 10, 99, + 20, 145 + ); + + Map typedMap = MapUtils.castMap(rawMap, Integer.class, Integer.class); + + assertEquals(99, typedMap.get(10)); + assertEquals(145, typedMap.get(20)); + } + + @Test + void testCastMap_NonIntegerKeyOrValue_ThrowsException() { + Map rawMap = + Map.of( + 10, 99, + 20, "pigeons" // Non-Integer value + ); + + assertThrows(IllegalArgumentException.class, () -> MapUtils.castMap(rawMap, Integer.class, Integer.class)); + } + + @Test + void testCastMap_WithNullValues_ShouldHandleGracefully() { + Map rawMap = new HashMap<>(); + rawMap.put("key1", "value1"); + rawMap.put("key2", null); // Null value + + Map typedMap = MapUtils.castMap(rawMap, String.class, String.class); + + assertEquals("value1", typedMap.get("key1")); + assertNull(typedMap.get("key2")); + } + + @Test + void testCastMap_SuperclassTypeCompatibility_ShouldPass() { + Map rawMap = + Map.of( + "key1", "value1", + "key2", "value2" + ); + + // Cast to Map since String implements CharSequence + Map typedMap = MapUtils.castMap(rawMap, CharSequence.class, CharSequence.class); + + assertEquals("value1", typedMap.get("key1")); + assertEquals("value2", typedMap.get("key2")); + } + + @Test + void testCastMap_InterfaceTypeCompatibility_ShouldPass() { + Map rawMap = + Map.of( + "key1", "value1", + "key2", "value2" + ); + + // Cast to Map since String is an Object + Map typedMap = MapUtils.castMap(rawMap, Object.class, Object.class); + + assertEquals("value1", typedMap.get("key1")); + assertEquals("value2", typedMap.get("key2")); + } + + @Test + void testCastMap_StringToObject_ShouldPass() { + Map rawMap = + Map.of( + "key1", "value1", + "key2", "value2" + ); + + // Cast to Map since String is an Object + Map typedMap = MapUtils.castMap(rawMap, Object.class, Object.class); + + assertEquals("value1", typedMap.get("key1")); + assertEquals("value2", typedMap.get("key2")); + } + + @Test + void testCastMap_StringToCharSequence_ShouldPass() { + Map rawMap = + Map.of( + "key1", "value1", + "key2", "value2" + ); + + // Cast to Map since String implements CharSequence + Map typedMap = MapUtils.castMap(rawMap, CharSequence.class, CharSequence.class); + + assertEquals("value1", typedMap.get("key1")); + assertEquals("value2", typedMap.get("key2")); + } + +} diff --git a/java-connectors/kafka-connect-common/src/test/java/io/lenses/streamreactor/common/util/TasksSplitterTest.java b/java-connectors/kafka-connect-common/src/test/java/io/lenses/streamreactor/common/util/TasksSplitterTest.java new file mode 100644 index 000000000..ed9cfc156 --- /dev/null +++ b/java-connectors/kafka-connect-common/src/test/java/io/lenses/streamreactor/common/util/TasksSplitterTest.java @@ -0,0 +1,83 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.common.util; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.stream.Stream; + +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import io.lenses.streamreactor.common.config.base.KcqlSettings; +import lombok.val; + +class TasksSplitterTest { + + private static final String KCQL_SETTINGS_KEY = "connect.some.prefix.kcql"; + + private static final String OTHER_KEY = "key1"; + + private static final String OTHER_VALUE = "value1"; + + @ParameterizedTest + @MethodSource("testCases") + void testSplitTasksByKcqlStatements(String joinedKcqlStatements, int maxTasks, List expectedKcqls) { + Map props = + Map.of( + OTHER_KEY, OTHER_VALUE, + KCQL_SETTINGS_KEY, joinedKcqlStatements + ); + + val kcqlSettings = mock(KcqlSettings.class); + when(kcqlSettings.getKcqlSettingsKey()).thenReturn(KCQL_SETTINGS_KEY); + + val result = TasksSplitter.splitByKcqlStatements(maxTasks, props, kcqlSettings); + + assertEquals(expectedKcqls.size(), result.size()); + for (int i = 0; i < expectedKcqls.size(); i++) { + val taskProps = result.get(i); + assertEquals(OTHER_VALUE, taskProps.get(OTHER_KEY)); + assertEquals(expectedKcqls.get(i), taskProps.get(KCQL_SETTINGS_KEY)); + } + } + + private static Stream testCases() { + return Stream.of( + Arguments.of("INSERT INTO * SELECT * FROM topicA", 1, Collections.singletonList( + "INSERT INTO * SELECT * FROM topicA")), + Arguments.of("INSERT INTO * SELECT * FROM topicA;INSERT INTO * SELECT * FROM topicB", 1, Collections + .singletonList( + "INSERT INTO * SELECT * FROM topicA;INSERT INTO * SELECT * FROM topicB")), + Arguments.of( + "INSERT INTO * SELECT * FROM topicA;INSERT INTO * SELECT * FROM topicB;INSERT INTO * SELECT * FROM topicC", + 2, Arrays.asList( + "INSERT INTO * SELECT * FROM topicA;INSERT INTO * SELECT * FROM topicB", + "INSERT INTO * SELECT * FROM topicC")), + Arguments.of("", 1, Collections.singletonList("")), + Arguments.of("INSERT INTO * SELECT * FROM topicA;INSERT INTO * SELECT * FROM topicB", 3, Arrays.asList( + "INSERT INTO * SELECT * FROM topicA", + "INSERT INTO * SELECT * FROM topicB")) + ); + } +} diff --git a/java-connectors/kafka-connect-gcp-common/build.gradle b/java-connectors/kafka-connect-gcp-common/build.gradle index f6b031abb..48e44280a 100644 --- a/java-connectors/kafka-connect-gcp-common/build.gradle +++ b/java-connectors/kafka-connect-gcp-common/build.gradle @@ -5,7 +5,7 @@ project(":kafka-connect-gcp-common") { } ext { - gcpCloudVersion = "2.37.0" + gcpCloudVersion = "26.38.0" } dependencies { @@ -16,7 +16,9 @@ project(":kafka-connect-gcp-common") { api group: 'org.apache.kafka', name: 'kafka-clients', version: kafkaVersion //gcp - implementation group: 'com.google.cloud', name: 'google-cloud-core', version: gcpCloudVersion - implementation group: 'com.google.cloud', name: 'google-cloud-core-http', version: gcpCloudVersion + implementation platform(group: 'com.google.cloud', name: 'libraries-bom', version: gcpCloudVersion) + + implementation 'com.google.cloud:google-cloud-core' + implementation 'com.google.cloud:google-cloud-core-http' } } diff --git a/java-connectors/kafka-connect-gcp-common/src/main/java/io/lenses/streamreactor/connect/gcp/common/auth/GCPServiceBuilderConfigurer.java b/java-connectors/kafka-connect-gcp-common/src/main/java/io/lenses/streamreactor/connect/gcp/common/auth/GCPServiceBuilderConfigurer.java index e4732e7f1..90ff88c44 100644 --- a/java-connectors/kafka-connect-gcp-common/src/main/java/io/lenses/streamreactor/connect/gcp/common/auth/GCPServiceBuilderConfigurer.java +++ b/java-connectors/kafka-connect-gcp-common/src/main/java/io/lenses/streamreactor/connect/gcp/common/auth/GCPServiceBuilderConfigurer.java @@ -24,6 +24,9 @@ import java.io.IOException; import java.util.Optional; import java.util.function.Supplier; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; import lombok.experimental.UtilityClass; import lombok.val; import org.apache.kafka.common.config.ConfigException; @@ -32,7 +35,7 @@ /** * Utility class for configuring generic GCP service clients using a {@link GCPConnectionConfig}. */ -@UtilityClass +@NoArgsConstructor(access = AccessLevel.PRIVATE) public class GCPServiceBuilderConfigurer { /** @@ -97,7 +100,7 @@ private static RetrySettings createRetrySettings(RetryConfig httpRetryConfig) { .build(); } - private Supplier createConfigException(String message) { + private static Supplier createConfigException(String message) { return () -> new ConfigException(message); } } diff --git a/java-connectors/kafka-connect-gcp-common/src/main/java/io/lenses/streamreactor/connect/gcp/common/config/GCPSettings.java b/java-connectors/kafka-connect-gcp-common/src/main/java/io/lenses/streamreactor/connect/gcp/common/config/GCPSettings.java index 3d9eb7ce7..c5a465d22 100644 --- a/java-connectors/kafka-connect-gcp-common/src/main/java/io/lenses/streamreactor/connect/gcp/common/config/GCPSettings.java +++ b/java-connectors/kafka-connect-gcp-common/src/main/java/io/lenses/streamreactor/connect/gcp/common/config/GCPSettings.java @@ -15,6 +15,8 @@ */ package io.lenses.streamreactor.connect.gcp.common.config; +import org.apache.kafka.common.config.ConfigDef; + import io.lenses.streamreactor.common.config.base.ConfigSettings; import io.lenses.streamreactor.common.config.base.RetryConfig; import io.lenses.streamreactor.common.config.base.model.ConnectorPrefix; @@ -23,7 +25,6 @@ import io.lenses.streamreactor.connect.gcp.common.auth.HttpTimeoutConfig; import lombok.Getter; import lombok.val; -import org.apache.kafka.common.config.ConfigDef; /** * Configuration settings for connecting to Google Cloud Platform (GCP) services. @@ -34,13 +35,13 @@ public class GCPSettings implements ConfigSettings { public static final String EMPTY_STRING = ""; - private final String gcpProjectId; - private final String gcpQuotaProjectId; - private final String host; - private final String httpErrorRetryInterval; - private final String httpNbrOfRetries; - private final String httpSocketTimeout; - private final String httpConnectionTimeout; + private final String gcpProjectIdKey; + private final String gcpQuotaProjectIdKey; + private final String hostKey; + private final String httpErrorRetryIntervalKey; + private final String httpNbrOfRetriesKey; + private final String httpSocketTimeoutKey; + private final String httpConnectionTimeoutKey; public static final Long HTTP_ERROR_RETRY_INTERVAL_DEFAULT = 50L; public static final Integer HTTP_NUMBER_OF_RETIRES_DEFAULT = 5; @@ -55,13 +56,13 @@ public class GCPSettings implements ConfigSettings { * @param connectorPrefix the prefix used for configuration keys */ public GCPSettings(ConnectorPrefix connectorPrefix) { - gcpProjectId = connectorPrefix.prefixKey("gcp.project.id"); - gcpQuotaProjectId = connectorPrefix.prefixKey("gcp.quota.project.id"); - host = connectorPrefix.prefixKey("endpoint"); - httpErrorRetryInterval = connectorPrefix.prefixKey("http.retry.interval"); - httpNbrOfRetries = connectorPrefix.prefixKey("http.max.retries"); - httpSocketTimeout = connectorPrefix.prefixKey("http.socket.timeout"); - httpConnectionTimeout = connectorPrefix.prefixKey("http.connection.timeout"); + gcpProjectIdKey = connectorPrefix.prefixKey("gcp.project.id"); + gcpQuotaProjectIdKey = connectorPrefix.prefixKey("gcp.quota.project.id"); + hostKey = connectorPrefix.prefixKey("endpoint"); + httpErrorRetryIntervalKey = connectorPrefix.prefixKey("http.retry.interval"); + httpNbrOfRetriesKey = connectorPrefix.prefixKey("http.max.retries"); + httpSocketTimeoutKey = connectorPrefix.prefixKey("http.socket.timeout"); + httpConnectionTimeoutKey = connectorPrefix.prefixKey("http.connection.timeout"); authModeSettings = new AuthModeSettings(connectorPrefix); } @@ -77,20 +78,20 @@ public ConfigDef withSettings(ConfigDef configDef) { val conf = configDef .define( - gcpProjectId, + gcpProjectIdKey, ConfigDef.Type.STRING, EMPTY_STRING, ConfigDef.Importance.HIGH, "GCP Project ID") .define( - gcpQuotaProjectId, + gcpQuotaProjectIdKey, ConfigDef.Type.STRING, EMPTY_STRING, ConfigDef.Importance.HIGH, "GCP Quota Project ID") - .define(host, ConfigDef.Type.STRING, EMPTY_STRING, ConfigDef.Importance.LOW, "GCP Host") + .define(hostKey, ConfigDef.Type.STRING, EMPTY_STRING, ConfigDef.Importance.LOW, "GCP Host") .define( - httpNbrOfRetries, + httpNbrOfRetriesKey, ConfigDef.Type.INT, HTTP_NUMBER_OF_RETIRES_DEFAULT, ConfigDef.Importance.MEDIUM, @@ -99,9 +100,9 @@ public ConfigDef withSettings(ConfigDef configDef) { "Error", 2, ConfigDef.Width.LONG, - httpNbrOfRetries) + httpNbrOfRetriesKey) .define( - httpErrorRetryInterval, + httpErrorRetryIntervalKey, ConfigDef.Type.LONG, HTTP_ERROR_RETRY_INTERVAL_DEFAULT, ConfigDef.Importance.MEDIUM, @@ -110,15 +111,15 @@ public ConfigDef withSettings(ConfigDef configDef) { "Error", 3, ConfigDef.Width.LONG, - httpErrorRetryInterval) + httpErrorRetryIntervalKey) .define( - httpSocketTimeout, + httpSocketTimeoutKey, ConfigDef.Type.LONG, HTTP_SOCKET_TIMEOUT_DEFAULT, ConfigDef.Importance.LOW, "Socket timeout (ms)") .define( - httpConnectionTimeout, + httpConnectionTimeoutKey, ConfigDef.Type.LONG, HTTP_CONNECTION_TIMEOUT_DEFAULT, ConfigDef.Importance.LOW, @@ -130,19 +131,19 @@ public ConfigDef withSettings(ConfigDef configDef) { public GCPConnectionConfig parseFromConfig(ConfigSource configSource) { val builder = GCPConnectionConfig.builder().authMode(authModeSettings.parseFromConfig(configSource)); - configSource.getString(gcpProjectId).ifPresent(builder::projectId); - configSource.getString(gcpQuotaProjectId).ifPresent(builder::quotaProjectId); - configSource.getString(host).ifPresent(builder::host); + configSource.getString(gcpProjectIdKey).ifPresent(builder::projectId); + configSource.getString(gcpQuotaProjectIdKey).ifPresent(builder::quotaProjectId); + configSource.getString(hostKey).ifPresent(builder::host); val retryConfig = new RetryConfig( - configSource.getInt(httpNbrOfRetries).orElse(HTTP_NUMBER_OF_RETIRES_DEFAULT), - configSource.getLong(httpErrorRetryInterval).orElse(HTTP_ERROR_RETRY_INTERVAL_DEFAULT)); + configSource.getInt(httpNbrOfRetriesKey).orElse(HTTP_NUMBER_OF_RETIRES_DEFAULT), + configSource.getLong(httpErrorRetryIntervalKey).orElse(HTTP_ERROR_RETRY_INTERVAL_DEFAULT)); val timeoutConfig = new HttpTimeoutConfig( - configSource.getLong(httpSocketTimeout).orElse(null), - configSource.getLong(httpConnectionTimeout).orElse(null)); + configSource.getLong(httpSocketTimeoutKey).orElse(null), + configSource.getLong(httpConnectionTimeoutKey).orElse(null)); builder.httpRetryConfig(retryConfig); builder.timeouts(timeoutConfig); diff --git a/java-connectors/kafka-connect-gcp-common/src/test/java/io/lenses/streamreactor/connect/gcp/common/auth/mode/TestFileUtil.java b/java-connectors/kafka-connect-gcp-common/src/test/java/io/lenses/streamreactor/connect/gcp/common/auth/mode/TestFileUtil.java index 1272445ea..a68e14107 100644 --- a/java-connectors/kafka-connect-gcp-common/src/test/java/io/lenses/streamreactor/connect/gcp/common/auth/mode/TestFileUtil.java +++ b/java-connectors/kafka-connect-gcp-common/src/test/java/io/lenses/streamreactor/connect/gcp/common/auth/mode/TestFileUtil.java @@ -17,14 +17,17 @@ import static com.google.common.base.Preconditions.checkNotNull; -import com.google.common.io.ByteStreams; import java.io.File; import java.io.InputStream; import java.net.URL; import java.nio.charset.StandardCharsets; -import lombok.experimental.UtilityClass; -@UtilityClass +import com.google.common.io.ByteStreams; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; + +@NoArgsConstructor(access = AccessLevel.PRIVATE) public class TestFileUtil { static String streamToString(InputStream inputStream) throws Exception { diff --git a/java-connectors/kafka-connect-gcp-pubsub/build.gradle b/java-connectors/kafka-connect-gcp-pubsub/build.gradle new file mode 100644 index 000000000..e1cde51fe --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/build.gradle @@ -0,0 +1,29 @@ +project(":kafka-connect-gcp-pubsub") { + + test { + maxParallelForks = 1 + } + + dependencies { + implementation project(':kafka-connect-query-language') + implementation project(':kafka-connect-common') + implementation project(':kafka-connect-gcp-common') + + //gcp + implementation platform(group: 'com.google.cloud', name: 'libraries-bom', version: project(':kafka-connect-gcp-common').ext.gcpCloudVersion) + + implementation 'com.google.cloud:google-cloud-pubsub' + implementation 'io.grpc:grpc-core' + implementation 'io.grpc:grpc-netty-shaded' + implementation 'io.grpc:grpc-stub' + implementation 'io.grpc:grpc-protobuf-lite' + + implementation 'com.google.protobuf:protobuf-java' + implementation 'com.google.api.grpc:proto-google-common-protos' + + implementation 'io.perfmark:perfmark-api:0.27.0' + + implementation(group: 'com.github.ben-manes.caffeine', name: 'caffeine', version: caffeineVersion) + + } +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/GCPPubSubSourceConnector.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/GCPPubSubSourceConnector.java new file mode 100644 index 000000000..377f6f620 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/GCPPubSubSourceConnector.java @@ -0,0 +1,91 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source; + +import static io.lenses.streamreactor.common.util.AsciiArtPrinter.printAsciiHeader; + +import java.util.List; +import java.util.Map; + +import org.apache.kafka.common.config.ConfigDef; +import org.apache.kafka.connect.connector.Task; +import org.apache.kafka.connect.errors.ConnectException; +import org.apache.kafka.connect.source.SourceConnector; + +import io.lenses.streamreactor.common.util.JarManifest; +import io.lenses.streamreactor.common.util.TasksSplitter; +import io.lenses.streamreactor.connect.gcp.pubsub.source.configdef.PubSubConfigSettings; +import lombok.val; + +/** + * GCPPubSubSourceConnector is a source connector for Google Cloud Pub/Sub. + * It is responsible for starting the connector, creating tasks, and stopping the connector. + */ +public class GCPPubSubSourceConnector extends SourceConnector { + + private Map props; + + private final PubSubConfigSettings pubSubConfigSettings = new PubSubConfigSettings(); + + private final JarManifest jarManifest = + new JarManifest(getClass().getProtectionDomain().getCodeSource().getLocation()); + + @Override + public void start(Map props) { + printAsciiHeader(jarManifest, "/gcp-pubsub-source-ascii.txt"); + this.props = validateProps(props); + } + + private Map validateProps(Map props) { + try { + val pubSubConfigDef = new PubSubConfigSettings(); + val pubSubSourceConfig = pubSubConfigDef.parse(props); + pubSubSourceConfig.validateKcql(); + return props; + } catch (Exception e) { + throw new ConnectException("Invalid connector properties configuration: " + e.getMessage(), e); + } + } + + @Override + public Class taskClass() { + return GCPPubSubSourceTask.class; + } + + @Override + public List> taskConfigs(int maxTasks) { + return TasksSplitter.splitByKcqlStatements( + maxTasks, + props, + PubSubConfigSettings.getKcqlSettings() + ); + } + + @Override + public void stop() { + // No implementation required! + } + + @Override + public ConfigDef config() { + return pubSubConfigSettings.getConfigDef(); + } + + @Override + public String version() { + return jarManifest.getVersion(); + } +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/GCPPubSubSourceTask.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/GCPPubSubSourceTask.java new file mode 100644 index 000000000..c1c9a8869 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/GCPPubSubSourceTask.java @@ -0,0 +1,106 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import io.lenses.streamreactor.common.util.MapUtils; +import io.lenses.streamreactor.connect.gcp.pubsub.source.config.PubSubConfig; +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.*; +import org.apache.kafka.clients.producer.RecordMetadata; +import org.apache.kafka.connect.errors.ConnectException; +import org.apache.kafka.connect.source.SourceRecord; +import org.apache.kafka.connect.source.SourceTask; + +import io.lenses.streamreactor.common.util.JarManifest; +import io.lenses.streamreactor.connect.gcp.pubsub.source.admin.PubSubService; +import io.lenses.streamreactor.connect.gcp.pubsub.source.configdef.PubSubConfigSettings; +import io.lenses.streamreactor.connect.gcp.pubsub.source.configdef.PubSubKcqlConverter; +import io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.SourceRecordConverter; +import lombok.val; + +/** + * This class represents a source task for Google Cloud Pub/Sub. + * It is responsible for starting the task, polling for records, and stopping the task. + * It also handles record commit operations. + */ +public class GCPPubSubSourceTask extends SourceTask { + + private final JarManifest jarManifest = + new JarManifest(getClass().getProtectionDomain().getCodeSource().getLocation()); + + private final PubSubConfigSettings pubSubConfigSettings = new PubSubConfigSettings(); + + private PubSubSubscriberManager pubSubSubscriberManager; + + private SourceRecordConverter converter; + + @Override + public String version() { + return jarManifest.getVersion(); + } + + @Override + public void start(Map props) { + val sourceConfigSettings = pubSubConfigSettings.parse(props); + val pubSubService = createPubSubService(sourceConfigSettings.getGcpSettings()); + val pubSubConfig = sourceConfigSettings.getGcpSettings(); + val kcqls = sourceConfigSettings.getKcqlSettings(); + val kcqlConverter = new PubSubKcqlConverter(pubSubService); + val subscriptionConfigs = kcqlConverter.convertAll(kcqls); + converter = new SourceRecordConverter(pubSubConfig.getMappingConfig()); + pubSubSubscriberManager = + new PubSubSubscriberManager( + pubSubService, + pubSubConfig.getProjectId(), + subscriptionConfigs, + PubSubSubscriber::new); + } + + private static PubSubService createPubSubService(PubSubConfig pubSubConfig) { + try { + return new PubSubService( + pubSubConfig.getAuthMode().orElseThrow(() -> new ConnectException("No AuthMode specified")), + Optional.ofNullable(pubSubConfig.getProjectId()).orElseThrow(() -> new ConnectException( + "No ProjectId specified")) + ); + } catch (IOException e) { + throw new ConnectException(e); + } + } + + @Override + public List poll() { + return converter.convertAll(pubSubSubscriberManager.poll()); + } + + @Override + public void stop() { + Optional.ofNullable(pubSubSubscriberManager).ifPresent(PubSubSubscriberManager::stop); + } + + @Override + public void commitRecord(SourceRecord sourceRecord, RecordMetadata metadata) { + val sourcePartition = + PubSubSourcePartition.fromMap(MapUtils.castMap(sourceRecord.sourcePartition(), String.class, String.class)); + val sourceOffset = + PubSubSourceOffset.fromMap(MapUtils.castMap(sourceRecord.sourceOffset(), String.class, String.class)); + pubSubSubscriberManager.commitRecord(sourcePartition, sourceOffset); + } +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/admin/PubSubService.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/admin/PubSubService.java new file mode 100644 index 000000000..8d8e40bf9 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/admin/PubSubService.java @@ -0,0 +1,86 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.admin; + +import java.io.IOException; +import java.util.Optional; + +import com.google.cloud.pubsub.v1.MessageReceiver; +import com.google.cloud.pubsub.v1.Subscriber; +import com.google.cloud.pubsub.v1.SubscriptionAdminClient; +import com.google.cloud.pubsub.v1.SubscriptionAdminSettings; +import com.google.pubsub.v1.ProjectSubscriptionName; + +import com.google.pubsub.v1.SubscriptionName; +import io.lenses.streamreactor.connect.gcp.common.auth.mode.AuthMode; +import lombok.extern.slf4j.Slf4j; +import lombok.val; + +/** + * PubSubService manages the remote PubSub resources and provides methods to create a subscriber and look up topic name. + */ +@Slf4j +public class PubSubService { + + private final SubscriptionAdminClient subscriptionAdminClient; + + private final AuthMode authMode; + + private final String projectId; + + public PubSubService(final AuthMode authMode, final String projectId) throws IOException { + this.authMode = authMode; + this.projectId = projectId; + this.subscriptionAdminClient = createSubscriptionAdminClient(authMode); + } + + public PubSubService(final AuthMode authMode, final String projectId, + final SubscriptionAdminClient subscriptionAdminClient) { + this.authMode = authMode; + this.projectId = projectId; + this.subscriptionAdminClient = subscriptionAdminClient; + } + + public Subscriber createSubscriber( + final String subscriptionId, + final MessageReceiver receiver + ) { + val subscriberBuilder = Subscriber.newBuilder(createProjectSubscriptionName(subscriptionId), receiver); + Optional.ofNullable(authMode).ifPresent(e -> subscriberBuilder.setCredentialsProvider(e::getCredentials)); + return subscriberBuilder.build(); + } + + private SubscriptionAdminClient createSubscriptionAdminClient(final AuthMode authMode) throws IOException { + val settingsBuilder = SubscriptionAdminSettings.newBuilder(); + Optional.ofNullable(authMode).ifPresent(e -> settingsBuilder.setCredentialsProvider(e::getCredentials)); + return SubscriptionAdminClient.create(settingsBuilder.build()); + } + + public String topicNameFor(final String subscriptionId) { + val subscription = subscriptionAdminClient.getSubscription(createSubscriptionName(subscriptionId)); + log.info("Found topic details {} for subscriptionId {}", subscription.getTopic(), subscriptionId); + return subscription.getTopic(); + } + + private SubscriptionName createSubscriptionName(final String subscriptionId) { + return SubscriptionName.of(projectId, subscriptionId); + } + + private ProjectSubscriptionName createProjectSubscriptionName(final String subscriptionId) { + return ProjectSubscriptionName.of(projectId, subscriptionId); + } + +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/config/PubSubConfig.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/config/PubSubConfig.java new file mode 100644 index 000000000..f20f4a1f8 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/config/PubSubConfig.java @@ -0,0 +1,43 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.config; + +import java.util.Optional; + +import io.lenses.streamreactor.connect.gcp.common.auth.mode.AuthMode; +import io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.MappingConfig; +import lombok.AllArgsConstructor; +import lombok.Getter; + +/** + * PubSubConfig holds the configuration for the PubSub connector. + * It contains the projectId, authMode and mappingConfig. + */ +@Getter +@AllArgsConstructor +public class PubSubConfig { + + private final String projectId; + + private final AuthMode authMode; + + private final MappingConfig mappingConfig; + + public Optional getAuthMode() { + return Optional.ofNullable(authMode); + } + +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/config/PubSubSourceConfig.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/config/PubSubSourceConfig.java new file mode 100644 index 000000000..2f1d7e427 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/config/PubSubSourceConfig.java @@ -0,0 +1,48 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.config; + +import io.lenses.kcql.Kcql; +import lombok.AllArgsConstructor; +import lombok.Getter; +import org.apache.kafka.common.config.ConfigException; + +import java.util.List; + +import static io.lenses.streamreactor.connect.gcp.pubsub.source.configdef.PubSubKcqlConverter.*; + +/** + * SourceConfigSettings holds the configuration for the PubSub connector. + * It contains the gcpSettings and kcqlSettings. + */ +@Getter +@AllArgsConstructor +public class PubSubSourceConfig { + + private final PubSubConfig gcpSettings; + + private final List kcqlSettings; + + public void validateKcql() { + try { + getKcqlSettings() + .forEach(k -> k.validateKcqlProperties(KCQL_PROP_KEY_BATCH_SIZE, KCQL_PROP_KEY_CACHE_TTL, + KCQL_PROP_KEY_QUEUE_MAX)); + } catch (IllegalArgumentException e) { + throw new ConfigException("Invalid KCQL properties", e); + } + } +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/config/PubSubSubscription.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/config/PubSubSubscription.java new file mode 100644 index 000000000..e7660c7c9 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/config/PubSubSubscription.java @@ -0,0 +1,41 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.config; + +import lombok.Builder; +import lombok.Getter; + +/** + * PubSubSubscription holds the configuration for the PubSubSubscription. + * It contains the targetKafkaTopic, sourceTopicId, subscriptionId, batchSize and cacheExpire. + */ +@Getter +@Builder +public class PubSubSubscription { + + private final String targetKafkaTopic; + + private final String sourceTopicId; + + private final String subscriptionId; + + private final Integer batchSize; + + private final Long cacheExpire; + + private final Integer queueMaxEntries; + +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubConfigSettings.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubConfigSettings.java new file mode 100644 index 000000000..4b29de574 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubConfigSettings.java @@ -0,0 +1,76 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.configdef; + +import java.util.List; +import java.util.Map; + +import org.apache.kafka.common.config.ConfigDef; + +import io.lenses.streamreactor.common.config.base.ConfigSettings; +import io.lenses.streamreactor.common.config.base.KcqlSettings; +import io.lenses.streamreactor.common.config.base.model.ConnectorPrefix; +import io.lenses.streamreactor.common.config.source.ConfigSource; +import io.lenses.streamreactor.common.config.source.ConfigWrapperSource; +import io.lenses.streamreactor.connect.gcp.pubsub.source.config.PubSubSourceConfig; +import lombok.Getter; + +/** + * PubSubConfigDef is responsible for holding the configuration definition for the PubSub connector. + * It contains the gcpSettings and kcqlSettings. + */ +public class PubSubConfigSettings implements ConfigSettings { + + @Getter + private final ConfigDef configDef; + + private static final ConnectorPrefix connectorPrefix = new ConnectorPrefix("connect.pubsub"); + + @Getter + private static final PubSubSettings gcpSettings = new PubSubSettings(connectorPrefix); + + @Getter + private static final KcqlSettings kcqlSettings = new KcqlSettings(connectorPrefix); + + private static final List> settings = List.of(kcqlSettings, gcpSettings); + + public PubSubConfigSettings() { + configDef = new ConfigDef(); + withSettings(configDef); + } + + public PubSubSourceConfig parse(Map props) { + return parseFromConfig(ConfigWrapperSource.fromConfigDef(getConfigDef(), props)); + } + + @Override + public PubSubSourceConfig parseFromConfig(ConfigSource configSource) { + return new PubSubSourceConfig( + gcpSettings.parseFromConfig(configSource), + kcqlSettings.parseFromConfig(configSource) + ); + } + + @Override + public ConfigDef withSettings(ConfigDef configDef) { + + // side-effects + settings.forEach(s -> s.withSettings(configDef)); + + return configDef; + } + +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubKcqlConverter.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubKcqlConverter.java new file mode 100644 index 000000000..408a2d31d --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubKcqlConverter.java @@ -0,0 +1,66 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.configdef; + +import org.apache.kafka.common.config.ConfigException; + +import io.lenses.kcql.Kcql; +import io.lenses.streamreactor.common.config.base.intf.KcqlConverter; +import io.lenses.streamreactor.connect.gcp.pubsub.source.admin.PubSubService; +import io.lenses.streamreactor.connect.gcp.pubsub.source.config.PubSubSubscription; +import lombok.val; + +/** + * PubSubKcqlConverter is responsible for converting Kcql source to PubSubSubscription. + * It uses the builder pattern to create a new instance of PubSubSubscription. + */ +public class PubSubKcqlConverter extends KcqlConverter { + + public static final int DEFAULT_BATCH_SIZE = 1000; + // 1 hour + public static final long DEFAULT_CACHE_TTL_MILLIS = 3600L * 1000L; + public static final int DEFAULT_CACHE_MAX = 10000; + + public static final String KCQL_PROP_KEY_BATCH_SIZE = "batch.size"; + public static final String KCQL_PROP_KEY_CACHE_TTL = "cache.ttl"; + public static final String KCQL_PROP_KEY_QUEUE_MAX = "queue.max"; + + private final PubSubService pubSubService; + + public PubSubKcqlConverter(PubSubService pubSubService) { + this.pubSubService = pubSubService; + } + + public PubSubSubscription convert(Kcql source) throws ConfigException { + try { + source.validateKcqlProperties(KCQL_PROP_KEY_BATCH_SIZE, KCQL_PROP_KEY_CACHE_TTL, KCQL_PROP_KEY_QUEUE_MAX); + } catch (IllegalArgumentException e) { + throw new ConfigException("Invalid KCQL properties", e); + } + val subscriptionId = source.getSource(); + return PubSubSubscription.builder() + .sourceTopicId(pubSubService.topicNameFor(subscriptionId)) + .targetKafkaTopic(source.getTarget()) + .subscriptionId(subscriptionId) + .batchSize(source.extractOptionalProperty(KCQL_PROP_KEY_BATCH_SIZE).map(Integer::parseInt).orElse( + DEFAULT_BATCH_SIZE)) + .cacheExpire(source.extractOptionalProperty(KCQL_PROP_KEY_CACHE_TTL).map(Long::parseLong).orElse( + DEFAULT_CACHE_TTL_MILLIS)) + .queueMaxEntries(source.extractOptionalProperty(KCQL_PROP_KEY_QUEUE_MAX).map(Integer::parseInt).orElse( + DEFAULT_CACHE_MAX)) + .build(); + } +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubSettings.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubSettings.java new file mode 100644 index 000000000..1c4b54a27 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubSettings.java @@ -0,0 +1,88 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.configdef; + +import org.apache.kafka.common.config.ConfigDef; + +import io.lenses.streamreactor.common.config.base.ConfigSettings; +import io.lenses.streamreactor.common.config.base.model.ConnectorPrefix; +import io.lenses.streamreactor.common.config.source.ConfigSource; +import io.lenses.streamreactor.connect.gcp.common.config.AuthModeSettings; +import io.lenses.streamreactor.connect.gcp.pubsub.source.config.PubSubConfig; +import io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.MappingConfig; +import lombok.Getter; +import lombok.val; + +import static io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.MappingConfig.OUTPUT_MODE_DEFAULT; + +/** + * PubSubSettings is responsible for configuration settings for connecting to Google Cloud Platform (GCP) services. + * This class provides methods for defining and parsing GCP-specific configuration properties. + */ +@Getter +public class PubSubSettings implements ConfigSettings { + + private static final String EMPTY_STRING = ""; + + private final String gcpProjectIdKey; + private final String outputModeKey; + + private final AuthModeSettings authModeSettings; + + /** + * Constructs a new instance of {@code GCPSettings} with the specified connector prefix. + * + * @param connectorPrefix the prefix used for configuration keys + */ + public PubSubSettings(ConnectorPrefix connectorPrefix) { + gcpProjectIdKey = connectorPrefix.prefixKey("gcp.project.id"); + outputModeKey = connectorPrefix.prefixKey("output.mode"); + authModeSettings = new AuthModeSettings(connectorPrefix); + } + + /** + * Configures the provided {@link ConfigDef} with GCP-specific settings. + * + * @param configDef the base configuration definition to extend + * @return the updated {@link ConfigDef} with GCP-specific settings + */ + @Override + public ConfigDef withSettings(ConfigDef configDef) { + val conf = + configDef + .define( + gcpProjectIdKey, + ConfigDef.Type.STRING, + EMPTY_STRING, + ConfigDef.Importance.HIGH, + "GCP Project ID") + .define( + outputModeKey, + ConfigDef.Type.STRING, + EMPTY_STRING, + ConfigDef.Importance.HIGH, + "Output Mode (options are DEFAULT or COMPATIBILITY)"); + + return authModeSettings.withSettings(conf); + } + + public PubSubConfig parseFromConfig(ConfigSource configSource) { + return new PubSubConfig( + configSource.getString(gcpProjectIdKey).orElse(null), + authModeSettings.parseFromConfig(configSource), + MappingConfig.fromOutputMode(configSource.getString(outputModeKey).orElse(OUTPUT_MODE_DEFAULT))); + } +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/MappingConfig.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/MappingConfig.java new file mode 100644 index 000000000..4a7f13166 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/MappingConfig.java @@ -0,0 +1,71 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.mapping; + +import io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.headers.HeaderMapper; +import io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.headers.MinimalAndMessageAttributesHeaderMapper; +import io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.headers.MinimalHeaderMapper; +import io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.key.CompatibilityKeyMapper; +import io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.key.KeyMapper; +import io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.key.MessageIdKeyMapper; +import io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.value.CompatibilityValueMapper; +import io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.value.MessageValueMapper; +import io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.value.ValueMapper; +import lombok.AllArgsConstructor; +import lombok.Getter; + +/** + * MappingConfig is responsible for holding the mapping configuration for the PubSubMessageData to SourceRecord + * conversion. + * It contains the key, value and header mapping configurations. + */ +@AllArgsConstructor +@Getter +public class MappingConfig { + + public static final String OUTPUT_MODE_DEFAULT = "DEFAULT"; + public static final String OUTPUT_MODE_COMPATIBILITY = "COMPATIBILITY"; + private KeyMapper keyMapper; + + private ValueMapper valueMapper; + + private HeaderMapper headerMapper; + + public static MappingConfig fromOutputMode(String outputMode) { + switch (outputMode.toUpperCase()) { + case OUTPUT_MODE_COMPATIBILITY: + return MappingConfig.COMPATIBILITY_MAPPING_CONFIG; + default: + case OUTPUT_MODE_DEFAULT: + return MappingConfig.DEFAULT_MAPPING_CONFIG; + } + } + + public static final MappingConfig DEFAULT_MAPPING_CONFIG = + new MappingConfig( + new MessageIdKeyMapper(), + new MessageValueMapper(), + new MinimalAndMessageAttributesHeaderMapper() + ); + + public static final MappingConfig COMPATIBILITY_MAPPING_CONFIG = + new MappingConfig( + new CompatibilityKeyMapper(), + new CompatibilityValueMapper(), + new MinimalHeaderMapper() + ); + +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/SourceRecordConverter.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/SourceRecordConverter.java new file mode 100644 index 000000000..1fe86b71b --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/SourceRecordConverter.java @@ -0,0 +1,64 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.mapping; + +import org.apache.kafka.common.config.ConfigException; +import org.apache.kafka.connect.data.Schema; +import org.apache.kafka.connect.source.SourceRecord; + +import io.lenses.streamreactor.common.config.base.intf.Converter; +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.PubSubMessageData; +import lombok.AllArgsConstructor; + +/** + * SourceRecordConverter is responsible for converting PubSubMessageData to SourceRecord. + * It uses the MappingConfig to generate the key and value for the SourceRecord. + */ +@AllArgsConstructor +public class SourceRecordConverter extends Converter { + + private final MappingConfig mappingConfig; + + @Override + protected SourceRecord convert(final PubSubMessageData source) throws ConfigException { + return new SourceRecord( + source.getSourcePartition().toMap(), + source.getSourceOffset().toMap(), + source.getTargetTopicName(), + getKeySchema(), + getKey(source), + getValueSchema(), + getValue(source) + ); + } + + private Object getValue(final PubSubMessageData source) { + return mappingConfig.getValueMapper().mapValue(source); + } + + private Schema getValueSchema() { + return mappingConfig.getValueMapper().getSchema(); + } + + private Object getKey(final PubSubMessageData source) { + return mappingConfig.getKeyMapper().mapKey(source); + } + + private Schema getKeySchema() { + return mappingConfig.getKeyMapper().getSchema(); + } + +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/HeaderMapper.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/HeaderMapper.java new file mode 100644 index 000000000..5c9617b3c --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/HeaderMapper.java @@ -0,0 +1,30 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.headers; + +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.PubSubMessageData; + +import java.util.Map; + +/** + * HeaderMapping is an interface for mapping headers in the PubSubMessageData to Kafka Connect headers. + * Implementations of this interface should define how this mapping is done. + */ +public interface HeaderMapper { + + Map mapHeaders(final PubSubMessageData source); + +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/MinimalAndMessageAttributesHeaderMapper.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/MinimalAndMessageAttributesHeaderMapper.java new file mode 100644 index 000000000..499acc167 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/MinimalAndMessageAttributesHeaderMapper.java @@ -0,0 +1,42 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.headers; + +import com.google.common.collect.ImmutableMap; +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.PubSubMessageData; +import lombok.val; + +import java.util.Map; + +/** + * MinimalAndMessageAttributesHeaderMapping is responsible for mapping minimal headers and message attributes from + * PubSubMessageData to Kafka Connect headers. + * It extends the functionality of MinimalHeaderMapping by adding message attributes to the headers. + */ +public class MinimalAndMessageAttributesHeaderMapper implements HeaderMapper { + + private final MinimalHeaderMapper minimalHeaderMapping = new MinimalHeaderMapper(); + + @Override + public Map mapHeaders(final PubSubMessageData source) { + val miniMap = minimalHeaderMapping.mapHeaders(source); + val headMap = source.getMessage().getAttributesMap(); + return ImmutableMap.builder() + .putAll(miniMap) + .putAll(headMap) + .build(); + } +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/MinimalHeaderMapper.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/MinimalHeaderMapper.java new file mode 100644 index 000000000..a9018714d --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/MinimalHeaderMapper.java @@ -0,0 +1,35 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.headers; + +import java.util.Map; + +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.PubSubMessageData; + +/** + * MinimalHeaderMapping is responsible for mapping minimal headers from PubSubMessageData to Kafka Connect headers. + * The minimal headers include only the most essential information from the PubSubMessageData. + */ +public class MinimalHeaderMapper implements HeaderMapper { + + @Override + public Map mapHeaders(final PubSubMessageData source) { + return Map.of( + "PublishTimestamp", String.valueOf(source.getMessage().getPublishTime().getSeconds()) + ); + } + +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/key/CompatibilityKeyMapper.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/key/CompatibilityKeyMapper.java new file mode 100644 index 000000000..572b9d79c --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/key/CompatibilityKeyMapper.java @@ -0,0 +1,57 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.key; + +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.PubSubMessageData; +import org.apache.kafka.connect.data.Schema; +import org.apache.kafka.connect.data.SchemaBuilder; +import org.apache.kafka.connect.data.Struct; + +/** + * CompatibilityKeyMapping is responsible for mapping the key from PubSubMessageData to Kafka Connect key. + * The key is composed of the project id, topic id, subscription id and message id. The aim is to maintain compatibility + * with another connector in the market. + */ +public class CompatibilityKeyMapper implements KeyMapper { + + private static final String STRUCT_KEY_PROJECT_ID = "ProjectId"; + private static final String STRUCT_KEY_TOPIC_ID = "TopicId"; + private static final String STRUCT_KEY_SUBSCRIPTION_ID = "SubscriptionId"; + private static final String STRUCT_KEY_MESSAGE_ID = "MessageId"; + + private static final Schema SCHEMA = + SchemaBuilder + .struct() + .field(STRUCT_KEY_PROJECT_ID, Schema.STRING_SCHEMA) + .field(STRUCT_KEY_TOPIC_ID, Schema.STRING_SCHEMA) + .field(STRUCT_KEY_SUBSCRIPTION_ID, Schema.STRING_SCHEMA) + .field(STRUCT_KEY_MESSAGE_ID, Schema.STRING_SCHEMA) + .build(); + + @Override + public Object mapKey(final PubSubMessageData source) { + return new Struct(SCHEMA) + .put(STRUCT_KEY_PROJECT_ID, source.getSourcePartition().getProjectId()) + .put(STRUCT_KEY_TOPIC_ID, source.getSourcePartition().getTopicId()) + .put(STRUCT_KEY_SUBSCRIPTION_ID, source.getSourcePartition().getSubscriptionId()) + .put(STRUCT_KEY_MESSAGE_ID, source.getSourceOffset().getMessageId()); + } + + @Override + public Schema getSchema() { + return SCHEMA; + } +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/key/KeyMapper.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/key/KeyMapper.java new file mode 100644 index 000000000..1a3d8d789 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/key/KeyMapper.java @@ -0,0 +1,29 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.key; + +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.PubSubMessageData; +import org.apache.kafka.connect.data.Schema; + +/** + * KeyMapping is responsible for mapping the key from PubSubMessageData to Kafka Connect key. + */ +public interface KeyMapper { + + Object mapKey(PubSubMessageData source); + + Schema getSchema(); +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/key/MessageIdKeyMapper.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/key/MessageIdKeyMapper.java new file mode 100644 index 000000000..5f3f1ab07 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/key/MessageIdKeyMapper.java @@ -0,0 +1,35 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.key; + +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.PubSubMessageData; +import org.apache.kafka.connect.data.Schema; + +/** + * MessageIdKeyMapping is responsible for mapping the key from PubSubMessageData to Kafka Connect key. + */ +public class MessageIdKeyMapper implements KeyMapper { + + @Override + public Object mapKey(final PubSubMessageData source) { + return source.getSourceOffset().getMessageId(); + } + + @Override + public Schema getSchema() { + return Schema.STRING_SCHEMA; + } +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/value/CompatibilityValueMapper.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/value/CompatibilityValueMapper.java new file mode 100644 index 000000000..3c0d94464 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/value/CompatibilityValueMapper.java @@ -0,0 +1,65 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.value; + +import java.util.Map; +import java.util.Optional; + +import org.apache.kafka.connect.data.Schema; +import org.apache.kafka.connect.data.SchemaBuilder; +import org.apache.kafka.connect.data.Struct; + +import com.google.gson.Gson; + +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.PubSubMessageData; + +/** + * CompatibilityValueMapping is responsible for mapping the value from PubSubMessageData to Kafka Connect value. This is + * for a mode that aims to enable compatibility with another connector in the market. + */ +public class CompatibilityValueMapper implements ValueMapper { + + private static final String STRUCT_KEY_MESSAGE_DATA = "MessageData"; + private static final String STRUCT_KEY_ATTRIBUTE_MAP = "AttributeMap"; + private static final Schema SCHEMA = + SchemaBuilder + .struct() + .field(STRUCT_KEY_MESSAGE_DATA, Schema.OPTIONAL_STRING_SCHEMA) + .field(STRUCT_KEY_ATTRIBUTE_MAP, Schema.OPTIONAL_STRING_SCHEMA) + .build(); + + private final Gson gson = new Gson(); + + @Override + public Object mapValue(final PubSubMessageData source) { + + return new Struct(SCHEMA) + .put(STRUCT_KEY_MESSAGE_DATA, new String(source.getMessage().getData().toByteArray())) + .put(STRUCT_KEY_ATTRIBUTE_MAP, attributesMapToString(source.getMessage().getAttributesMap())); + } + + private String attributesMapToString(Map attributesMap) { + return Optional + .ofNullable(attributesMap) + .map(gson::toJson) + .orElse(null); + } + + @Override + public Schema getSchema() { + return SCHEMA; + } +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/value/MessageValueMapper.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/value/MessageValueMapper.java new file mode 100644 index 000000000..72db18512 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/value/MessageValueMapper.java @@ -0,0 +1,35 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.value; + +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.PubSubMessageData; +import org.apache.kafka.connect.data.Schema; + +/** + * MessageValueMapping is responsible for mapping the value from PubSubMessageData to Kafka Connect value. + */ +public class MessageValueMapper implements ValueMapper { + + @Override + public Object mapValue(final PubSubMessageData source) { + return source.getMessage().getData().toByteArray(); + } + + @Override + public Schema getSchema() { + return Schema.BYTES_SCHEMA; + } +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/value/ValueMapper.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/value/ValueMapper.java new file mode 100644 index 000000000..9b91dc988 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/value/ValueMapper.java @@ -0,0 +1,29 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.value; + +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.PubSubMessageData; +import org.apache.kafka.connect.data.Schema; + +/** + * ValueMapping is responsible for mapping the value from PubSubMessageData to Kafka Connect value. + */ +public interface ValueMapper { + + Object mapValue(final PubSubMessageData source); + + Schema getSchema(); +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/LooselyBoundedQueue.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/LooselyBoundedQueue.java new file mode 100644 index 000000000..b6e7dfbc6 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/LooselyBoundedQueue.java @@ -0,0 +1,96 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber; + +import java.util.Collection; +import java.util.concurrent.ConcurrentLinkedQueue; + +import lombok.val; +import lombok.extern.slf4j.Slf4j; + +/** + * LooselyBoundedQueue is an extension of {@link ConcurrentLinkedQueue} that introduces a maximum size limit. + * Unlike a strictly bounded queue, it does not fail the {@link #add(Object)} or {@link #addAll(Collection)} operations + * when this limit is exceeded. Instead, it logs a debug message when the queue size exceeds the specified maximum size. + * + *

This class is designed to be used in multi-threaded environments where it is more important to avoid the failure + * of add operations than to strictly enforce a queue size limit. In such scenarios, failing the connector due to queue + * overflow is undesirable as it can lead to disruptions in the data processing pipeline. Hence, this loosely bounded + * approach allows for some flexibility by permitting the queue size to exceed the maximum limit while still providing + * an indication (through logging) that the limit has been breached.

+ * + * @param the type of elements held in this queue + */ +@Slf4j +public class LooselyBoundedQueue extends ConcurrentLinkedQueue { + + private final int maxSize; + + /** + * Constructs a new LooselyBoundedQueue with the specified maximum size. + * + * @param maxSize the maximum size of the queue + */ + public LooselyBoundedQueue(int maxSize) { + this.maxSize = maxSize; + } + + /** + * Checks if there is space available in the queue for the specified number of elements. + * + * @param count the number of elements proposed to be added to the queue + * @return true if there is spare capacity for these elements, false otherwise + */ + public boolean hasSpareCapacity(int count) { + val newSize = getNewSize(count); + return (newSize <= maxSize); + } + + @Override + public boolean add(X x) { + logIfNoSpareCapacity(1); + return super.add(x); + } + + @Override + public boolean addAll(Collection c) { + logIfNoSpareCapacity(c.size()); + return super.addAll(c); + } + + /** + * Logs a debug message if adding the specified number of elements would exceed the queue's maximum size. + * + * @param numberOfElements the number of elements to be added + */ + private void logIfNoSpareCapacity(int numberOfElements) { + final var newSize = getNewSize(numberOfElements); + if (newSize > maxSize) { + log.debug("Queue will be full - {}/{}", newSize, maxSize); + } + } + + /** + * Calculates the new size of the queue if the specified number of elements were added. + * + * @param numberOfElements the number of elements to be added + * @return the new size of the queue + */ + private int getNewSize(int numberOfElements) { + return super.size() + numberOfElements; + } + +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubMessageData.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubMessageData.java new file mode 100644 index 000000000..90629ed1a --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubMessageData.java @@ -0,0 +1,39 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber; + +import com.google.pubsub.v1.PubsubMessage; + +import lombok.AllArgsConstructor; +import lombok.Getter; + +/** + * PubSubMessageData holds the data from PubSubMessage and the target topic name for processing before being sent back + * via Kafka Connect. + */ +@AllArgsConstructor +@Getter +public class PubSubMessageData { + + private PubSubSourcePartition sourcePartition; + + private PubSubSourceOffset sourceOffset; + + private PubsubMessage message; + + private String targetTopicName; + +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSourceOffset.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSourceOffset.java new file mode 100644 index 000000000..8173a4f0d --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSourceOffset.java @@ -0,0 +1,44 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber; + +import java.util.Map; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.ToString; + +/** + * SourceOffset holds the message id from the PubSub message, to allow kafka connect to track the offset of the message, + * for later use in the GCP offset acknowledgement. + */ +@AllArgsConstructor +@Getter +@ToString +public class PubSubSourceOffset { + + private static final String KEY_MESSAGE_ID = "message.id"; + private String messageId; + + public static PubSubSourceOffset fromMap(Map sourceLocation) { + return new PubSubSourceOffset(sourceLocation.get(KEY_MESSAGE_ID)); + } + + public Map toMap() { + return Map.of(KEY_MESSAGE_ID, messageId); + } + +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSourcePartition.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSourcePartition.java new file mode 100644 index 000000000..7885c1c28 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSourcePartition.java @@ -0,0 +1,57 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber; + +import java.util.Map; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.ToString; + +/** + * SourcePartition holds the location from which the message was sourced within GCP PubSub, for use reporting back + * partitions to Kafka Connect for later use in the GCP record acknowledgement.. + */ +@Getter +@AllArgsConstructor +@ToString +public class PubSubSourcePartition { + + private static final String KEY_PROJECT_ID = "project.id"; + private static final String KEY_TOPIC_ID = "topic.id"; + private static final String KEY_SUBSCRIPTION_ID = "subscription.id"; + + private String projectId; + private String topicId; + private String subscriptionId; + + public static PubSubSourcePartition fromMap(Map sourceLocation) { + return new PubSubSourcePartition( + sourceLocation.get(KEY_PROJECT_ID), + sourceLocation.get(KEY_TOPIC_ID), + sourceLocation.get(KEY_SUBSCRIPTION_ID) + ); + } + + public Map toMap() { + return Map.of( + KEY_TOPIC_ID, topicId, + KEY_PROJECT_ID, projectId, + KEY_SUBSCRIPTION_ID, subscriptionId + ); + } + +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSubscriber.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSubscriber.java new file mode 100644 index 000000000..e9ab31eed --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSubscriber.java @@ -0,0 +1,126 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber; + +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; +import com.google.cloud.pubsub.v1.AckReplyConsumer; +import com.google.cloud.pubsub.v1.MessageReceiver; +import com.google.pubsub.v1.PubsubMessage; + +import io.lenses.streamreactor.connect.gcp.pubsub.source.admin.PubSubService; +import io.lenses.streamreactor.connect.gcp.pubsub.source.config.PubSubSubscription; +import lombok.extern.slf4j.Slf4j; +import lombok.val; + +/** + * Subscriber is responsible for receiving messages from GCP PubSub and storing them in a queue for processing. + */ +@Slf4j +public class PubSubSubscriber { + + private final LooselyBoundedQueue messageQueue; + + private final PubSubSourcePartition sourcePartition; + + private final Cache ackCache; + + private final Integer batchSize; + + private final String targetTopicName; + + private final com.google.cloud.pubsub.v1.Subscriber gcpSubscriber; + + public PubSubSubscriber( + PubSubService pubSubService, + String projectId, + PubSubSubscription subscription + ) { + log.info("Starting PubSubSubscriber for subscription {}", subscription.getSubscriptionId()); + targetTopicName = subscription.getTargetKafkaTopic(); + batchSize = subscription.getBatchSize(); + messageQueue = new LooselyBoundedQueue<>(subscription.getQueueMaxEntries()); + ackCache = + Caffeine + .newBuilder() + .expireAfterWrite(subscription.getCacheExpire(), TimeUnit.MILLISECONDS) + .build(); + + val receiver = createMessageReceiver(); + + gcpSubscriber = pubSubService.createSubscriber(subscription.getSubscriptionId(), receiver); + sourcePartition = + new PubSubSourcePartition( + projectId, + subscription.getSourceTopicId(), + subscription.getSubscriptionId() + ); + startAsync(); + } + + public void startAsync() { + gcpSubscriber.startAsync(); + } + + private MessageReceiver createMessageReceiver() { + return (PubsubMessage message, AckReplyConsumer consumer) -> { + if (messageQueue.hasSpareCapacity(1)) { + log.info("Spare capacity"); + messageQueue.add(message); + ackCache.put(message.getMessageId(), consumer); + } else { + log.info("Message rejected from GCP as queue is full"); + // if the queue is full, let Google know we haven't received it and it will be sent again in the near future + consumer.nack(); + } + }; + } + + public List getMessages() { + return IntStream.range(0, batchSize) + .mapToObj(i -> messageQueue.poll()) + .filter(Objects::nonNull) + .map(psm -> new PubSubMessageData( + sourcePartition, + new PubSubSourceOffset(psm.getMessageId()), + psm, + targetTopicName + )) + .collect(Collectors.toUnmodifiableList()); + } + + public void acknowledge(String messageId) { + log.trace("Sending acknowledgement for {}}", messageId); + Optional + .ofNullable(ackCache.getIfPresent(messageId)) + .ifPresent(e -> { + e.ack(); + ackCache.invalidate(messageId); + }); + } + + public void stopAsync() { + gcpSubscriber.stopAsync(); + } + +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSubscriberManager.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSubscriberManager.java new file mode 100644 index 000000000..e0abdaeee --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSubscriberManager.java @@ -0,0 +1,80 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber; + +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import io.lenses.streamreactor.connect.gcp.pubsub.source.admin.PubSubService; +import io.lenses.streamreactor.connect.gcp.pubsub.source.config.PubSubSubscription; +import lombok.extern.slf4j.Slf4j; +import lombok.val; + +/** + * SubscriberManager is responsible for managing the subscribers and polling messages from GCP PubSub. + */ +@Slf4j +public class PubSubSubscriberManager { + + /** + * Map of Subscribers. The key is the subscriptionId. + */ + private final Map subscribers; + + public PubSubSubscriberManager( + PubSubService pubSubService, + String projectId, + List subscriptionConfigs, + SubscriberCreator subscriberCreator + ) { + log.info("Starting PubSubSubscriberManager for {} subscriptions", subscriptionConfigs.size()); + subscribers = + subscriptionConfigs + .parallelStream() + .collect(Collectors.toConcurrentMap( + PubSubSubscription::getSubscriptionId, + s -> subscriberCreator.createSubscriber(pubSubService, projectId, s))); + } + + public List poll() { + log.trace("Polling messages from all partitions"); + val subs = + subscribers + .values() + .parallelStream() + .flatMap(pubSubSubscriber -> pubSubSubscriber.getMessages().stream()) + .collect(Collectors.toList()); + log.debug("Polled {} messages from all partitions", subs.size()); + return subs; + } + + public void commitRecord( + PubSubSourcePartition sourcePartition, + PubSubSourceOffset sourceOffset + ) { + log.trace("Committing record for partition {} with offset {}", sourcePartition, sourceOffset); + subscribers + .get(sourcePartition.getSubscriptionId()) + .acknowledge(sourceOffset.getMessageId()); + } + + public void stop() { + log.info("Stopping PubSubSubscriberManager"); + subscribers.values().forEach(PubSubSubscriber::stopAsync); + } + +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/SubscriberCreator.java b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/SubscriberCreator.java new file mode 100644 index 000000000..87d1cd39b --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/SubscriberCreator.java @@ -0,0 +1,29 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber; + +import io.lenses.streamreactor.connect.gcp.pubsub.source.admin.PubSubService; +import io.lenses.streamreactor.connect.gcp.pubsub.source.config.PubSubSubscription; + +@FunctionalInterface +public interface SubscriberCreator { + + PubSubSubscriber createSubscriber( + PubSubService pubSubService, + String projectId, + PubSubSubscription subscription + ); +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/resources/META-INF/services/org.apache.kafka.connect.source.SourceConnector b/java-connectors/kafka-connect-gcp-pubsub/src/main/resources/META-INF/services/org.apache.kafka.connect.source.SourceConnector new file mode 100644 index 000000000..30a8b7b2d --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/resources/META-INF/services/org.apache.kafka.connect.source.SourceConnector @@ -0,0 +1 @@ +io.lenses.streamreactor.connect.gcp.pubsub.source.GCPPubSubSourceConnector \ No newline at end of file diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/main/resources/gcp-pubsub-ascii-source.txt b/java-connectors/kafka-connect-gcp-pubsub/src/main/resources/gcp-pubsub-ascii-source.txt new file mode 100644 index 000000000..96515a976 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/main/resources/gcp-pubsub-ascii-source.txt @@ -0,0 +1,30 @@ + + ████████▀▀▀▀▀███████████████████████████████████████████████████████████████████ + █████▀ ▀████████████████████████████████████████████████████████████████ + ███▀ ▄█████▄ ▀██████████████████████████████████████████████████████████████ + ███ ▄███████▄ ██████ █████▌ █▌ ████ ███ ▄▄ ██ ███ ▄▄ ███ + ███ █████████ ██████ █████▌ ██████▌ ▀██ ██ ██████ ██████ ███████ + ███ ▀███████▀ ██████ █████▌ ██▌ █▄ █ ███▄▄ ██ ███▄▄ ███ + ████▄ ▄███████ █████▌ ██████▌ ███ ███████ █ ███████████ ██ + █████████ ████████████ ▌ █▌ ████▄ ██▄ ▄██ █▄ ▄███ + █████████ ████████████████████████████████████████████████████████████████████ + █████████ ▄████████████████████████████████████████████████████████████████████ + ████████████████████████████████████████████████████████████████████████████████ + + ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄·▄• ▄▌▄▄▄▄· .▄▄ · ▄• ▄▌▄▄▄▄· + ▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌ ▐█ ▄██▪██▌▐█ ▀█▪▐█ ▀. █▪██▌▐█ ▀█▪ + ▐░█▀▀▀▀▀▀▀▀▀ ▐░█▀▀▀▀▀▀▀▀▀ ▐░█▀▀▀▀▀▀▀█░▌ ██▀·█▌▐█▌▐█▀▀█▄▄▀▀▀█▄█▌▐█▌▐█▀▀█▄ + ▐░▌ ▐░▌ ▐░▌ ▐░▌ ▐█▪·•▐█▄█▌██▄▪▐█▐█▄▪▐█▐█▄█▌██▄▪▐█ + ▐░▌ ▄▄▄▄▄▄▄▄ ▐░▌ ▐░█▄▄▄▄▄▄▄█░▌ .▀ ▀▀▀ ·▀▀▀▀ ▀▀▀▀ ▀▀▀ ·▀▀▀▀ + ▐░▌▐░░░░░░░░▌▐░▌ ▐░░░░░░░░░░░▌ .▄▄ · ▄• ▄▌▄▄▄ ▄▄· ▄▄▄ . + ▐░▌ ▀▀▀▀▀▀█░▌▐░▌ ▐░█▀▀▀▀▀▀▀▀▀ ▐█ ▀. ▪ █▪██▌▀▄ █·▐█ ▌▪▀▄.▀· + ▐░▌ ▐░▌▐░▌ ▐░▌ ▄▀▀▀█▄ ▄█▀▄ █▌▐█▌▐▀▀▄ ██ ▄▄▐▀▀▪▄ + ▐░█▄▄▄▄▄▄▄█░▌▐░█▄▄▄▄▄▄▄▄▄ ▐░▌ ▐█▄▪▐█▐█▌.▐▌▐█▄█▌▐█•█▌▐███▌▐█▄▄▌ + ▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░▌ ▀▀▀▀ ▀█▄▀▪ ▀▀▀ .▀ ▀·▀▀▀ ▀▀▀ + ▀▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀▀ ▀ + + + + + + diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/admin/PubSubServiceTest.java b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/admin/PubSubServiceTest.java new file mode 100644 index 000000000..f93cd557d --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/admin/PubSubServiceTest.java @@ -0,0 +1,83 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.admin; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.IOException; + +import com.google.pubsub.v1.SubscriptionName; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import com.google.cloud.pubsub.v1.MessageReceiver; +import com.google.cloud.pubsub.v1.SubscriptionAdminClient; +import com.google.pubsub.v1.ProjectSubscriptionName; +import com.google.pubsub.v1.Subscription; + +import io.lenses.streamreactor.connect.gcp.common.auth.mode.AuthMode; +import lombok.val; + +@ExtendWith(MockitoExtension.class) +class PubSubServiceTest { + + private static final String PROJECT_ID = "test-project"; + private static final String SUBSCRIPTION_ID = "test-subscription"; + private static final String TOPIC_ID = "test-topic"; + + private static final SubscriptionName SUBSCRIPTION_NAME = + SubscriptionName.of(PROJECT_ID, SUBSCRIPTION_ID); + + private static final ProjectSubscriptionName PROJECT_SUBSCRIPTION_NAME = + ProjectSubscriptionName.of(PROJECT_ID, SUBSCRIPTION_ID); + @Mock + private AuthMode authMode; + + @Mock + private SubscriptionAdminClient subscriptionAdminClient; + + @Mock + private MessageReceiver messageReceiver; + + private PubSubService pubSubService; + + @BeforeEach + public void setup() throws IOException { + pubSubService = new PubSubService(authMode, PROJECT_ID, subscriptionAdminClient); + } + + @Test + void testCreateSubscriber() { + val subscriber = pubSubService.createSubscriber(SUBSCRIPTION_ID, messageReceiver); + assertEquals(PROJECT_SUBSCRIPTION_NAME.toString(), subscriber.getSubscriptionNameString()); + } + + @Test + void testTopicNameFor() { + val subscription = Subscription.newBuilder().setTopic(TOPIC_ID).build(); + when(subscriptionAdminClient.getSubscription(SUBSCRIPTION_NAME)).thenReturn(subscription); + + val topicName = pubSubService.topicNameFor(SUBSCRIPTION_ID); + + assertEquals(TOPIC_ID, topicName); + verify(subscriptionAdminClient).getSubscription(SUBSCRIPTION_NAME); + } +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubConfigSettingsTest.java b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubConfigSettingsTest.java new file mode 100644 index 000000000..54bc67080 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubConfigSettingsTest.java @@ -0,0 +1,52 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.configdef; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.apache.kafka.common.config.ConfigDef; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; + +import lombok.val; + +@ExtendWith(MockitoExtension.class) +class PubSubConfigSettingsTest { + + private static final PubSubConfigSettings PUB_SUB_CONFIG_SETTINGS = new PubSubConfigSettings(); + + @Test + void configDefShouldContainGcpSettings() { + val projectIdConfigSetting = + PUB_SUB_CONFIG_SETTINGS.getConfigDef().configKeys().get("connect.pubsub.gcp.project.id"); + assertEquals("", projectIdConfigSetting.defaultValue); + assertEquals(ConfigDef.Type.STRING, projectIdConfigSetting.type); + assertEquals(ConfigDef.Importance.HIGH, projectIdConfigSetting.importance); + assertEquals("GCP Project ID", projectIdConfigSetting.documentation); + } + + @Test + void configDefShouldContainKcqlSettings() { + val kcqlConfigSetting = PUB_SUB_CONFIG_SETTINGS.getConfigDef().configKeys().get("connect.pubsub.kcql"); + assertEquals(ConfigDef.Type.STRING, kcqlConfigSetting.type); + assertEquals(ConfigDef.Importance.HIGH, kcqlConfigSetting.importance); + assertEquals( + "Contains the Kafka Connect Query Language describing data mappings from the source to the target system.", + kcqlConfigSetting.documentation); + } + +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubKcqlConverterTest.java b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubKcqlConverterTest.java new file mode 100644 index 000000000..84c137623 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubKcqlConverterTest.java @@ -0,0 +1,92 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.configdef; + +import static io.lenses.streamreactor.connect.gcp.pubsub.source.configdef.PubSubKcqlConverter.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.when; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; + +import io.lenses.kcql.Kcql; +import io.lenses.streamreactor.connect.gcp.pubsub.source.admin.PubSubService; +import io.lenses.streamreactor.connect.gcp.pubsub.source.config.PubSubSubscription; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.util.Optional; + +@ExtendWith(MockitoExtension.class) +class PubSubKcqlConverterTest { + + private static final String SUBSCRIPTION_ID = "test-subscription"; + private static final String TOPIC_NAME = "test-topic"; + private static final String TARGET_KAFKA_TOPIC = "test-kafka-topic"; + private static final int BATCH_SIZE = 999; + private static final long TTL = 1599L; + + @Mock + private PubSubService pubSubService; + + @Mock + private Kcql kcql; + + private PubSubKcqlConverter pubSubKcqlConverter; + + @BeforeEach + public void setup() { + pubSubKcqlConverter = new PubSubKcqlConverter(pubSubService); + } + + @Test + void convertShouldMapKcqlPropertiesToPubSubSubscription() { + setUpScenario( + Optional.of(String.valueOf(BATCH_SIZE)), + Optional.of(String.valueOf(TTL)) + ); + + PubSubSubscription result = pubSubKcqlConverter.convert(kcql); + + assertEquals(TOPIC_NAME, result.getSourceTopicId()); + assertEquals(TARGET_KAFKA_TOPIC, result.getTargetKafkaTopic()); + assertEquals(SUBSCRIPTION_ID, result.getSubscriptionId()); + assertEquals(BATCH_SIZE, result.getBatchSize()); + assertEquals(TTL, result.getCacheExpire()); + } + + @Test + void convertShouldProvideDefaultsForBatchSizeAndCacheTtl() { + setUpScenario( + Optional.empty(), + Optional.empty() + ); + + PubSubSubscription result = pubSubKcqlConverter.convert(kcql); + + assertEquals(DEFAULT_BATCH_SIZE, result.getBatchSize()); + assertEquals(DEFAULT_CACHE_TTL_MILLIS, result.getCacheExpire()); + } + + private void setUpScenario(Optional maybeBatchSize, Optional maybeCacheTtl) { + when(kcql.getSource()).thenReturn(SUBSCRIPTION_ID); + when(kcql.getTarget()).thenReturn(TARGET_KAFKA_TOPIC); + when(pubSubService.topicNameFor(SUBSCRIPTION_ID)).thenReturn(TOPIC_NAME); + when(kcql.extractOptionalProperty(KCQL_PROP_KEY_BATCH_SIZE)).thenReturn(maybeBatchSize); + when(kcql.extractOptionalProperty(KCQL_PROP_KEY_CACHE_TTL)).thenReturn(maybeCacheTtl); + } +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubSettingsTest.java b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubSettingsTest.java new file mode 100644 index 000000000..a71618c09 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/configdef/PubSubSettingsTest.java @@ -0,0 +1,61 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.configdef; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.Map; + +import org.apache.kafka.common.config.ConfigDef; +import org.junit.jupiter.api.Test; + +import io.lenses.streamreactor.common.config.base.model.ConnectorPrefix; +import io.lenses.streamreactor.common.config.source.MapConfigSource; +import lombok.val; + +class PubSubSettingsTest { + + private static final ConnectorPrefix connectorPrefix = new ConnectorPrefix("pubsub.test"); + public static final String TEST_PROJECT_ID = "test-project-id"; + + @Test + void shouldInjectGcpSettingsIntoConfigDef() { + val configDef = new ConfigDef(); + new PubSubSettings(connectorPrefix).withSettings(configDef); + + val projectIdConfigSetting = configDef.configKeys().get("pubsub.test.gcp.project.id"); + assertEquals("", projectIdConfigSetting.defaultValue); + assertEquals(ConfigDef.Type.STRING, projectIdConfigSetting.type); + assertEquals(ConfigDef.Importance.HIGH, projectIdConfigSetting.importance); + assertEquals("GCP Project ID", projectIdConfigSetting.documentation); + } + + @Test + void shouldParsePubSubConfigFromConfigSource() { + val configSource = + new MapConfigSource( + Map.of( + "pubsub.test.gcp.project.id", TEST_PROJECT_ID + + ) + ); + + val pubSubConfig = new PubSubSettings(connectorPrefix).parseFromConfig(configSource); + + assertEquals(TEST_PROJECT_ID, pubSubConfig.getProjectId()); + } + +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/SourceRecordConverterTest.java b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/SourceRecordConverterTest.java new file mode 100644 index 000000000..adb4d53f6 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/SourceRecordConverterTest.java @@ -0,0 +1,79 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.mapping; + +import static io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.MappingConfig.COMPATIBILITY_MAPPING_CONFIG; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.io.UnsupportedEncodingException; +import java.util.Map; + +import org.apache.kafka.connect.data.Struct; +import org.junit.jupiter.api.Test; + +import com.google.protobuf.ByteString; +import com.google.protobuf.Timestamp; +import com.google.pubsub.v1.PubsubMessage; + +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.PubSubMessageData; +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.PubSubSourceOffset; +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.PubSubSourcePartition; +import lombok.val; + +class SourceRecordConverterTest { + + private static final Map ATTRIBUTE_MAP = + Map.of( + "key", "value" + ); + + private static final String ATTRIBUTE_MAP_STRING = "{\"key\":\"value\"}"; + + @Test + void shouldConvertDataForCompatibilityMode() throws UnsupportedEncodingException { + val converted = new SourceRecordConverter(COMPATIBILITY_MAPPING_CONFIG).convert(setUpDataForCompatibilityMode()); + + assertEquals(Map.of("message.id", "messageId1"), converted.sourceOffset()); + assertEquals(Map.of( + "project.id", "projectId1", + "topic.id", "topicId1", + "subscription.id", "subscriptionId1"), + converted.sourcePartition()); + assertEquals("targetTopicName", converted.topic()); + + val connectRecord = (Struct) converted.value(); + assertEquals("My data", connectRecord.get("MessageData")); + assertEquals(ATTRIBUTE_MAP_STRING, connectRecord.get("AttributeMap")); + } + + private static PubSubMessageData setUpDataForCompatibilityMode() throws UnsupportedEncodingException { + return new PubSubMessageData( + new PubSubSourcePartition("projectId1", "topicId1", "subscriptionId1"), + new PubSubSourceOffset("messageId1"), + PubsubMessage.newBuilder() + .setMessageId("messageId") + .setData( + ByteString.copyFrom("My data", "UTF-8") + ) + .setPublishTime(Timestamp.newBuilder().build()) + .putAllAttributes( + ATTRIBUTE_MAP + ).build(), + "targetTopicName" + ); + } + +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/MinimalAndMessageAttributesHeaderMapperTest.java b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/MinimalAndMessageAttributesHeaderMapperTest.java new file mode 100644 index 000000000..67a13b140 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/MinimalAndMessageAttributesHeaderMapperTest.java @@ -0,0 +1,72 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.headers; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.when; + +import java.time.Instant; +import java.util.Map; + +import com.google.common.collect.ImmutableMap; +import com.google.protobuf.Timestamp; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import com.google.pubsub.v1.PubsubMessage; + +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.PubSubMessageData; + +@ExtendWith(MockitoExtension.class) +class MinimalAndMessageAttributesHeaderMapperTest { + + private static final String PUBLISH_TIME = "1955-11-12T10:04:00Z"; + private static final Instant PUBLISH_TIME_INSTANT = Instant.parse(PUBLISH_TIME); + + private static final Map HEADERS_MAP = Map.of("attr1", "value1", "attr2", "value2"); + + @Mock + private PubSubMessageData pubSubMessageData; + + @Mock + private PubsubMessage pubsubMessage; + + private MinimalAndMessageAttributesHeaderMapper minimalAndMessageAttributesHeaderMapping; + + @BeforeEach + void setup() { + minimalAndMessageAttributesHeaderMapping = new MinimalAndMessageAttributesHeaderMapper(); + } + + @Test + void testGetHeaders() { + when(pubsubMessage.getPublishTime()).thenReturn(Timestamp.newBuilder().setSeconds(PUBLISH_TIME_INSTANT + .getEpochSecond()).build()); + when(pubsubMessage.getAttributesMap()).thenReturn(HEADERS_MAP); + when(pubSubMessageData.getMessage()).thenReturn(pubsubMessage); + + Map result = minimalAndMessageAttributesHeaderMapping.mapHeaders(pubSubMessageData); + + assertEquals( + ImmutableMap.builder() + .put("PublishTimestamp", String.valueOf(PUBLISH_TIME_INSTANT.getEpochSecond())) + .putAll(HEADERS_MAP).build(), + result); + } +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/MinimalHeaderMapperTest.java b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/MinimalHeaderMapperTest.java new file mode 100644 index 000000000..e79fa50f2 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/headers/MinimalHeaderMapperTest.java @@ -0,0 +1,66 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.headers; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.when; + +import java.time.Instant; +import java.util.Map; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import com.google.common.collect.ImmutableMap; +import com.google.protobuf.Timestamp; +import com.google.pubsub.v1.PubsubMessage; + +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.PubSubMessageData; + +@ExtendWith(MockitoExtension.class) +class MinimalHeaderMapperTest { + + private static final String PUBLISH_TIME = "1955-11-12T10:04:00Z"; + private static final Instant PUBLISH_TIME_INSTANT = Instant.parse(PUBLISH_TIME); + + @Mock + private PubSubMessageData pubSubMessageData; + + @Mock + private PubsubMessage pubsubMessage; + + private MinimalHeaderMapper minimalHeaderMapping; + + @BeforeEach + void setup() { + minimalHeaderMapping = new MinimalHeaderMapper(); + } + + @Test + void testGetHeaders() { + when(pubsubMessage.getPublishTime()).thenReturn(Timestamp.newBuilder().setSeconds(PUBLISH_TIME_INSTANT + .getEpochSecond()).build()); + when(pubSubMessageData.getMessage()).thenReturn(pubsubMessage); + + Map result = minimalHeaderMapping.mapHeaders(pubSubMessageData); + + assertEquals(ImmutableMap.builder().put("PublishTimestamp", String.valueOf(PUBLISH_TIME_INSTANT.getEpochSecond())) + .build(), result); + } +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/value/MessageValueMapperTest.java b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/value/MessageValueMapperTest.java new file mode 100644 index 000000000..de6b2d35a --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/mapping/value/MessageValueMapperTest.java @@ -0,0 +1,70 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.mapping.value; + +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.apache.kafka.connect.data.Schema; +import org.junit.jupiter.api.Test; + +import com.google.protobuf.ByteString; +import com.google.pubsub.v1.PubsubMessage; + +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.PubSubMessageData; +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.PubSubSourceOffset; +import io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber.PubSubSourcePartition; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class MessageValueMapperTest { + + @Mock + PubSubSourcePartition sourcePartition; + @Mock + PubSubSourceOffset sourceOffset; + + @Test + void testGetValue() { + String testMessageData = "Test message data"; + PubsubMessage message = + PubsubMessage.newBuilder() + .setData(ByteString.copyFromUtf8(testMessageData)) + .build(); + PubSubMessageData pubSubMessageData = + new PubSubMessageData( + sourcePartition, + sourceOffset, + message, + "notRelevantToThisTest" + ); + + MessageValueMapper messageValueMapping = new MessageValueMapper(); + byte[] result = (byte[]) messageValueMapping.mapValue(pubSubMessageData); + + assertArrayEquals(testMessageData.getBytes(), result); + } + + @Test + void testGetSchema() { + MessageValueMapper messageValueMapping = new MessageValueMapper(); + Schema result = messageValueMapping.getSchema(); + + assertEquals(Schema.BYTES_SCHEMA, result); + } +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSourceOffsetTest.java b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSourceOffsetTest.java new file mode 100644 index 000000000..96870023d --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSourceOffsetTest.java @@ -0,0 +1,41 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.Map; + +import org.junit.jupiter.api.Test; + +class PubSubSourceOffsetTest { + + @Test + void testFromMap() { + Map sourceLocation = Map.of("message.id", "messageId1"); + PubSubSourceOffset sourceOffset = PubSubSourceOffset.fromMap(sourceLocation); + + assertEquals("messageId1", sourceOffset.getMessageId()); + } + + @Test + void testToMap() { + PubSubSourceOffset sourceOffset = new PubSubSourceOffset("messageId1"); + Map result = sourceOffset.toMap(); + + assertEquals(Map.of("message.id", "messageId1"), result); + } +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSourcePartitionTest.java b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSourcePartitionTest.java new file mode 100644 index 000000000..6717c1bc6 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSourcePartitionTest.java @@ -0,0 +1,52 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.Map; + +import org.junit.jupiter.api.Test; + +class PubSubSourcePartitionTest { + + @Test + void testFromMap() { + Map sourceLocation = + Map.of( + "project.id", "projectId1", + "topic.id", "topicId1", + "subscription.id", "subscriptionId1" + ); + PubSubSourcePartition sourcePartition = PubSubSourcePartition.fromMap(sourceLocation); + + assertEquals("projectId1", sourcePartition.getProjectId()); + assertEquals("topicId1", sourcePartition.getTopicId()); + assertEquals("subscriptionId1", sourcePartition.getSubscriptionId()); + } + + @Test + void testToMap() { + PubSubSourcePartition sourcePartition = new PubSubSourcePartition("projectId1", "topicId1", "subscriptionId1"); + Map result = sourcePartition.toMap(); + + assertEquals(Map.of( + "project.id", "projectId1", + "topic.id", "topicId1", + "subscription.id", "subscriptionId1" + ), result); + } +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSubscriberManagerTest.java b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSubscriberManagerTest.java new file mode 100644 index 000000000..f967faf06 --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSubscriberManagerTest.java @@ -0,0 +1,146 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.*; + +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import com.google.protobuf.ByteString; +import com.google.pubsub.v1.PubsubMessage; + +import io.lenses.streamreactor.connect.gcp.pubsub.source.admin.PubSubService; +import io.lenses.streamreactor.connect.gcp.pubsub.source.config.PubSubSubscription; +import lombok.val; + +@ExtendWith(MockitoExtension.class) +class PubSubSubscriberManagerTest { + + public static final String MESSAGE_ID_PREFIX = "myMessageId"; + public static final String PROJECT_ID = "myProject"; + public static final String TOPIC_ID = "myTopic"; + public static final String SUBSCRIPTION_ID_PREFIX = "mySubscription"; + public static final String TARGET_KAFKA_TOPIC_PREFIX = "targetKafkaTopic"; + + @Mock + private PubSubService pubSubService; + + @Mock + private PubSubSubscriber pubSubSubscriber1; + @Mock + private PubSubSubscriber pubSubSubscriber2; + + @Mock + private SubscriberCreator subscriberCreator; + + private PubSubSubscriberManager target; + + @BeforeEach + void setUp() { + + final var subscription1 = mockSubscriberCreator(pubSubSubscriber1, "1"); + final var subscription2 = mockSubscriberCreator(pubSubSubscriber2, "2"); + + val subscriptions = List.of(subscription1, subscription2); + + target = new PubSubSubscriberManager(pubSubService, PROJECT_ID, subscriptions, subscriberCreator); + } + + @Test + void testSingleRecordPoll() { + + val testMessage = generateTestMessage("1"); + + when(pubSubSubscriber1.getMessages()).thenReturn(List.of(testMessage)); + + val messages = target.poll(); + + assertEquals(1, messages.size()); + assertEquals("test message1", messages.get(0).getMessage().getData().toStringUtf8()); + } + + @Test + void testMultiRecordPoll() { + + val testMessage1 = generateTestMessage("1"); + val testMessage2 = generateTestMessage("2"); + + when(pubSubSubscriber1.getMessages()).thenReturn(List.of(testMessage1, testMessage2)); + + val messages = target.poll(); + + assertEquals(2, messages.size()); + assertEquals("test message1", messages.get(0).getMessage().getData().toStringUtf8()); + assertEquals("test message2", messages.get(1).getMessage().getData().toStringUtf8()); + } + + private PubSubMessageData generateTestMessage(String id) { + return new PubSubMessageData( + sourcePartition(id), + sourceOffset(id), + PubsubMessage.newBuilder().setData(ByteString.copyFrom(("test message" + id).getBytes())).build(), + TARGET_KAFKA_TOPIC_PREFIX + ); + } + + @Test + void testCommitRecord() { + + target.commitRecord(sourcePartition("1"), sourceOffset("1")); + + verify(pubSubSubscriber1).acknowledge(MESSAGE_ID_PREFIX + "1"); + verifyNoInteractions(pubSubSubscriber2); + + } + + @Test + void testStop() { + target.stop(); + + verify(pubSubSubscriber1).stopAsync(); + verify(pubSubSubscriber2).stopAsync(); + } + + private PubSubSubscription mockSubscriberCreator(PubSubSubscriber pubSubSubscriber, String id) { + val subscription = + PubSubSubscription.builder() + .targetKafkaTopic(PubSubSubscriberManagerTest.TARGET_KAFKA_TOPIC_PREFIX + id) + .subscriptionId(PubSubSubscriberManagerTest.SUBSCRIPTION_ID_PREFIX + id) + .cacheExpire(1000L) + .build(); + + when(subscriberCreator.createSubscriber(pubSubService, PROJECT_ID, subscription)) + .thenReturn(pubSubSubscriber); + + return subscription; + } + + private PubSubSourcePartition sourcePartition(String id) { + return new PubSubSourcePartition(PROJECT_ID, TOPIC_ID, SUBSCRIPTION_ID_PREFIX + id); + } + + private PubSubSourceOffset sourceOffset(String id) { + return new PubSubSourceOffset(MESSAGE_ID_PREFIX + id); + } + +} diff --git a/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSubscriberTest.java b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSubscriberTest.java new file mode 100644 index 000000000..7764306fa --- /dev/null +++ b/java-connectors/kafka-connect-gcp-pubsub/src/test/java/io/lenses/streamreactor/connect/gcp/pubsub/source/subscriber/PubSubSubscriberTest.java @@ -0,0 +1,184 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.streamreactor.connect.gcp.pubsub.source.subscriber; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.*; + +import java.util.Collection; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import com.google.cloud.pubsub.v1.AckReplyConsumer; +import com.google.cloud.pubsub.v1.MessageReceiver; +import com.google.cloud.pubsub.v1.Subscriber; +import com.google.pubsub.v1.PubsubMessage; + +import io.lenses.streamreactor.connect.gcp.pubsub.source.admin.PubSubService; +import io.lenses.streamreactor.connect.gcp.pubsub.source.config.PubSubSubscription; +import lombok.val; +import lombok.extern.slf4j.Slf4j; + +@ExtendWith(MockitoExtension.class) +@Slf4j +class PubSubSubscriberTest { + + private static final String MESSAGE_ID_PREFIX = "message_id_"; + private static final String TARGET_TOPIC = "target-topic"; + private static final int QUEUE_SIZE = 100; + private static final int BATCH_SIZE = 10; + private static final String SUBSCRIPTION_ID = "subscription-id"; + private static final String SOURCE_TOPIC_ID = "source-topic-id"; + private static final long CACHE_EXPIRE = 1000L; + + @Mock + private PubSubSubscription subscription; + @Mock + private PubSubService pubSubService; + @Mock + private Subscriber gcpSubscriber; + + @BeforeEach + void setUp() { + + when(pubSubService.createSubscriber(anyString(), any(MessageReceiver.class))) + .thenReturn(gcpSubscriber); + + when(subscription.getSubscriptionId()).thenReturn(SUBSCRIPTION_ID); + when(subscription.getTargetKafkaTopic()).thenReturn(TARGET_TOPIC); + when(subscription.getBatchSize()).thenReturn(BATCH_SIZE); + when(subscription.getQueueMaxEntries()).thenReturn(QUEUE_SIZE); + when(subscription.getCacheExpire()).thenReturn(CACHE_EXPIRE); + when(subscription.getSourceTopicId()).thenReturn(SOURCE_TOPIC_ID); + + } + + @Test + void testStartAsync() { + createPubSubSubscriber(); + + verify(gcpSubscriber, times(1)).startAsync(); + } + + @Test + void testMessageReceptionAndQueueing() { + + val subscriber = createPubSubSubscriber(); + messageSend(1); + + List messages = subscriber.getMessages(); + + assertEquals(1, messages.size()); + assertEquals(MESSAGE_ID_PREFIX + "0", messages.get(0).getSourceOffset().getMessageId()); + assertEquals(TARGET_TOPIC, messages.get(0).getTargetTopicName()); + } + + @Test + void testNotAcknoledgeMessageOnFullQueue() { + + val subscriber = createPubSubSubscriber(); + // put 200 messages into the queue so it is twice oversaturated + val acks = messageSend(QUEUE_SIZE * 2); + + // first 100 messages are added to the queue + acks.subList(0, 99).forEach(ack -> { + log.info("ACK: {}", ack); + verifyNoInteractions(ack); + }); + // all other messages bounce with a 'nack' + acks.subList(100, 199).forEach(nack -> { + log.info("NACK: {}", nack); + verify(nack).nack(); + }); + + val messages = + IntStream.rangeClosed(0, BATCH_SIZE - 1) + .mapToObj(i -> subscriber.getMessages()) + .flatMap(Collection::stream) + .collect(Collectors.toList()); + + assertEquals(QUEUE_SIZE, messages.size()); + + } + + @Test + void testAcknowledgeMessage() { + + val acksIndexes = Set.of(0, 3, 5); + val noAcksIndexes = + IntStream + .rangeClosed(0, QUEUE_SIZE - 1) + .filter(i -> !acksIndexes.contains(i)); + + val subscriber = createPubSubSubscriber(); + val acks = messageSend(QUEUE_SIZE); + + acksIndexes + .forEach(i -> subscriber.acknowledge(MESSAGE_ID_PREFIX + i) + ); + + acksIndexes + .forEach(i -> verify(acks.get(i)).ack() + ); + + noAcksIndexes.forEach( + i -> verifyNoInteractions(acks.get(i)) + ); + } + + @Test + void testStopAsync() { + PubSubSubscriber subscriber = createPubSubSubscriber(); + reset(gcpSubscriber); + subscriber.stopAsync(); + verify(gcpSubscriber, times(1)).stopAsync(); + } + + private PubSubSubscriber createPubSubSubscriber() { + return new PubSubSubscriber(pubSubService, "project-id", subscription); + } + + private List messageSend(int numMessages) { + + final var receiver = captureMessageReceiver(); + + return IntStream.rangeClosed(0, numMessages - 1).mapToObj(i -> { + val ackReplyConsumer = mock(AckReplyConsumer.class); + val message = PubsubMessage.newBuilder().setMessageId(MESSAGE_ID_PREFIX + i).build(); + + receiver.receiveMessage(message, ackReplyConsumer); + + return ackReplyConsumer; + }).collect(Collectors.toList()); + + } + + private MessageReceiver captureMessageReceiver() { + val receiverCaptor = ArgumentCaptor.forClass(MessageReceiver.class); + verify(pubSubService).createSubscriber(eq(SUBSCRIPTION_ID), receiverCaptor.capture()); + return receiverCaptor.getValue(); + } + +} diff --git a/java-connectors/kafka-connect-query-language/src/main/java/io/lenses/kcql/Kcql.java b/java-connectors/kafka-connect-query-language/src/main/java/io/lenses/kcql/Kcql.java index 88a7711a1..29b2c470f 100644 --- a/java-connectors/kafka-connect-query-language/src/main/java/io/lenses/kcql/Kcql.java +++ b/java-connectors/kafka-connect-query-language/src/main/java/io/lenses/kcql/Kcql.java @@ -32,6 +32,7 @@ public class Kcql { public static final String TIMESTAMP = "sys_time()"; private static final String MSG_ILLEGAL_FIELD_ALIAS = "Illegal fieldAlias."; + public static final String KCQL_MULTI_STATEMENT_SEPARATOR = ";"; private String query; private boolean autoCreate; private boolean autoEvolve; @@ -202,6 +203,27 @@ public Map getProperties() { return properties; } + // TODO: Jira LC-203 improvements + public void validateKcqlProperties(String... allowedKeys) { + + Set unexpectedKeys = + properties.keySet().stream().filter(k -> !Arrays.stream(allowedKeys).collect(Collectors.toUnmodifiableSet()) + .contains(k)).collect(Collectors.toUnmodifiableSet()); + if (!unexpectedKeys.isEmpty()) { + throw new IllegalArgumentException( + String.format( + "Unexpected properties found: `%s`. Please check the documentation to find the properties you really need.", + String.join(", ", unexpectedKeys) + ) + ); + } + } + + // TODO: Jira LC-203 improvements + public Optional extractOptionalProperty(String key) { + return Optional.ofNullable(properties.get(key)); + } + public FormatType getFormatType() { return formatType; } @@ -305,7 +327,8 @@ private void setTimestampUnit(TimeUnit timestampUnit) { * @return */ public static List parseMultiple(final String kcqlStatements) { - return Arrays.stream(kcqlStatements.split(";")).map(Kcql::parse).collect(Collectors.toList()); + return Arrays.stream(kcqlStatements.split(KCQL_MULTI_STATEMENT_SEPARATOR)).map(Kcql::parse).collect(Collectors + .toList()); } public static Kcql parse(final String syntax) { diff --git a/java-connectors/kafka-connect-query-language/src/test/java/io/lenses/kcql/KcqlTest.java b/java-connectors/kafka-connect-query-language/src/test/java/io/lenses/kcql/KcqlTest.java new file mode 100644 index 000000000..4ff3533c4 --- /dev/null +++ b/java-connectors/kafka-connect-query-language/src/test/java/io/lenses/kcql/KcqlTest.java @@ -0,0 +1,62 @@ +/* + * Copyright 2017-2024 Lenses.io Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.lenses.kcql; + +import static org.junit.jupiter.api.Assertions.*; + +import java.util.Optional; + +import org.junit.jupiter.api.Test; + +class PropertyValidatorTest { + + private final Kcql kcqlWithProperties = + Kcql.parse( + "INSERT INTO table SELECT * FROM topic PK f1,f2 properties(key1=value1, key2='value2', 'key3'='value3')"); + private final Kcql kcqlNoProperties = Kcql.parse("INSERT INTO table SELECT * FROM topic PK f1,f2"); + + @Test + void testValidateKcqlProperties_withAllowedKeys() { + assertDoesNotThrow(() -> kcqlWithProperties.validateKcqlProperties("key1", "key2", "key3")); + } + + @Test + void testValidateKcqlProperties_withUnexpectedKeys() { + IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> { + kcqlWithProperties.validateKcqlProperties("key1", "key2"); + }); + assertTrue(exception.getMessage().contains("Unexpected properties found: `key3`")); + } + + @Test + void testExtractOptionalProperty_withExistingKey() { + Optional result = kcqlWithProperties.extractOptionalProperty("key1"); + assertTrue(result.isPresent()); + assertEquals("value1", result.get()); + } + + @Test + void testExtractOptionalProperty_withNonExistingKey() { + Optional result = kcqlWithProperties.extractOptionalProperty("key4"); + assertFalse(result.isPresent()); + } + + @Test + void testValidateKcqlProperties_withNoProperties() { + assertDoesNotThrow(() -> kcqlNoProperties.validateKcqlProperties("key1", "key2", "key3")); + } + +} diff --git a/java-connectors/kafka-connect-query-language/src/test/java/io/lenses/kcql/KcqlNestedFieldTest.scala b/java-connectors/kafka-connect-query-language/src/test/scala/kcql/KcqlNestedFieldTest.scala similarity index 100% rename from java-connectors/kafka-connect-query-language/src/test/java/io/lenses/kcql/KcqlNestedFieldTest.scala rename to java-connectors/kafka-connect-query-language/src/test/scala/kcql/KcqlNestedFieldTest.scala diff --git a/java-connectors/kafka-connect-query-language/src/test/java/io/lenses/kcql/KcqlPropertiesTest.scala b/java-connectors/kafka-connect-query-language/src/test/scala/kcql/KcqlPropertiesTest.scala similarity index 100% rename from java-connectors/kafka-connect-query-language/src/test/java/io/lenses/kcql/KcqlPropertiesTest.scala rename to java-connectors/kafka-connect-query-language/src/test/scala/kcql/KcqlPropertiesTest.scala diff --git a/java-connectors/kafka-connect-query-language/src/test/java/io/lenses/kcql/KcqlSelectOnlyTest.scala b/java-connectors/kafka-connect-query-language/src/test/scala/kcql/KcqlSelectOnlyTest.scala similarity index 100% rename from java-connectors/kafka-connect-query-language/src/test/java/io/lenses/kcql/KcqlSelectOnlyTest.scala rename to java-connectors/kafka-connect-query-language/src/test/scala/kcql/KcqlSelectOnlyTest.scala diff --git a/java-connectors/kafka-connect-query-language/src/test/java/io/lenses/kcql/KcqlTest.scala b/java-connectors/kafka-connect-query-language/src/test/scala/kcql/KcqlTest.scala similarity index 100% rename from java-connectors/kafka-connect-query-language/src/test/java/io/lenses/kcql/KcqlTest.scala rename to java-connectors/kafka-connect-query-language/src/test/scala/kcql/KcqlTest.scala diff --git a/java-connectors/settings.gradle b/java-connectors/settings.gradle index 51c2e87e6..93863b1ee 100644 --- a/java-connectors/settings.gradle +++ b/java-connectors/settings.gradle @@ -2,4 +2,5 @@ rootProject.name = 'java-reactor' include 'kafka-connect-common', 'kafka-connect-azure-eventhubs', 'kafka-connect-query-language', -'kafka-connect-gcp-common' +'kafka-connect-gcp-common', +'kafka-connect-gcp-pubsub' diff --git a/kafka-connect-aws-s3/src/it/scala/io/lenses/streamreactor/connect/aws/s3/source/S3SourceTaskBucketRootTest.scala b/kafka-connect-aws-s3/src/it/scala/io/lenses/streamreactor/connect/aws/s3/source/S3SourceTaskBucketRootTest.scala index d8d4287ea..42809626f 100644 --- a/kafka-connect-aws-s3/src/it/scala/io/lenses/streamreactor/connect/aws/s3/source/S3SourceTaskBucketRootTest.scala +++ b/kafka-connect-aws-s3/src/it/scala/io/lenses/streamreactor/connect/aws/s3/source/S3SourceTaskBucketRootTest.scala @@ -1,7 +1,7 @@ package io.lenses.streamreactor.connect.aws.s3.source import cats.implicits._ -import io.lenses.streamreactor.connect.aws.s3.config.S3ConfigSettings._ +import io.lenses.streamreactor.common.config.base.KcqlSettings import io.lenses.streamreactor.connect.aws.s3.source.S3SourceTaskTest.formats import io.lenses.streamreactor.connect.aws.s3.utils.S3ProxyContainerTest import io.lenses.streamreactor.connect.cloud.common.source.config.CloudSourceSettingsKeys @@ -28,6 +28,8 @@ class S3SourceTaskBucketRootTest SOURCE_PARTITION_SEARCH_INTERVAL_MILLIS -> "1000", ) + val KCQL_CONFIG = new KcqlSettings(javaConnectorPrefix).getKcqlSettingsKey + private val TopicName = "myTopic" override def cleanUp(): Unit = () diff --git a/kafka-connect-aws-s3/src/it/scala/io/lenses/streamreactor/connect/aws/s3/source/S3SourceTaskTest.scala b/kafka-connect-aws-s3/src/it/scala/io/lenses/streamreactor/connect/aws/s3/source/S3SourceTaskTest.scala index 2b0a2b9b4..a667b6168 100644 --- a/kafka-connect-aws-s3/src/it/scala/io/lenses/streamreactor/connect/aws/s3/source/S3SourceTaskTest.scala +++ b/kafka-connect-aws-s3/src/it/scala/io/lenses/streamreactor/connect/aws/s3/source/S3SourceTaskTest.scala @@ -3,7 +3,7 @@ package io.lenses.streamreactor.connect.aws.s3.source import cats.effect.unsafe.implicits.global import cats.implicits._ import com.typesafe.scalalogging.LazyLogging -import io.lenses.streamreactor.connect.aws.s3.config.S3ConfigSettings._ +import io.lenses.streamreactor.common.config.base.KcqlSettings import io.lenses.streamreactor.connect.aws.s3.model.location.S3LocationValidator import io.lenses.streamreactor.connect.aws.s3.source.S3SourceTaskTest.formats import io.lenses.streamreactor.connect.aws.s3.storage.AwsS3DirectoryLister @@ -56,6 +56,7 @@ class S3SourceTaskTest def bucketSetup: BucketSetup = bucketSetupOpt.getOrElse(throw new IllegalStateException("Not initialised")) override def cleanUp(): Unit = () private val filesLimit = 1000 + val KCQL_CONFIG = new KcqlSettings(javaConnectorPrefix).getKcqlSettingsKey def DefaultProps: Map[String, String] = defaultProps ++ Seq( diff --git a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/config/S3CommonConfigDef.scala b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/config/S3CommonConfigDef.scala index a249898f4..48a6ad1c4 100644 --- a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/config/S3CommonConfigDef.scala +++ b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/config/S3CommonConfigDef.scala @@ -15,6 +15,7 @@ */ package io.lenses.streamreactor.connect.aws.s3.config +import io.lenses.streamreactor.common.config.base.KcqlSettings import io.lenses.streamreactor.common.config.base.traits.ConnectorRetryConfigKeys import io.lenses.streamreactor.common.config.base.traits.ErrorPolicyConfigKey import io.lenses.streamreactor.connect.aws.s3.config.S3ConfigSettings._ @@ -80,7 +81,6 @@ trait S3CommonConfigDef Importance.LOW, "Enable virtual host buckets", ) - .define(KCQL_CONFIG, Type.STRING, Importance.HIGH, KCQL_DOC) .define( HTTP_NBR_OF_RETRIES, Type.INT, @@ -140,5 +140,6 @@ trait S3CommonConfigDef ) withConnectorRetryConfig(config) withErrorPolicyConfig(config) + new KcqlSettings(javaConnectorPrefix).withSettings(config) } } diff --git a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/config/S3ConfigSettings.scala b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/config/S3ConfigSettings.scala index 7861d0b77..4c9228530 100644 --- a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/config/S3ConfigSettings.scala +++ b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/config/S3ConfigSettings.scala @@ -28,10 +28,6 @@ object S3ConfigSettings { val CUSTOM_ENDPOINT: String = s"$CONNECTOR_PREFIX.custom.endpoint" val ENABLE_VIRTUAL_HOST_BUCKETS: String = s"$CONNECTOR_PREFIX.vhost.bucket" - val KCQL_CONFIG = s"$CONNECTOR_PREFIX.$KCQL_PROP_SUFFIX" - val KCQL_DOC = - "Contains the Kafka Connect Query Language describing the flow from Apache Kafka topics to Apache Hive tables." - val HTTP_ERROR_RETRY_INTERVAL = s"$CONNECTOR_PREFIX.http.$RETRY_INTERVAL_PROP_SUFFIX" val HTTP_ERROR_RETRY_INTERVAL_DOC = "If greater than zero, used to determine the delay after which to retry the http request in milliseconds. Based on an exponential backoff algorithm." diff --git a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/config/S3CommonConfigDefTest.scala b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/config/S3CommonConfigDefTest.scala index 60d43fecc..5dc2b18b8 100644 --- a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/config/S3CommonConfigDefTest.scala +++ b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/config/S3CommonConfigDefTest.scala @@ -15,6 +15,8 @@ */ package io.lenses.streamreactor.connect.aws.s3.config +import io.lenses.streamreactor.common.config.base.KcqlSettings +import io.lenses.streamreactor.common.config.base.model.ConnectorPrefix import io.lenses.streamreactor.connect.aws.s3.config.S3ConfigSettings._ import io.lenses.streamreactor.connect.aws.s3.config.processors.kcql.DeprecationConfigDefProcessor._ import org.scalatest.EitherValues @@ -26,6 +28,7 @@ import scala.jdk.CollectionConverters.MapHasAsScala import scala.util.Try class S3CommonConfigDefTest extends AnyFlatSpec with Matchers with EitherValues { + val KCQL_CONFIG = new KcqlSettings(new ConnectorPrefix("connect.s3")).getKcqlSettingsKey private val DeprecatedProps: Map[String, String] = Map( DEP_AWS_ACCESS_KEY -> "DepAccessKey", diff --git a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/source/config/S3SourceConfigTests.scala b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/source/config/S3SourceConfigTests.scala index 1557c6556..6e49e3571 100644 --- a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/source/config/S3SourceConfigTests.scala +++ b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/source/config/S3SourceConfigTests.scala @@ -15,6 +15,7 @@ */ package io.lenses.streamreactor.connect.aws.s3.source.config +import io.lenses.streamreactor.common.config.base.KcqlSettings import io.lenses.streamreactor.connect.aws.s3.config.S3ConfigSettings._ import io.lenses.streamreactor.connect.aws.s3.model.location.S3LocationValidator import io.lenses.streamreactor.connect.cloud.common.config.ConnectorTaskId @@ -33,6 +34,8 @@ class S3SourceConfigTests extends AnyFunSuite with Matchers with TaskIndexKey wi implicit val taskId: ConnectorTaskId = ConnectorTaskId("test", 1, 1) implicit val validator: CloudLocationValidator = S3LocationValidator + val KCQL_CONFIG = new KcqlSettings(javaConnectorPrefix).getKcqlSettingsKey + test("default recursive levels is 0") { S3SourceConfig.fromProps( taskId, diff --git a/kafka-connect-azure-datalake/src/main/scala/io/lenses/streamreactor/connect/datalake/config/AzureConfigSettings.scala b/kafka-connect-azure-datalake/src/main/scala/io/lenses/streamreactor/connect/datalake/config/AzureConfigSettings.scala index cf0bb5f75..051ebf6f2 100644 --- a/kafka-connect-azure-datalake/src/main/scala/io/lenses/streamreactor/connect/datalake/config/AzureConfigSettings.scala +++ b/kafka-connect-azure-datalake/src/main/scala/io/lenses/streamreactor/connect/datalake/config/AzureConfigSettings.scala @@ -22,10 +22,6 @@ object AzureConfigSettings { val CONNECTOR_PREFIX = "connect.datalake" val ENDPOINT: String = s"$CONNECTOR_PREFIX.endpoint" - val KCQL_CONFIG = s"$CONNECTOR_PREFIX.$KCQL_PROP_SUFFIX" - val KCQL_DOC = - "Contains the Kafka Connect Query Language describing the flow from Apache Kafka topics to Apache Hive tables." - val ERROR_POLICY = s"$CONNECTOR_PREFIX.$ERROR_POLICY_PROP_SUFFIX" val ERROR_POLICY_DOC: String = """ diff --git a/kafka-connect-azure-datalake/src/main/scala/io/lenses/streamreactor/connect/datalake/config/CommonConfigDef.scala b/kafka-connect-azure-datalake/src/main/scala/io/lenses/streamreactor/connect/datalake/config/CommonConfigDef.scala index 78e78f0c9..6fb38b4ff 100644 --- a/kafka-connect-azure-datalake/src/main/scala/io/lenses/streamreactor/connect/datalake/config/CommonConfigDef.scala +++ b/kafka-connect-azure-datalake/src/main/scala/io/lenses/streamreactor/connect/datalake/config/CommonConfigDef.scala @@ -15,6 +15,7 @@ */ package io.lenses.streamreactor.connect.datalake.config +import io.lenses.streamreactor.common.config.base.KcqlSettings import io.lenses.streamreactor.connect.cloud.common.config.CompressionCodecConfigKeys import io.lenses.streamreactor.connect.datalake.config.AzureConfigSettings._ import org.apache.kafka.common.config.ConfigDef @@ -32,7 +33,6 @@ trait CommonConfigDef extends CompressionCodecConfigKeys with AuthModeSettingsCo Importance.LOW, "Azure endpoint", ) - .define(KCQL_CONFIG, Type.STRING, Importance.HIGH, KCQL_DOC) .define( ERROR_POLICY, Type.STRING, @@ -107,5 +107,6 @@ trait CommonConfigDef extends CompressionCodecConfigKeys with AuthModeSettingsCo ) withAuthModeSettings(cD) + new KcqlSettings(javaConnectorPrefix).withSettings(config) } } diff --git a/kafka-connect-azure-documentdb/src/main/scala/io/lenses/streamreactor/connect/azure/documentdb/config/DocumentDbConfig.scala b/kafka-connect-azure-documentdb/src/main/scala/io/lenses/streamreactor/connect/azure/documentdb/config/DocumentDbConfig.scala index aef4edce8..b01e77be3 100644 --- a/kafka-connect-azure-documentdb/src/main/scala/io/lenses/streamreactor/connect/azure/documentdb/config/DocumentDbConfig.scala +++ b/kafka-connect-azure-documentdb/src/main/scala/io/lenses/streamreactor/connect/azure/documentdb/config/DocumentDbConfig.scala @@ -17,6 +17,7 @@ package io.lenses.streamreactor.connect.azure.documentdb.config import com.microsoft.azure.documentdb.ConsistencyLevel import io.lenses.streamreactor.common.config.KcqlWithFieldsSettings +import io.lenses.streamreactor.common.config.base.model.ConnectorPrefix import io.lenses.streamreactor.common.config.base.traits._ import org.apache.kafka.common.config.ConfigDef import org.apache.kafka.common.config.ConfigDef.Importance @@ -87,16 +88,6 @@ object DocumentDbConfig { ConfigDef.Width.MEDIUM, DocumentDbConfigConstants.PROXY_HOST_DISPLAY, ) - .define( - DocumentDbConfigConstants.KCQL_CONFIG, - Type.STRING, - Importance.HIGH, - DocumentDbConfigConstants.KCQL_DOC, - "Mappings", - 1, - ConfigDef.Width.LONG, - DocumentDbConfigConstants.KCQL_CONFIG, - ) .define( DocumentDbConfigConstants.ERROR_POLICY_CONFIG, Type.STRING, @@ -141,6 +132,9 @@ object DocumentDbConfig { ConfigDef.Width.MEDIUM, DocumentDbConfigConstants.PROGRESS_COUNTER_ENABLED_DISPLAY, ) + new io.lenses.streamreactor.common.config.base.KcqlSettings(new ConnectorPrefix( + DocumentDbConfigConstants.CONNECTOR_PREFIX, + )).withSettings(config) } case class DocumentDbConfig(props: Map[String, String]) diff --git a/kafka-connect-azure-documentdb/src/main/scala/io/lenses/streamreactor/connect/azure/documentdb/config/DocumentDbConfigConstants.scala b/kafka-connect-azure-documentdb/src/main/scala/io/lenses/streamreactor/connect/azure/documentdb/config/DocumentDbConfigConstants.scala index dce783255..aa973c135 100644 --- a/kafka-connect-azure-documentdb/src/main/scala/io/lenses/streamreactor/connect/azure/documentdb/config/DocumentDbConfigConstants.scala +++ b/kafka-connect-azure-documentdb/src/main/scala/io/lenses/streamreactor/connect/azure/documentdb/config/DocumentDbConfigConstants.scala @@ -40,7 +40,7 @@ object DocumentDbConfigConstants { val MASTER_KEY_CONFIG = s"$CONNECTOR_PREFIX.master.key" val MASTER_KEY_DOC = "The connection master key" - val ERROR_POLICY_CONFIG = s"$CONNECTOR_PREFIX.${ERROR_POLICY_PROP_SUFFIX}" + val ERROR_POLICY_CONFIG = s"$CONNECTOR_PREFIX.$ERROR_POLICY_PROP_SUFFIX" val ERROR_POLICY_DOC: String = """Specifies the action to be taken if an error occurs while inserting the data |There are two available options: @@ -50,7 +50,7 @@ object DocumentDbConfigConstants { |The error will be logged automatically""".stripMargin val ERROR_POLICY_DEFAULT = "THROW" - val ERROR_RETRY_INTERVAL_CONFIG = s"$CONNECTOR_PREFIX.${RETRY_INTERVAL_PROP_SUFFIX}" + val ERROR_RETRY_INTERVAL_CONFIG = s"$CONNECTOR_PREFIX.$RETRY_INTERVAL_PROP_SUFFIX" val ERROR_RETRY_INTERVAL_DOC = "The time in milliseconds between retries." val ERROR_RETRY_INTERVAL_DEFAULT = "60000" @@ -58,16 +58,15 @@ object DocumentDbConfigConstants { val NBR_OF_RETRIES_DOC = "The maximum number of times to try the write again." val NBR_OF_RETIRES_DEFAULT = 20 - val KCQL_CONFIG = s"$CONNECTOR_PREFIX.${KCQL_PROP_SUFFIX}" - val KCQL_DOC = "KCQL expression describing field selection and data routing to the target DocumentDb." + val KCQL_CONFIG = s"$CONNECTOR_PREFIX.$KCQL_PROP_SUFFIX" - val CONSISTENCY_CONFIG = s"$CONNECTOR_PREFIX.${CONSISTENCY_LEVEL_PROP_SUFFIX}" + val CONSISTENCY_CONFIG = s"$CONNECTOR_PREFIX.$CONSISTENCY_LEVEL_PROP_SUFFIX" val CONSISTENCY_DOC = "Determines the write visibility. There are four possible values: Strong,BoundedStaleness,Session or Eventual" val CONSISTENCY_DISPLAY = "Writes consistency" val CONSISTENCY_DEFAULT = "Session" - val CREATE_DATABASE_CONFIG = s"$CONNECTOR_PREFIX.${DATABASE_PROP_SUFFIX}.create" + val CREATE_DATABASE_CONFIG = s"$CONNECTOR_PREFIX.$DATABASE_PROP_SUFFIX.create" val CREATE_DATABASE_DOC = "If set to true it will create the database if it doesn't exist. If this is set to default(false) an exception will be raised." val CREATE_DATABASE_DISPLAY = "Auto-create database" diff --git a/kafka-connect-gcp-storage/src/it/scala/io/lenses/streamreactor/connect/gcp/storage/utils/GCPProxyContainerTest.scala b/kafka-connect-gcp-storage/src/it/scala/io/lenses/streamreactor/connect/gcp/storage/utils/GCPProxyContainerTest.scala index 3451aa2eb..7333b1d32 100644 --- a/kafka-connect-gcp-storage/src/it/scala/io/lenses/streamreactor/connect/gcp/storage/utils/GCPProxyContainerTest.scala +++ b/kafka-connect-gcp-storage/src/it/scala/io/lenses/streamreactor/connect/gcp/storage/utils/GCPProxyContainerTest.scala @@ -66,12 +66,12 @@ trait GCPProxyContainerTest lazy val defaultProps: Map[String, String] = Map( - gcpSettings.getGcpProjectId -> "projectId", - authModeConfig.getAuthModeKey -> "none", - gcpSettings.getHost -> container.getEndpointUrl(), - "name" -> "gcpSinkTaskTest", - TASK_INDEX -> "1:1", - AVOID_RESUMABLE_UPLOAD -> "true", + gcpSettings.getGcpProjectIdKey -> "projectId", + authModeConfig.getAuthModeKey -> "none", + gcpSettings.getHostKey -> container.getEndpointUrl(), + "name" -> "gcpSinkTaskTest", + TASK_INDEX -> "1:1", + AVOID_RESUMABLE_UPLOAD -> "true", ) val localRoot: File = Files.createTempDirectory("blah").toFile diff --git a/kafka-connect-gcp-storage/src/main/scala/io/lenses/streamreactor/connect/gcp/storage/config/CommonConfigDef.scala b/kafka-connect-gcp-storage/src/main/scala/io/lenses/streamreactor/connect/gcp/storage/config/CommonConfigDef.scala index a49ead43d..21438c238 100644 --- a/kafka-connect-gcp-storage/src/main/scala/io/lenses/streamreactor/connect/gcp/storage/config/CommonConfigDef.scala +++ b/kafka-connect-gcp-storage/src/main/scala/io/lenses/streamreactor/connect/gcp/storage/config/CommonConfigDef.scala @@ -15,6 +15,7 @@ */ package io.lenses.streamreactor.connect.gcp.storage.config +import io.lenses.streamreactor.common.config.base.KcqlSettings import io.lenses.streamreactor.connect.cloud.common.config.CompressionCodecConfigKeys import io.lenses.streamreactor.connect.gcp.common.config.GCPSettings import io.lenses.streamreactor.connect.gcp.storage.config.GCPConfigSettings._ @@ -30,12 +31,6 @@ trait CommonConfigDef extends CompressionCodecConfigKeys { def config: ConfigDef = { val conf = new ConfigDef() - .define( - KCQL_CONFIG, - Type.STRING, - Importance.HIGH, - KCQL_DOC, - ) .define( ERROR_POLICY, Type.STRING, @@ -84,6 +79,6 @@ trait CommonConfigDef extends CompressionCodecConfigKeys { COMPRESSION_LEVEL_DOC, ) withSettings(conf) - + new KcqlSettings(javaConnectorPrefix).withSettings(conf) } } diff --git a/kafka-connect-gcp-storage/src/main/scala/io/lenses/streamreactor/connect/gcp/storage/config/GCPConfigSettings.scala b/kafka-connect-gcp-storage/src/main/scala/io/lenses/streamreactor/connect/gcp/storage/config/GCPConfigSettings.scala index f1c89ad26..c300161a9 100644 --- a/kafka-connect-gcp-storage/src/main/scala/io/lenses/streamreactor/connect/gcp/storage/config/GCPConfigSettings.scala +++ b/kafka-connect-gcp-storage/src/main/scala/io/lenses/streamreactor/connect/gcp/storage/config/GCPConfigSettings.scala @@ -21,10 +21,6 @@ object GCPConfigSettings { val CONNECTOR_PREFIX = "connect.gcpstorage" - val KCQL_CONFIG = s"$CONNECTOR_PREFIX.$KCQL_PROP_SUFFIX" - val KCQL_DOC = - "Contains the Kafka Connect Query Language describing the flow from Apache Kafka topics to Apache Hive tables." - val ERROR_POLICY = s"$CONNECTOR_PREFIX.$ERROR_POLICY_PROP_SUFFIX" val ERROR_POLICY_DOC: String = """ diff --git a/kafka-connect-gcp-storage/src/test/scala/io/lenses/streamreactor/connect/gcp/storage/config/CommonConfigDefTest.scala b/kafka-connect-gcp-storage/src/test/scala/io/lenses/streamreactor/connect/gcp/storage/config/CommonConfigDefTest.scala index d84bab120..8eafd3e3d 100644 --- a/kafka-connect-gcp-storage/src/test/scala/io/lenses/streamreactor/connect/gcp/storage/config/CommonConfigDefTest.scala +++ b/kafka-connect-gcp-storage/src/test/scala/io/lenses/streamreactor/connect/gcp/storage/config/CommonConfigDefTest.scala @@ -16,10 +16,11 @@ package io.lenses.streamreactor.connect.gcp.storage.config import cats.implicits.catsSyntaxOptionId +import io.lenses.streamreactor.common.config.base.KcqlSettings +import io.lenses.streamreactor.common.config.base.model.ConnectorPrefix import io.lenses.streamreactor.connect.gcp.common.config.AuthModeSettings import io.lenses.streamreactor.connect.gcp.common.config.GCPSettings import io.lenses.streamreactor.connect.gcp.storage.config.GCPConfigSettings.CONNECTOR_PREFIX -import io.lenses.streamreactor.connect.gcp.storage.config.GCPConfigSettings.KCQL_CONFIG import org.scalatest.EitherValues import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -29,6 +30,8 @@ import scala.jdk.CollectionConverters.MapHasAsScala class CommonConfigDefTest extends AnyFlatSpec with Matchers with EitherValues with UploadConfigKeys { + private val KCQL_CONFIG: String = new KcqlSettings(new ConnectorPrefix("connect.gcpstorage")).getKcqlSettingsKey + private val authModeConfig = new AuthModeSettings(javaConnectorPrefix) private val gcpSettings = new GCPSettings(javaConnectorPrefix) @@ -38,10 +41,10 @@ class CommonConfigDefTest extends AnyFlatSpec with Matchers with EitherValues wi private val DefaultProps: Map[String, String] = Map( - gcpSettings.getGcpProjectId -> "projectId", - authModeConfig.getAuthModeKey -> "none", - gcpSettings.getHost -> "localhost:9090", - KCQL_CONFIG -> "SELECT * FROM DEFAULT", + gcpSettings.getGcpProjectIdKey -> "projectId", + authModeConfig.getAuthModeKey -> "none", + gcpSettings.getHostKey -> "localhost:9090", + KCQL_CONFIG -> "SELECT * FROM DEFAULT", ) "CommonConfigDef" should "retain original properties after parsing" in {