From 423141f9f951b1f14d263907df9fc043156a7dd8 Mon Sep 17 00:00:00 2001
From: "release-please[bot]"
<55107282+release-please[bot]@users.noreply.github.com>
Date: Fri, 18 Nov 2022 18:38:15 +0000
Subject: [PATCH 01/17] chore(main): release 1.0.1-SNAPSHOT (#200)
:robot: I have created a release *beep* *boop*
---
### Updating meta-information for bleeding-edge SNAPSHOT release.
---
This PR was generated with [Release Please](https://togithub.com/googleapis/release-please). See [documentation](https://togithub.com/googleapis/release-please#release-please).
---
pom.xml | 2 +-
versions.txt | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/pom.xml b/pom.xml
index f75bcbcb..1cafe08e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -3,7 +3,7 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
4.0.0
pubsub-group-kafka-connector
- 1.0.0
+ 1.0.1-SNAPSHOT
jar
Pub/Sub Group Kafka Connector
https://github.com/googleapis/java-pubsub-group-kafka-connector
diff --git a/versions.txt b/versions.txt
index d2fe15f2..941b5612 100644
--- a/versions.txt
+++ b/versions.txt
@@ -1,4 +1,4 @@
# Format:
# module:released-version:current-version
-pubsub-group-kafka-connector:1.0.0:1.0.0
+pubsub-group-kafka-connector:1.0.0:1.0.1-SNAPSHOT
From c499c395cef38f9bb4b52d157bc336bff0644b94 Mon Sep 17 00:00:00 2001
From: dpcollins-google <40498610+dpcollins-google@users.noreply.github.com>
Date: Thu, 2 Mar 2023 13:47:47 -0500
Subject: [PATCH 02/17] feat: add pubsublite.ordering.mode to kafka connector
(#228)
* feat: add pubsublite.ordering.mode to kafka connector
This is useful for migration cases using the kafka wire protocol.
* feat: add pubsublite.ordering.mode to kafka connector
This is useful for migration cases using the kafka wire protocol.
Also clean up dependency management.
* feat: add pubsublite.ordering.mode to kafka connector
This is useful for migration cases using the kafka wire protocol.
Also clean up dependency management.
* feat: add pubsublite.ordering.mode to kafka connector
This is useful for migration cases using the kafka wire protocol.
Also clean up dependency management.
* feat: add pubsublite.ordering.mode to kafka connector
This is useful for migration cases using the kafka wire protocol.
Also clean up dependency management.
* feat: add pubsublite.ordering.mode to kafka connector
This is useful for migration cases using the kafka wire protocol.
Also clean up dependency management.
---
pom.xml | 106 +++++++-------
.../pubsublite/kafka/sink/ConfigDefs.java | 9 +-
.../pubsublite/kafka/sink/Constants.java | 2 -
.../sink/KafkaPartitionRoutingPolicy.java | 57 ++++++++
.../pubsublite/kafka/sink/OrderingMode.java | 23 +++
.../kafka/sink/PubSubLiteSinkTask.java | 14 +-
.../kafka/sink/PublisherFactoryImpl.java | 29 ++--
.../kafka/sink/PubSubLiteSinkTaskTest.java | 133 +++++++-----------
src/test/java/it/StandaloneIT.java | 55 +++-----
9 files changed, 238 insertions(+), 190 deletions(-)
create mode 100644 src/main/java/com/google/pubsublite/kafka/sink/KafkaPartitionRoutingPolicy.java
create mode 100644 src/main/java/com/google/pubsublite/kafka/sink/OrderingMode.java
diff --git a/pom.xml b/pom.xml
index 1cafe08e..e8a82495 100644
--- a/pom.xml
+++ b/pom.xml
@@ -15,54 +15,73 @@
com.google.cloud
google-cloud-shared-config
- 1.5.4
+ 1.5.5
1.8
1.8
- 3.3.1
- 1.120.25
- 1.8.0
- 1.16.0
- 3.21.9
- 2.19.4
- 2.0.3
+ 3.4.0
+
+
+
+ com.google.cloud
+ google-cloud-shared-dependencies
+ 3.3.0
+ pom
+ import
+
+
+ com.google.cloud
+ libraries-bom
+ 26.8.0
+ pom
+ import
+
+
+
+ com.google.api.grpc
+ grpc-google-cloud-pubsublite-v1
+ 1.11.1
+
+
+
+
+
+ com.google.api.grpc
+ proto-google-cloud-pubsublite-v1
+
+ 1.11.1
+
+
+ com.google.cloud
+ google-cloud-pubsublite
+
+ 1.11.1
+
+
+ com.google.cloud
+ pubsublite-kafka
+ 1.1.2
+
com.google.cloud
google-cloud-pubsub
- ${pubsub.version}
com.google.protobuf
protobuf-java
- ${protobuf-java.vesion}
-
-
- com.google.auth
- google-auth-library-oauth2-http
- 1.12.1
org.apache.kafka
kafka-clients
${kafka.version}
-
- com.google.cloud
- pubsublite-kafka
- 1.0.2
-
-
- com.google.api.grpc
- proto-google-cloud-pubsublite-v1
- ${pubsublite.version}
-
com.google.flogger
google-extensions
@@ -71,62 +90,51 @@
com.google.code.findbugs
jsr305
- 3.0.2
com.google.api
gax
- ${gax.version}
org.slf4j
slf4j-api
- ${slf4j.version}
+ 2.0.5
com.google.api
gax-grpc
- ${gax.version}
com.google.auth
google-auth-library-credentials
- 1.13.0
com.google.api.grpc
proto-google-cloud-pubsub-v1
- 1.102.25
com.google.api
api-common
- 2.2.2
-
-
- com.google.cloud
- google-cloud-pubsublite
- ${pubsublite.version}
com.google.guava
guava
- 31.1-jre
org.threeten
threetenbp
- 1.6.4
com.google.errorprone
error_prone_annotations
- 2.16
com.google.protobuf
protobuf-java-util
- ${protobuf-java.vesion}
+
+
+ com.google.auth
+ google-auth-library-oauth2-http
@@ -141,25 +149,24 @@
junit
junit
- 4.13.2
test
+ 4.13.2
org.mockito
mockito-core
- 4.9.0
test
+ 4.11.0
com.google.truth
truth
- 1.1.3
test
+ 1.1.3
com.google.cloud
google-cloud-core
- 2.8.28
test
@@ -177,13 +184,11 @@
com.google.api.grpc
proto-google-cloud-compute-v1
- ${cloud-compute.version}
test
com.google.cloud
google-cloud-compute
- ${cloud-compute.version}
test
@@ -195,13 +200,6 @@
com.google.cloud
google-cloud-storage
- 2.15.0
- test
-
-
- org.slf4j
- slf4j-log4j12
- ${slf4j.version}
test
diff --git a/src/main/java/com/google/pubsublite/kafka/sink/ConfigDefs.java b/src/main/java/com/google/pubsublite/kafka/sink/ConfigDefs.java
index 82dbd9bd..932c20ec 100644
--- a/src/main/java/com/google/pubsublite/kafka/sink/ConfigDefs.java
+++ b/src/main/java/com/google/pubsublite/kafka/sink/ConfigDefs.java
@@ -25,6 +25,7 @@ private ConfigDefs() {}
static final String PROJECT_FLAG = "pubsublite.project";
static final String LOCATION_FLAG = "pubsublite.location";
static final String TOPIC_NAME_FLAG = "pubsublite.topic";
+ static final String ORDERING_MODE_FLAG = "pubsublite.ordering.mode";
static ConfigDef config() {
return new ConfigDef()
@@ -42,6 +43,12 @@ static ConfigDef config() {
TOPIC_NAME_FLAG,
ConfigDef.Type.STRING,
Importance.HIGH,
- "The name of the topic to which to publish.");
+ "The name of the topic to which to publish.")
+ .define(
+ ORDERING_MODE_FLAG,
+ ConfigDef.Type.STRING,
+ OrderingMode.DEFAULT.name(),
+ Importance.HIGH,
+ "The ordering mode to use for publishing to Pub/Sub Lite. If set to `KAFKA`, messages will be republished to the same partition index they were read from on the source topic. Note that this means the Pub/Sub Lite topic *must* have the same number of partitions as the source Kafka topic.");
}
}
diff --git a/src/main/java/com/google/pubsublite/kafka/sink/Constants.java b/src/main/java/com/google/pubsublite/kafka/sink/Constants.java
index abfed7ae..7dc57b5c 100644
--- a/src/main/java/com/google/pubsublite/kafka/sink/Constants.java
+++ b/src/main/java/com/google/pubsublite/kafka/sink/Constants.java
@@ -24,6 +24,4 @@ private Constants() {}
public static final String KAFKA_OFFSET_HEADER = "x-goog-pubsublite-source-kafka-offset";
public static final String KAFKA_EVENT_TIME_TYPE_HEADER =
"x-goog-pubsublite-source-kafka-event-time-type";
- public static final String PUBSUBLITE_KAFKA_SINK_CONNECTOR_NAME =
- "JAVA_PUBSUBLITE_KAFKA_SINK_CONNECTOR";
}
diff --git a/src/main/java/com/google/pubsublite/kafka/sink/KafkaPartitionRoutingPolicy.java b/src/main/java/com/google/pubsublite/kafka/sink/KafkaPartitionRoutingPolicy.java
new file mode 100644
index 00000000..b4faa3cb
--- /dev/null
+++ b/src/main/java/com/google/pubsublite/kafka/sink/KafkaPartitionRoutingPolicy.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2023 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.pubsublite.kafka.sink;
+
+import static com.google.cloud.pubsublite.internal.ExtractStatus.toCanonical;
+
+import com.google.api.gax.rpc.StatusCode.Code;
+import com.google.cloud.pubsublite.Partition;
+import com.google.cloud.pubsublite.internal.CheckedApiException;
+import com.google.cloud.pubsublite.internal.RoutingPolicy;
+import com.google.cloud.pubsublite.proto.PubSubMessage;
+
+/** A routing policy that extracts the original kafka partition and routes to that partition. */
+class KafkaPartitionRoutingPolicy implements RoutingPolicy {
+ private final long numPartitions;
+
+ KafkaPartitionRoutingPolicy(long numPartitions) {
+ this.numPartitions = numPartitions;
+ }
+
+ @Override
+ public Partition route(PubSubMessage message) throws CheckedApiException {
+ Partition partition = getPartition(message);
+ if (partition.value() >= numPartitions) {
+ throw new CheckedApiException(
+ "Kafka topic has more partitions than Pub/Sub Lite topic. OrderingMode.KAFKA cannot be used.",
+ Code.FAILED_PRECONDITION);
+ }
+ return partition;
+ }
+
+ private Partition getPartition(PubSubMessage message) throws CheckedApiException {
+ try {
+ return Partition.of(
+ Long.parseLong(
+ message
+ .getAttributesOrThrow(Constants.KAFKA_PARTITION_HEADER)
+ .getValues(0)
+ .toStringUtf8()));
+ } catch (Throwable t) {
+ throw toCanonical(t);
+ }
+ }
+}
diff --git a/src/main/java/com/google/pubsublite/kafka/sink/OrderingMode.java b/src/main/java/com/google/pubsublite/kafka/sink/OrderingMode.java
new file mode 100644
index 00000000..2f24627d
--- /dev/null
+++ b/src/main/java/com/google/pubsublite/kafka/sink/OrderingMode.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2023 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.pubsublite.kafka.sink;
+
+public enum OrderingMode {
+ /* Order based on the standard Pub/Sub Lite logic. */
+ DEFAULT,
+ /* Send messages to the same partition index they were from in Kafka. */
+ KAFKA
+}
diff --git a/src/main/java/com/google/pubsublite/kafka/sink/PubSubLiteSinkTask.java b/src/main/java/com/google/pubsublite/kafka/sink/PubSubLiteSinkTask.java
index 328b303a..9dd5ab95 100644
--- a/src/main/java/com/google/pubsublite/kafka/sink/PubSubLiteSinkTask.java
+++ b/src/main/java/com/google/pubsublite/kafka/sink/PubSubLiteSinkTask.java
@@ -18,9 +18,10 @@
import static com.google.pubsublite.kafka.sink.Schemas.encodeToBytes;
import com.google.api.core.ApiService.State;
-import com.google.cloud.pubsublite.Message;
import com.google.cloud.pubsublite.MessageMetadata;
import com.google.cloud.pubsublite.internal.Publisher;
+import com.google.cloud.pubsublite.proto.AttributeValues;
+import com.google.cloud.pubsublite.proto.PubSubMessage;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableListMultimap;
import com.google.protobuf.ByteString;
@@ -75,7 +76,7 @@ public void put(Collection collection) {
}
}
for (SinkRecord record : collection) {
- Message.Builder message = Message.builder();
+ PubSubMessage.Builder message = PubSubMessage.newBuilder();
if (record.key() != null) {
message.setKey(encodeToBytes(record.keySchema(), record.key()));
}
@@ -89,6 +90,7 @@ public void put(Collection collection) {
header ->
attributes.put(
header.key(), Schemas.encodeToBytes(header.schema(), header.value())));
+
if (record.topic() != null) {
attributes.put(Constants.KAFKA_TOPIC_HEADER, ByteString.copyFromUtf8(record.topic()));
}
@@ -106,7 +108,13 @@ public void put(Collection collection) {
ByteString.copyFromUtf8(record.timestampType().name));
message.setEventTime(Timestamps.fromMillis(record.timestamp()));
}
- message.setAttributes(attributes.build());
+ attributes
+ .build()
+ .asMap()
+ .forEach(
+ (key, values) ->
+ message.putAttributes(
+ key, AttributeValues.newBuilder().addAllValues(values).build()));
publisher.publish(message.build());
}
}
diff --git a/src/main/java/com/google/pubsublite/kafka/sink/PublisherFactoryImpl.java b/src/main/java/com/google/pubsublite/kafka/sink/PublisherFactoryImpl.java
index b6923287..47b93fab 100644
--- a/src/main/java/com/google/pubsublite/kafka/sink/PublisherFactoryImpl.java
+++ b/src/main/java/com/google/pubsublite/kafka/sink/PublisherFactoryImpl.java
@@ -18,11 +18,12 @@
import static com.google.cloud.pubsublite.internal.ExtractStatus.toCanonical;
import static com.google.cloud.pubsublite.internal.wire.ServiceClients.addDefaultSettings;
import static com.google.cloud.pubsublite.internal.wire.ServiceClients.getCallContext;
-import static com.google.pubsublite.kafka.sink.Constants.PUBSUBLITE_KAFKA_SINK_CONNECTOR_NAME;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiException;
-import com.google.cloud.pubsublite.CloudZone;
+import com.google.cloud.pubsublite.AdminClient;
+import com.google.cloud.pubsublite.AdminClientSettings;
+import com.google.cloud.pubsublite.CloudRegionOrZone;
import com.google.cloud.pubsublite.MessageMetadata;
import com.google.cloud.pubsublite.Partition;
import com.google.cloud.pubsublite.ProjectPath;
@@ -30,11 +31,11 @@
import com.google.cloud.pubsublite.TopicPath;
import com.google.cloud.pubsublite.cloudpubsub.PublisherSettings;
import com.google.cloud.pubsublite.internal.Publisher;
+import com.google.cloud.pubsublite.internal.wire.PartitionCountWatchingPublisherSettings;
import com.google.cloud.pubsublite.internal.wire.PartitionPublisherFactory;
import com.google.cloud.pubsublite.internal.wire.PubsubContext;
import com.google.cloud.pubsublite.internal.wire.PubsubContext.Framework;
import com.google.cloud.pubsublite.internal.wire.RoutingMetadata;
-import com.google.cloud.pubsublite.internal.wire.RoutingPublisherBuilder;
import com.google.cloud.pubsublite.internal.wire.SinglePartitionPublisherBuilder;
import com.google.cloud.pubsublite.v1.PublisherServiceClient;
import com.google.cloud.pubsublite.v1.PublisherServiceSettings;
@@ -49,8 +50,7 @@ class PublisherFactoryImpl implements PublisherFactory {
private PartitionPublisherFactory getPartitionPublisherFactory(TopicPath topic) {
return new PartitionPublisherFactory() {
-
- protected Optional publisherServiceClient = Optional.empty();
+ private Optional publisherServiceClient = Optional.empty();
private synchronized PublisherServiceClient getServiceClient() throws ApiException {
if (publisherServiceClient.isPresent()) return publisherServiceClient.get();
@@ -82,8 +82,7 @@ public Publisher newPublisher(Partition partition) throws ApiEx
responseStream -> {
ApiCallContext context =
getCallContext(
- PubsubContext.of(Framework.of(PUBSUBLITE_KAFKA_SINK_CONNECTOR_NAME)),
- RoutingMetadata.of(topic, partition));
+ PubsubContext.of(FRAMEWORK), RoutingMetadata.of(topic, partition));
return client.publishCallable().splitCall(responseStream, context);
});
return singlePartitionBuilder.build();
@@ -97,17 +96,27 @@ public void close() {}
@Override
public Publisher newPublisher(Map params) {
Map config = ConfigDefs.config().validateAll(params);
- RoutingPublisherBuilder.Builder builder = RoutingPublisherBuilder.newBuilder();
+ CloudRegionOrZone location =
+ CloudRegionOrZone.parse(config.get(ConfigDefs.LOCATION_FLAG).value().toString());
+ PartitionCountWatchingPublisherSettings.Builder builder =
+ PartitionCountWatchingPublisherSettings.newBuilder();
TopicPath topic =
TopicPath.newBuilder()
.setProject(
ProjectPath.parse("projects/" + config.get(ConfigDefs.PROJECT_FLAG).value())
.project())
- .setLocation(CloudZone.parse(config.get(ConfigDefs.LOCATION_FLAG).value().toString()))
+ .setLocation(location)
.setName(TopicName.of(config.get(ConfigDefs.TOPIC_NAME_FLAG).value().toString()))
.build();
builder.setTopic(topic);
builder.setPublisherFactory(getPartitionPublisherFactory(topic));
- return builder.build();
+ builder.setAdminClient(
+ AdminClient.create(
+ AdminClientSettings.newBuilder().setRegion(location.extractRegion()).build()));
+ if (OrderingMode.valueOf(config.get(ConfigDefs.ORDERING_MODE_FLAG).value().toString())
+ == OrderingMode.KAFKA) {
+ builder.setRoutingPolicyFactory(KafkaPartitionRoutingPolicy::new);
+ }
+ return builder.build().instantiate();
}
}
diff --git a/src/test/java/com/google/pubsublite/kafka/sink/PubSubLiteSinkTaskTest.java b/src/test/java/com/google/pubsublite/kafka/sink/PubSubLiteSinkTaskTest.java
index d64d5582..dc77d179 100644
--- a/src/test/java/com/google/pubsublite/kafka/sink/PubSubLiteSinkTaskTest.java
+++ b/src/test/java/com/google/pubsublite/kafka/sink/PubSubLiteSinkTaskTest.java
@@ -23,12 +23,12 @@
import static org.mockito.Mockito.verify;
import static org.mockito.MockitoAnnotations.initMocks;
-import com.google.cloud.pubsublite.Message;
import com.google.cloud.pubsublite.MessageMetadata;
import com.google.cloud.pubsublite.internal.Publisher;
import com.google.cloud.pubsublite.internal.testing.FakeApiService;
+import com.google.cloud.pubsublite.proto.AttributeValues;
+import com.google.cloud.pubsublite.proto.PubSubMessage;
import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.protobuf.ByteString;
import com.google.protobuf.util.Timestamps;
@@ -74,31 +74,21 @@ public class PubSubLiteSinkTaskTest {
Schema.BYTES_SCHEMA,
KAFKA_MESSAGE2.toByteArray(),
-1);
- private static final Message SAMPLE_MESSAGE_1 =
- Message.builder()
+ private static final PubSubMessage SAMPLE_MESSAGE_1 =
+ PubSubMessage.newBuilder()
.setKey(ByteString.copyFromUtf8(KAFKA_MESSAGE_KEY1))
.setData(KAFKA_MESSAGE1)
- .setAttributes(
- ImmutableListMultimap.builder()
- .put(Constants.KAFKA_TOPIC_HEADER, ByteString.copyFromUtf8(KAFKA_TOPIC))
- .put(
- Constants.KAFKA_PARTITION_HEADER,
- ByteString.copyFromUtf8(Integer.toString(0)))
- .put(Constants.KAFKA_OFFSET_HEADER, ByteString.copyFromUtf8(Integer.toString(-1)))
- .build())
+ .putAttributes(Constants.KAFKA_TOPIC_HEADER, single(KAFKA_TOPIC))
+ .putAttributes(Constants.KAFKA_PARTITION_HEADER, single("0"))
+ .putAttributes(Constants.KAFKA_OFFSET_HEADER, single("-1"))
.build();
- private static final Message SAMPLE_MESSAGE_2 =
- Message.builder()
+ private static final PubSubMessage SAMPLE_MESSAGE_2 =
+ PubSubMessage.newBuilder()
.setKey(ByteString.copyFromUtf8(KAFKA_MESSAGE_KEY2))
.setData(KAFKA_MESSAGE2)
- .setAttributes(
- ImmutableListMultimap.builder()
- .put(Constants.KAFKA_TOPIC_HEADER, ByteString.copyFromUtf8(KAFKA_TOPIC))
- .put(
- Constants.KAFKA_PARTITION_HEADER,
- ByteString.copyFromUtf8(Integer.toString(0)))
- .put(Constants.KAFKA_OFFSET_HEADER, ByteString.copyFromUtf8(Integer.toString(-1)))
- .build())
+ .putAttributes(Constants.KAFKA_TOPIC_HEADER, single(KAFKA_TOPIC))
+ .putAttributes(Constants.KAFKA_PARTITION_HEADER, single("0"))
+ .putAttributes(Constants.KAFKA_OFFSET_HEADER, single("-1"))
.build();
private PubSubLiteSinkTask task;
@@ -108,6 +98,10 @@ abstract static class FakePublisher extends FakeApiService
private @Spy FakePublisher publisher;
+ private static AttributeValues single(String value) {
+ return AttributeValues.newBuilder().addValues(ByteString.copyFromUtf8(value)).build();
+ }
+
@Before
public void setup() {
initMocks(this);
@@ -174,7 +168,7 @@ public void testPutWithNullValues() {
null,
-1));
task.put(records);
- Message expectedResult = SAMPLE_MESSAGE_1.toBuilder().setData(ByteString.EMPTY).build();
+ PubSubMessage expectedResult = SAMPLE_MESSAGE_1.toBuilder().setData(ByteString.EMPTY).build();
verify(publisher).publish(expectedResult);
}
@@ -185,7 +179,7 @@ public void testPutWithNullMessage() {
records.add(
new SinkRecord(KAFKA_TOPIC, 0, Schema.STRING_SCHEMA, null, Schema.BYTES_SCHEMA, null, -1));
task.put(records);
- Message expectedResult =
+ PubSubMessage expectedResult =
SAMPLE_MESSAGE_1.toBuilder().setKey(ByteString.EMPTY).setData(ByteString.EMPTY).build();
verify(publisher).publish(expectedResult);
}
@@ -259,48 +253,35 @@ public void testKafkaMetadata() {
null,
TimestampType.CREATE_TIME);
task.put(ImmutableList.of(record1, record2, record3));
- ImmutableListMultimap attributesBase =
- ImmutableListMultimap.builder()
- .put(Constants.KAFKA_TOPIC_HEADER, ByteString.copyFromUtf8(KAFKA_TOPIC))
- .put(Constants.KAFKA_PARTITION_HEADER, ByteString.copyFromUtf8(Integer.toString(4)))
+ ImmutableMap attributesBase =
+ ImmutableMap.builder()
+ .put(Constants.KAFKA_TOPIC_HEADER, single(KAFKA_TOPIC))
+ .put(Constants.KAFKA_PARTITION_HEADER, single(Integer.toString(4)))
.build();
- Message message1 =
- Message.builder()
+ PubSubMessage message1 =
+ PubSubMessage.newBuilder()
.setKey(ByteString.copyFromUtf8(KAFKA_MESSAGE_KEY1))
.setData(KAFKA_MESSAGE1)
.setEventTime(Timestamps.fromMillis(50000))
- .setAttributes(
- ImmutableListMultimap.builder()
- .putAll(attributesBase)
- .put(Constants.KAFKA_OFFSET_HEADER, ByteString.copyFromUtf8("1000"))
- .put(
- Constants.KAFKA_EVENT_TIME_TYPE_HEADER,
- ByteString.copyFromUtf8("CreateTime"))
- .build())
+ .putAllAttributes(attributesBase)
+ .putAttributes(Constants.KAFKA_OFFSET_HEADER, single("1000"))
+ .putAttributes(Constants.KAFKA_EVENT_TIME_TYPE_HEADER, single("CreateTime"))
.build();
- Message message2 =
- Message.builder()
+ PubSubMessage message2 =
+ PubSubMessage.newBuilder()
.setKey(ByteString.copyFromUtf8(KAFKA_MESSAGE_KEY1))
.setData(KAFKA_MESSAGE1)
.setEventTime(Timestamps.fromMillis(50001))
- .setAttributes(
- ImmutableListMultimap.builder()
- .putAll(attributesBase)
- .put(Constants.KAFKA_OFFSET_HEADER, ByteString.copyFromUtf8("1001"))
- .put(
- Constants.KAFKA_EVENT_TIME_TYPE_HEADER,
- ByteString.copyFromUtf8("LogAppendTime"))
- .build())
+ .putAllAttributes(attributesBase)
+ .putAttributes(Constants.KAFKA_OFFSET_HEADER, single("1001"))
+ .putAttributes(Constants.KAFKA_EVENT_TIME_TYPE_HEADER, single("LogAppendTime"))
.build();
- Message message3 =
- Message.builder()
+ PubSubMessage message3 =
+ PubSubMessage.newBuilder()
.setKey(ByteString.copyFromUtf8(KAFKA_MESSAGE_KEY1))
.setData(KAFKA_MESSAGE1)
- .setAttributes(
- ImmutableListMultimap.builder()
- .putAll(attributesBase)
- .put(Constants.KAFKA_OFFSET_HEADER, ByteString.copyFromUtf8("1002"))
- .build())
+ .putAllAttributes(attributesBase)
+ .putAttributes(Constants.KAFKA_OFFSET_HEADER, single("1002"))
.build();
InOrder order = inOrder(publisher);
order.verify(publisher).publish(message1);
@@ -336,40 +317,30 @@ public void testKafkaHeaders() {
TimestampType.LOG_APPEND_TIME);
record2.headers().addString("yourHeader", "yourValue");
task.put(ImmutableList.of(record1, record2));
- ImmutableListMultimap attributesBase =
- ImmutableListMultimap.builder()
- .put(Constants.KAFKA_TOPIC_HEADER, ByteString.copyFromUtf8(KAFKA_TOPIC))
- .put(Constants.KAFKA_PARTITION_HEADER, ByteString.copyFromUtf8(Integer.toString(4)))
+ ImmutableMap attributesBase =
+ ImmutableMap.builder()
+ .put(Constants.KAFKA_TOPIC_HEADER, single(KAFKA_TOPIC))
+ .put(Constants.KAFKA_PARTITION_HEADER, single(Integer.toString(4)))
.build();
- Message message1 =
- Message.builder()
+ PubSubMessage message1 =
+ PubSubMessage.newBuilder()
.setKey(ByteString.copyFromUtf8(KAFKA_MESSAGE_KEY1))
.setData(KAFKA_MESSAGE1)
.setEventTime(Timestamps.fromMillis(50000))
- .setAttributes(
- ImmutableListMultimap.builder()
- .putAll(attributesBase)
- .put(Constants.KAFKA_OFFSET_HEADER, ByteString.copyFromUtf8("1000"))
- .put(
- Constants.KAFKA_EVENT_TIME_TYPE_HEADER,
- ByteString.copyFromUtf8("CreateTime"))
- .put("myHeader", ByteString.copyFromUtf8("myValue"))
- .build())
+ .putAllAttributes(attributesBase)
+ .putAttributes(Constants.KAFKA_OFFSET_HEADER, single("1000"))
+ .putAttributes(Constants.KAFKA_EVENT_TIME_TYPE_HEADER, single("CreateTime"))
+ .putAttributes("myHeader", single("myValue"))
.build();
- Message message2 =
- Message.builder()
+ PubSubMessage message2 =
+ PubSubMessage.newBuilder()
.setKey(ByteString.copyFromUtf8(KAFKA_MESSAGE_KEY1))
.setData(KAFKA_MESSAGE1)
.setEventTime(Timestamps.fromMillis(50001))
- .setAttributes(
- ImmutableListMultimap.builder()
- .putAll(attributesBase)
- .put(Constants.KAFKA_OFFSET_HEADER, ByteString.copyFromUtf8("1001"))
- .put(
- Constants.KAFKA_EVENT_TIME_TYPE_HEADER,
- ByteString.copyFromUtf8("LogAppendTime"))
- .put("yourHeader", ByteString.copyFromUtf8("yourValue"))
- .build())
+ .putAllAttributes(attributesBase)
+ .putAttributes(Constants.KAFKA_OFFSET_HEADER, single("1001"))
+ .putAttributes(Constants.KAFKA_EVENT_TIME_TYPE_HEADER, single("LogAppendTime"))
+ .putAttributes("yourHeader", single("yourValue"))
.build();
InOrder order = inOrder(publisher);
order.verify(publisher).publish(message1);
diff --git a/src/test/java/it/StandaloneIT.java b/src/test/java/it/StandaloneIT.java
index 1c054a04..6de9723f 100644
--- a/src/test/java/it/StandaloneIT.java
+++ b/src/test/java/it/StandaloneIT.java
@@ -17,6 +17,7 @@
package it;
import static com.google.common.truth.Truth.assertThat;
+import static java.util.concurrent.TimeUnit.MINUTES;
import static junit.framework.TestCase.assertNotNull;
import com.google.api.core.ApiFuture;
@@ -159,9 +160,8 @@ public class StandaloneIT extends Base {
private static final String instanceName = "kafka-it-" + runId;
private static final String instanceTemplateName = "kafka-it-template-" + runId;
- private static AtomicBoolean initialized = new AtomicBoolean(false);
- private static Boolean cpsMessageReceived = false;
- private static Boolean pslMessageReceived = false;
+ private static AtomicBoolean cpsMessageReceived = new AtomicBoolean(false);
+ private static AtomicBoolean pslMessageReceived = new AtomicBoolean(false);
private static Instance gceKafkaInstance;
private static String kafkaInstanceIpAddress;
@@ -283,7 +283,7 @@ protected static void setupPslResources() throws Exception {
.setTopic(pslSinkTopicPath.toString())
.build();
pslSinkSubscription = pslAdminClient.createSubscription(pslSinkSubscription).get();
- log.atInfo().log("Created PSL sink subscription: " + pslSinkSubscriptionPath.toString());
+ log.atInfo().log("Created PSL sink subscription: " + pslSinkSubscriptionPath);
Topic.Builder sourceTopicBuilder =
Topic.newBuilder()
@@ -318,7 +318,7 @@ protected static void setupPslResources() throws Exception {
.setTopic(pslSourceTopicPath.toString())
.build();
pslSourceSubscription = pslAdminClient.createSubscription(pslSourceSubscription).get();
- log.atInfo().log("Created PSL source subscription: " + pslSinkSubscriptionPath.toString());
+ log.atInfo().log("Created PSL source subscription: " + pslSinkSubscriptionPath);
}
}
@@ -404,12 +404,12 @@ public Void apply(Runnable runnable) {
}
try (InstancesClient instancesClient = InstancesClient.create()) {
- instancesClient.deleteAsync(projectId, location, instanceName).get(3, TimeUnit.MINUTES);
+ instancesClient.deleteAsync(projectId, location, instanceName).get(3, MINUTES);
}
log.atInfo().log("Deleted Compute Engine instance.");
try (InstanceTemplatesClient instanceTemplatesClient = InstanceTemplatesClient.create()) {
- instanceTemplatesClient.deleteAsync(projectId, instanceTemplateName).get(3, TimeUnit.MINUTES);
+ instanceTemplatesClient.deleteAsync(projectId, instanceTemplateName).get(3, MINUTES);
}
log.atInfo().log("Deleted Compute Engine instance template.");
}
@@ -434,7 +434,7 @@ public void testCpsSinkConnector() throws Exception {
.forEach(
(metricName, metric) -> {
if (metricName.name() == "record-send-total") {
- log.atInfo().log("record-send-total: " + metric.metricValue().toString());
+ log.atInfo().log("record-send-total: " + metric.metricValue());
}
});
kafkaProducer.close();
@@ -452,7 +452,7 @@ public void testCpsSinkConnector() throws Exception {
assertThat(message.getData().toStringUtf8()).isEqualTo("value0");
assertThat(message.getAttributesMap().get(ConnectorUtils.CPS_MESSAGE_KEY_ATTRIBUTE))
.isEqualTo("key0");
- this.cpsMessageReceived = true;
+ this.cpsMessageReceived.set(true);
consumer.ack();
};
@@ -465,7 +465,7 @@ public void testCpsSinkConnector() throws Exception {
// Shut down the subscriber after 30s. Stop receiving messages.
subscriber.stopAsync();
}
- assertThat(this.cpsMessageReceived).isTrue();
+ assertThat(this.cpsMessageReceived.get()).isTrue();
}
@Test(timeout = 5 * 60 * 1000L)
@@ -573,7 +573,7 @@ public void testPslSinkConnector() throws Exception {
.forEach(
(metricName, metric) -> {
if (metricName.name() == "record-send-total") {
- log.atInfo().log("record-send-total: " + metric.metricValue().toString());
+ log.atInfo().log("record-send-total: " + metric.metricValue());
}
});
kafkaProducer.close();
@@ -588,8 +588,7 @@ public void testPslSinkConnector() throws Exception {
log.atInfo().log("Received message: " + message);
assertThat(message.getData().toStringUtf8()).isEqualTo("value0");
assertThat(message.getOrderingKey()).isEqualTo("key0");
- this.pslMessageReceived = true;
- log.atInfo().log("this.pslMessageReceived: " + this.pslMessageReceived);
+ this.pslMessageReceived.set(true);
consumer.ack();
};
@@ -606,12 +605,12 @@ public void testPslSinkConnector() throws Exception {
.build());
try {
subscriber.startAsync().awaitRunning();
- subscriber.awaitTerminated(3, TimeUnit.MINUTES);
+ subscriber.awaitTerminated(3, MINUTES);
} catch (TimeoutException timeoutException) {
// Shut down the subscriber after 3 minutes. Stop receiving messages.
subscriber.stopAsync();
}
- assertThat(this.pslMessageReceived).isTrue();
+ assertThat(this.pslMessageReceived.get()).isTrue();
}
@Test(timeout = 5 * 60 * 1000L)
@@ -634,30 +633,8 @@ public void testPslSourceConnector() throws Exception {
PubsubMessage msg0 =
PubsubMessage.newBuilder().setData(ByteString.copyFromUtf8("msg0")).build();
ApiFuture publishFuture = publisher.publish(msg0);
- ApiFutures.addCallback(
- publishFuture,
- new ApiFutureCallback() {
-
- @Override
- public void onFailure(Throwable throwable) {
- if (throwable instanceof ApiException) {
- ApiException apiException = ((ApiException) throwable);
- // details on the API exception
- log.atInfo().log(apiException.fillInStackTrace().toString());
- }
- Assert.fail("Error publishing message : " + msg0);
- }
-
- @Override
- public void onSuccess(String messageId) {
- // Once published, returns server-assigned message ids (unique within the topic)
- log.atInfo().log("Published message ID: " + messageId);
- }
- },
- MoreExecutors.directExecutor());
-
- // Sleep for 1min.
- Thread.sleep(60 * 1000);
+ log.atInfo().log("Published message ID: " + publishFuture.get(1, MINUTES));
+ publisher.stopAsync().awaitTerminated();
// Consume from Kafka connect.
Properties consumer_props = new Properties();
From f10f9a6546eb6ea65b61fbbe4538edae81b524ab Mon Sep 17 00:00:00 2001
From: Mend Renovate
Date: Fri, 3 Mar 2023 16:25:27 +0000
Subject: [PATCH 03/17] deps: update dependency
com.google.cloud:google-cloud-shared-dependencies to v3.4.0 (#232)
---
pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pom.xml b/pom.xml
index e8a82495..55bd7932 100644
--- a/pom.xml
+++ b/pom.xml
@@ -30,7 +30,7 @@
com.google.cloud
google-cloud-shared-dependencies
- 3.3.0
+ 3.4.0
pom
import
From 0432d6e68d338137290adc70ac021572b8a05944 Mon Sep 17 00:00:00 2001
From: Mend Renovate
Date: Fri, 3 Mar 2023 16:25:47 +0000
Subject: [PATCH 04/17] chore(deps): update dependency
com.google.cloud:libraries-bom to v26.9.0 (#231)
---
pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pom.xml b/pom.xml
index 55bd7932..bffc9296 100644
--- a/pom.xml
+++ b/pom.xml
@@ -37,7 +37,7 @@
com.google.cloud
libraries-bom
- 26.8.0
+ 26.9.0
pom
import
From 9dec71b8df9acdd738b4738ee060b13ff602e86b Mon Sep 17 00:00:00 2001
From: Mend Renovate
Date: Fri, 3 Mar 2023 16:26:07 +0000
Subject: [PATCH 05/17] deps: update dependency org.slf4j:slf4j-api to v2.0.6
(#230)
---
pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pom.xml b/pom.xml
index bffc9296..3d0525ec 100644
--- a/pom.xml
+++ b/pom.xml
@@ -98,7 +98,7 @@
org.slf4j
slf4j-api
- 2.0.5
+ 2.0.6
com.google.api
From 334d0d425de353569db23114d0f1712e46359958 Mon Sep 17 00:00:00 2001
From: Mend Renovate
Date: Fri, 3 Mar 2023 16:44:03 +0000
Subject: [PATCH 06/17] build(deps): update dependency
org.apache.maven.plugins:maven-dependency-plugin to v3.5.0 (#204)
---
pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pom.xml b/pom.xml
index 3d0525ec..382e2ff8 100644
--- a/pom.xml
+++ b/pom.xml
@@ -218,7 +218,7 @@
org.apache.maven.plugins
maven-dependency-plugin
- 3.3.0
+ 3.5.0
From 810bc1d08683cc70c65526a8f3fd186be0663ac2 Mon Sep 17 00:00:00 2001
From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com>
Date: Fri, 3 Mar 2023 11:50:46 -0500
Subject: [PATCH 07/17] chore: Upgrade to later version of gcp-uploader and
protobuf (#1741) (#221)
* chore: Upgrade to later version of gcp-uploader and protobuf
* chore: Add secretstorage version
* chore: Use python 3.9.13
* chore: Unpin all python versions
* chore: Rerun pipcompile tool
* chore: Rerun pipcompile tool with --allow-unsafe
* chore: Add --require-hashes
* chore: Update requirements file
* chore: Remove --require-hashes
* chore: Print python version
* chore: Add new generated hashes for requirements.txt
* chore: Remove python version check in cloud build
* chore: Allow synthtool to update python dependencies
* chore: Add typing-extensions into requirements
* chore: Unpin docker requirements.in file
* chore: Add java synthtool validation
* chore: Add check to import java synthtool python package
Source-Link: https://github.com/googleapis/synthtool/commit/91904bfb986c51516389fb591c67053cdf4de104
Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-java:latest@sha256:4566cf7fcd0eece77705330267b2b61a70165711c7829d049b60bc2f1bbcd74e
Co-authored-by: Owl Bot
---
.github/.OwlBot.lock.yaml | 4 +-
.kokoro/requirements.in | 40 ++------
.kokoro/requirements.txt | 192 +++++++++++++++-----------------------
3 files changed, 81 insertions(+), 155 deletions(-)
diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index 0f0647b0..2b48c81e 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -1,4 +1,4 @@
-# Copyright 2022 Google LLC
+# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,4 +13,4 @@
# limitations under the License.
docker:
image: gcr.io/cloud-devrel-public-resources/owlbot-java:latest
- digest: sha256:e76136cc48f90aa19ba29cdfbd4002111467e44a1c9d905867d98dafafbd03bb
+ digest: sha256:4566cf7fcd0eece77705330267b2b61a70165711c7829d049b60bc2f1bbcd74e
diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in
index 924f94ae..b19a8dbf 100644
--- a/.kokoro/requirements.in
+++ b/.kokoro/requirements.in
@@ -1,34 +1,6 @@
-gcp-docuploader==0.6.3
-google-crc32c==1.3.0
-googleapis-common-protos==1.56.3
-gcp-releasetool==1.9.1
-cryptography==38.0.3
-cachetools==4.2.4
-cffi==1.15.1
-jeepney==0.7.1
-jinja2==3.0.3
-markupsafe==2.0.1
-keyring==23.4.1
-packaging==21.3
-protobuf==3.19.5
-pyjwt==2.4.0
-pyparsing==3.0.9
-pycparser==2.21
-pyperclip==1.8.2
-python-dateutil==2.8.2
-requests==2.27.1
-certifi==2022.9.24
-importlib-metadata==4.8.3
-zipp==3.6.0
-google_api_core==2.8.2
-google-cloud-storage==2.0.0
-google-resumable-media==2.3.3
-google-cloud-core==2.3.1
-typing-extensions==4.1.1
-urllib3==1.26.12
-zipp==3.6.0
-rsa==4.9
-six==1.16.0
-attrs==22.1.0
-google-auth==2.14.1
-idna==3.4
\ No newline at end of file
+gcp-docuploader
+gcp-releasetool
+wheel
+setuptools
+typing-extensions
+click<8.1.0
\ No newline at end of file
diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt
index 71fcafc7..1dfe6304 100644
--- a/.kokoro/requirements.txt
+++ b/.kokoro/requirements.txt
@@ -1,27 +1,21 @@
#
-# This file is autogenerated by pip-compile with python 3.10
-# To update, run:
+# This file is autogenerated by pip-compile with Python 3.9
+# by the following command:
#
-# pip-compile --generate-hashes requirements.in
+# pip-compile --allow-unsafe --generate-hashes requirements.in
#
attrs==22.1.0 \
--hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \
--hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c
- # via
- # -r requirements.in
- # gcp-releasetool
+ # via gcp-releasetool
cachetools==4.2.4 \
--hash=sha256:89ea6f1b638d5a73a4f9226be57ac5e4f399d22770b92355f92dcb0f7f001693 \
--hash=sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1
- # via
- # -r requirements.in
- # google-auth
-certifi==2022.9.24 \
- --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \
- --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382
- # via
- # -r requirements.in
- # requests
+ # via google-auth
+certifi==2022.12.7 \
+ --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
+ --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
+ # via requests
cffi==1.15.1 \
--hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
--hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
@@ -87,9 +81,7 @@ cffi==1.15.1 \
--hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
--hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
--hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
- # via
- # -r requirements.in
- # cryptography
+ # via cryptography
charset-normalizer==2.0.12 \
--hash=sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597 \
--hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df
@@ -98,6 +90,7 @@ click==8.0.4 \
--hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \
--hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb
# via
+ # -r requirements.in
# gcp-docuploader
# gcp-releasetool
colorlog==6.7.0 \
@@ -132,12 +125,11 @@ cryptography==38.0.3 \
--hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \
--hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722
# via
- # -r requirements.in
# gcp-releasetool
# secretstorage
-gcp-docuploader==0.6.3 \
- --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \
- --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b
+gcp-docuploader==0.6.4 \
+ --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \
+ --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf
# via -r requirements.in
gcp-releasetool==1.9.1 \
--hash=sha256:952f4055d5d986b070ae2a71c4410b250000f9cc5a1e26398fcd55a5bbc5a15f \
@@ -147,13 +139,12 @@ google-api-core==2.8.2 \
--hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \
--hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50
# via
- # -r requirements.in
# google-cloud-core
# google-cloud-storage
google-auth==2.14.1 \
+ --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \
--hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016
# via
- # -r requirements.in
# gcp-releasetool
# google-api-core
# google-cloud-core
@@ -161,15 +152,11 @@ google-auth==2.14.1 \
google-cloud-core==2.3.1 \
--hash=sha256:113ba4f492467d5bd442c8d724c1a25ad7384045c3178369038840ecdd19346c \
--hash=sha256:34334359cb04187bdc80ddcf613e462dfd7a3aabbc3fe4d118517ab4b9303d53
- # via
- # -r requirements.in
- # google-cloud-storage
+ # via google-cloud-storage
google-cloud-storage==2.0.0 \
--hash=sha256:a57a15aead0f9dfbd4381f1bfdbe8bf89818a4bd75bab846cafcefb2db846c47 \
--hash=sha256:ec4be60bb223a3a960f0d01697d849b86d91cad815a84915a32ed3635e93a5e7
- # via
- # -r requirements.in
- # gcp-docuploader
+ # via gcp-docuploader
google-crc32c==1.3.0 \
--hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \
--hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \
@@ -214,52 +201,37 @@ google-crc32c==1.3.0 \
--hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \
--hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \
--hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3
- # via
- # -r requirements.in
- # google-resumable-media
+ # via google-resumable-media
google-resumable-media==2.3.3 \
--hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \
--hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5
- # via
- # -r requirements.in
- # google-cloud-storage
+ # via google-cloud-storage
googleapis-common-protos==1.56.3 \
--hash=sha256:6f1369b58ed6cf3a4b7054a44ebe8d03b29c309257583a2bbdc064cd1e4a1442 \
--hash=sha256:87955d7b3a73e6e803f2572a33179de23989ebba725e05ea42f24838b792e461
- # via
- # -r requirements.in
- # google-api-core
+ # via google-api-core
idna==3.4 \
--hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
--hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
- # via
- # -r requirements.in
- # requests
+ # via requests
importlib-metadata==4.8.3 \
--hash=sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e \
--hash=sha256:766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668
+ # via keyring
+jeepney==0.8.0 \
+ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \
+ --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755
# via
- # -r requirements.in
- # keyring
-jeepney==0.7.1 \
- --hash=sha256:1b5a0ea5c0e7b166b2f5895b91a08c14de8915afda4407fb5022a195224958ac \
- --hash=sha256:fa9e232dfa0c498bd0b8a3a73b8d8a31978304dcef0515adc859d4e096f96f4f
- # via
- # -r requirements.in
# keyring
# secretstorage
jinja2==3.0.3 \
--hash=sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8 \
--hash=sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7
- # via
- # -r requirements.in
- # gcp-releasetool
+ # via gcp-releasetool
keyring==23.4.1 \
--hash=sha256:17e49fb0d6883c2b4445359434dba95aad84aabb29bbff044ad0ed7100232eca \
--hash=sha256:89cbd74d4683ed164c8082fb38619341097741323b3786905c6dac04d6915a55
- # via
- # -r requirements.in
- # gcp-releasetool
+ # via gcp-releasetool
markupsafe==2.0.1 \
--hash=sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298 \
--hash=sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64 \
@@ -330,49 +302,41 @@ markupsafe==2.0.1 \
--hash=sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509 \
--hash=sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51 \
--hash=sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872
- # via
- # -r requirements.in
- # jinja2
+ # via jinja2
packaging==21.3 \
--hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \
--hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522
+ # via gcp-releasetool
+protobuf==3.20.1 \
+ --hash=sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf \
+ --hash=sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f \
+ --hash=sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f \
+ --hash=sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7 \
+ --hash=sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996 \
+ --hash=sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067 \
+ --hash=sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c \
+ --hash=sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7 \
+ --hash=sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9 \
+ --hash=sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c \
+ --hash=sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739 \
+ --hash=sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91 \
+ --hash=sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c \
+ --hash=sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153 \
+ --hash=sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9 \
+ --hash=sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388 \
+ --hash=sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e \
+ --hash=sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab \
+ --hash=sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde \
+ --hash=sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531 \
+ --hash=sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8 \
+ --hash=sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7 \
+ --hash=sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20 \
+ --hash=sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3
# via
- # -r requirements.in
- # gcp-releasetool
-protobuf==3.19.5 \
- --hash=sha256:1867f93b06a183f87696871bb8d1e99ee71dbb69d468ce1f0cc8bf3d30f982f3 \
- --hash=sha256:3c4160b601220627f7e91154e572baf5e161a9c3f445a8242d536ee3d0b7b17c \
- --hash=sha256:4ee2af7051d3b10c8a4fe6fd1a2c69f201fea36aeee7086cf202a692e1b99ee1 \
- --hash=sha256:5266c36cc0af3bb3dbf44f199d225b33da66a9a5c3bdc2b14865ad10eddf0e37 \
- --hash=sha256:5470f892961af464ae6eaf0f3099e2c1190ae8c7f36f174b89491281341f79ca \
- --hash=sha256:66d14b5b90090353efe75c9fb1bf65ef7267383034688d255b500822e37d5c2f \
- --hash=sha256:67efb5d20618020aa9596e17bfc37ca068c28ec0c1507d9507f73c93d46c9855 \
- --hash=sha256:696e6cfab94cc15a14946f2bf72719dced087d437adbd994fff34f38986628bc \
- --hash=sha256:6a02172b9650f819d01fb8e224fc69b0706458fc1ab4f1c669281243c71c1a5e \
- --hash=sha256:6eca9ae238ba615d702387a2ddea635d535d769994a9968c09a4ca920c487ab9 \
- --hash=sha256:950abd6c00e7b51f87ae8b18a0ce4d69fea217f62f171426e77de5061f6d9850 \
- --hash=sha256:9e1d74032f56ff25f417cfe84c8147047732e5059137ca42efad20cbbd25f5e0 \
- --hash=sha256:9e42b1cf2ecd8a1bd161239e693f22035ba99905ae6d7efeac8a0546c7ec1a27 \
- --hash=sha256:9f957ef53e872d58a0afd3bf6d80d48535d28c99b40e75e6634cbc33ea42fd54 \
- --hash=sha256:a89aa0c042e61e11ade320b802d6db4ee5391d8d973e46d3a48172c1597789f8 \
- --hash=sha256:c0f80876a8ff0ae7064084ed094eb86497bd5a3812e6fc96a05318b92301674e \
- --hash=sha256:c44e3282cff74ad18c7e8a0375f407f69ee50c2116364b44492a196293e08b21 \
- --hash=sha256:d249519ba5ecf5dd6b18150c9b6bcde510b273714b696f3923ff8308fc11ae49 \
- --hash=sha256:d3973a2d58aefc7d1230725c2447ce7f86a71cbc094b86a77c6ee1505ac7cdb1 \
- --hash=sha256:dca2284378a5f2a86ffed35c6ac147d14c48b525eefcd1083e5a9ce28dfa8657 \
- --hash=sha256:e63b0b3c42e51c94add62b010366cd4979cb6d5f06158bcae8faac4c294f91e1 \
- --hash=sha256:f2b599a21c9a32e171ec29a2ac54e03297736c578698e11b099d031f79da114b \
- --hash=sha256:f2bde37667b18c2b5280df83bc799204394a5d2d774e4deaf9de0eb741df6833 \
- --hash=sha256:f4f909f4dde413dec435a44b0894956d55bb928ded7d6e3c726556ca4c796e84 \
- --hash=sha256:f976234e20ab2785f54224bcdafa027674e23663b132fa3ca0caa291a6cfbde7 \
- --hash=sha256:f9cebda093c2f6bfed88f1c17cdade09d4d96096421b344026feee236532d4de
- # via
- # -r requirements.in
# gcp-docuploader
# gcp-releasetool
# google-api-core
# google-cloud-storage
- # googleapis-common-protos
pyasn1==0.4.8 \
--hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \
--hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba
@@ -386,71 +350,61 @@ pyasn1-modules==0.2.8 \
pycparser==2.21 \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
- # via
- # -r requirements.in
- # cffi
+ # via cffi
pyjwt==2.4.0 \
--hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \
--hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba
- # via
- # -r requirements.in
- # gcp-releasetool
+ # via gcp-releasetool
pyparsing==3.0.9 \
--hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \
--hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc
- # via
- # -r requirements.in
- # packaging
+ # via packaging
pyperclip==1.8.2 \
--hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57
- # via
- # -r requirements.in
- # gcp-releasetool
+ # via gcp-releasetool
python-dateutil==2.8.2 \
--hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
--hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
- # via
- # -r requirements.in
- # gcp-releasetool
+ # via gcp-releasetool
requests==2.27.1 \
--hash=sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61 \
--hash=sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d
# via
- # -r requirements.in
# gcp-releasetool
# google-api-core
# google-cloud-storage
rsa==4.9 \
--hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
--hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
- # via
- # -r requirements.in
- # google-auth
+ # via google-auth
secretstorage==3.3.3 \
--hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \
--hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99
# via keyring
+setuptools==65.6.3 \
+ --hash=sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54 \
+ --hash=sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75
+ # via -r requirements.in
six==1.16.0 \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
# via
- # -r requirements.in
# gcp-docuploader
# google-auth
# python-dateutil
-typing-extensions==4.1.1 \
- --hash=sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42 \
- --hash=sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2
+typing-extensions==4.4.0 \
+ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \
+ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e
# via -r requirements.in
urllib3==1.26.12 \
--hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \
--hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997
- # via
- # -r requirements.in
- # requests
+ # via requests
+wheel==0.38.4 \
+ --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \
+ --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8
+ # via -r requirements.in
zipp==3.6.0 \
--hash=sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832 \
--hash=sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc
- # via
- # -r requirements.in
- # importlib-metadata
+ # via importlib-metadata
From e9a162f579bd97b51cf51b9c9c43b9fe9b1df7b9 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Fri, 3 Mar 2023 17:12:12 +0000
Subject: [PATCH 08/17] build(deps): bump cryptography from 38.0.3 to 39.0.1 in
/.kokoro (#225)
Bumps [cryptography](https://togithub.com/pyca/cryptography) from 38.0.3 to 39.0.1.
Changelog
Sourced from cryptography's changelog.
39.0.1 - 2023-02-07
* **SECURITY ISSUE** - Fixed a bug where ``Cipher.update_into`` accepted Python
buffer protocol objects, but allowed immutable buffers. **CVE-2023-23931**
* Updated Windows, macOS, and Linux wheels to be compiled with OpenSSL 3.0.8.
.. _v39-0-0:
39.0.0 - 2023-01-01
- BACKWARDS INCOMPATIBLE: Support for OpenSSL 1.1.0 has been removed.
Users on older version of OpenSSL will need to upgrade.
- BACKWARDS INCOMPATIBLE: Dropped support for LibreSSL < 3.5. The new
minimum LibreSSL version is 3.5.0. Going forward our policy is to support
versions of LibreSSL that are available in versions of OpenBSD that are
still receiving security support.
- BACKWARDS INCOMPATIBLE: Removed the
encode_point
and
from_encoded_point
methods on
:class:~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicNumbers
,
which had been deprecated for several years.
:meth:~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey.public_bytes
and
:meth:~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey.from_encoded_point
should be used instead.
- BACKWARDS INCOMPATIBLE: Support for using MD5 or SHA1 in
:class:
~cryptography.x509.CertificateBuilder
, other X.509 builders, and
PKCS7 has been removed.
- BACKWARDS INCOMPATIBLE: Dropped support for macOS 10.10 and 10.11, macOS
users must upgrade to 10.12 or newer.
- ANNOUNCEMENT: The next version of
cryptography
(40.0) will change
the way we link OpenSSL. This will only impact users who build
cryptography
from source (i.e., not from a wheel
), and specify their
own version of OpenSSL. For those users, the CFLAGS
, LDFLAGS
,
INCLUDE
, LIB
, and CRYPTOGRAPHY_SUPPRESS_LINK_FLAGS
environment
variables will no longer be respected. Instead, users will need to
configure their builds as documented here
_.
- Added support for
:ref:
disabling the legacy provider in OpenSSL 3.0.x<legacy-provider>
.
- Added support for disabling RSA key validation checks when loading RSA
keys via
:func:
~cryptography.hazmat.primitives.serialization.load_pem_private_key
,
:func:~cryptography.hazmat.primitives.serialization.load_der_private_key
,
and
:meth:~cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateNumbers.private_key
.
This speeds up key loading but is :term:unsafe
if you are loading potentially
attacker supplied keys.
- Significantly improved performance for
:class:
~cryptography.hazmat.primitives.ciphers.aead.ChaCha20Poly1305
... (truncated)
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
- `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language
- `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language
- `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language
- `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language
You can disable automated security fix PRs for this repo from the [Security Alerts page](https://togithub.com/googleapis/java-pubsub-group-kafka-connector/network/alerts).
From c23a8d60b422ab190a6796ef3b521e924e353667 Mon Sep 17 00:00:00 2001
From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com>
Date: Thu, 16 Mar 2023 17:42:06 -0400
Subject: [PATCH 09/17] chore: skip google-iam-policy rather than java-iam
(#1779) (#241)
Source-Link: https://github.com/googleapis/synthtool/commit/3c19c3314ccdff81c1d02b7665cb48de46ae5928
Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-java:latest@sha256:5df8b62e8da534f7604daef347698f6701e34b3f61713712a3384ac88fc32088
Co-authored-by: Owl Bot
---
.github/.OwlBot.lock.yaml | 2 +-
.github/dependabot.yml | 14 +++++
.kokoro/requirements.in | 2 +-
.kokoro/requirements.txt | 111 ++++++++++++++++++--------------------
4 files changed, 68 insertions(+), 61 deletions(-)
create mode 100644 .github/dependabot.yml
diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index 2b48c81e..0c1ba969 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -13,4 +13,4 @@
# limitations under the License.
docker:
image: gcr.io/cloud-devrel-public-resources/owlbot-java:latest
- digest: sha256:4566cf7fcd0eece77705330267b2b61a70165711c7829d049b60bc2f1bbcd74e
+ digest: sha256:5df8b62e8da534f7604daef347698f6701e34b3f61713712a3384ac88fc32088
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 00000000..c8f413b0
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,14 @@
+version: 2
+updates:
+ - package-ecosystem: "maven"
+ directory: "/"
+ schedule:
+ interval: "daily"
+ # Disable version updates for Maven dependencies
+ open-pull-requests-limit: 0
+ - package-ecosystem: "pip"
+ directory: "/"
+ schedule:
+ interval: "daily"
+ # Disable version updates for pip dependencies
+ open-pull-requests-limit: 0
\ No newline at end of file
diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in
index b19a8dbf..2092cc74 100644
--- a/.kokoro/requirements.in
+++ b/.kokoro/requirements.in
@@ -1,5 +1,5 @@
gcp-docuploader
-gcp-releasetool
+gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x
wheel
setuptools
typing-extensions
diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt
index 1dfe6304..c80f0a87 100644
--- a/.kokoro/requirements.txt
+++ b/.kokoro/requirements.txt
@@ -1,5 +1,5 @@
#
-# This file is autogenerated by pip-compile with Python 3.9
+# This file is autogenerated by pip-compile with Python 3.11
# by the following command:
#
# pip-compile --allow-unsafe --generate-hashes requirements.in
@@ -97,33 +97,28 @@ colorlog==6.7.0 \
--hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \
--hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5
# via gcp-docuploader
-cryptography==38.0.3 \
- --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \
- --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \
- --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \
- --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \
- --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \
- --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \
- --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \
- --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \
- --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \
- --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \
- --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \
- --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \
- --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \
- --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \
- --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \
- --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \
- --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \
- --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \
- --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \
- --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \
- --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \
- --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \
- --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \
- --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \
- --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \
- --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722
+cryptography==39.0.1 \
+ --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \
+ --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \
+ --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \
+ --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \
+ --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \
+ --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \
+ --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \
+ --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \
+ --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \
+ --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \
+ --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \
+ --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \
+ --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \
+ --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \
+ --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \
+ --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \
+ --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \
+ --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \
+ --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \
+ --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \
+ --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8
# via
# gcp-releasetool
# secretstorage
@@ -131,9 +126,9 @@ gcp-docuploader==0.6.4 \
--hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \
--hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf
# via -r requirements.in
-gcp-releasetool==1.9.1 \
- --hash=sha256:952f4055d5d986b070ae2a71c4410b250000f9cc5a1e26398fcd55a5bbc5a15f \
- --hash=sha256:d0d3c814a97c1a237517e837d8cfa668ced8df4b882452578ecef4a4e79c583b
+gcp-releasetool==1.10.5 \
+ --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \
+ --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9
# via -r requirements.in
google-api-core==2.8.2 \
--hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \
@@ -307,31 +302,29 @@ packaging==21.3 \
--hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \
--hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522
# via gcp-releasetool
-protobuf==3.20.1 \
- --hash=sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf \
- --hash=sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f \
- --hash=sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f \
- --hash=sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7 \
- --hash=sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996 \
- --hash=sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067 \
- --hash=sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c \
- --hash=sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7 \
- --hash=sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9 \
- --hash=sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c \
- --hash=sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739 \
- --hash=sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91 \
- --hash=sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c \
- --hash=sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153 \
- --hash=sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9 \
- --hash=sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388 \
- --hash=sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e \
- --hash=sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab \
- --hash=sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde \
- --hash=sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531 \
- --hash=sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8 \
- --hash=sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7 \
- --hash=sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20 \
- --hash=sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3
+protobuf==3.20.2 \
+ --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \
+ --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \
+ --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \
+ --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \
+ --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \
+ --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \
+ --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \
+ --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \
+ --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \
+ --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \
+ --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \
+ --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \
+ --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \
+ --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \
+ --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \
+ --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \
+ --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \
+ --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \
+ --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \
+ --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \
+ --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \
+ --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0
# via
# gcp-docuploader
# gcp-releasetool
@@ -381,9 +374,9 @@ secretstorage==3.3.3 \
--hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \
--hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99
# via keyring
-setuptools==65.6.3 \
- --hash=sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54 \
- --hash=sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75
+setuptools==67.3.2 \
+ --hash=sha256:95f00380ef2ffa41d9bba85d95b27689d923c93dfbafed4aecd7cf988a25e012 \
+ --hash=sha256:bb6d8e508de562768f2027902929f8523932fcd1fb784e6d573d2cafac995a48
# via -r requirements.in
six==1.16.0 \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
From 2ac422ded68e8068be7b835e9d5907e2cacf8be8 Mon Sep 17 00:00:00 2001
From: Mend Renovate
Date: Fri, 17 Mar 2023 17:10:54 +0000
Subject: [PATCH 10/17] chore(deps): update dependency
com.google.cloud:libraries-bom to v26.10.0 (#238)
---
pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pom.xml b/pom.xml
index 382e2ff8..0d53af69 100644
--- a/pom.xml
+++ b/pom.xml
@@ -37,7 +37,7 @@
com.google.cloud
libraries-bom
- 26.9.0
+ 26.10.0
pom
import
From d881eafef9bf5ece2391a75bc3d2cb6208a20ba9 Mon Sep 17 00:00:00 2001
From: samarthsingal
Date: Fri, 17 Mar 2023 14:00:19 -0400
Subject: [PATCH 11/17] fix(main): Typo in README.md for
PubSubLiteSourceConnector (#242)
s/PubSubLiteSinkConnector/PubSubLiteSourceConnector/g where needed in README.md
---
README.md | 16 ++++++++--------
1 file changed, 8 insertions(+), 8 deletions(-)
diff --git a/README.md b/README.md
index 16e5c64f..9dbdf075 100644
--- a/README.md
+++ b/README.md
@@ -61,7 +61,7 @@ locally in standalone mode (single process).
- `CloudPubSubSinkConnector` and `CloudPubSubSourceConnector`
- Create a pair of Pub/Sub [topic](https://cloud.google.com/pubsub/docs/admin#create_a_topic)
and [subscription](https://cloud.google.com/pubsub/docs/create-subscription#pull_subscription)
- - `PubSubLiteSinkConnector` and `PubSubLiteSinkConnector`
+ - `PubSubLiteSinkConnector` and `PubSubLiteSourceConnector`
- Create a pair of Pub/Sub Lite [topic](https://cloud.google.com/pubsub/lite/docs/topics#create_a_lite_topic)
and [subscription](https://cloud.google.com/pubsub/lite/docs/subscriptions#create_a_lite_subscription).
@@ -82,7 +82,7 @@ locally in standalone mode (single process).
1. Open [`pubsub-lite-sink-connector.properties`](/config/pubsub-lite-sink-connector.properties).
2. Update `topics`, `pubsublite.project`, `pubsublite.location` and `pubsublite.topic`.
- - `PubSubLiteSinkConnector`
+ - `PubSubLiteSourceConnector`
1. Open [`pubsub-lite-source-connector.properties`](/config/pubsub-lite-source-connector.properties).
2. Update `kafka.topic`, `pubsublite.project`, `pubsublite.location` and `pubsublite.subscription`.
@@ -108,18 +108,18 @@ locally in standalone mode (single process).
2. Follow the instructions in the [Kafka quickstart](https://kafka.apache.org/quickstart)
to read the message from your Kafka topic.
- - `PubSubLiteSinkConnector`
- 1. Follow the instructions in the [Kafka quickstart](https://kafka.apache.org/quickstart)
- to publish a message to the Kafka topic.
- 2. [Pull](https://cloud.google.com/pubsub/docs/publish-receive-messages-console#pull_the_message_from_the_subscription)
- the message from your Pub/Sub Lite subscription.
-
- `PubSubLiteSinkConnector`
1. [Publish](https://cloud.google.com/pubsub/docs/publish-receive-messages-console#publish_a_message_to_the_topic)
a message to your Pub/Sub Lite topic.
2. Follow the instructions in the [Kafka quickstart](https://kafka.apache.org/quickstart)
to read the message from your Kafka topic.
+ - `PubSubLiteSourceConnector`
+ 1. Follow the instructions in the [Kafka quickstart](https://kafka.apache.org/quickstart)
+ to publish a message to the Kafka topic.
+ 2. [Pull](https://cloud.google.com/pubsub/docs/publish-receive-messages-console#pull_the_message_from_the_subscription)
+ the message from your Pub/Sub Lite subscription.
+
### Acquire the connector
The connector is available from [Maven Central repository](https://search.maven.org/artifact/com.google.cloud/pubsub-group-kafka-connector).
From 1e1c33666ee1e0ef3fd45684054dcf25216004f8 Mon Sep 17 00:00:00 2001
From: Mend Renovate
Date: Fri, 17 Mar 2023 21:22:50 +0000
Subject: [PATCH 12/17] deps: update dependency org.slf4j:slf4j-api to v2.0.7
(#243)
---
pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pom.xml b/pom.xml
index 0d53af69..a5b273eb 100644
--- a/pom.xml
+++ b/pom.xml
@@ -98,7 +98,7 @@
org.slf4j
slf4j-api
- 2.0.6
+ 2.0.7
com.google.api
From 435cd98e3ae81f3d698e174782ac81199bdf756f Mon Sep 17 00:00:00 2001
From: Mend Renovate
Date: Mon, 20 Mar 2023 14:36:26 +0000
Subject: [PATCH 13/17] deps: update dependency
com.google.cloud:google-cloud-shared-dependencies to v3.5.0 (#244)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
[](https://renovatebot.com)
This PR contains the following updates:
| Package | Change | Age | Adoption | Passing | Confidence |
|---|---|---|---|---|---|
| [com.google.cloud:google-cloud-shared-dependencies](https://togithub.com/googleapis/google-cloud-java) | `3.4.0` -> `3.5.0` | [](https://docs.renovatebot.com/merge-confidence/) | [](https://docs.renovatebot.com/merge-confidence/) | [](https://docs.renovatebot.com/merge-confidence/) | [](https://docs.renovatebot.com/merge-confidence/) |
---
### Configuration
📅 **Schedule**: Branch creation - At any time (no schedule defined), Automerge - At any time (no schedule defined).
🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied.
â™» **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox.
🔕 **Ignore**: Close this PR and you won't be reminded about this update again.
---
- [ ] If you want to rebase/retry this PR, check this box
---
This PR has been generated by [Mend Renovate](https://www.mend.io/free-developer-tools/renovate/). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/java-pubsub-group-kafka-connector).
---
pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pom.xml b/pom.xml
index a5b273eb..024186ad 100644
--- a/pom.xml
+++ b/pom.xml
@@ -30,7 +30,7 @@
com.google.cloud
google-cloud-shared-dependencies
- 3.4.0
+ 3.5.0
pom
import
From 888d3d95ea01ae1535e6aba47fa71c6cb0f0e7e1 Mon Sep 17 00:00:00 2001
From: Mend Renovate
Date: Wed, 29 Mar 2023 03:53:18 +0100
Subject: [PATCH 14/17] deps: update dependency
com.google.cloud:google-cloud-shared-dependencies to v3.6.0 (#250)
---
pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pom.xml b/pom.xml
index 024186ad..6281df06 100644
--- a/pom.xml
+++ b/pom.xml
@@ -30,7 +30,7 @@
com.google.cloud
google-cloud-shared-dependencies
- 3.5.0
+ 3.6.0
pom
import
From c9c3beb70e8fa504a37216f18fe500e43b75936b Mon Sep 17 00:00:00 2001
From: Mend Renovate
Date: Wed, 29 Mar 2023 03:54:09 +0100
Subject: [PATCH 15/17] chore(deps): update dependency
com.google.cloud:libraries-bom to v26.11.0 (#248)
---
pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pom.xml b/pom.xml
index 6281df06..6ea9b464 100644
--- a/pom.xml
+++ b/pom.xml
@@ -37,7 +37,7 @@
com.google.cloud
libraries-bom
- 26.10.0
+ 26.11.0
pom
import
From 7290786e82db354f1d310c599f84150a7ec0ef8b Mon Sep 17 00:00:00 2001
From: dpcollins-google <40498610+dpcollins-google@users.noreply.github.com>
Date: Fri, 31 Mar 2023 14:56:14 -0400
Subject: [PATCH 16/17] feat: add pubsublite sink support for credentials
settings (#251)
psl source support requires more work
also unify credentialsProvider creation
---
.../common/ConnectorCredentialsProvider.java | 55 ++++++++++++++-----
.../kafka/sink/CloudPubSubSinkConnector.java | 4 +-
.../kafka/sink/CloudPubSubSinkTask.java | 20 +------
.../source/CloudPubSubSourceConnector.java | 29 +++-------
.../kafka/source/CloudPubSubSourceTask.java | 22 +-------
.../pubsublite/kafka/sink/ConfigDefs.java | 15 ++++-
.../kafka/sink/PublisherFactoryImpl.java | 45 ++++++++++-----
.../pubsublite/kafka/source/ConfigDefs.java | 15 ++++-
8 files changed, 113 insertions(+), 92 deletions(-)
diff --git a/src/main/java/com/google/pubsub/kafka/common/ConnectorCredentialsProvider.java b/src/main/java/com/google/pubsub/kafka/common/ConnectorCredentialsProvider.java
index 4c36a2ad..6fccf7fe 100644
--- a/src/main/java/com/google/pubsub/kafka/common/ConnectorCredentialsProvider.java
+++ b/src/main/java/com/google/pubsub/kafka/common/ConnectorCredentialsProvider.java
@@ -23,29 +23,58 @@
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
+import java.util.Map;
public class ConnectorCredentialsProvider implements CredentialsProvider {
+ private static final List GCP_SCOPE =
+ Arrays.asList("https://www.googleapis.com/auth/cloud-platform");
- private static final List CPS_SCOPE =
- Arrays.asList("https://www.googleapis.com/auth/pubsub");
+ CredentialsProvider impl;
- GoogleCredentials credentials;
+ private ConnectorCredentialsProvider(CredentialsProvider impl) {
+ this.impl = impl;
+ }
+
+ public static ConnectorCredentialsProvider fromConfig(Map config) {
+ String credentialsPath = config.get(ConnectorUtils.GCP_CREDENTIALS_FILE_PATH_CONFIG).toString();
+ String credentialsJson = config.get(ConnectorUtils.GCP_CREDENTIALS_JSON_CONFIG).toString();
+ if (!credentialsPath.isEmpty()) {
+ if (!credentialsJson.isEmpty()) {
+ throw new IllegalArgumentException(
+ "May not set both "
+ + ConnectorUtils.GCP_CREDENTIALS_FILE_PATH_CONFIG
+ + " and "
+ + ConnectorUtils.GCP_CREDENTIALS_JSON_CONFIG);
+ }
+ return ConnectorCredentialsProvider.fromFile(credentialsPath);
+ } else if (!credentialsJson.isEmpty()) {
+ return ConnectorCredentialsProvider.fromJson(credentialsJson);
+ } else {
+ return ConnectorCredentialsProvider.fromDefault();
+ }
+ }
- public void loadFromFile(String credentialPath) throws IOException {
- this.credentials = GoogleCredentials.fromStream(new FileInputStream(credentialPath));
+ public static ConnectorCredentialsProvider fromFile(String credentialPath) {
+ return new ConnectorCredentialsProvider(
+ () ->
+ GoogleCredentials.fromStream(new FileInputStream(credentialPath))
+ .createScoped(GCP_SCOPE));
}
- public void loadJson(String credentialsJson) throws IOException {
- ByteArrayInputStream bs = new ByteArrayInputStream(credentialsJson.getBytes());
- this.credentials = credentials = GoogleCredentials.fromStream(bs);
+ public static ConnectorCredentialsProvider fromJson(String credentialsJson) {
+ return new ConnectorCredentialsProvider(
+ () ->
+ GoogleCredentials.fromStream(new ByteArrayInputStream(credentialsJson.getBytes()))
+ .createScoped(GCP_SCOPE));
+ }
+
+ public static ConnectorCredentialsProvider fromDefault() {
+ return new ConnectorCredentialsProvider(
+ () -> GoogleCredentials.getApplicationDefault().createScoped(GCP_SCOPE));
}
@Override
public Credentials getCredentials() throws IOException {
- if (this.credentials == null) {
- return GoogleCredentials.getApplicationDefault().createScoped(this.CPS_SCOPE);
- } else {
- return this.credentials.createScoped(this.CPS_SCOPE);
- }
+ return impl.getCredentials();
}
}
diff --git a/src/main/java/com/google/pubsub/kafka/sink/CloudPubSubSinkConnector.java b/src/main/java/com/google/pubsub/kafka/sink/CloudPubSubSinkConnector.java
index 7b083d44..eb7d681f 100644
--- a/src/main/java/com/google/pubsub/kafka/sink/CloudPubSubSinkConnector.java
+++ b/src/main/java/com/google/pubsub/kafka/sink/CloudPubSubSinkConnector.java
@@ -233,13 +233,13 @@ public ConfigDef config() {
.define(
ConnectorUtils.GCP_CREDENTIALS_FILE_PATH_CONFIG,
Type.STRING,
- null,
+ "",
Importance.HIGH,
"The path to the GCP credentials file")
.define(
ConnectorUtils.GCP_CREDENTIALS_JSON_CONFIG,
Type.STRING,
- null,
+ "",
Importance.HIGH,
"GCP JSON credentials")
.define(
diff --git a/src/main/java/com/google/pubsub/kafka/sink/CloudPubSubSinkTask.java b/src/main/java/com/google/pubsub/kafka/sink/CloudPubSubSinkTask.java
index 5c31a9d8..9faa7910 100644
--- a/src/main/java/com/google/pubsub/kafka/sink/CloudPubSubSinkTask.java
+++ b/src/main/java/com/google/pubsub/kafka/sink/CloudPubSubSinkTask.java
@@ -33,7 +33,6 @@
import com.google.pubsub.kafka.sink.CloudPubSubSinkConnector.OrderingKeySource;
import com.google.pubsub.v1.ProjectTopicName;
import com.google.pubsub.v1.PubsubMessage;
-import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
@@ -136,24 +135,7 @@ public void start(Map props) {
orderingKeySource =
OrderingKeySource.getEnum(
(String) validatedProps.get(CloudPubSubSinkConnector.ORDERING_KEY_SOURCE));
- gcpCredentialsProvider = new ConnectorCredentialsProvider();
- String credentialsPath =
- (String) validatedProps.get(ConnectorUtils.GCP_CREDENTIALS_FILE_PATH_CONFIG);
- String credentialsJson =
- (String) validatedProps.get(ConnectorUtils.GCP_CREDENTIALS_JSON_CONFIG);
- if (credentialsPath != null) {
- try {
- gcpCredentialsProvider.loadFromFile(credentialsPath);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- } else if (credentialsJson != null) {
- try {
- gcpCredentialsProvider.loadJson(credentialsJson);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- }
+ gcpCredentialsProvider = ConnectorCredentialsProvider.fromConfig(validatedProps);
if (publisher == null) {
// Only do this if we did not use the constructor.
createPublisher();
diff --git a/src/main/java/com/google/pubsub/kafka/source/CloudPubSubSourceConnector.java b/src/main/java/com/google/pubsub/kafka/source/CloudPubSubSourceConnector.java
index 4fd649c2..3f507d2f 100644
--- a/src/main/java/com/google/pubsub/kafka/source/CloudPubSubSourceConnector.java
+++ b/src/main/java/com/google/pubsub/kafka/source/CloudPubSubSourceConnector.java
@@ -22,7 +22,6 @@
import com.google.pubsub.kafka.common.ConnectorCredentialsProvider;
import com.google.pubsub.kafka.common.ConnectorUtils;
import com.google.pubsub.v1.GetSubscriptionRequest;
-import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -138,25 +137,11 @@ public String version() {
public void start(Map props) {
// Do a validation of configs here too so that we do not pass null objects to
// verifySubscription().
- config().parse(props);
- String cpsProject = props.get(ConnectorUtils.CPS_PROJECT_CONFIG);
- String cpsSubscription = props.get(CPS_SUBSCRIPTION_CONFIG);
- String credentialsPath = props.get(ConnectorUtils.GCP_CREDENTIALS_FILE_PATH_CONFIG);
- String credentialsJson = props.get(ConnectorUtils.GCP_CREDENTIALS_JSON_CONFIG);
- ConnectorCredentialsProvider credentialsProvider = new ConnectorCredentialsProvider();
- if (credentialsPath != null) {
- try {
- credentialsProvider.loadFromFile(credentialsPath);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- } else if (credentialsJson != null) {
- try {
- credentialsProvider.loadJson(credentialsJson);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- }
+ Map validated = config().parse(props);
+ String cpsProject = validated.get(ConnectorUtils.CPS_PROJECT_CONFIG).toString();
+ String cpsSubscription = validated.get(CPS_SUBSCRIPTION_CONFIG).toString();
+ ConnectorCredentialsProvider credentialsProvider =
+ ConnectorCredentialsProvider.fromConfig(validated);
verifySubscription(cpsProject, cpsSubscription, credentialsProvider);
this.props = props;
@@ -271,13 +256,13 @@ public ConfigDef config() {
.define(
ConnectorUtils.GCP_CREDENTIALS_FILE_PATH_CONFIG,
Type.STRING,
- null,
+ "",
Importance.HIGH,
"The path to the GCP credentials file")
.define(
ConnectorUtils.GCP_CREDENTIALS_JSON_CONFIG,
Type.STRING,
- null,
+ "",
Importance.HIGH,
"GCP JSON credentials")
.define(
diff --git a/src/main/java/com/google/pubsub/kafka/source/CloudPubSubSourceTask.java b/src/main/java/com/google/pubsub/kafka/source/CloudPubSubSourceTask.java
index 7319cea1..39a8f2ab 100644
--- a/src/main/java/com/google/pubsub/kafka/source/CloudPubSubSourceTask.java
+++ b/src/main/java/com/google/pubsub/kafka/source/CloudPubSubSourceTask.java
@@ -34,7 +34,6 @@
import com.google.pubsub.v1.ProjectSubscriptionName;
import com.google.pubsub.v1.PubsubMessage;
import com.google.pubsub.v1.ReceivedMessage;
-import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
@@ -114,11 +113,6 @@ public void start(Map props) {
useKafkaHeaders = (Boolean) validatedProps.get(CloudPubSubSourceConnector.USE_KAFKA_HEADERS);
makeOrderingKeyAttribute =
(Boolean) validatedProps.get(CloudPubSubSourceConnector.CPS_MAKE_ORDERING_KEY_ATTRIBUTE);
- ConnectorCredentialsProvider gcpCredentialsProvider = new ConnectorCredentialsProvider();
- String gcpCredentialsFilePath =
- (String) validatedProps.get(ConnectorUtils.GCP_CREDENTIALS_FILE_PATH_CONFIG);
- String credentialsJson =
- (String) validatedProps.get(ConnectorUtils.GCP_CREDENTIALS_JSON_CONFIG);
boolean useStreamingPull =
(Boolean) validatedProps.get(CloudPubSubSourceConnector.CPS_STREAMING_PULL_ENABLED);
long streamingPullBytes =
@@ -136,19 +130,9 @@ public void start(Map props) {
(Long)
validatedProps.get(
CloudPubSubSourceConnector.CPS_STREAMING_PULL_MAX_MS_PER_ACK_EXTENSION);
- if (gcpCredentialsFilePath != null) {
- try {
- gcpCredentialsProvider.loadFromFile(gcpCredentialsFilePath);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- } else if (credentialsJson != null) {
- try {
- gcpCredentialsProvider.loadJson(credentialsJson);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- }
+ ConnectorCredentialsProvider gcpCredentialsProvider =
+ ConnectorCredentialsProvider.fromConfig(validatedProps);
+
// Only do this if we did not set it through the constructor.
if (subscriber == null) {
if (useStreamingPull) {
diff --git a/src/main/java/com/google/pubsublite/kafka/sink/ConfigDefs.java b/src/main/java/com/google/pubsublite/kafka/sink/ConfigDefs.java
index 932c20ec..12d59eb9 100644
--- a/src/main/java/com/google/pubsublite/kafka/sink/ConfigDefs.java
+++ b/src/main/java/com/google/pubsublite/kafka/sink/ConfigDefs.java
@@ -15,6 +15,7 @@
*/
package com.google.pubsublite.kafka.sink;
+import com.google.pubsub.kafka.common.ConnectorUtils;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.common.config.ConfigDef.Importance;
@@ -49,6 +50,18 @@ static ConfigDef config() {
ConfigDef.Type.STRING,
OrderingMode.DEFAULT.name(),
Importance.HIGH,
- "The ordering mode to use for publishing to Pub/Sub Lite. If set to `KAFKA`, messages will be republished to the same partition index they were read from on the source topic. Note that this means the Pub/Sub Lite topic *must* have the same number of partitions as the source Kafka topic.");
+ "The ordering mode to use for publishing to Pub/Sub Lite. If set to `KAFKA`, messages will be republished to the same partition index they were read from on the source topic. Note that this means the Pub/Sub Lite topic *must* have the same number of partitions as the source Kafka topic.")
+ .define(
+ ConnectorUtils.GCP_CREDENTIALS_FILE_PATH_CONFIG,
+ ConfigDef.Type.STRING,
+ "",
+ Importance.HIGH,
+ "The path to the GCP credentials file")
+ .define(
+ ConnectorUtils.GCP_CREDENTIALS_JSON_CONFIG,
+ ConfigDef.Type.STRING,
+ "",
+ Importance.HIGH,
+ "GCP JSON credentials");
}
}
diff --git a/src/main/java/com/google/pubsublite/kafka/sink/PublisherFactoryImpl.java b/src/main/java/com/google/pubsublite/kafka/sink/PublisherFactoryImpl.java
index 47b93fab..09c62b21 100644
--- a/src/main/java/com/google/pubsublite/kafka/sink/PublisherFactoryImpl.java
+++ b/src/main/java/com/google/pubsublite/kafka/sink/PublisherFactoryImpl.java
@@ -37,17 +37,21 @@
import com.google.cloud.pubsublite.internal.wire.PubsubContext.Framework;
import com.google.cloud.pubsublite.internal.wire.RoutingMetadata;
import com.google.cloud.pubsublite.internal.wire.SinglePartitionPublisherBuilder;
+import com.google.cloud.pubsublite.v1.AdminServiceClient;
+import com.google.cloud.pubsublite.v1.AdminServiceSettings;
import com.google.cloud.pubsublite.v1.PublisherServiceClient;
import com.google.cloud.pubsublite.v1.PublisherServiceSettings;
+import com.google.pubsub.kafka.common.ConnectorCredentialsProvider;
+import java.io.IOException;
import java.util.Map;
import java.util.Optional;
-import org.apache.kafka.common.config.ConfigValue;
class PublisherFactoryImpl implements PublisherFactory {
private static final Framework FRAMEWORK = Framework.of("KAFKA_CONNECT");
- private PartitionPublisherFactory getPartitionPublisherFactory(TopicPath topic) {
+ private PartitionPublisherFactory getPartitionPublisherFactory(
+ TopicPath topic, ConnectorCredentialsProvider credentialsProvider) {
return new PartitionPublisherFactory() {
private Optional publisherServiceClient = Optional.empty();
@@ -61,9 +65,7 @@ private synchronized PublisherServiceClient getServiceClient() throws ApiExcepti
addDefaultSettings(
topic.location().extractRegion(),
PublisherServiceSettings.newBuilder()
- .setCredentialsProvider(
- PublisherServiceSettings.defaultCredentialsProviderBuilder()
- .build()))));
+ .setCredentialsProvider(credentialsProvider))));
return publisherServiceClient.get();
} catch (Throwable t) {
throw toCanonical(t).underlying;
@@ -95,25 +97,38 @@ public void close() {}
@Override
public Publisher newPublisher(Map params) {
- Map config = ConfigDefs.config().validateAll(params);
+ Map config = ConfigDefs.config().parse(params);
+ ConnectorCredentialsProvider credentialsProvider =
+ ConnectorCredentialsProvider.fromConfig(config);
CloudRegionOrZone location =
- CloudRegionOrZone.parse(config.get(ConfigDefs.LOCATION_FLAG).value().toString());
+ CloudRegionOrZone.parse(config.get(ConfigDefs.LOCATION_FLAG).toString());
PartitionCountWatchingPublisherSettings.Builder builder =
PartitionCountWatchingPublisherSettings.newBuilder();
TopicPath topic =
TopicPath.newBuilder()
.setProject(
- ProjectPath.parse("projects/" + config.get(ConfigDefs.PROJECT_FLAG).value())
- .project())
+ ProjectPath.parse("projects/" + config.get(ConfigDefs.PROJECT_FLAG)).project())
.setLocation(location)
- .setName(TopicName.of(config.get(ConfigDefs.TOPIC_NAME_FLAG).value().toString()))
+ .setName(TopicName.of(config.get(ConfigDefs.TOPIC_NAME_FLAG).toString()))
.build();
builder.setTopic(topic);
- builder.setPublisherFactory(getPartitionPublisherFactory(topic));
- builder.setAdminClient(
- AdminClient.create(
- AdminClientSettings.newBuilder().setRegion(location.extractRegion()).build()));
- if (OrderingMode.valueOf(config.get(ConfigDefs.ORDERING_MODE_FLAG).value().toString())
+ builder.setPublisherFactory(getPartitionPublisherFactory(topic, credentialsProvider));
+ try {
+ builder.setAdminClient(
+ AdminClient.create(
+ AdminClientSettings.newBuilder()
+ .setRegion(location.extractRegion())
+ .setServiceClient(
+ AdminServiceClient.create(
+ addDefaultSettings(
+ location.extractRegion(),
+ AdminServiceSettings.newBuilder()
+ .setCredentialsProvider(credentialsProvider))))
+ .build()));
+ } catch (IOException e) {
+ throw new IllegalStateException(e);
+ }
+ if (OrderingMode.valueOf(config.get(ConfigDefs.ORDERING_MODE_FLAG).toString())
== OrderingMode.KAFKA) {
builder.setRoutingPolicyFactory(KafkaPartitionRoutingPolicy::new);
}
diff --git a/src/main/java/com/google/pubsublite/kafka/source/ConfigDefs.java b/src/main/java/com/google/pubsublite/kafka/source/ConfigDefs.java
index 2dbf1d3a..36942662 100644
--- a/src/main/java/com/google/pubsublite/kafka/source/ConfigDefs.java
+++ b/src/main/java/com/google/pubsublite/kafka/source/ConfigDefs.java
@@ -15,6 +15,7 @@
*/
package com.google.pubsublite.kafka.source;
+import com.google.pubsub.kafka.common.ConnectorUtils;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.common.config.ConfigDef.Importance;
@@ -63,6 +64,18 @@ static ConfigDef config() {
ConfigDef.Type.LONG,
20_000_000,
Importance.MEDIUM,
- "The number of outstanding bytes per-partition allowed. Set to 20MB by default.");
+ "The number of outstanding bytes per-partition allowed. Set to 20MB by default.")
+ .define(
+ ConnectorUtils.GCP_CREDENTIALS_FILE_PATH_CONFIG,
+ ConfigDef.Type.STRING,
+ "",
+ Importance.HIGH,
+ "The path to the GCP credentials file")
+ .define(
+ ConnectorUtils.GCP_CREDENTIALS_JSON_CONFIG,
+ ConfigDef.Type.STRING,
+ "",
+ Importance.HIGH,
+ "GCP JSON credentials");
}
}
From 4b97d9e6114772ebf01cddd03f4075f31db2082c Mon Sep 17 00:00:00 2001
From: "release-please[bot]"
<55107282+release-please[bot]@users.noreply.github.com>
Date: Fri, 31 Mar 2023 19:24:15 +0000
Subject: [PATCH 17/17] chore(main): release 1.1.0 (#233)
:robot: I have created a release *beep* *boop*
---
## [1.1.0](https://togithub.com/googleapis/java-pubsub-group-kafka-connector/compare/v1.0.0...v1.1.0) (2023-03-31)
### Features
* Add pubsublite sink support for credentials settings ([#251](https://togithub.com/googleapis/java-pubsub-group-kafka-connector/issues/251)) ([7290786](https://togithub.com/googleapis/java-pubsub-group-kafka-connector/commit/7290786e82db354f1d310c599f84150a7ec0ef8b))
* Add pubsublite.ordering.mode to kafka connector ([#228](https://togithub.com/googleapis/java-pubsub-group-kafka-connector/issues/228)) ([c499c39](https://togithub.com/googleapis/java-pubsub-group-kafka-connector/commit/c499c395cef38f9bb4b52d157bc336bff0644b94))
### Bug Fixes
* **main:** Typo in README.md for PubSubLiteSourceConnector ([#242](https://togithub.com/googleapis/java-pubsub-group-kafka-connector/issues/242)) ([d881eaf](https://togithub.com/googleapis/java-pubsub-group-kafka-connector/commit/d881eafef9bf5ece2391a75bc3d2cb6208a20ba9))
### Dependencies
* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.4.0 ([#232](https://togithub.com/googleapis/java-pubsub-group-kafka-connector/issues/232)) ([f10f9a6](https://togithub.com/googleapis/java-pubsub-group-kafka-connector/commit/f10f9a6546eb6ea65b61fbbe4538edae81b524ab))
* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.5.0 ([#244](https://togithub.com/googleapis/java-pubsub-group-kafka-connector/issues/244)) ([435cd98](https://togithub.com/googleapis/java-pubsub-group-kafka-connector/commit/435cd98e3ae81f3d698e174782ac81199bdf756f))
* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.6.0 ([#250](https://togithub.com/googleapis/java-pubsub-group-kafka-connector/issues/250)) ([888d3d9](https://togithub.com/googleapis/java-pubsub-group-kafka-connector/commit/888d3d95ea01ae1535e6aba47fa71c6cb0f0e7e1))
* Update dependency org.slf4j:slf4j-api to v2.0.6 ([#230](https://togithub.com/googleapis/java-pubsub-group-kafka-connector/issues/230)) ([9dec71b](https://togithub.com/googleapis/java-pubsub-group-kafka-connector/commit/9dec71b8df9acdd738b4738ee060b13ff602e86b))
* Update dependency org.slf4j:slf4j-api to v2.0.7 ([#243](https://togithub.com/googleapis/java-pubsub-group-kafka-connector/issues/243)) ([1e1c336](https://togithub.com/googleapis/java-pubsub-group-kafka-connector/commit/1e1c33666ee1e0ef3fd45684054dcf25216004f8))
---
This PR was generated with [Release Please](https://togithub.com/googleapis/release-please). See [documentation](https://togithub.com/googleapis/release-please#release-please).
---
CHANGELOG.md | 22 ++++++++++++++++++++++
pom.xml | 2 +-
versions.txt | 2 +-
3 files changed, 24 insertions(+), 2 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3e78e3ec..23e8fa24 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,27 @@
# Changelog
+## [1.1.0](https://github.com/googleapis/java-pubsub-group-kafka-connector/compare/v1.0.0...v1.1.0) (2023-03-31)
+
+
+### Features
+
+* Add pubsublite sink support for credentials settings ([#251](https://github.com/googleapis/java-pubsub-group-kafka-connector/issues/251)) ([7290786](https://github.com/googleapis/java-pubsub-group-kafka-connector/commit/7290786e82db354f1d310c599f84150a7ec0ef8b))
+* Add pubsublite.ordering.mode to kafka connector ([#228](https://github.com/googleapis/java-pubsub-group-kafka-connector/issues/228)) ([c499c39](https://github.com/googleapis/java-pubsub-group-kafka-connector/commit/c499c395cef38f9bb4b52d157bc336bff0644b94))
+
+
+### Bug Fixes
+
+* **main:** Typo in README.md for PubSubLiteSourceConnector ([#242](https://github.com/googleapis/java-pubsub-group-kafka-connector/issues/242)) ([d881eaf](https://github.com/googleapis/java-pubsub-group-kafka-connector/commit/d881eafef9bf5ece2391a75bc3d2cb6208a20ba9))
+
+
+### Dependencies
+
+* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.4.0 ([#232](https://github.com/googleapis/java-pubsub-group-kafka-connector/issues/232)) ([f10f9a6](https://github.com/googleapis/java-pubsub-group-kafka-connector/commit/f10f9a6546eb6ea65b61fbbe4538edae81b524ab))
+* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.5.0 ([#244](https://github.com/googleapis/java-pubsub-group-kafka-connector/issues/244)) ([435cd98](https://github.com/googleapis/java-pubsub-group-kafka-connector/commit/435cd98e3ae81f3d698e174782ac81199bdf756f))
+* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.6.0 ([#250](https://github.com/googleapis/java-pubsub-group-kafka-connector/issues/250)) ([888d3d9](https://github.com/googleapis/java-pubsub-group-kafka-connector/commit/888d3d95ea01ae1535e6aba47fa71c6cb0f0e7e1))
+* Update dependency org.slf4j:slf4j-api to v2.0.6 ([#230](https://github.com/googleapis/java-pubsub-group-kafka-connector/issues/230)) ([9dec71b](https://github.com/googleapis/java-pubsub-group-kafka-connector/commit/9dec71b8df9acdd738b4738ee060b13ff602e86b))
+* Update dependency org.slf4j:slf4j-api to v2.0.7 ([#243](https://github.com/googleapis/java-pubsub-group-kafka-connector/issues/243)) ([1e1c336](https://github.com/googleapis/java-pubsub-group-kafka-connector/commit/1e1c33666ee1e0ef3fd45684054dcf25216004f8))
+
## [1.0.0](https://github.com/googleapis/java-pubsub-group-kafka-connector/compare/v0.1.5...v1.0.0) (2022-11-18)
diff --git a/pom.xml b/pom.xml
index 6ea9b464..d17aa38f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -3,7 +3,7 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
4.0.0
pubsub-group-kafka-connector
- 1.0.1-SNAPSHOT
+ 1.1.0
jar
Pub/Sub Group Kafka Connector
https://github.com/googleapis/java-pubsub-group-kafka-connector
diff --git a/versions.txt b/versions.txt
index 941b5612..af274e56 100644
--- a/versions.txt
+++ b/versions.txt
@@ -1,4 +1,4 @@
# Format:
# module:released-version:current-version
-pubsub-group-kafka-connector:1.0.0:1.0.1-SNAPSHOT
+pubsub-group-kafka-connector:1.1.0:1.1.0