Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 032c9dd

Browse files
authored
[FLINK-38289] Update to Flink 2.1
1 parent ac98345 commit 032c9dd

File tree

8 files changed

+197
-12
lines changed

8 files changed

+197
-12
lines changed

.github/workflows/push_pr.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ jobs:
2828
compile_and_test:
2929
strategy:
3030
matrix:
31-
flink: [ 2.0.0 ]
31+
flink: [ 2.1.0 ]
3232
jdk: [ '11, 17, 21' ]
3333
uses: apache/flink-connector-shared-utils/.github/workflows/ci.yml@ci_utils
3434
with:
@@ -37,7 +37,7 @@ jobs:
3737
python_test:
3838
strategy:
3939
matrix:
40-
flink: [ 2.0.0 ]
40+
flink: [ 2.1.0 ]
4141
jdk: [ '11, 17, 21' ]
4242
uses: apache/flink-connector-shared-utils/.github/workflows/python_ci.yml@ci_utils
4343
with:

flink-connector-kafka/archunit-violations/c0d94764-76a0-4c50-b617-70b1754c4612

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,6 @@ Method <org.apache.flink.connector.kafka.dynamic.source.reader.DynamicKafkaSourc
2323
Method <org.apache.flink.connector.kafka.dynamic.source.reader.DynamicKafkaSourceReader.syncAvailabilityHelperWithReaders()> calls method <org.apache.flink.streaming.runtime.io.MultipleFuturesAvailabilityHelper.anyOf(int, java.util.concurrent.CompletableFuture)> in (DynamicKafkaSourceReader.java:500)
2424
Method <org.apache.flink.connector.kafka.sink.ExactlyOnceKafkaWriter.getProducerPool()> is annotated with <org.apache.flink.annotation.VisibleForTesting> in (ExactlyOnceKafkaWriter.java:0)
2525
Method <org.apache.flink.connector.kafka.sink.ExactlyOnceKafkaWriter.getTransactionalIdPrefix()> is annotated with <org.apache.flink.annotation.VisibleForTesting> in (ExactlyOnceKafkaWriter.java:0)
26-
Method <org.apache.flink.connector.kafka.sink.internal.KafkaCommitter.getBackchannel()> is annotated with <org.apache.flink.annotation.VisibleForTesting> in (KafkaCommitter.java:0)
27-
Method <org.apache.flink.connector.kafka.sink.internal.KafkaCommitter.getCommittingProducer()> is annotated with <org.apache.flink.annotation.VisibleForTesting> in (KafkaCommitter.java:0)
2826
Method <org.apache.flink.connector.kafka.sink.KafkaSink.addPostCommitTopology(org.apache.flink.streaming.api.datastream.DataStream)> calls method <org.apache.flink.api.dag.Transformation.getCoLocationGroupKey()> in (KafkaSink.java:178)
2927
Method <org.apache.flink.connector.kafka.sink.KafkaSink.addPostCommitTopology(org.apache.flink.streaming.api.datastream.DataStream)> calls method <org.apache.flink.api.dag.Transformation.getInputs()> in (KafkaSink.java:181)
3028
Method <org.apache.flink.connector.kafka.sink.KafkaSink.addPostCommitTopology(org.apache.flink.streaming.api.datastream.DataStream)> calls method <org.apache.flink.api.dag.Transformation.getOutputType()> in (KafkaSink.java:177)
@@ -34,6 +32,8 @@ Method <org.apache.flink.connector.kafka.sink.KafkaSink.addPostCommitTopology(or
3432
Method <org.apache.flink.connector.kafka.sink.KafkaSink.getKafkaProducerConfig()> is annotated with <org.apache.flink.annotation.VisibleForTesting> in (KafkaSink.java:0)
3533
Method <org.apache.flink.connector.kafka.sink.KafkaSinkBuilder.setRecordSerializer(org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema)> calls method <org.apache.flink.api.java.ClosureCleaner.clean(java.lang.Object, org.apache.flink.api.common.ExecutionConfig$ClosureCleanerLevel, boolean)> in (KafkaSinkBuilder.java:154)
3634
Method <org.apache.flink.connector.kafka.sink.KafkaWriter.getCurrentProducer()> is annotated with <org.apache.flink.annotation.VisibleForTesting> in (KafkaWriter.java:0)
35+
Method <org.apache.flink.connector.kafka.sink.internal.KafkaCommitter.getBackchannel()> is annotated with <org.apache.flink.annotation.VisibleForTesting> in (KafkaCommitter.java:0)
36+
Method <org.apache.flink.connector.kafka.sink.internal.KafkaCommitter.getCommittingProducer()> is annotated with <org.apache.flink.annotation.VisibleForTesting> in (KafkaCommitter.java:0)
3737
Method <org.apache.flink.connector.kafka.sink.internal.ProducerPoolImpl.getProducers()> is annotated with <org.apache.flink.annotation.VisibleForTesting> in (ProducerPoolImpl.java:0)
3838
Method <org.apache.flink.connector.kafka.source.KafkaSource.createReader(org.apache.flink.api.connector.source.SourceReaderContext, java.util.function.Consumer)> is annotated with <org.apache.flink.annotation.VisibleForTesting> in (KafkaSource.java:0)
3939
Method <org.apache.flink.connector.kafka.source.KafkaSource.getConfiguration()> is annotated with <org.apache.flink.annotation.VisibleForTesting> in (KafkaSource.java:0)
@@ -50,6 +50,4 @@ Method <org.apache.flink.connector.kafka.source.reader.KafkaSourceReader.getOffs
5050
Method <org.apache.flink.streaming.connectors.kafka.table.DynamicKafkaRecordSerializationSchema.createProjectedRow(org.apache.flink.table.data.RowData, org.apache.flink.types.RowKind, [Lorg.apache.flink.table.data.RowData$FieldGetter;)> has parameter of type <[Lorg.apache.flink.table.data.RowData$FieldGetter;> in (DynamicKafkaRecordSerializationSchema.java:0)
5151
Method <org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptionsUtil.createKeyFormatProjection(org.apache.flink.configuration.ReadableConfig, org.apache.flink.table.types.DataType)> calls method <org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getFieldNames(org.apache.flink.table.types.logical.LogicalType)> in (KafkaConnectorOptionsUtil.java:520)
5252
Method <org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptionsUtil.createValueFormatProjection(org.apache.flink.configuration.ReadableConfig, org.apache.flink.table.types.DataType)> calls method <org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getFieldCount(org.apache.flink.table.types.logical.LogicalType)> in (KafkaConnectorOptionsUtil.java:564)
53-
Method <org.apache.flink.streaming.connectors.kafka.table.KafkaDynamicSink.createSerialization(org.apache.flink.table.connector.sink.DynamicTableSink$Context, org.apache.flink.table.connector.format.EncodingFormat, [I, java.lang.String)> calls method <org.apache.flink.table.types.utils.DataTypeUtils.stripRowPrefix(org.apache.flink.table.types.DataType, java.lang.String)> in (KafkaDynamicSink.java:408)
5453
Method <org.apache.flink.streaming.connectors.kafka.table.KafkaDynamicSink.getFieldGetters(java.util.List, [I)> has return type <[Lorg.apache.flink.table.data.RowData$FieldGetter;> in (KafkaDynamicSink.java:0)
55-
Method <org.apache.flink.streaming.connectors.kafka.table.KafkaDynamicSource.createDeserialization(org.apache.flink.table.connector.source.DynamicTableSource$Context, org.apache.flink.table.connector.format.DecodingFormat, [I, java.lang.String)> calls method <org.apache.flink.table.types.utils.DataTypeUtils.stripRowPrefix(org.apache.flink.table.types.DataType, java.lang.String)> in (KafkaDynamicSource.java:574)

flink-connector-kafka/src/main/java/org/apache/flink/streaming/connectors/kafka/table/KafkaDynamicSink.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,6 @@
4444
import org.apache.flink.table.data.RowData;
4545
import org.apache.flink.table.types.DataType;
4646
import org.apache.flink.table.types.logical.LogicalType;
47-
import org.apache.flink.table.types.utils.DataTypeUtils;
4847

4948
import org.apache.kafka.clients.producer.ProducerConfig;
5049
import org.apache.kafka.common.header.Header;
@@ -405,7 +404,8 @@ private RowData.FieldGetter[] getFieldGetters(
405404
}
406405
DataType physicalFormatDataType = Projection.of(projection).project(this.physicalDataType);
407406
if (prefix != null) {
408-
physicalFormatDataType = DataTypeUtils.stripRowPrefix(physicalFormatDataType, prefix);
407+
physicalFormatDataType =
408+
TableDataTypeUtils.stripRowPrefix(physicalFormatDataType, prefix);
409409
}
410410
return format.createRuntimeEncoder(context, physicalFormatDataType);
411411
}

flink-connector-kafka/src/main/java/org/apache/flink/streaming/connectors/kafka/table/KafkaDynamicSource.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,6 @@
4949
import org.apache.flink.table.data.StringData;
5050
import org.apache.flink.table.data.TimestampData;
5151
import org.apache.flink.table.types.DataType;
52-
import org.apache.flink.table.types.utils.DataTypeUtils;
5352
import org.apache.flink.util.Preconditions;
5453

5554
import org.apache.kafka.clients.consumer.ConsumerConfig;
@@ -571,7 +570,8 @@ private KafkaRecordDeserializationSchema<RowData> createKafkaDeserializationSche
571570
}
572571
DataType physicalFormatDataType = Projection.of(projection).project(this.physicalDataType);
573572
if (prefix != null) {
574-
physicalFormatDataType = DataTypeUtils.stripRowPrefix(physicalFormatDataType, prefix);
573+
physicalFormatDataType =
574+
TableDataTypeUtils.stripRowPrefix(physicalFormatDataType, prefix);
575575
}
576576
return format.createRuntimeDecoder(context, physicalFormatDataType);
577577
}
Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
package org.apache.flink.streaming.connectors.kafka.table;
2+
3+
import org.apache.flink.table.types.DataType;
4+
import org.apache.flink.table.types.FieldsDataType;
5+
import org.apache.flink.table.types.logical.LogicalType;
6+
import org.apache.flink.table.types.logical.RowType;
7+
8+
import java.util.List;
9+
import java.util.stream.Collectors;
10+
import java.util.stream.IntStream;
11+
12+
import static org.apache.flink.table.types.logical.LogicalTypeRoot.ROW;
13+
14+
/**
15+
* Utility class for manipulating {@link DataType} objects, particularly for table schemas. This
16+
* class provides methods that were removed from the Flink API in version 2.1.0. See <a
17+
* href="https://github.com/apache/flink/pull/26784">Flink PR 26784</a>.
18+
*/
19+
public class TableDataTypeUtils {
20+
21+
protected static final String STRIP_ROW_NO_ROW_ERROR_MSG = "Row data type expected.";
22+
protected static final String RENAME_ROW_LENGTH_MISMATCH_ERROR_MSG =
23+
"Row length and new names must match.";
24+
25+
/**
26+
* Removes a string prefix from the fields of the given row data type.
27+
*
28+
* @param dataType The row data type to modify.
29+
* @param prefix The prefix to remove from the field names.
30+
* @return A new DataType with the modified field names.
31+
* @throws IllegalArgumentException if the provided dataType is not of ROW type.
32+
*/
33+
public static DataType stripRowPrefix(DataType dataType, String prefix) {
34+
35+
if (!dataType.getLogicalType().is(ROW)) {
36+
throw new IllegalArgumentException(STRIP_ROW_NO_ROW_ERROR_MSG);
37+
}
38+
39+
final RowType rowType = (RowType) dataType.getLogicalType();
40+
final List<String> newFieldNames =
41+
rowType.getFieldNames().stream()
42+
.map(
43+
s -> {
44+
if (s.startsWith(prefix)) {
45+
return s.substring(prefix.length());
46+
}
47+
return s;
48+
})
49+
.collect(Collectors.toList());
50+
final LogicalType newRowType = renameRowFields(rowType, newFieldNames);
51+
return new FieldsDataType(
52+
newRowType, dataType.getConversionClass(), dataType.getChildren());
53+
}
54+
55+
/**
56+
* Renames the fields of the given {@link RowType}.
57+
*
58+
* @param rowType The original RowType.
59+
* @param newFieldNames The new field names to apply.
60+
* @return A new RowType with the updated field names.
61+
* @throws IllegalArgumentException if the number of new field names does not match the number
62+
* of fields in the original RowType.
63+
*/
64+
public static RowType renameRowFields(RowType rowType, List<String> newFieldNames) {
65+
66+
if (!(rowType.getFieldCount() == newFieldNames.size())) {
67+
throw new IllegalArgumentException(RENAME_ROW_LENGTH_MISMATCH_ERROR_MSG);
68+
}
69+
70+
final List<RowType.RowField> newFields =
71+
IntStream.range(0, rowType.getFieldCount())
72+
.mapToObj(
73+
pos -> {
74+
final RowType.RowField oldField = rowType.getFields().get(pos);
75+
return new RowType.RowField(
76+
newFieldNames.get(pos),
77+
oldField.getType(),
78+
oldField.getDescription().orElse(null));
79+
})
80+
.collect(Collectors.toList());
81+
return new RowType(rowType.isNullable(), newFields);
82+
}
83+
}
Lines changed: 104 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,104 @@
1+
package org.apache.flink.streaming.connectors.kafka.table;
2+
3+
import org.apache.flink.table.types.DataType;
4+
import org.apache.flink.table.types.logical.IntType;
5+
import org.apache.flink.table.types.logical.RowType;
6+
import org.apache.flink.table.types.logical.VarCharType;
7+
import org.apache.flink.util.TestLogger;
8+
9+
import org.junit.jupiter.api.Test;
10+
11+
import java.util.Arrays;
12+
import java.util.Collections;
13+
import java.util.List;
14+
15+
import static org.apache.flink.table.api.DataTypes.FIELD;
16+
import static org.apache.flink.table.api.DataTypes.INT;
17+
import static org.apache.flink.table.api.DataTypes.ROW;
18+
import static org.apache.flink.table.api.DataTypes.STRING;
19+
import static org.assertj.core.api.Assertions.assertThat;
20+
import static org.assertj.core.api.Assertions.assertThatThrownBy;
21+
22+
/** Tests for {@link TableDataTypeUtils}. */
23+
public class TableDataTypeUtilsTest extends TestLogger {
24+
25+
@Test
26+
public void testStripRowPrefix() {
27+
DataType rowDataType =
28+
ROW(
29+
FIELD("prefix_name", STRING()),
30+
FIELD("prefix_age", INT()),
31+
FIELD("address", STRING()));
32+
33+
DataType result = TableDataTypeUtils.stripRowPrefix(rowDataType, "prefix_");
34+
35+
RowType rowType = (RowType) result.getLogicalType();
36+
List<String> fieldNames = rowType.getFieldNames();
37+
38+
assertThat(fieldNames).containsExactly("name", "age", "address");
39+
}
40+
41+
@Test
42+
public void testStripRowPrefixWithNoMatch() {
43+
// Create a test row data type with no matching prefixes
44+
DataType rowDataType =
45+
ROW(FIELD("name", STRING()), FIELD("age", INT()), FIELD("address", STRING()));
46+
47+
DataType result = TableDataTypeUtils.stripRowPrefix(rowDataType, "nonexistent_");
48+
49+
// Field names should remain unchanged
50+
RowType rowType = (RowType) result.getLogicalType();
51+
List<String> fieldNames = rowType.getFieldNames();
52+
53+
assertThat(fieldNames).containsExactly("name", "age", "address");
54+
}
55+
56+
@Test
57+
public void testStripRowPrefixInvalidType() {
58+
// Create a non-row data type
59+
DataType nonRowType = STRING();
60+
61+
// Attempt to strip prefix should throw an exception
62+
assertThatThrownBy(() -> TableDataTypeUtils.stripRowPrefix(nonRowType, "prefix_"))
63+
.isInstanceOf(IllegalArgumentException.class)
64+
.hasMessageContaining(TableDataTypeUtils.STRIP_ROW_NO_ROW_ERROR_MSG);
65+
}
66+
67+
@Test
68+
public void testRenameRowFields() {
69+
List<RowType.RowField> fields =
70+
Arrays.asList(
71+
new RowType.RowField("oldName1", new VarCharType(), null),
72+
new RowType.RowField("oldName2", new IntType(), "description"));
73+
RowType rowType = new RowType(false, fields);
74+
75+
List<String> newFieldNames = Arrays.asList("newName1", "newName2");
76+
77+
RowType renamedType = TableDataTypeUtils.renameRowFields(rowType, newFieldNames);
78+
79+
List<String> resultFieldNames = renamedType.getFieldNames();
80+
assertThat(resultFieldNames).containsExactly("newName1", "newName2");
81+
82+
assertThat(renamedType.getFields().get(0).getType()).isInstanceOf(VarCharType.class);
83+
assertThat(renamedType.getFields().get(1).getType()).isInstanceOf(IntType.class);
84+
assertThat(renamedType.getFields().get(1).getDescription().orElse(null))
85+
.isEqualTo("description");
86+
}
87+
88+
@Test
89+
public void testRenameRowFieldsInvalidLength() {
90+
List<RowType.RowField> fields =
91+
Arrays.asList(
92+
new RowType.RowField("oldName1", new VarCharType(), null),
93+
new RowType.RowField("oldName2", new IntType(), null));
94+
RowType rowType = new RowType(false, fields);
95+
96+
// Incorrect number of new field names
97+
List<String> newFieldNames = Collections.singletonList("newName1");
98+
99+
// Rename with incorrect number of fields should throw an exception
100+
assertThatThrownBy(() -> TableDataTypeUtils.renameRowFields(rowType, newFieldNames))
101+
.isInstanceOf(IllegalArgumentException.class)
102+
.hasMessageContaining(TableDataTypeUtils.RENAME_ROW_LENGTH_MISMATCH_ERROR_MSG);
103+
}
104+
}

flink-python/tox.ini

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
# in multiple virtualenvs. This configuration file will run the
2222
# test suite on all supported python versions.
2323
# new environments will be excluded by default unless explicitly added to envlist.
24-
envlist = {py38, py39, py310}-cython
24+
envlist = {py39, py310, py311}-cython
2525

2626
[testenv]
2727
whitelist_externals = /bin/bash

pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ under the License.
5353

5454
<!-- Main Dependencies -->
5555
<confluent.version>7.9.2</confluent.version>
56-
<flink.version>2.0.0</flink.version>
56+
<flink.version>2.1.0</flink.version>
5757
<kafka.version>4.0.0</kafka.version>
5858

5959
<!-- Other Dependencies -->

0 commit comments

Comments
 (0)