Skip to content

Commit 75e1998

Browse files
committed
fixup! aa
1 parent bfef897 commit 75e1998

File tree

12 files changed

+15
-117
lines changed

12 files changed

+15
-117
lines changed

flink-core/src/main/java/org/apache/flink/api/common/functions/AbstractRichFunction.java

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919
package org.apache.flink.api.common.functions;
2020

2121
import org.apache.flink.annotation.Public;
22-
import org.apache.flink.configuration.Configuration;
2322

2423
import java.io.Serializable;
2524

@@ -68,9 +67,6 @@ public IterationRuntimeContext getIterationRuntimeContext() {
6867
// Default life cycle methods
6968
// --------------------------------------------------------------------------------------------
7069

71-
@Override
72-
public void open(Configuration parameters) throws Exception {}
73-
7470
@Override
7571
public void open(OpenContext openContext) throws Exception {}
7672

flink-core/src/main/java/org/apache/flink/api/common/functions/RichFunction.java

Lines changed: 0 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020

2121
import org.apache.flink.annotation.Public;
2222
import org.apache.flink.annotation.PublicEvolving;
23-
import org.apache.flink.configuration.Configuration;
2423

2524
/**
2625
* An base interface for all rich user-defined functions. This class defines methods for the life
@@ -30,51 +29,6 @@
3029
@Public
3130
public interface RichFunction extends Function {
3231

33-
/**
34-
* Initialization method for the function. It is called before the actual working methods (like
35-
* <i>map</i> or <i>join</i>) and thus suitable for one time setup work. For functions that are
36-
* part of an iteration, this method will be invoked at the beginning of each iteration
37-
* superstep.
38-
*
39-
* <p>The configuration object passed to the function can be used for configuration and
40-
* initialization. The configuration contains all parameters that were configured on the
41-
* function in the program composition.
42-
*
43-
* <pre>{@code
44-
* public class MyFilter extends RichFilterFunction<String> {
45-
*
46-
* private String searchString;
47-
*
48-
* public void open(Configuration parameters) {
49-
* this.searchString = parameters.getString("foo");
50-
* }
51-
*
52-
* public boolean filter(String value) {
53-
* return value.equals(searchString);
54-
* }
55-
* }
56-
* }</pre>
57-
*
58-
* <p>By default, this method does nothing.
59-
*
60-
* @param parameters The configuration containing the parameters attached to the contract.
61-
* @throws Exception Implementations may forward exceptions, which are caught by the runtime.
62-
* When the runtime catches an exception, it aborts the task and lets the fail-over logic
63-
* decide whether to retry the task execution.
64-
* @see org.apache.flink.configuration.Configuration
65-
* @deprecated This method is deprecated since Flink 1.19. The users are recommended to
66-
* implement {@code open(OpenContext openContext)} and implement {@code open(Configuration
67-
* parameters)} with an empty body instead. 1. If you implement {@code open(OpenContext
68-
* openContext)}, the {@code open(OpenContext openContext)} will be invoked and the {@code
69-
* open(Configuration parameters)} won't be invoked. 2. If you don't implement {@code
70-
* open(OpenContext openContext)}, the {@code open(Configuration parameters)} will be
71-
* invoked in the default implementation of the {@code open(OpenContext openContext)}.
72-
* @see <a href="https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=263425231">
73-
* FLIP-344: Remove parameter in RichFunction#open </a>
74-
*/
75-
@Deprecated
76-
void open(Configuration parameters) throws Exception;
77-
7832
/**
7933
* Initialization method for the function. It is called before the actual working methods (like
8034
* <i>map</i> or <i>join</i>) and thus suitable for one time setup work. For functions that are

flink-libraries/flink-state-processing-api/src/main/java/org/apache/flink/state/api/functions/KeyedStateReaderFunction.java

Lines changed: 3 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -18,21 +18,20 @@
1818

1919
package org.apache.flink.state.api.functions;
2020

21+
import java.util.Set;
22+
2123
import org.apache.flink.annotation.PublicEvolving;
2224
import org.apache.flink.api.common.functions.AbstractRichFunction;
2325
import org.apache.flink.api.common.functions.OpenContext;
24-
import org.apache.flink.configuration.Configuration;
2526
import org.apache.flink.util.Collector;
2627

27-
import java.util.Set;
28-
2928
/**
3029
* A function that processes keys from a restored operator
3130
*
3231
* <p>For every key {@link #readKey(Object, Context, Collector)} is invoked. This can produce zero
3332
* or more elements as output.
3433
*
35-
* <p><b>NOTE:</b> State descriptors must be eagerly registered in {@code open(Configuration)}. Any
34+
* <p><b>NOTE:</b> State descriptors must be eagerly registered in {@code open(OpenContext)}. Any
3635
* attempt to dynamically register states inside of {@code readKey} will result in a {@code
3736
* RuntimeException}.
3837
*
@@ -51,15 +50,6 @@ public abstract class KeyedStateReaderFunction<K, OUT> extends AbstractRichFunct
5150

5251
private static final long serialVersionUID = 3873843034140417407L;
5352

54-
/**
55-
* Initialization method for the function. It is called before {@link #readKey(Object, Context,
56-
* Collector)} and thus suitable for one time setup work.
57-
*
58-
* <p>This is the only method that my register state descriptors within a {@code
59-
* KeyedStateReaderFunction}.
60-
*/
61-
public abstract void open(Configuration parameters) throws Exception;
62-
6353
/**
6454
* Process one key from the restored state backend.
6555
*

flink-libraries/flink-state-processing-api/src/test/java/org/apache/flink/state/api/SavepointDeepCopyTest.java

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
import org.apache.flink.api.common.state.ValueStateDescriptor;
2424
import org.apache.flink.api.common.typeinfo.Types;
2525
import org.apache.flink.api.java.tuple.Tuple2;
26-
import org.apache.flink.configuration.Configuration;
2726
import org.apache.flink.configuration.MemorySize;
2827
import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
2928
import org.apache.flink.runtime.state.StateBackend;
@@ -110,12 +109,6 @@ public void open(OpenContext openContext) {
110109
state = getRuntimeContext().getState(stateDescriptor);
111110
}
112111

113-
@Override
114-
public void open(Configuration parameters) throws Exception {
115-
throw new UnsupportedOperationException(
116-
"This method is deprecated and shouldn't be invoked. Please use open(OpenContext) instead.");
117-
}
118-
119112
@Override
120113
public void readKey(String key, Context ctx, Collector<Tuple2<String, String>> out)
121114
throws Exception {

flink-libraries/flink-state-processing-api/src/test/java/org/apache/flink/state/api/SavepointReaderKeyedStateITCase.java

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -112,12 +112,6 @@ public void open(OpenContext openContext) {
112112
state = getRuntimeContext().getState(valueState);
113113
}
114114

115-
@Override
116-
public void open(Configuration parameters) {
117-
throw new UnsupportedOperationException(
118-
"This method is deprecated and shouldn't be invoked. Please use open(OpenContext) instead.");
119-
}
120-
121115
@Override
122116
public void readKey(Integer key, Context ctx, Collector<Pojo> out) throws Exception {
123117
Pojo pojo = new Pojo();

flink-libraries/flink-state-processing-api/src/test/java/org/apache/flink/state/api/input/KeyedStateInputFormatTest.java

Lines changed: 0 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -262,12 +262,6 @@ public void open(OpenContext openContext) {
262262
state = getRuntimeContext().getState(stateDescriptor);
263263
}
264264

265-
@Override
266-
public void open(Configuration parameters) throws Exception {
267-
throw new UnsupportedOperationException(
268-
"This method is deprecated and shouldn't be invoked. Please use open(OpenContext) instead.");
269-
}
270-
271265
@Override
272266
public void readKey(
273267
Integer key, KeyedStateReaderFunction.Context ctx, Collector<Integer> out)
@@ -284,12 +278,6 @@ public void open(OpenContext openContext) {
284278
state = getRuntimeContext().getState(stateDescriptor);
285279
}
286280

287-
@Override
288-
public void open(Configuration parameters) throws Exception {
289-
throw new UnsupportedOperationException(
290-
"This method is deprecated and shouldn't be invoked. Please use open(OpenContext) instead.");
291-
}
292-
293281
@Override
294282
public void readKey(
295283
Integer key, KeyedStateReaderFunction.Context ctx, Collector<Integer> out)
@@ -306,12 +294,6 @@ public void open(OpenContext openContext) {
306294
getRuntimeContext().getState(stateDescriptor);
307295
}
308296

309-
@Override
310-
public void open(Configuration parameters) throws Exception {
311-
throw new UnsupportedOperationException(
312-
"This method is deprecated and shouldn't be invoked. Please use open(OpenContext) instead.");
313-
}
314-
315297
@Override
316298
public void readKey(
317299
Integer key, KeyedStateReaderFunction.Context ctx, Collector<Integer> out)
@@ -360,12 +342,6 @@ public void open(OpenContext openContext) {
360342
state = getRuntimeContext().getState(stateDescriptor);
361343
}
362344

363-
@Override
364-
public void open(Configuration parameters) throws Exception {
365-
throw new UnsupportedOperationException(
366-
"This method is deprecated and shouldn't be invoked. Please use open(OpenContext) instead.");
367-
}
368-
369345
@Override
370346
public void readKey(
371347
Integer key, KeyedStateReaderFunction.Context ctx, Collector<Integer> out)

flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/CollectorCodeGenerator.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
*/
1818
package org.apache.flink.table.planner.codegen
1919

20+
import org.apache.flink.api.common.functions.DefaultOpenContext
2021
import org.apache.flink.configuration.Configuration
2122
import org.apache.flink.table.planner.codegen.CodeGenUtils._
2223
import org.apache.flink.table.planner.codegen.Indenter.toISC
@@ -180,7 +181,7 @@ object CollectorCodeGenerator {
180181
s"""
181182
|$collectorTerm = new ${generatedCollector.getClassName}();
182183
|$collectorTerm.setRuntimeContext(getRuntimeContext());
183-
|$collectorTerm.open(new ${className[Configuration]}());
184+
|$collectorTerm.open(new ${className[DefaultOpenContext]}());
184185
|""".stripMargin
185186
ctx.addReusableOpenStatement(openCollector)
186187

flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/ExpressionReducer.scala

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
*/
1818
package org.apache.flink.table.planner.codegen
1919

20-
import org.apache.flink.api.common.functions.{MapFunction, OpenContext, RichMapFunction}
20+
import org.apache.flink.api.common.functions.{DefaultOpenContext, MapFunction, OpenContext, RichMapFunction}
2121
import org.apache.flink.configuration.{Configuration, PipelineOptions, ReadableConfig}
2222
import org.apache.flink.table.api.{TableConfig, TableException}
2323
import org.apache.flink.table.data.{DecimalData, GenericRowData, TimestampData}
@@ -33,7 +33,6 @@ import org.apache.flink.table.planner.utils.Logging
3333
import org.apache.flink.table.planner.utils.TimestampStringUtils.fromLocalDateTime
3434
import org.apache.flink.table.types.DataType
3535
import org.apache.flink.table.types.logical.RowType
36-
3736
import org.apache.calcite.avatica.util.ByteString
3837
import org.apache.calcite.rex._
3938
import org.apache.calcite.sql.`type`.SqlTypeName
@@ -102,12 +101,9 @@ class ExpressionReducer(
102101
throw new TableException("RichMapFunction[GenericRowData, GenericRowData] required here")
103102
}
104103

105-
val parameters = toScala(tableConfig.getOptional(PipelineOptions.GLOBAL_JOB_PARAMETERS))
106-
.map(Configuration.fromMap)
107-
.getOrElse(new Configuration)
108104
val reduced =
109105
try {
110-
richMapFunction.open(parameters)
106+
richMapFunction.open(DefaultOpenContext.INSTANCE)
111107
// execute
112108
richMapFunction.map(EMPTY_ROW)
113109
} catch {

flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/FunctionCodeGenerator.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,7 @@ object FunctionCodeGenerator {
154154
${ctx.reuseConstructorCode(funcName)}
155155

156156
@Override
157-
public void open(${classOf[Configuration].getCanonicalName} parameters) throws Exception {
157+
public void open(${classOf[OpenContext].getCanonicalName} context) throws Exception {
158158
${ctx.reuseOpenCode()}
159159
}
160160

flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/LongHashJoinGenerator.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,13 @@
1717
*/
1818
package org.apache.flink.table.planner.codegen
1919

20+
import org.apache.flink.api.common.functions.DefaultOpenContext
2021
import org.apache.flink.configuration.{Configuration, ReadableConfig}
2122
import org.apache.flink.metrics.Gauge
2223
import org.apache.flink.table.data.{RowData, TimestampData}
2324
import org.apache.flink.table.data.utils.JoinedRowData
2425
import org.apache.flink.table.planner.codegen.CodeGenUtils._
25-
import org.apache.flink.table.planner.codegen.OperatorCodeGenerator.{generateCollect, INPUT_SELECTION}
26+
import org.apache.flink.table.planner.codegen.OperatorCodeGenerator.{INPUT_SELECTION, generateCollect}
2627
import org.apache.flink.table.runtime.generated.{GeneratedJoinCondition, GeneratedProjection}
2728
import org.apache.flink.table.runtime.hashtable.{LongHashPartition, LongHybridHashTable, ProbeIterator}
2829
import org.apache.flink.table.runtime.operators.CodeGenOperatorFactory
@@ -157,7 +158,7 @@ object LongHashJoinGenerator {
157158
val condRefs = ctx.addReusableObject(condFunc.getReferences, "condRefs")
158159
ctx.addReusableInitStatement(s"condFunc = new ${condFunc.getClassName}($condRefs);")
159160
ctx.addReusableOpenStatement(s"condFunc.setRuntimeContext(getRuntimeContext());")
160-
ctx.addReusableOpenStatement(s"condFunc.open(new ${className[Configuration]}());")
161+
ctx.addReusableOpenStatement(s"condFunc.open(new ${className[DefaultOpenContext]}());")
161162
ctx.addReusableCloseStatement(s"condFunc.close();")
162163

163164
val leftIsBuildTerm = newName(ctx, "leftIsBuild")

flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/expressions/utils/ExpressionTestBase.scala

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package org.apache.flink.table.planner.expressions.utils
1919

2020
import org.apache.flink.api.common.{TaskInfo, TaskInfoImpl}
21-
import org.apache.flink.api.common.functions.{MapFunction, RichFunction, RichMapFunction}
21+
import org.apache.flink.api.common.functions.{DefaultOpenContext, MapFunction, OpenContext, RichFunction, RichMapFunction}
2222
import org.apache.flink.api.common.functions.util.RuntimeUDFContext
2323
import org.apache.flink.api.java.typeutils.RowTypeInfo
2424
import org.apache.flink.configuration.Configuration
@@ -45,7 +45,6 @@ import org.apache.flink.table.types.{AbstractDataType, DataType}
4545
import org.apache.flink.table.types.logical.{RowType, VarCharType}
4646
import org.apache.flink.table.types.utils.TypeConversions
4747
import org.apache.flink.types.Row
48-
4948
import org.apache.calcite.plan.hep.{HepPlanner, HepProgramBuilder}
5049
import org.apache.calcite.rel.RelNode
5150
import org.apache.calcite.rel.logical.LogicalCalc
@@ -58,9 +57,7 @@ import org.junit.jupiter.api.{AfterEach, BeforeEach}
5857
import org.junit.jupiter.api.Assertions.{assertEquals, assertTrue}
5958

6059
import javax.annotation.Nullable
61-
6260
import java.util.Collections
63-
6461
import scala.collection.JavaConverters._
6562
import scala.collection.mutable
6663

@@ -241,7 +238,7 @@ abstract class ExpressionTestBase(isStreaming: Boolean = true) {
241238
Collections.emptyMap(),
242239
null)
243240
richMapper.setRuntimeContext(t)
244-
richMapper.open(new Configuration())
241+
richMapper.open(DefaultOpenContext.INSTANCE)
245242
}
246243

247244
val testRow = if (containsLegacyTypes) {

flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/runtime/operators/join/LookupJoinHarnessTest.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@
2020

2121
import org.apache.flink.api.common.functions.AbstractRichFunction;
2222
import org.apache.flink.api.common.functions.FlatMapFunction;
23+
import org.apache.flink.api.common.functions.OpenContext;
2324
import org.apache.flink.api.common.typeutils.TypeSerializer;
24-
import org.apache.flink.configuration.Configuration;
2525
import org.apache.flink.streaming.api.functions.ProcessFunction;
2626
import org.apache.flink.streaming.api.operators.ProcessOperator;
2727
import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness;
@@ -332,7 +332,7 @@ public static final class TestingPreFilterCondition extends AbstractRichFunction
332332
private static final long serialVersionUID = 1L;
333333

334334
@Override
335-
public void open(Configuration parameters) throws Exception {
335+
public void open(OpenContext context) throws Exception {
336336
// do nothing
337337
}
338338

0 commit comments

Comments
 (0)