Skip to content

Commit

Permalink
[FLINK-34516] Use new CheckpointingMode in flink-core in doc
Browse files Browse the repository at this point in the history
  • Loading branch information
Zakelly committed Mar 13, 2024
1 parent 7b04077 commit fd7fac3
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 8 deletions.
8 changes: 4 additions & 4 deletions docs/content.zh/docs/dev/table/data_stream_api.md
Original file line number Diff line number Diff line change
Expand Up @@ -523,7 +523,7 @@ We recommend setting all configuration options in DataStream API early before sw
{{< tab "Java" >}}
```java
import java.time.ZoneId;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.core.execution.CheckpointingMode.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

Expand All @@ -537,7 +537,7 @@ env.setMaxParallelism(256);

env.getConfig().addDefaultKryoSerializer(MyCustomType.class, CustomKryoSerializer.class);

env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
env.getCheckpointConfig().setConsistencyMode(CheckpointingMode.EXACTLY_ONCE);

// then switch to Java Table API

Expand All @@ -553,9 +553,9 @@ tableEnv.getConfig().setLocalTimeZone(ZoneId.of("Europe/Berlin"));
{{< tab "Scala" >}}
```scala
import java.time.ZoneId
import org.apache.flink.core.execution.CheckpointingMode
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.CheckpointingMode
import org.apache.flink.table.api.bridge.scala._

// create Scala DataStream API
Expand All @@ -568,7 +568,7 @@ env.setMaxParallelism(256)

env.getConfig.addDefaultKryoSerializer(classOf[MyCustomType], classOf[CustomKryoSerializer])

env.getCheckpointConfig.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE)
env.getCheckpointConfig.setConsistencyMode(CheckpointingMode.EXACTLY_ONCE)

// then switch to Scala Table API

Expand Down
8 changes: 4 additions & 4 deletions docs/content/docs/dev/table/data_stream_api.md
Original file line number Diff line number Diff line change
Expand Up @@ -521,7 +521,7 @@ We recommend setting all configuration options in DataStream API early before sw
{{< tab "Java" >}}
```java
import java.time.ZoneId;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.core.execution.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

Expand All @@ -535,7 +535,7 @@ env.setMaxParallelism(256);

env.getConfig().addDefaultKryoSerializer(MyCustomType.class, CustomKryoSerializer.class);

env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
env.getCheckpointConfig().setConsistencyMode(CheckpointingMode.EXACTLY_ONCE);

// then switch to Java Table API

Expand All @@ -551,9 +551,9 @@ tableEnv.getConfig().setLocalTimeZone(ZoneId.of("Europe/Berlin"));
{{< tab "Scala" >}}
```scala
import java.time.ZoneId
import org.apache.flink.core.execution.CheckpointingMode
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.CheckpointingMode
import org.apache.flink.table.api.bridge.scala._

// create Scala DataStream API
Expand All @@ -566,7 +566,7 @@ env.setMaxParallelism(256)

env.getConfig.addDefaultKryoSerializer(classOf[MyCustomType], classOf[CustomKryoSerializer])

env.getCheckpointConfig.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE)
env.getCheckpointConfig.setConsistencyMode(CheckpointingMode.EXACTLY_ONCE)

// then switch to Scala Table API

Expand Down

0 comments on commit fd7fac3

Please sign in to comment.