diff --git a/Cargo.lock b/Cargo.lock
index 519b76c68813b..663875ba23db9 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -45,7 +45,7 @@ dependencies = [
"getrandom",
"once_cell",
"version_check",
- "zerocopy 0.7.31",
+ "zerocopy",
]
[[package]]
@@ -170,40 +170,40 @@ dependencies = [
[[package]]
name = "apache-avro"
-version = "0.15.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c0fdddc3fdac97394ffcc5c89c634faa9c1c166ced54189af34e407c97b6ee7"
+version = "0.16.0"
+source = "git+https://github.com/risingwavelabs/avro?rev=d0846a16ce813a225af04ade35b3b8117b137a29#d0846a16ce813a225af04ade35b3b8117b137a29"
dependencies = [
- "apache-avro-derive",
- "byteorder",
+ "bzip2",
+ "crc32fast",
"digest",
"lazy_static",
- "libflate 1.4.0",
+ "libflate",
"log",
"num-bigint",
"quad-rand",
"rand",
- "regex",
+ "regex-lite",
"serde",
"serde_json",
+ "snap",
"strum",
"strum_macros",
"thiserror",
- "typed-builder 0.14.0",
+ "typed-builder 0.16.2",
"uuid",
- "zerocopy 0.6.6",
+ "xz2",
+ "zstd 0.12.4",
]
[[package]]
name = "apache-avro"
-version = "0.16.0"
-source = "git+https://github.com/risingwavelabs/avro?rev=d0846a16ce813a225af04ade35b3b8117b137a29#d0846a16ce813a225af04ade35b3b8117b137a29"
+version = "0.17.0"
+source = "git+https://github.com/icelake-io/avro.git?branch=icelake-dev#4b828e9283e7248fd3ca42f5b590c2160b201785"
dependencies = [
- "bzip2",
- "crc32fast",
+ "apache-avro-derive",
+ "bigdecimal 0.4.2",
"digest",
- "lazy_static",
- "libflate 2.0.0",
+ "libflate",
"log",
"num-bigint",
"quad-rand",
@@ -211,27 +211,23 @@ dependencies = [
"regex-lite",
"serde",
"serde_json",
- "snap",
"strum",
"strum_macros",
"thiserror",
- "typed-builder 0.16.2",
+ "typed-builder 0.18.0",
"uuid",
- "xz2",
- "zstd 0.12.4",
]
[[package]]
name = "apache-avro-derive"
-version = "0.15.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6686cd705badba064ec2322b9c3d72f5c70db8394e486bbb56e84fbdb3fa158c"
+version = "0.17.0"
+source = "git+https://github.com/icelake-io/avro.git?branch=icelake-dev#4b828e9283e7248fd3ca42f5b590c2160b201785"
dependencies = [
"darling 0.20.3",
"proc-macro2",
"quote",
"serde_json",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -806,7 +802,7 @@ checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -855,7 +851,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -872,7 +868,7 @@ checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -905,7 +901,7 @@ dependencies = [
"derive_utils",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -1689,7 +1685,7 @@ dependencies = [
"proc-macro-crate 2.0.0",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
"syn_derive",
]
@@ -1978,7 +1974,7 @@ checksum = "bc7cb2538d4ecc42b6c3b57a83094d8c69894e74468d18cd045a09fdea807358"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -2095,7 +2091,7 @@ dependencies = [
"heck 0.4.1",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -2530,7 +2526,7 @@ dependencies = [
"proc-macro-error 1.0.4",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -2744,7 +2740,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30d2b3721e861707777e3195b0158f950ae6dc4a27e4d02ff9f67e3eb3de199e"
dependencies = [
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -2772,7 +2768,7 @@ checksum = "83fdaf97f4804dcebfa5862639bc9ce4121e82140bec2a987ac5140294865b5b"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -2799,7 +2795,7 @@ dependencies = [
"proc-macro2",
"quote",
"scratch",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -2816,7 +2812,7 @@ checksum = "2fa16a70dd58129e4dfffdff535fb1bce66673f7bbeec4a5a1765a504e1ccd84"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -2888,7 +2884,7 @@ dependencies = [
"proc-macro2",
"quote",
"strsim",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -2921,7 +2917,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5"
dependencies = [
"darling_core 0.20.3",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -3216,6 +3212,7 @@ dependencies = [
name = "delta_btree_map"
version = "1.7.0-alpha"
dependencies = [
+ "educe 0.5.7",
"enum-as-inner",
]
@@ -3361,7 +3358,7 @@ checksum = "9abcad25e9720609ccb3dcdb795d845e37d8ce34183330a9f48b03a1a71c8e21"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -3590,7 +3587,7 @@ dependencies = [
"enum-ordinalize 4.3.0",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -3667,7 +3664,7 @@ dependencies = [
"heck 0.4.1",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -3692,22 +3689,22 @@ dependencies = [
[[package]]
name = "enum-iterator"
-version = "1.4.1"
+version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7add3873b5dd076766ee79c8e406ad1a472c385476b9e38849f8eec24f1be689"
+checksum = "9fd242f399be1da0a5354aa462d57b4ab2b4ee0683cc552f7c007d2d12d36e94"
dependencies = [
"enum-iterator-derive",
]
[[package]]
name = "enum-iterator-derive"
-version = "1.2.1"
+version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eecf8589574ce9b895052fa12d69af7a233f99e6107f5cb8dd1044f2a17bfdcb"
+checksum = "03cdc46ec28bd728e67540c528013c6a10eb69a02eb31078a1bda695438cbfb8"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -3720,7 +3717,7 @@ dependencies = [
"num-traits",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -3740,7 +3737,7 @@ checksum = "0d28318a75d4aead5c4db25382e8ef717932d0346600cacae6357eb5941bc5ff"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -3760,7 +3757,7 @@ checksum = "f95e2801cd355d4a1a3e3953ce6ee5ae9603a5c833455343a8bfe3f44d418246"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -3801,6 +3798,15 @@ dependencies = [
"version_check",
]
+[[package]]
+name = "escape8259"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba4f4911e3666fcd7826997b4745c8224295a6f3072f1418c3067b97a67557ee"
+dependencies = [
+ "rustversion",
+]
+
[[package]]
name = "etcd-client"
version = "0.12.1"
@@ -4238,7 +4244,7 @@ checksum = "b0fa992f1656e1707946bbba340ad244f0814009ef8c0118eb7b658395f19a2e"
dependencies = [
"frunk_proc_macro_helpers",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -4250,7 +4256,7 @@ dependencies = [
"frunk_core",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -4262,7 +4268,7 @@ dependencies = [
"frunk_core",
"frunk_proc_macro_helpers",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -4350,7 +4356,7 @@ checksum = "5df2c13d48c8cb8a3ec093ede6f0f4482f327d7bb781120c5fb483ef0f17e758"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -4420,7 +4426,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -5055,10 +5061,10 @@ dependencies = [
[[package]]
name = "icelake"
version = "0.0.10"
-source = "git+https://github.com/icelake-io/icelake?rev=3f7b53ba5b563524212c25810345d1314678e7fc#3f7b53ba5b563524212c25810345d1314678e7fc"
+source = "git+https://github.com/icelake-io/icelake?rev=32c0bbf242f5c47b1e743f10577012fe7436c770#32c0bbf242f5c47b1e743f10577012fe7436c770"
dependencies = [
"anyhow",
- "apache-avro 0.15.0",
+ "apache-avro 0.17.0",
"arrow-arith 49.0.0",
"arrow-array 49.0.0",
"arrow-buffer 49.0.0",
@@ -5077,6 +5083,7 @@ dependencies = [
"faster-hex",
"futures",
"itertools 0.11.0",
+ "lazy_static",
"log",
"murmur3",
"once_cell",
@@ -5193,7 +5200,7 @@ checksum = "ce243b1bfa62ffc028f1cc3b6034ec63d649f3031bc8a4fbbb004e1ac17d1f68"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -5550,17 +5557,6 @@ version = "0.2.150"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c"
-[[package]]
-name = "libflate"
-version = "1.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5ff4ae71b685bbad2f2f391fe74f6b7659a34871c08b210fdc039e43bee07d18"
-dependencies = [
- "adler32",
- "crc32fast",
- "libflate_lz77 1.2.0",
-]
-
[[package]]
name = "libflate"
version = "2.0.0"
@@ -5571,16 +5567,7 @@ dependencies = [
"core2",
"crc32fast",
"dary_heap",
- "libflate_lz77 2.0.0",
-]
-
-[[package]]
-name = "libflate_lz77"
-version = "1.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a52d3a8bfc85f250440e4424db7d857e241a3aebbbe301f3eb606ab15c39acbf"
-dependencies = [
- "rle-decode-fast",
+ "libflate_lz77",
]
[[package]]
@@ -5643,6 +5630,18 @@ dependencies = [
"threadpool",
]
+[[package]]
+name = "libtest-mimic"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f0f4c6f44ecfd52e8b443f2ad18f2b996540135771561283c2352ce56a1c70b"
+dependencies = [
+ "clap",
+ "escape8259",
+ "termcolor",
+ "threadpool",
+]
+
[[package]]
name = "libz-sys"
version = "1.1.12"
@@ -5956,7 +5955,7 @@ dependencies = [
"proc-macro2",
"prost-build 0.12.1",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
"tonic-build",
]
@@ -6167,9 +6166,9 @@ checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a"
[[package]]
name = "multimap"
-version = "0.9.0"
+version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "70db9248a93dc36a36d9a47898caa007a32755c7ad140ec64eeeb50d5a730631"
+checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03"
dependencies = [
"serde",
]
@@ -6193,7 +6192,7 @@ dependencies = [
"proc-macro-error 1.0.4",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
"termcolor",
"thiserror",
]
@@ -6722,7 +6721,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -6921,7 +6920,7 @@ dependencies = [
"proc-macro-error 1.0.4",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -7130,7 +7129,7 @@ dependencies = [
"regex",
"regex-syntax 0.7.5",
"structmeta",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -7330,7 +7329,7 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -7461,7 +7460,7 @@ dependencies = [
"heck 0.4.1",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -7609,7 +7608,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d"
dependencies = [
"proc-macro2",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -7698,9 +7697,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068"
[[package]]
name = "proc-macro2"
-version = "1.0.67"
+version = "1.0.76"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3d433d9f1a3e8c1263d9456598b16fec66f4acc9a74dacffd35c7bb09b3a1328"
+checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c"
dependencies = [
"unicode-ident",
]
@@ -7831,7 +7830,7 @@ dependencies = [
"prost 0.12.1",
"prost-types 0.12.1",
"regex",
- "syn 2.0.37",
+ "syn 2.0.48",
"tempfile",
"which",
]
@@ -7859,7 +7858,7 @@ dependencies = [
"itertools 0.11.0",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -7868,7 +7867,7 @@ version = "0.1.0"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -8063,9 +8062,9 @@ dependencies = [
[[package]]
name = "quote"
-version = "1.0.33"
+version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
+checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
dependencies = [
"proc-macro2",
]
@@ -8245,7 +8244,7 @@ checksum = "7f7473c2cfcf90008193dd0e3e16599455cb601a9fce322b5bb55de799664925"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -8519,7 +8518,7 @@ dependencies = [
"prettyplease 0.2.15",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -8576,6 +8575,7 @@ dependencies = [
"risingwave_rpc_client",
"risingwave_source",
"risingwave_storage",
+ "rw_futures_util",
"scopeguard",
"serde_json",
"task_stats_alloc",
@@ -8604,6 +8604,7 @@ dependencies = [
"bytesize",
"clap",
"futures",
+ "futures-async-stream",
"hdrhistogram",
"itertools 0.12.0",
"libc",
@@ -8614,9 +8615,13 @@ dependencies = [
"prometheus",
"rand",
"risingwave_common",
+ "risingwave_connector",
+ "risingwave_pb",
"risingwave_rt",
"risingwave_storage",
+ "risingwave_stream",
"serde",
+ "serde_yaml",
"tokio-stream",
"toml 0.8.2",
"tracing",
@@ -8754,6 +8759,7 @@ dependencies = [
"risingwave_pb",
"rust_decimal",
"rusty-fork",
+ "rw_futures_util",
"ryu",
"serde",
"serde_bytes",
@@ -8998,6 +9004,7 @@ dependencies = [
"risingwave_pb",
"risingwave_rpc_client",
"rust_decimal",
+ "rw_futures_util",
"serde",
"serde_derive",
"serde_json",
@@ -9137,6 +9144,7 @@ version = "1.7.0-alpha"
dependencies = [
"aho-corasick",
"anyhow",
+ "arrow-schema 49.0.0",
"async-trait",
"auto_enums",
"chrono",
@@ -9146,6 +9154,7 @@ dependencies = [
"futures-async-stream",
"futures-util",
"hex",
+ "icelake",
"itertools 0.12.0",
"jsonbb",
"madsim-tokio",
@@ -9175,7 +9184,7 @@ dependencies = [
"itertools 0.12.0",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -9238,6 +9247,7 @@ dependencies = [
"risingwave_storage",
"risingwave_udf",
"risingwave_variables",
+ "rw_futures_util",
"serde",
"serde_json",
"sha2",
@@ -9356,6 +9366,7 @@ dependencies = [
"risingwave_object_store",
"risingwave_pb",
"risingwave_storage",
+ "rw_futures_util",
"serde",
"serde_json",
"thiserror",
@@ -9433,6 +9444,7 @@ dependencies = [
"risingwave_rpc_client",
"risingwave_sqlparser",
"risingwave_test_runner",
+ "rw_futures_util",
"scopeguard",
"sea-orm",
"serde",
@@ -9464,6 +9476,7 @@ dependencies = [
name = "risingwave_meta_model_v2"
version = "1.7.0-alpha"
dependencies = [
+ "risingwave_common",
"risingwave_hummock_sdk",
"risingwave_pb",
"sea-orm",
@@ -9588,7 +9601,7 @@ dependencies = [
"anyhow",
"expect-test",
"itertools 0.12.0",
- "libtest-mimic",
+ "libtest-mimic 0.7.0",
"madsim-tokio",
"paste",
"risingwave_expr_impl",
@@ -9638,6 +9651,7 @@ dependencies = [
"risingwave_error",
"risingwave_hummock_sdk",
"risingwave_pb",
+ "rw_futures_util",
"static_assertions",
"thiserror",
"thiserror-ext",
@@ -9745,6 +9759,7 @@ dependencies = [
"risingwave_common",
"risingwave_connector",
"risingwave_pb",
+ "rw_futures_util",
"tempfile",
"tracing",
"workspace-hack",
@@ -9786,7 +9801,7 @@ dependencies = [
"clap",
"expect-test",
"itertools 0.12.0",
- "libtest-mimic",
+ "libtest-mimic 0.7.0",
"madsim-tokio",
"rand",
"rand_chacha",
@@ -9920,7 +9935,7 @@ dependencies = [
"madsim-tonic",
"maplit",
"memcomparable",
- "multimap 0.9.0",
+ "multimap 0.10.0",
"parking_lot 0.12.1",
"pin-project",
"prometheus",
@@ -9936,6 +9951,7 @@ dependencies = [
"risingwave_rpc_client",
"risingwave_source",
"risingwave_storage",
+ "rw_futures_util",
"serde",
"serde_json",
"serde_yaml",
@@ -10085,7 +10101,7 @@ dependencies = [
"proc-macro2",
"quote",
"rust-embed-utils",
- "syn 2.0.37",
+ "syn 2.0.48",
"walkdir",
]
@@ -10252,6 +10268,15 @@ dependencies = [
"wait-timeout",
]
+[[package]]
+name = "rw_futures_util"
+version = "0.0.0"
+dependencies = [
+ "futures",
+ "pin-project-lite",
+ "tokio",
+]
+
[[package]]
name = "ryu"
version = "1.0.15"
@@ -10344,7 +10369,7 @@ dependencies = [
"proc-macro-error 1.0.4",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -10402,7 +10427,7 @@ dependencies = [
"proc-macro2",
"quote",
"sea-bae",
- "syn 2.0.37",
+ "syn 2.0.48",
"unicode-ident",
]
@@ -10585,9 +10610,9 @@ checksum = "a3f0bf26fd526d2a95683cd0f87bf103b8539e2ca1ef48ce002d67aad59aa0b4"
[[package]]
name = "serde"
-version = "1.0.188"
+version = "1.0.195"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e"
+checksum = "63261df402c67811e9ac6def069e4786148c4563f4b50fd4bf30aa370d626b02"
dependencies = [
"serde_derive",
]
@@ -10634,13 +10659,13 @@ dependencies = [
[[package]]
name = "serde_derive"
-version = "1.0.188"
+version = "1.0.195"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2"
+checksum = "46fe8f8603d81ba86327b23a2e9cdf49e1255fb94a4c5f297f6ee0547178ea2c"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -10656,9 +10681,9 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.107"
+version = "1.0.111"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65"
+checksum = "176e46fa42316f18edd598015a5166857fc835ec732f5215eac6b7bdbf0a84f4"
dependencies = [
"itoa",
"ryu",
@@ -10701,7 +10726,7 @@ checksum = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -10751,7 +10776,7 @@ dependencies = [
"darling 0.20.3",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -10789,7 +10814,7 @@ checksum = "b93fb4adc70021ac1b47f7d45e8cc4169baaa7ea58483bc5b721d19a26202212"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -11031,9 +11056,9 @@ dependencies = [
[[package]]
name = "smallvec"
-version = "1.11.1"
+version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a"
+checksum = "2593d31f82ead8df961d8bd23a64c2ccf2eb5dd34b0a34bfb4dd54011c72009e"
dependencies = [
"serde",
]
@@ -11174,7 +11199,7 @@ dependencies = [
"glob",
"humantime",
"itertools 0.11.0",
- "libtest-mimic",
+ "libtest-mimic 0.6.1",
"md-5",
"owo-colors",
"regex",
@@ -11465,7 +11490,7 @@ dependencies = [
"proc-macro2",
"quote",
"structmeta-derive",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -11476,7 +11501,7 @@ checksum = "a60bcaff7397072dca0017d1db428e30d5002e00b6847703e2e42005c95fbe00"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -11490,15 +11515,15 @@ dependencies = [
[[package]]
name = "strum_macros"
-version = "0.25.2"
+version = "0.25.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ad8d03b598d3d0fff69bf533ee3ef19b8eeb342729596df84bcc7e1f96ec4059"
+checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0"
dependencies = [
"heck 0.4.1",
"proc-macro2",
"quote",
"rustversion",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -11563,9 +11588,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "2.0.37"
+version = "2.0.48"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7303ef2c05cd654186cb250d29049a24840ca25d2747c25c0381c8d9e2f582e8"
+checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f"
dependencies = [
"proc-macro2",
"quote",
@@ -11592,7 +11617,7 @@ dependencies = [
"proc-macro-error 1.0.4",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -11697,9 +11722,9 @@ checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76"
[[package]]
name = "thiserror"
-version = "1.0.48"
+version = "1.0.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9d6d7a740b8a666a7e828dd00da9c0dc290dff53154ea77ac109281de90589b7"
+checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad"
dependencies = [
"thiserror-impl",
]
@@ -11723,18 +11748,18 @@ dependencies = [
"either",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
name = "thiserror-impl"
-version = "1.0.48"
+version = "1.0.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "49922ecae66cc8a249b77e68d1d0623c1b2c514f0060c27cdc68bd62a1219d35"
+checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -11909,7 +11934,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -12153,7 +12178,7 @@ dependencies = [
"proc-macro2",
"prost-build 0.12.1",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -12234,7 +12259,7 @@ checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -12373,22 +12398,20 @@ dependencies = [
[[package]]
name = "typed-builder"
-version = "0.14.0"
+version = "0.16.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "64cba322cb9b7bc6ca048de49e83918223f35e7a86311267013afff257004870"
+checksum = "34085c17941e36627a879208083e25d357243812c30e7d7387c3b954f30ade16"
dependencies = [
- "proc-macro2",
- "quote",
- "syn 1.0.109",
+ "typed-builder-macro 0.16.2",
]
[[package]]
name = "typed-builder"
-version = "0.16.2"
+version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "34085c17941e36627a879208083e25d357243812c30e7d7387c3b954f30ade16"
+checksum = "e47c0496149861b7c95198088cbf36645016b1a0734cf350c50e2a38e070f38a"
dependencies = [
- "typed-builder-macro",
+ "typed-builder-macro 0.18.0",
]
[[package]]
@@ -12399,7 +12422,18 @@ checksum = "f03ca4cb38206e2bef0700092660bb74d696f808514dae47fa1467cbfe26e96e"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
+]
+
+[[package]]
+name = "typed-builder-macro"
+version = "0.18.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "982ee4197351b5c9782847ef5ec1fdcaf50503fb19d68f9771adae314e72b492"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.48",
]
[[package]]
@@ -12556,11 +12590,12 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]]
name = "vergen"
-version = "8.2.5"
+version = "8.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "85e7dc29b3c54a2ea67ef4f953d5ec0c4085035c0ae2d325be1c0d2144bd9f16"
+checksum = "ec0d895592fa7710eba03fe072e614e3dc6a61ab76ae7ae10d2eb4a7ed5b00ca"
dependencies = [
"anyhow",
+ "cfg-if",
"rustversion",
"time",
]
@@ -12687,7 +12722,7 @@ dependencies = [
"once_cell",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
"wasm-bindgen-shared",
]
@@ -12721,7 +12756,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
@@ -12851,7 +12886,7 @@ dependencies = [
"anyhow",
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
"wasmtime-component-util",
"wasmtime-wit-bindgen",
"wit-parser",
@@ -13043,7 +13078,7 @@ checksum = "f55c5565959287c21dd0f4277ae3518dd2ae62679f655ee2dbc4396e19d210db"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -13210,7 +13245,7 @@ dependencies = [
"proc-macro2",
"quote",
"shellexpand",
- "syn 2.0.37",
+ "syn 2.0.48",
"witx",
]
@@ -13222,7 +13257,7 @@ checksum = "31ae1ec11a17ea481539ee9a5719a278c9790d974060fbf71db4b2c05378780b"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
"wiggle-generate",
]
@@ -13551,7 +13586,7 @@ version = "1.7.0-alpha"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
@@ -13594,6 +13629,7 @@ dependencies = [
"aws-smithy-types",
"axum",
"base64 0.21.4",
+ "bigdecimal 0.4.2",
"bit-vec",
"bitflags 2.4.0",
"byteorder",
@@ -13616,6 +13652,7 @@ dependencies = [
"futures",
"futures-channel",
"futures-core",
+ "futures-executor",
"futures-io",
"futures-sink",
"futures-task",
@@ -13694,7 +13731,7 @@ dependencies = [
"strum",
"subtle",
"syn 1.0.109",
- "syn 2.0.37",
+ "syn 2.0.48",
"time",
"time-macros",
"tinyvec",
@@ -13715,6 +13752,8 @@ dependencies = [
"uuid",
"whoami",
"zeroize",
+ "zstd 0.13.0",
+ "zstd-safe 7.0.0",
"zstd-sys",
]
@@ -13806,34 +13845,13 @@ version = "3.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a599daf1b507819c1121f0bf87fa37eb19daac6aff3aefefd4e6e2e0f2020fc"
-[[package]]
-name = "zerocopy"
-version = "0.6.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "854e949ac82d619ee9a14c66a1b674ac730422372ccb759ce0c39cabcf2bf8e6"
-dependencies = [
- "byteorder",
- "zerocopy-derive 0.6.6",
-]
-
[[package]]
name = "zerocopy"
version = "0.7.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c4061bedbb353041c12f413700357bec76df2c7e2ca8e4df8bac24c6bf68e3d"
dependencies = [
- "zerocopy-derive 0.7.31",
-]
-
-[[package]]
-name = "zerocopy-derive"
-version = "0.6.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "125139de3f6b9d625c39e2efdd73d41bdac468ccd556556440e322be0e1bbd91"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn 2.0.37",
+ "zerocopy-derive",
]
[[package]]
@@ -13844,7 +13862,7 @@ checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.37",
+ "syn 2.0.48",
]
[[package]]
diff --git a/Cargo.toml b/Cargo.toml
index 945091f384371..7bd67bc583745 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -51,6 +51,7 @@ members = [
"src/tests/sqlsmith",
"src/tests/state_cleaning_test",
"src/utils/delta_btree_map",
+ "src/utils/futures_util",
"src/utils/local_stats_alloc",
"src/utils/pgwire",
"src/utils/runtime",
@@ -120,7 +121,7 @@ tonic = { package = "madsim-tonic", version = "0.4.1" }
tonic-build = { package = "madsim-tonic-build", version = "0.4.2" }
otlp-embedded = { git = "https://github.com/risingwavelabs/otlp-embedded", rev = "58c1f003484449d7c6dd693b348bf19dd44889cb" }
prost = { version = "0.12" }
-icelake = { git = "https://github.com/icelake-io/icelake", rev = "3f7b53ba5b563524212c25810345d1314678e7fc", features = [
+icelake = { git = "https://github.com/icelake-io/icelake", rev = "32c0bbf242f5c47b1e743f10577012fe7436c770", features = [
"prometheus",
] }
arrow-array = "49"
@@ -187,6 +188,7 @@ risingwave_udf = { path = "./src/expr/udf" }
risingwave_variables = { path = "./src/utils/variables" }
risingwave_java_binding = { path = "./src/java_binding" }
risingwave_jni_core = { path = "src/jni_core" }
+rw_futures_util = { path = "src/utils/futures_util" }
tokio-util = "0.7"
[workspace.lints.rust]
diff --git a/ci/scripts/e2e-source-test.sh b/ci/scripts/e2e-source-test.sh
index 64144d051ad58..ec04a1d6863cf 100755
--- a/ci/scripts/e2e-source-test.sh
+++ b/ci/scripts/e2e-source-test.sh
@@ -5,6 +5,15 @@ set -euo pipefail
source ci/scripts/common.sh
+# Arguments:
+# $1: subject name
+# $2: schema file path
+function register_schema_registry() {
+ curl -X POST http://message_queue:8081/subjects/$1/versions \
+ -H ‘Content-Type: application/vnd.schemaregistry.v1+json’ \
+ --data-binary @<(jq -n --arg schema “$(cat $2)” ‘{schemaType: “PROTOBUF”, schema: $schema}’)
+}
+
# prepare environment
export CONNECTOR_LIBS_PATH="./connector-node/libs"
@@ -115,12 +124,13 @@ export RISINGWAVE_CI=true
RUST_LOG="info,risingwave_stream=info,risingwave_batch=info,risingwave_storage=info" \
cargo make ci-start ci-1cn-1fe
python3 -m pip install requests protobuf confluent-kafka
-python3 e2e_test/schema_registry/pb.py "message_queue:29092" "http://message_queue:8081" "sr_pb_test" 20
+python3 e2e_test/schema_registry/pb.py "message_queue:29092" "http://message_queue:8081" "sr_pb_test" 20 user
echo "make sure google/protobuf/source_context.proto is NOT in schema registry"
curl --silent 'http://message_queue:8081/subjects'; echo
# curl --silent --head -X GET 'http://message_queue:8081/subjects/google%2Fprotobuf%2Fsource_context.proto/versions' | grep 404
curl --silent 'http://message_queue:8081/subjects' | grep -v 'google/protobuf/source_context.proto'
sqllogictest -p 4566 -d dev './e2e_test/schema_registry/pb.slt'
+sqllogictest -p 4566 -d dev './e2e_test/schema_registry/alter_sr.slt'
echo "--- Kill cluster"
cargo make ci-kill
diff --git a/ci/scripts/gen-integration-test-yaml.py b/ci/scripts/gen-integration-test-yaml.py
index f2cf770be8ece..8451290a93c93 100644
--- a/ci/scripts/gen-integration-test-yaml.py
+++ b/ci/scripts/gen-integration-test-yaml.py
@@ -37,8 +37,8 @@
'starrocks-sink': ['json'],
'deltalake-sink': ['json'],
'pinot-sink': ['json'],
+ 'presto-trino': ['json'],
'client-library': ['none'],
-
}
def gen_pipeline_steps():
diff --git a/ci/scripts/run-backfill-tests.sh b/ci/scripts/run-backfill-tests.sh
index 46bad790d21f7..f26a983d67050 100755
--- a/ci/scripts/run-backfill-tests.sh
+++ b/ci/scripts/run-backfill-tests.sh
@@ -200,12 +200,82 @@ test_sink_backfill_recovery() {
wait
}
+test_no_shuffle_backfill_runtime() {
+ echo "--- e2e, test_no_shuffle_backfill_runtime"
+ cargo make ci-start $CLUSTER_PROFILE
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/create_table.slt'
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/insert.slt'
+
+ # Provide updates ...
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/insert.slt' 2>&1 1>out.log &
+
+ # ... and concurrently create mv.
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/create_no_shuffle_mv.slt'
+
+ wait
+
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/validate_rows.slt'
+
+ cargo make kill
+ cargo make wait-processes-exit
+}
+
+test_arrangement_backfill_runtime() {
+ echo "--- e2e, test_arrangement_backfill_runtime"
+ cargo make ci-start $CLUSTER_PROFILE
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/create_table.slt'
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/insert.slt'
+
+ # Provide updates ...
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/insert.slt' 2>&1 1>out.log &
+
+ # ... and concurrently create mv.
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/create_arrangement_backfill_mv.slt'
+
+ wait
+
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/validate_rows.slt'
+
+ cargo make kill
+ cargo make wait-processes-exit
+}
+
+test_no_shuffle_backfill_snapshot_only_runtime() {
+ echo "--- e2e, test_no_shuffle_backfill_snapshot_only_runtime"
+ cargo make ci-start $CLUSTER_PROFILE
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/create_table.slt'
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/insert.slt'
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/insert.slt'
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/create_no_shuffle_mv.slt'
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/validate_rows.slt'
+
+ cargo make kill
+ cargo make wait-processes-exit
+}
+
+test_arrangement_backfill_snapshot_only_runtime() {
+ echo "--- e2e, test_arrangement_backfill_snapshot_only_runtime"
+ cargo make ci-start $CLUSTER_PROFILE
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/create_table.slt'
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/insert.slt'
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/insert.slt'
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/create_arrangement_backfill_mv.slt'
+ sqllogictest -p 4566 -d dev 'e2e_test/backfill/runtime/validate_rows.slt'
+
+ cargo make kill
+ cargo make wait-processes-exit
+}
+
main() {
set -euo pipefail
test_snapshot_and_upstream_read
test_backfill_tombstone
test_replication_with_column_pruning
test_sink_backfill_recovery
+ test_no_shuffle_backfill_runtime
+ test_arrangement_backfill_runtime
+ test_no_shuffle_backfill_snapshot_only_runtime
+ test_arrangement_backfill_snapshot_only_runtime
}
main
diff --git a/ci/scripts/run-unit-test.sh b/ci/scripts/run-unit-test.sh
index 6f2093060f370..8fe4b150bf866 100755
--- a/ci/scripts/run-unit-test.sh
+++ b/ci/scripts/run-unit-test.sh
@@ -10,10 +10,6 @@ cd ${REPO_ROOT}/src/expr/udf/python
python3 -m pytest
cd ${REPO_ROOT}
-echo "+++ Run unit tests with coverage"
+echo "+++ Run unit tests"
# use tee to disable progress bar
-NEXTEST_PROFILE=ci cargo llvm-cov nextest --lcov --output-path lcov.info --features failpoints,sync_point --workspace --exclude risingwave_simulation
-
-echo "--- Codecov upload coverage reports"
-curl -Os https://uploader.codecov.io/latest/linux/codecov && chmod +x codecov
-./codecov -t "$CODECOV_TOKEN" -s . -F rust
+NEXTEST_PROFILE=ci cargo nextest run --features failpoints,sync_point --workspace --exclude risingwave_simulation
diff --git a/dashboard/components/CatalogModal.tsx b/dashboard/components/CatalogModal.tsx
new file mode 100644
index 0000000000000..cf6a2f8cc9e0d
--- /dev/null
+++ b/dashboard/components/CatalogModal.tsx
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2024 RisingWave Labs
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+import {
+ Button,
+ Modal,
+ ModalBody,
+ ModalCloseButton,
+ ModalContent,
+ ModalFooter,
+ ModalHeader,
+ ModalOverlay,
+} from "@chakra-ui/react"
+
+import Link from "next/link"
+import { parseAsInteger, useQueryState } from "nuqs"
+import {
+ Relation,
+ relationIsStreamingJob,
+ relationTypeTitleCase,
+} from "../pages/api/streaming"
+import { ReactJson } from "./Relations"
+
+export function useCatalogModal(relationList: Relation[] | undefined) {
+ const [modalId, setModalId] = useQueryState("modalId", parseAsInteger)
+ const modalData = relationList?.find((r) => r.id === modalId)
+
+ return [modalData, setModalId] as const
+}
+
+export function CatalogModal({
+ modalData,
+ onClose,
+}: {
+ modalData: Relation | undefined
+ onClose: () => void
+}) {
+ return (
+
+
+
+
+ Catalog of {modalData && relationTypeTitleCase(modalData)}{" "}
+ {modalData?.id} - {modalData?.name}
+
+
+
+ {modalData && (
+
+ )}
+
+
+
+ {modalData && relationIsStreamingJob(modalData) && (
+
+ )}
+
+
+
+
+ )
+}
diff --git a/dashboard/components/FragmentDependencyGraph.tsx b/dashboard/components/FragmentDependencyGraph.tsx
index 553c40ec53f92..7b3417507efcf 100644
--- a/dashboard/components/FragmentDependencyGraph.tsx
+++ b/dashboard/components/FragmentDependencyGraph.tsx
@@ -3,18 +3,18 @@ import * as d3 from "d3"
import { Dag, DagLink, DagNode, zherebko } from "d3-dag"
import { cloneDeep } from "lodash"
import { useCallback, useEffect, useRef, useState } from "react"
-import { Position } from "../lib/layout"
+import { Enter, FragmentBox, Position } from "../lib/layout"
const nodeRadius = 5
const edgeRadius = 12
export default function FragmentDependencyGraph({
- mvDependency,
+ fragmentDependency,
svgWidth,
selectedId,
onSelectedIdChange,
}: {
- mvDependency: Dag
+ fragmentDependency: Dag
svgWidth: number
selectedId: string | undefined
onSelectedIdChange: (id: string) => void | undefined
@@ -24,21 +24,21 @@ export default function FragmentDependencyGraph({
const MARGIN_X = 10
const MARGIN_Y = 2
- const mvDependencyDagCallback = useCallback(() => {
+ const fragmentDependencyDagCallback = useCallback(() => {
const layout = zherebko().nodeSize([
nodeRadius * 2,
(nodeRadius + edgeRadius) * 2,
nodeRadius,
])
- const dag = cloneDeep(mvDependency)
+ const dag = cloneDeep(fragmentDependency)
const { width, height } = layout(dag)
return { width, height, dag }
- }, [mvDependency])
+ }, [fragmentDependency])
- const mvDependencyDag = mvDependencyDagCallback()
+ const fragmentDependencyDag = fragmentDependencyDagCallback()
useEffect(() => {
- const { width, height, dag } = mvDependencyDag
+ const { width, height, dag } = fragmentDependencyDag
// This code only handles rendering
@@ -53,25 +53,27 @@ export default function FragmentDependencyGraph({
.x(({ x }) => x + MARGIN_X)
.y(({ y }) => y)
- const isSelected = (d: any) => d.data.id === selectedId
+ const isSelected = (d: DagNode) => d.data.id === selectedId
const edgeSelection = svgSelection
.select(".edges")
- .selectAll(".edge")
+ .selectAll(".edge")
.data(dag.links())
- const applyEdge = (sel: any) =>
+ type EdgeSelection = typeof edgeSelection
+
+ const applyEdge = (sel: EdgeSelection) =>
sel
.attr("d", ({ points }: DagLink) => line(points))
.attr("fill", "none")
- .attr("stroke-width", (d: any) =>
+ .attr("stroke-width", (d) =>
isSelected(d.source) || isSelected(d.target) ? 2 : 1
)
- .attr("stroke", (d: any) =>
+ .attr("stroke", (d) =>
isSelected(d.source) || isSelected(d.target)
? theme.colors.blue["500"]
: theme.colors.gray["300"]
)
- const createEdge = (sel: any) =>
+ const createEdge = (sel: Enter) =>
sel.append("path").attr("class", "edge").call(applyEdge)
edgeSelection.exit().remove()
edgeSelection.enter().call(createEdge)
@@ -80,19 +82,18 @@ export default function FragmentDependencyGraph({
// Select nodes
const nodeSelection = svgSelection
.select(".nodes")
- .selectAll(".node")
+ .selectAll(".node")
.data(dag.descendants())
- const applyNode = (sel: any) =>
+ type NodeSelection = typeof nodeSelection
+
+ const applyNode = (sel: NodeSelection) =>
sel
- .attr(
- "transform",
- ({ x, y }: Position) => `translate(${x + MARGIN_X}, ${y})`
- )
- .attr("fill", (d: any) =>
+ .attr("transform", (d) => `translate(${d.x! + MARGIN_X}, ${d.y})`)
+ .attr("fill", (d) =>
isSelected(d) ? theme.colors.blue["500"] : theme.colors.gray["500"]
)
- const createNode = (sel: any) =>
+ const createNode = (sel: Enter) =>
sel
.append("circle")
.attr("class", "node")
@@ -105,22 +106,23 @@ export default function FragmentDependencyGraph({
// Add text to nodes
const labelSelection = svgSelection
.select(".labels")
- .selectAll(".label")
+ .selectAll(".label")
.data(dag.descendants())
+ type LabelSelection = typeof labelSelection
- const applyLabel = (sel: any) =>
+ const applyLabel = (sel: LabelSelection) =>
sel
- .text((d: any) => d.data.name)
+ .text((d) => d.data.name)
.attr("x", svgWidth - MARGIN_X)
.attr("font-family", "inherit")
.attr("text-anchor", "end")
.attr("alignment-baseline", "middle")
- .attr("y", (d: any) => d.y)
- .attr("fill", (d: any) =>
+ .attr("y", (d) => d.y!)
+ .attr("fill", (d) =>
isSelected(d) ? theme.colors.black["500"] : theme.colors.gray["500"]
)
.attr("font-weight", "600")
- const createLabel = (sel: any) =>
+ const createLabel = (sel: Enter) =>
sel.append("text").attr("class", "label").call(applyLabel)
labelSelection.exit().remove()
labelSelection.enter().call(createLabel)
@@ -129,11 +131,12 @@ export default function FragmentDependencyGraph({
// Add overlays
const overlaySelection = svgSelection
.select(".overlays")
- .selectAll(".overlay")
+ .selectAll(".overlay")
.data(dag.descendants())
+ type OverlaySelection = typeof overlaySelection
const STROKE_WIDTH = 3
- const applyOverlay = (sel: any) =>
+ const applyOverlay = (sel: OverlaySelection) =>
sel
.attr("x", STROKE_WIDTH)
.attr(
@@ -143,20 +146,13 @@ export default function FragmentDependencyGraph({
.attr("width", svgWidth - STROKE_WIDTH * 2)
.attr(
"y",
- (d: any) => d.y - nodeRadius - edgeRadius + MARGIN_Y + STROKE_WIDTH
+ (d) => d.y! - nodeRadius - edgeRadius + MARGIN_Y + STROKE_WIDTH
)
.attr("rx", 5)
.attr("fill", theme.colors.gray["500"])
.attr("opacity", 0)
.style("cursor", "pointer")
- const createOverlay = (
- sel: d3.Selection<
- d3.EnterElement,
- DagNode,
- d3.BaseType,
- unknown
- >
- ) =>
+ const createOverlay = (sel: Enter) =>
sel
.append("rect")
.attr("class", "overlay")
@@ -187,7 +183,7 @@ export default function FragmentDependencyGraph({
})
.on("click", function (d, i) {
if (onSelectedIdChange) {
- onSelectedIdChange((i.data as any).id)
+ onSelectedIdChange(i.data.id)
}
})
@@ -196,7 +192,13 @@ export default function FragmentDependencyGraph({
overlaySelection.call(applyOverlay)
setSvgHeight(`${height}px`)
- }, [mvDependency, selectedId, svgWidth, onSelectedIdChange, mvDependencyDag])
+ }, [
+ fragmentDependency,
+ selectedId,
+ svgWidth,
+ onSelectedIdChange,
+ fragmentDependencyDag,
+ ])
return (
+ setModalId(null)} />
>
)
}
diff --git a/dashboard/components/Relations.tsx b/dashboard/components/Relations.tsx
index c16a70e8c6fa2..0422eaa2531fa 100644
--- a/dashboard/components/Relations.tsx
+++ b/dashboard/components/Relations.tsx
@@ -18,13 +18,6 @@
import {
Box,
Button,
- Modal,
- ModalBody,
- ModalCloseButton,
- ModalContent,
- ModalFooter,
- ModalHeader,
- ModalOverlay,
Table,
TableContainer,
Tbody,
@@ -37,7 +30,6 @@ import loadable from "@loadable/component"
import Head from "next/head"
import Link from "next/link"
-import { parseAsInteger, useQueryState } from "nuqs"
import { Fragment } from "react"
import Title from "../components/Title"
import extractColumnInfo from "../lib/extractInfo"
@@ -48,8 +40,9 @@ import {
Source as RwSource,
Table as RwTable,
} from "../proto/gen/catalog"
+import { CatalogModal, useCatalogModal } from "./CatalogModal"
-const ReactJson = loadable(() => import("react-json-view"))
+export const ReactJson = loadable(() => import("react-json-view"))
export type Column = {
name: string
@@ -122,40 +115,10 @@ export function Relations(
extraColumns: Column[]
) {
const { response: relationList } = useFetch(getRelations)
+ const [modalData, setModalId] = useCatalogModal(relationList)
- const [modalId, setModalId] = useQueryState("id", parseAsInteger)
- const modalData = relationList?.find((r) => r.id === modalId)
-
- const catalogModal = (
- setModalId(null)}
- size="3xl"
- >
-
-
-
- Catalog of {modalData?.id} - {modalData?.name}
-
-
-
- {modalData && (
-
- )}
-
-
-
-
-
-
-
+ const modal = (
+ setModalId(null)} />
)
const table = (
@@ -214,7 +177,7 @@ export function Relations(
{title}
- {catalogModal}
+ {modal}
{table}
)
diff --git a/dashboard/lib/layout.ts b/dashboard/lib/layout.ts
index 1182976dfe8cb..924374341daa8 100644
--- a/dashboard/lib/layout.ts
+++ b/dashboard/lib/layout.ts
@@ -15,10 +15,20 @@
*
*/
-import { cloneDeep, max } from "lodash"
+import { max } from "lodash"
+import { Relation } from "../pages/api/streaming"
import { TableFragments_Fragment } from "../proto/gen/meta"
import { GraphNode } from "./algo"
+export type Enter = Type extends d3.Selection<
+ any,
+ infer B,
+ infer C,
+ infer D
+>
+ ? d3.Selection
+ : never
+
interface DagNode {
node: GraphNode
temp: boolean
@@ -210,16 +220,16 @@ function dagLayout(nodes: GraphNode[]) {
}
/**
- * @param fragments
- * @returns Layer and row of the fragment
+ * @param items
+ * @returns Layer and row of the item
*/
-function gridLayout(
- fragments: Array
-): Map {
- // turn FragmentBox to GraphNode
- let idToBox = new Map()
- for (let fragment of fragments) {
- idToBox.set(fragment.id, fragment)
+function gridLayout(
+ items: Array
+): Map {
+ // turn item to GraphNode
+ let idToItem = new Map()
+ for (let item of items) {
+ idToItem.set(item.id, item)
}
let nodeToId = new Map()
@@ -232,23 +242,23 @@ function gridLayout(
let newNode = {
nextNodes: new Array(),
}
- let ab = idToBox.get(id)
- if (ab === undefined) {
+ let item = idToItem.get(id)
+ if (item === undefined) {
throw Error(`no such id ${id}`)
}
- for (let id of ab.parentIds) {
+ for (let id of item.parentIds) {
getNode(id).nextNodes.push(newNode)
}
idToNode.set(id, newNode)
nodeToId.set(newNode, id)
return newNode
}
- for (let fragment of fragments) {
- getNode(fragment.id)
+ for (let item of items) {
+ getNode(item.id)
}
// run daglayout on GraphNode
- let rtn = new Map()
+ let rtn = new Map()
let allNodes = new Array()
for (let _n of nodeToId.keys()) {
allNodes.push(_n)
@@ -257,33 +267,34 @@ function gridLayout(
for (let item of resultMap) {
let id = nodeToId.get(item[0])
if (!id) {
- throw Error(`no corresponding fragment id of node ${item[0]}`)
+ throw Error(`no corresponding item of node ${item[0]}`)
}
- let fb = idToBox.get(id)
+ let fb = idToItem.get(id)
if (!fb) {
- throw Error(`fragment id ${id} is not present in idToBox`)
+ throw Error(`item id ${id} is not present in idToBox`)
}
rtn.set(fb, item[1])
}
return rtn
}
-export interface FragmentBox {
+export interface LayoutItemBase {
id: string
- name: string
- order: number // preference order, fragment box with larger order will be placed at right
+ order: number // preference order, item with larger order will be placed at right or down
width: number
height: number
parentIds: string[]
+}
+
+export type FragmentBox = LayoutItemBase & {
+ name: string
externalParentIds: string[]
fragment?: TableFragments_Fragment
}
-export interface FragmentPoint {
- id: string
+export type RelationPoint = LayoutItemBase & {
name: string
- order: number // preference order, fragment box with larger order will be placed at right
- parentIds: string[]
+ relation: Relation
}
export interface Position {
@@ -292,7 +303,7 @@ export interface Position {
}
export type FragmentBoxPosition = FragmentBox & Position
-export type FragmentPointPosition = FragmentPoint & Position
+export type RelationPointPosition = RelationPoint & Position
export interface Edge {
points: Array
@@ -301,15 +312,15 @@ export interface Edge {
}
/**
- * @param fragments
+ * @param items
* @returns the coordination of the top-left corner of the fragment box
*/
-export function layout(
- fragments: Array,
+export function layoutItem(
+ items: Array,
layerMargin: number,
rowMargin: number
-): FragmentBoxPosition[] {
- let layoutMap = gridLayout(fragments)
+): (I & Position)[] {
+ let layoutMap = gridLayout(items)
let layerRequiredWidth = new Map()
let rowRequiredHeight = new Map()
let maxLayer = 0,
@@ -373,7 +384,7 @@ export function layout(
getCumulativeMargin(i, rowMargin, rowCumulativeHeight, rowRequiredHeight)
}
- let rtn: Array = []
+ let rtn: Array = []
for (let [data, [layer, row]] of layoutMap) {
let x = layerCumulativeWidth.get(layer)
@@ -391,39 +402,13 @@ export function layout(
return rtn
}
-export function flipLayout(
- fragments: Array,
- layerMargin: number,
- rowMargin: number
-): FragmentBoxPosition[] {
- const fragments_ = cloneDeep(fragments)
- for (let fragment of fragments_) {
- ;[fragment.width, fragment.height] = [fragment.height, fragment.width]
- }
- const fragmentPosition = layout(fragments_, rowMargin, layerMargin)
- return fragmentPosition.map(({ x, y, ...data }) => ({
- x: y,
- y: x,
- ...data,
- }))
-}
-
-export function layoutPoint(
- fragments: Array,
+function layoutRelation(
+ relations: Array,
layerMargin: number,
rowMargin: number,
nodeRadius: number
-): FragmentPointPosition[] {
- const fragmentBoxes: Array = []
- for (let { ...others } of fragments) {
- fragmentBoxes.push({
- width: nodeRadius * 2,
- height: nodeRadius * 2,
- externalParentIds: [], // we don't care about external parent for point layout
- ...others,
- })
- }
- const result = layout(fragmentBoxes, layerMargin, rowMargin)
+): RelationPointPosition[] {
+ const result = layoutItem(relations, layerMargin, rowMargin)
return result.map(({ x, y, ...data }) => ({
x: x + nodeRadius,
y: y + nodeRadius,
@@ -431,14 +416,14 @@ export function layoutPoint(
}))
}
-export function flipLayoutPoint(
- fragments: Array,
+export function flipLayoutRelation(
+ relations: Array,
layerMargin: number,
rowMargin: number,
nodeRadius: number
-): FragmentPointPosition[] {
- const fragmentPosition = layoutPoint(
- fragments,
+): RelationPointPosition[] {
+ const fragmentPosition = layoutRelation(
+ relations,
rowMargin,
layerMargin,
nodeRadius
@@ -450,21 +435,23 @@ export function flipLayoutPoint(
}))
}
-export function generatePointEdges(layoutMap: FragmentPointPosition[]): Edge[] {
+export function generateRelationEdges(
+ layoutMap: RelationPointPosition[]
+): Edge[] {
const links = []
- const fragmentMap = new Map()
+ const relationMap = new Map()
for (const x of layoutMap) {
- fragmentMap.set(x.id, x)
+ relationMap.set(x.id, x)
}
- for (const fragment of layoutMap) {
- for (const parentId of fragment.parentIds) {
- const parentFragment = fragmentMap.get(parentId)!
+ for (const relation of layoutMap) {
+ for (const parentId of relation.parentIds) {
+ const parentRelation = relationMap.get(parentId)!
links.push({
points: [
- { x: fragment.x, y: fragment.y },
- { x: parentFragment.x, y: parentFragment.y },
+ { x: relation.x, y: relation.y },
+ { x: parentRelation.x, y: parentRelation.y },
],
- source: fragment.id,
+ source: relation.id,
target: parentId,
})
}
@@ -472,7 +459,9 @@ export function generatePointEdges(layoutMap: FragmentPointPosition[]): Edge[] {
return links
}
-export function generateBoxEdges(layoutMap: FragmentBoxPosition[]): Edge[] {
+export function generateFragmentEdges(
+ layoutMap: FragmentBoxPosition[]
+): Edge[] {
const links = []
const fragmentMap = new Map()
for (const x of layoutMap) {
diff --git a/dashboard/pages/api/streaming.ts b/dashboard/pages/api/streaming.ts
index a77a165357b9f..13fa8716f821a 100644
--- a/dashboard/pages/api/streaming.ts
+++ b/dashboard/pages/api/streaming.ts
@@ -45,8 +45,26 @@ export interface StreamingJob extends Relation {
dependentRelations: number[]
}
+export function relationType(x: Relation) {
+ if ((x as Table).tableType !== undefined) {
+ return (x as Table).tableType
+ } else if ((x as Sink).sinkFromName !== undefined) {
+ return "SINK"
+ } else if ((x as Source).info !== undefined) {
+ return "SOURCE"
+ } else {
+ return "UNKNOWN"
+ }
+}
+export type RelationType = ReturnType
+
+export function relationTypeTitleCase(x: Relation) {
+ return _.startCase(_.toLower(relationType(x)))
+}
+
export function relationIsStreamingJob(x: Relation): x is StreamingJob {
- return (x as StreamingJob).dependentRelations !== undefined
+ const type = relationType(x)
+ return type !== "UNKNOWN" && type !== "SOURCE" && type !== "INTERNAL"
}
export async function getStreamingJobs() {
diff --git a/dashboard/pages/dependency_graph.tsx b/dashboard/pages/dependency_graph.tsx
index fb29f57b11bb5..a4c13a94df169 100644
--- a/dashboard/pages/dependency_graph.tsx
+++ b/dashboard/pages/dependency_graph.tsx
@@ -20,15 +20,17 @@ import { reverse, sortBy } from "lodash"
import Head from "next/head"
import { parseAsInteger, useQueryState } from "nuqs"
import { Fragment, useCallback } from "react"
-import RelationDependencyGraph from "../components/RelationDependencyGraph"
+import RelationDependencyGraph, {
+ nodeRadius,
+} from "../components/RelationDependencyGraph"
import Title from "../components/Title"
-import { FragmentPoint } from "../lib/layout"
+import { RelationPoint } from "../lib/layout"
import useFetch from "./api/fetch"
import { Relation, getRelations, relationIsStreamingJob } from "./api/streaming"
const SIDEBAR_WIDTH = "200px"
-function buildDependencyAsEdges(list: Relation[]): FragmentPoint[] {
+function buildDependencyAsEdges(list: Relation[]): RelationPoint[] {
const edges = []
const relationSet = new Set(list.map((r) => r.id))
for (const r of reverse(sortBy(list, "id"))) {
@@ -41,24 +43,27 @@ function buildDependencyAsEdges(list: Relation[]): FragmentPoint[] {
.map((r) => r.toString())
: [],
order: r.id,
+ width: nodeRadius * 2,
+ height: nodeRadius * 2,
+ relation: r,
})
}
return edges
}
export default function StreamingGraph() {
- const { response: streamingJobList } = useFetch(getRelations)
+ const { response: relationList } = useFetch(getRelations)
const [selectedId, setSelectedId] = useQueryState("id", parseAsInteger)
- const mvDependencyCallback = useCallback(() => {
- if (streamingJobList) {
- return buildDependencyAsEdges(streamingJobList)
+ const relationDependencyCallback = useCallback(() => {
+ if (relationList) {
+ return buildDependencyAsEdges(relationList)
} else {
return undefined
}
- }, [streamingJobList])
+ }, [relationList])
- const mvDependency = mvDependencyCallback()
+ const relationDependency = relationDependencyCallback()
const retVal = (
@@ -77,7 +82,7 @@ export default function StreamingGraph() {
- {streamingJobList?.map((r) => {
+ {relationList?.map((r) => {
const match = selectedId === r.id
return (
diff --git a/dashboard/pages/fragment_graph.tsx b/dashboard/pages/fragment_graph.tsx
index f85d37fa36e75..244b78a540a0a 100644
--- a/dashboard/pages/fragment_graph.tsx
+++ b/dashboard/pages/fragment_graph.tsx
@@ -64,7 +64,7 @@ function buildPlanNodeDependency(
const hierarchyActorNode = (node: StreamNode): PlanNodeDatum => {
return {
- name: node.nodeBody?.$case.toString() || "unknown",
+ name: node.nodeBody?.$case?.toString() || "unknown",
children: (node.input || []).map(hierarchyActorNode),
operatorId: node.operatorId,
node,
@@ -406,7 +406,7 @@ export default function Streaming() {
setSelectedFragmentId(parseInt(id))
}
diff --git a/dashboard/pages/heap_profiling.tsx b/dashboard/pages/heap_profiling.tsx
index a457d84824593..11098a19ab705 100644
--- a/dashboard/pages/heap_profiling.tsx
+++ b/dashboard/pages/heap_profiling.tsx
@@ -80,7 +80,7 @@ export default function HeapProfiling() {
setProfileList(list)
} catch (e: any) {
console.error(e)
- let result = `Getting Profiling File List\n$Error: ${e.message}]`
+ let result = `Getting Profiling File List\n\nError: ${e.message}\n${e.cause}`
setDisplayInfo(result)
}
}
@@ -119,8 +119,12 @@ export default function HeapProfiling() {
}, [selectedProfileList])
async function dumpProfile() {
- api.get(`/monitor/dump_heap_profile/${computeNodeId}`)
- getProfileList(computeNodes, computeNodeId)
+ try {
+ await api.get(`/monitor/dump_heap_profile/${computeNodeId}`)
+ getProfileList(computeNodes, computeNodeId)
+ } catch (e: any) {
+ setDisplayInfo(`Dumping heap profile.\n\nError: ${e.message}\n${e.cause}`)
+ }
}
async function analyzeHeapFile() {
diff --git a/e2e_test/backfill/runtime/create_arrangement_backfill_mv.slt b/e2e_test/backfill/runtime/create_arrangement_backfill_mv.slt
new file mode 100644
index 0000000000000..abc7fe9f3392e
--- /dev/null
+++ b/e2e_test/backfill/runtime/create_arrangement_backfill_mv.slt
@@ -0,0 +1,5 @@
+statement ok
+SET STREAMING_ENABLE_ARRANGEMENT_BACKFILL=true;
+
+statement ok
+CREATE MATERIALIZED VIEW m1 AS SELECT * FROM t;
\ No newline at end of file
diff --git a/e2e_test/backfill/runtime/create_no_shuffle_mv.slt b/e2e_test/backfill/runtime/create_no_shuffle_mv.slt
new file mode 100644
index 0000000000000..89806810649df
--- /dev/null
+++ b/e2e_test/backfill/runtime/create_no_shuffle_mv.slt
@@ -0,0 +1,5 @@
+statement ok
+SET STREAMING_ENABLE_ARRANGEMENT_BACKFILL=false;
+
+statement ok
+CREATE MATERIALIZED VIEW m1 AS SELECT * FROM t;
\ No newline at end of file
diff --git a/e2e_test/backfill/runtime/create_table.slt b/e2e_test/backfill/runtime/create_table.slt
new file mode 100644
index 0000000000000..fd71dffa36fb1
--- /dev/null
+++ b/e2e_test/backfill/runtime/create_table.slt
@@ -0,0 +1,2 @@
+statement ok
+CREATE TABLE t (v1 int, v2 varchar, v3 bigint);
\ No newline at end of file
diff --git a/e2e_test/backfill/runtime/insert.slt b/e2e_test/backfill/runtime/insert.slt
new file mode 100644
index 0000000000000..357ed56a61430
--- /dev/null
+++ b/e2e_test/backfill/runtime/insert.slt
@@ -0,0 +1,5 @@
+statement ok
+INSERT INTO t select 1, 'jakbj2khbe2', 22222222222 from generate_series(1, 500000);
+
+statement ok
+flush;
\ No newline at end of file
diff --git a/e2e_test/backfill/runtime/validate_rows.slt b/e2e_test/backfill/runtime/validate_rows.slt
new file mode 100644
index 0000000000000..ed3c9e1080458
--- /dev/null
+++ b/e2e_test/backfill/runtime/validate_rows.slt
@@ -0,0 +1,4 @@
+query I
+select count(*) from m1;
+----
+1000000
\ No newline at end of file
diff --git a/e2e_test/schema_registry/alter_sr.slt b/e2e_test/schema_registry/alter_sr.slt
new file mode 100644
index 0000000000000..dc05f81fc1362
--- /dev/null
+++ b/e2e_test/schema_registry/alter_sr.slt
@@ -0,0 +1,74 @@
+# Before running this test, seed data into kafka:
+# python3 e2e_test/schema_registry/pb.py
+
+statement ok
+CREATE SOURCE src_user WITH (
+ connector = 'kafka',
+ topic = 'sr_pb_test',
+ properties.bootstrap.server = 'message_queue:29092',
+ scan.startup.mode = 'earliest'
+)
+FORMAT PLAIN ENCODE PROTOBUF(
+ schema.registry = 'http://message_queue:8081',
+ message = 'test.User'
+);
+
+statement ok
+CREATE MATERIALIZED VIEW mv_user AS SELECT * FROM src_user;
+
+# Changing type is not allowed
+statement error Feature is not yet implemented: this altering statement will drop columns, which is not supported yet: \(city: character varying\)
+ALTER SOURCE src_user FORMAT PLAIN ENCODE PROTOBUF(
+ schema.registry = 'http://message_queue:8081',
+ message = 'test.UserWithNewType'
+);
+
+# Changing format/encode is not allowed
+statement error Feature is not yet implemented: the original definition is FORMAT Plain ENCODE Protobuf, and altering them is not supported yet
+ALTER SOURCE src_user FORMAT NATIVE ENCODE PROTOBUF(
+ schema.registry = 'http://message_queue:8081',
+ message = 'test.User'
+);
+
+statement ok
+ALTER SOURCE src_user FORMAT PLAIN ENCODE PROTOBUF(
+ schema.registry = 'http://message_queue:8081',
+ message = 'test.UserWithMoreFields'
+);
+
+# Dropping columns is not allowed
+statement error Feature is not yet implemented: this altering statement will drop columns, which is not supported yet: \(age: integer\)
+ALTER SOURCE src_user FORMAT PLAIN ENCODE PROTOBUF(
+ schema.registry = 'http://message_queue:8081',
+ message = 'test.User'
+);
+
+statement ok
+CREATE MATERIALIZED VIEW mv_more_fields AS SELECT * FROM src_user;
+
+system ok
+python3 e2e_test/schema_registry/pb.py "message_queue:29092" "http://message_queue:8081" "sr_pb_test" 5 user_with_more_fields
+
+sleep 10s
+
+query I
+SELECT COUNT(*) FROM mv_user;
+----
+25
+
+statement error
+SELECT SUM(age) FROM mv_user;
+
+query III
+SELECT COUNT(*), MAX(age), MIN(age) FROM mv_more_fields;
+----
+25 4 0
+
+statement ok
+DROP MATERIALIZED VIEW mv_user;
+
+statement ok
+DROP MATERIALIZED VIEW mv_more_fields;
+
+statement ok
+DROP SOURCE src_user;
diff --git a/e2e_test/schema_registry/pb.py b/e2e_test/schema_registry/pb.py
index e83fd1d36f8a5..7ca15222e149d 100644
--- a/e2e_test/schema_registry/pb.py
+++ b/e2e_test/schema_registry/pb.py
@@ -25,18 +25,38 @@ def get_user(i):
sc=SourceContext(file_name="source/context_{:03}.proto".format(i)),
)
+def get_user_with_more_fields(i):
+ return user_pb2.UserWithMoreFields(
+ id=i,
+ name="User_{}".format(i),
+ address="Address_{}".format(i),
+ city="City_{}".format(i),
+ gender=user_pb2.MALE if i % 2 == 0 else user_pb2.FEMALE,
+ sc=SourceContext(file_name="source/context_{:03}.proto".format(i)),
+ age=i,
+ )
+
+def get_user_with_new_type(i):
+ return user_pb2.UserWithNewType(
+ id=i,
+ name="User_{}".format(i),
+ address="Address_{}".format(i),
+ city=i,
+ gender=user_pb2.MALE if i % 2 == 0 else user_pb2.FEMALE,
+ sc=SourceContext(file_name="source/context_{:03}.proto".format(i)),
+ )
-def send_to_kafka(producer_conf, schema_registry_conf, topic, num_records):
+def send_to_kafka(producer_conf, schema_registry_conf, topic, num_records, get_user_fn, pb_message):
schema_registry_client = SchemaRegistryClient(schema_registry_conf)
serializer = ProtobufSerializer(
- user_pb2.User,
+ pb_message,
schema_registry_client,
{"use.deprecated.format": False, 'skip.known.types': True},
)
producer = Producer(producer_conf)
for i in range(num_records):
- user = get_user(i)
+ user = get_user_fn(i)
producer.produce(
topic=topic,
@@ -49,20 +69,29 @@ def send_to_kafka(producer_conf, schema_registry_conf, topic, num_records):
if __name__ == "__main__":
- if len(sys.argv) < 4:
- print("pb.py ")
+ if len(sys.argv) < 5:
+ print("pb.py ")
exit(1)
broker_list = sys.argv[1]
schema_registry_url = sys.argv[2]
topic = sys.argv[3]
num_records = int(sys.argv[4])
+ pb_message = sys.argv[5]
+
+ all_pb_messages = {
+ 'user': (get_user, user_pb2.User),
+ 'user_with_more_fields': (get_user_with_more_fields, user_pb2.UserWithMoreFields),
+ 'user_with_new_type': (get_user_with_new_type, user_pb2.UserWithNewType),
+ }
+
+ assert pb_message in all_pb_messages, f'pb_message must be one of {list(all_pb_messages.keys())}'
schema_registry_conf = {"url": schema_registry_url}
producer_conf = {"bootstrap.servers": broker_list}
try:
- send_to_kafka(producer_conf, schema_registry_conf, topic, num_records)
+ send_to_kafka(producer_conf, schema_registry_conf, topic, num_records, *all_pb_messages[pb_message])
except Exception as e:
print("Send Protobuf data to schema registry and kafka failed {}", e)
exit(1)
diff --git a/e2e_test/schema_registry/pb.slt b/e2e_test/schema_registry/pb.slt
index fb40759d34ada..d9c0edca1b21c 100644
--- a/e2e_test/schema_registry/pb.slt
+++ b/e2e_test/schema_registry/pb.slt
@@ -1,5 +1,5 @@
# Before running this test, seed data into kafka:
-# python3 e2e_test/schema_registry/pb.py
+# python3 e2e_test/schema_registry/pb.py
# Create a table.
statement ok
diff --git a/e2e_test/schema_registry/protobuf/user.proto b/e2e_test/schema_registry/protobuf/user.proto
index e6c5f109bbd76..bbff4b97bac9c 100644
--- a/e2e_test/schema_registry/protobuf/user.proto
+++ b/e2e_test/schema_registry/protobuf/user.proto
@@ -17,3 +17,22 @@ enum Gender {
MALE = 0;
FEMALE = 1;
}
+
+message UserWithMoreFields {
+ int32 id = 1;
+ string name = 2;
+ string address = 3;
+ string city = 4;
+ Gender gender = 5;
+ google.protobuf.SourceContext sc = 6;
+ int32 age = 7; // new field here
+}
+
+message UserWithNewType {
+ int32 id = 1;
+ string name = 2;
+ string address = 3;
+ int32 city = 4; // change the type from string to int32
+ Gender gender = 5;
+ google.protobuf.SourceContext sc = 6;
+}
diff --git a/e2e_test/schema_registry/protobuf/user_pb2.py b/e2e_test/schema_registry/protobuf/user_pb2.py
index b87f3a5ea1d81..bd7b61e646fb1 100644
--- a/e2e_test/schema_registry/protobuf/user_pb2.py
+++ b/e2e_test/schema_registry/protobuf/user_pb2.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: user.proto
-# Protobuf Python Version: 4.25.0
+# Protobuf Python Version: 4.25.1
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
@@ -15,15 +15,19 @@
from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\nuser.proto\x12\x04test\x1a$google/protobuf/source_context.proto\"\x89\x01\n\x04User\x12\n\n\x02id\x18\x01 \x01(\x05\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0f\n\x07\x61\x64\x64ress\x18\x03 \x01(\t\x12\x0c\n\x04\x63ity\x18\x04 \x01(\t\x12\x1c\n\x06gender\x18\x05 \x01(\x0e\x32\x0c.test.Gender\x12*\n\x02sc\x18\x06 \x01(\x0b\x32\x1e.google.protobuf.SourceContext*\x1e\n\x06Gender\x12\x08\n\x04MALE\x10\x00\x12\n\n\x06\x46\x45MALE\x10\x01\x62\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\nuser.proto\x12\x04test\x1a$google/protobuf/source_context.proto\"\x89\x01\n\x04User\x12\n\n\x02id\x18\x01 \x01(\x05\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0f\n\x07\x61\x64\x64ress\x18\x03 \x01(\t\x12\x0c\n\x04\x63ity\x18\x04 \x01(\t\x12\x1c\n\x06gender\x18\x05 \x01(\x0e\x32\x0c.test.Gender\x12*\n\x02sc\x18\x06 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\"\xa4\x01\n\x12UserWithMoreFields\x12\n\n\x02id\x18\x01 \x01(\x05\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0f\n\x07\x61\x64\x64ress\x18\x03 \x01(\t\x12\x0c\n\x04\x63ity\x18\x04 \x01(\t\x12\x1c\n\x06gender\x18\x05 \x01(\x0e\x32\x0c.test.Gender\x12*\n\x02sc\x18\x06 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\x0b\n\x03\x61ge\x18\x07 \x01(\x05\"\x94\x01\n\x0fUserWithNewType\x12\n\n\x02id\x18\x01 \x01(\x05\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0f\n\x07\x61\x64\x64ress\x18\x03 \x01(\t\x12\x0c\n\x04\x63ity\x18\x04 \x01(\x05\x12\x1c\n\x06gender\x18\x05 \x01(\x0e\x32\x0c.test.Gender\x12*\n\x02sc\x18\x06 \x01(\x0b\x32\x1e.google.protobuf.SourceContext*\x1e\n\x06Gender\x12\x08\n\x04MALE\x10\x00\x12\n\n\x06\x46\x45MALE\x10\x01\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'user_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
- _globals['_GENDER']._serialized_start=198
- _globals['_GENDER']._serialized_end=228
+ _globals['_GENDER']._serialized_start=516
+ _globals['_GENDER']._serialized_end=546
_globals['_USER']._serialized_start=59
_globals['_USER']._serialized_end=196
+ _globals['_USERWITHMOREFIELDS']._serialized_start=199
+ _globals['_USERWITHMOREFIELDS']._serialized_end=363
+ _globals['_USERWITHNEWTYPE']._serialized_start=366
+ _globals['_USERWITHNEWTYPE']._serialized_end=514
# @@protoc_insertion_point(module_scope)
diff --git a/e2e_test/sink/remote/jdbc.check.pg.slt b/e2e_test/sink/remote/jdbc.check.pg.slt
index 7cf6faa7b0fb6..d52481bed4e6e 100644
--- a/e2e_test/sink/remote/jdbc.check.pg.slt
+++ b/e2e_test/sink/remote/jdbc.check.pg.slt
@@ -3,8 +3,8 @@
query I
select * from t_remote_0 order by id;
----
-1 Alex 28208 281620391 4986480304337356659 28162.0391 2.03 28162.0391 2023-03-20 10:18:30
-3 Carl 18300 1702307129 7878292368468104216 17023.07129 23.07 17023.07129 2023-03-20 10:18:32
+1 Alex 28208 281620391 4986480304337356659 28162.0391 2.03 28162.0391 1000-01-01 00:00:00
+3 Carl 18300 1702307129 7878292368468104216 17023.07129 23.07 17023.07129 9999-12-31 23:49:59.499999
4 Doris 17250 151951802 3946135584462581863 1519518.02 18.02 1519518.02 2023-03-21 10:18:30
5 Eve 9725 698160808 524334216698825611 69.8160808 69.81 69.8160808 2023-03-21 10:18:31
6 Frank 28131 1233587627 8492820454814063326 123358.7627 58.76 123358.7627 2023-03-21 10:18:32
@@ -12,8 +12,8 @@ select * from t_remote_0 order by id;
query II
select * from t_remote_1 order by id;
----
-1 Alex Text value 1 123 456 789 12.34 56.78 90.12 t 2023-05-22 12:34:56 2023-05-22 12:34:56 2023-05-22 12:34:56+00 2 years 3 mons 4 days 05:06:07 {"key": "value"} \xdeadbeef
-3 Varchar value 3 Text value 3 345 678 901 34.56 78.9 12.34 t 2023-05-24 12:34:56 2023-05-24 12:34:56 2023-05-24 12:34:56+00 2 years 3 mons 4 days 05:06:07 {"key": "value3"} \xcafebabe
+1 Alex Text value 1 123 456 789 12.34 56.78 90.12 t 1550-12-31 00:00:00.123456 2023-05-22 12:34:56 1970-01-01 00:00:01+00 2 years 3 mons 4 days 05:06:07 {"key": "value"} \xdeadbeef
+3 Varchar value 3 Text value 3 345 678 901 34.56 78.9 12.34 t 9999-12-31 12:34:56 2023-05-24 12:34:56 2023-05-24 12:34:56.123456+00 2 years 3 mons 4 days 05:06:07 {"key": "value3"} \xcafebabe
4 Varchar value 4 Text value 4 456 789 12 45.67 89.01 23.45 f 2023-05-25 23:45:01 2023-05-25 23:45:01 2023-05-25 23:45:01+00 2 years 3 mons 4 days 05:06:07 {"key": "value4"} \xbabec0de
5 Varchar value 5 Text value 5 567 890 123 56.78 90.12 34.56 t 2023-05-26 12:34:56 2023-05-26 12:34:56 2023-05-26 12:34:56+00 2 years 3 mons 4 days 05:06:07 {"key": "value5"} \xdeadbabe
6 Varchar value 6 Text value 6 789 123 456 67.89 34.56 78.91 f 2023-05-27 23:45:01 2023-05-27 23:45:01 2023-05-27 23:45:01+00 2 years 3 mons 4 days 05:06:07 {"key": "value6"} \xdeadbabe
diff --git a/e2e_test/sink/remote/jdbc.load.slt b/e2e_test/sink/remote/jdbc.load.slt
index a3bc63e48f7de..80f319443ce0b 100644
--- a/e2e_test/sink/remote/jdbc.load.slt
+++ b/e2e_test/sink/remote/jdbc.load.slt
@@ -84,7 +84,7 @@ CREATE SINK s_postgres_1 FROM mv_remote_1 WITH (
statement ok
CREATE SINK s_mysql_0 FROM mv_remote_0 WITH (
connector='jdbc',
- jdbc.url='jdbc:mysql://mysql:3306/test?user=mysqluser&password=mysqlpw',
+ jdbc.url='jdbc:mysql://mysql:3306/test?user=mysqluser&password=mysqlpw&connectionTimeZone=UTC',
table.name='t_remote_0',
primary_key='id',
type='upsert'
@@ -93,7 +93,7 @@ CREATE SINK s_mysql_0 FROM mv_remote_0 WITH (
statement ok
CREATE SINK s_mysql_1 FROM mv_remote_1 WITH (
connector='jdbc',
- jdbc.url='jdbc:mysql://mysql:3306/test?user=mysqluser&password=mysqlpw',
+ jdbc.url='jdbc:mysql://mysql:3306/test?user=mysqluser&password=mysqlpw&connectionTimeZone=UTC',
table.name='t_remote_1',
primary_key='id',
type='upsert'
@@ -112,7 +112,7 @@ CREATE SINK s2_postgres FROM rw_typed_data WITH (
statement ok
CREATE SINK s2_mysql FROM rw_typed_data WITH (
connector='jdbc',
- jdbc.url='jdbc:mysql://mysql:3306/test?user=mysqluser&password=mysqlpw',
+ jdbc.url='jdbc:mysql://mysql:3306/test?user=mysqluser&password=mysqlpw&connectionTimeZone=UTC',
table.name='t_types',
primary_key='id',
type='upsert'
@@ -177,9 +177,9 @@ INSERT INTO tt1 VALUES
statement ok
INSERT INTO t_remote_0 VALUES
- (1, 'Alice', 28208, 281620391, 4986480304337356659, 28162.0391, 2.03, 28162.0391, '2023-03-20 10:18:30'),
+ (1, 'Alice', 28208, 281620391, 4986480304337356659, 28162.0391, 2.03, 28162.0391, '1000-01-01 00:00:00'),
(2, 'Bob', 10580, 2131030003, 3074255027698877876, 21310.30003, 10.3, 21310.30003, '2023-03-20 10:18:31'),
- (3, 'Carl', 18300, 1702307129, 7878292368468104216, 17023.07129, 23.07, 17023.07129, '2023-03-20 10:18:32');
+ (3, 'Carl', 18300, 1702307129, 7878292368468104216, 17023.07129, 23.07, 17023.07129, '9999-12-31 23:49:59.499999');
statement ok
INSERT INTO t_remote_0 VALUES
@@ -189,9 +189,9 @@ INSERT INTO t_remote_0 VALUES
statement ok
INSERT INTO t_remote_1 VALUES
- (1, 'Varchar value 1', 'Text value 1', 123, 456, 789, 12.34, 56.78, 90.12, TRUE, '2023-05-22', '12:34:56', '2023-05-22 12:34:56', '2023-05-22 12:34:56', '2 years 3 months 4 days 5 hours 6 minutes 7 seconds', '{"key": "value"}', E'\\xDEADBEEF'),
+ (1, 'Varchar value 1', 'Text value 1', 123, 456, 789, 12.34, 56.78, 90.12, TRUE, '1550-12-31', '00:00:00.123456', '2023-05-22 12:34:56', '1970-01-01 00:00:01Z', '2 years 3 months 4 days 5 hours 6 minutes 7 seconds', '{"key": "value"}', E'\\xDEADBEEF'),
(2, 'Varchar value 2', 'Text value 2', 234, 567, 890, 23.45, 67.89, 01.23, FALSE, '2023-05-23', '23:45:01', '2023-05-23 23:45:01', '2023-05-23 23:45:01', '2 years 3 months 4 days 5 hours 6 minutes 7 seconds', '{"key": "value2"}', E'\\xFEEDBEEF'),
- (3, 'Varchar value 3', 'Text value 3', 345, 678, 901, 34.56, 78.90, 12.34, TRUE, '2023-05-24', '12:34:56', '2023-05-24 12:34:56', '2023-05-24 12:34:56', '2 years 3 months 4 days 5 hours 6 minutes 7 seconds', '{"key": "value3"}', E'\\xCAFEBABE');
+ (3, 'Varchar value 3', 'Text value 3', 345, 678, 901, 34.56, 78.90, 12.34, TRUE, '9999-12-31', '12:34:56', '2023-05-24 12:34:56', '2023-05-24 12:34:56.123456Z', '2 years 3 months 4 days 5 hours 6 minutes 7 seconds', '{"key": "value3"}', E'\\xCAFEBABE');
statement ok
INSERT INTO t_remote_1 VALUES
diff --git a/e2e_test/sink/remote/mysql_create_table.sql b/e2e_test/sink/remote/mysql_create_table.sql
index 9eab3d0f5e13a..078b57af7cd03 100644
--- a/e2e_test/sink/remote/mysql_create_table.sql
+++ b/e2e_test/sink/remote/mysql_create_table.sql
@@ -7,7 +7,7 @@ CREATE TABLE t_remote_0 (
v_decimal decimal,
v_float float,
v_double double,
- v_timestamp timestamp
+ v_timestamp DATETIME(6)
);
CREATE TABLE t_remote_1 (
@@ -22,9 +22,9 @@ CREATE TABLE t_remote_1 (
v_double DOUBLE,
v_boolean BOOLEAN,
v_date DATE,
- v_time TIME,
- v_timestamp TIMESTAMP,
- v_timestamptz TIMESTAMP,
+ v_time TIME(6),
+ v_timestamp DATETIME(6),
+ v_timestamptz TIMESTAMP(6),
v_interval VARCHAR(255),
v_jsonb JSON,
v_bytea BLOB
@@ -43,7 +43,7 @@ CREATE TABLE t_types (
boolean_column TINYINT,
date_column DATE,
time_column TIME,
- timestamp_column TIMESTAMP,
+ timestamp_column DATETIME,
interval_column VARCHAR(100),
jsonb_column JSON,
array_column LONGTEXT,
diff --git a/e2e_test/sink/remote/mysql_expected_result_0.tsv b/e2e_test/sink/remote/mysql_expected_result_0.tsv
index ddee16e372aa3..1c00e9d1b9bf7 100644
--- a/e2e_test/sink/remote/mysql_expected_result_0.tsv
+++ b/e2e_test/sink/remote/mysql_expected_result_0.tsv
@@ -1,5 +1,5 @@
-1 Alex 28208 281620391 4986480304337356659 28162 2.03 28162.0391 2023-03-20 10:18:30
-3 Carl 18300 1702307129 7878292368468104216 17023 23.07 17023.07129 2023-03-20 10:18:32
-4 Doris 17250 151951802 3946135584462581863 1519518 18.02 1519518.02 2023-03-21 10:18:30
-5 Eve 9725 698160808 524334216698825611 70 69.81 69.8160808 2023-03-21 10:18:31
-6 Frank 28131 1233587627 8492820454814063326 123359 58.76 123358.7627 2023-03-21 10:18:32
+1 Alex 28208 281620391 4986480304337356659 28162 2.03 28162.0391 1000-01-01 00:00:00.000000
+3 Carl 18300 1702307129 7878292368468104216 17023 23.07 17023.07129 9999-12-31 23:49:59.499999
+4 Doris 17250 151951802 3946135584462581863 1519518 18.02 1519518.02 2023-03-21 10:18:30.000000
+5 Eve 9725 698160808 524334216698825611 70 69.81 69.8160808 2023-03-21 10:18:31.000000
+6 Frank 28131 1233587627 8492820454814063326 123359 58.76 123358.7627 2023-03-21 10:18:32.000000
diff --git a/e2e_test/sink/remote/mysql_expected_result_1.tsv b/e2e_test/sink/remote/mysql_expected_result_1.tsv
index 0213697dd6e26..f6bc79f624ff5 100644
--- a/e2e_test/sink/remote/mysql_expected_result_1.tsv
+++ b/e2e_test/sink/remote/mysql_expected_result_1.tsv
@@ -1,5 +1,5 @@
-1 Alex Text value 1 123 456 789 12.34 56.78 90.12 1 2023-05-22 12:34:56 2023-05-22 12:34:56 2023-05-22 12:34:56 P2Y3M4DT5H6M7S {"key": "value"} 3q2+7w==
-3 Varchar value 3 Text value 3 345 678 901 34.56 78.9 12.34 1 2023-05-24 12:34:56 2023-05-24 12:34:56 2023-05-24 12:34:56 P2Y3M4DT5H6M7S {"key": "value3"} yv66vg==
-4 Varchar value 4 Text value 4 456 789 12 45.67 89.01 23.45 0 2023-05-25 23:45:01 2023-05-25 23:45:01 2023-05-25 23:45:01 P2Y3M4DT5H6M7S {"key": "value4"} ur7A3g==
-5 Varchar value 5 Text value 5 567 890 123 56.78 90.12 34.56 1 2023-05-26 12:34:56 2023-05-26 12:34:56 2023-05-26 12:34:56 P2Y3M4DT5H6M7S {"key": "value5"} 3q26vg==
-6 Varchar value 6 Text value 6 789 123 456 67.89 34.56 78.91 0 2023-05-27 23:45:01 2023-05-27 23:45:01 2023-05-27 23:45:01 P2Y3M4DT5H6M7S {"key": "value6"} 3q26vg==
+1 Alex Text value 1 123 456 789 12.34 56.78 90.12 1 1550-12-31 00:00:00.123456 2023-05-22 12:34:56.000000 1970-01-01 00:00:01.000000 P2Y3M4DT5H6M7S {"key": "value"} 3q2+7w==
+3 Varchar value 3 Text value 3 345 678 901 34.56 78.9 12.34 1 9999-12-31 12:34:56.000000 2023-05-24 12:34:56.000000 2023-05-24 12:34:56.123456 P2Y3M4DT5H6M7S {"key": "value3"} yv66vg==
+4 Varchar value 4 Text value 4 456 789 12 45.67 89.01 23.45 0 2023-05-25 23:45:01.000000 2023-05-25 23:45:01.000000 2023-05-25 23:45:01.000000 P2Y3M4DT5H6M7S {"key": "value4"} ur7A3g==
+5 Varchar value 5 Text value 5 567 890 123 56.78 90.12 34.56 1 2023-05-26 12:34:56.000000 2023-05-26 12:34:56.000000 2023-05-26 12:34:56.000000 P2Y3M4DT5H6M7S {"key": "value5"} 3q26vg==
+6 Varchar value 6 Text value 6 789 123 456 67.89 34.56 78.91 0 2023-05-27 23:45:01.000000 2023-05-27 23:45:01.000000 2023-05-27 23:45:01.000000 P2Y3M4DT5H6M7S {"key": "value6"} 3q26vg==
diff --git a/e2e_test/source/basic/kafka.slt b/e2e_test/source/basic/kafka.slt
index ab7616084db5f..261e1cabda041 100644
--- a/e2e_test/source/basic/kafka.slt
+++ b/e2e_test/source/basic/kafka.slt
@@ -680,10 +680,10 @@ select count(*) from s16
----
0
-statement error Feature is not yet implemented: Alter source with schema registry
+statement error Not supported: alter source with schema registry
alter source s18 add column v10 int;
-statement error Feature is not yet implemented: Alter source with schema registry
+statement error Not supported: alter source with schema registry
alter source s17 add column v10 int;
query III rowsort
diff --git a/e2e_test/source/basic/pubsub.slt b/e2e_test/source/basic/pubsub.slt
index e5d9911405797..b245d9b2aea89 100644
--- a/e2e_test/source/basic/pubsub.slt
+++ b/e2e_test/source/basic/pubsub.slt
@@ -2,16 +2,14 @@
statement error
CREATE TABLE s1 (v1 int, v2 varchar) WITH (
pubsub.subscription = 'test-subscription-1',
- pubsub.emulator_host = 'localhost:5981',
- pubsub.split_count = 3
+ pubsub.emulator_host = 'invalid_host:5981'
) FORMAT PLAIN ENCODE JSON;
statement ok
CREATE TABLE s1 (v1 int, v2 varchar) WITH (
connector = 'google_pubsub',
pubsub.subscription = 'test-subscription-1',
- pubsub.emulator_host = 'localhost:5980',
- pubsub.split_count = 3
+ pubsub.emulator_host = 'localhost:5980'
) FORMAT PLAIN ENCODE JSON;
statement ok
@@ -25,25 +23,14 @@ statement error
CREATE TABLE s2 (v1 int, v2 varchar) WITH (
connector = 'google_pubsub',
pubsub.subscription = 'test-subscription-not-2',
- pubsub.emulator_host = 'localhost:5980',
- pubsub.split_count = 3
+ pubsub.emulator_host = 'localhost:5980'
) FORMAT PLAIN ENCODE JSON;
statement ok
CREATE TABLE s2 (v1 int, v2 varchar) WITH (
connector = 'google_pubsub',
pubsub.subscription = 'test-subscription-2',
- pubsub.emulator_host = 'localhost:5980',
- pubsub.split_count = 3
-) FORMAT PLAIN ENCODE JSON;
-
-# fail with invalid split count
-statement error
-CREATE TABLE s3 (v1 int, v2 varchar) WITH (
- connector = 'google_pubsub',
- pubsub.subscription = 'test-subscription-3',
- pubsub.emulator_host = 'localhost:5980',
- pubsub.split_count = 0
+ pubsub.emulator_host = 'localhost:5980'
) FORMAT PLAIN ENCODE JSON;
# fail if both start_offset and start_snapshot are provided
@@ -52,7 +39,6 @@ CREATE TABLE s3 (v1 int, v2 varchar) WITH (
connector = 'google_pubsub',
pubsub.subscription = 'test-subscription-3',
pubsub.emulator_host = 'localhost:5980',
- pubsub.split_count = 2,
pubsub.start_offset = "121212",
pubsub.start_snapshot = "snapshot-that-doesnt-exist"
) FORMAT PLAIN ENCODE JSON;
diff --git a/e2e_test/udf/sql_udf.slt b/e2e_test/udf/sql_udf.slt
index 02fc23b2d7f02..717a1cf23a829 100644
--- a/e2e_test/udf/sql_udf.slt
+++ b/e2e_test/udf/sql_udf.slt
@@ -64,6 +64,19 @@ select fib(100);
statement error function fib calling stack depth limit exceeded
create materialized view foo_mv as select fib(100);
+statement ok
+create function regexp_replace_wrapper(varchar) returns varchar language sql as $$select regexp_replace($1, 'baz(...)', '这是🥵', 'ic')$$;
+
+statement ok
+create function print(INT) returns int language sql as 'select $1';
+
+# Adjust the input value of the calling function (i.e., `print` here) with the actual input parameter
+statement ok
+create function print_add_one(INT) returns int language sql as 'select print($1 + 1)';
+
+statement ok
+create function print_add_two(INT) returns int language sql as 'select print($1 + $1)';
+
# Call the defined sql udf
query I
select add(1, -1);
@@ -100,6 +113,11 @@ select call_regexp_replace();
----
💩💩💩💩💩foo🤔️bar亲爱的😭这是🥵爱情❤️🔥
+query T
+select regexp_replace_wrapper('💩💩💩💩💩foo🤔️bar亲爱的😭baz这不是爱情❤️🔥');
+----
+💩💩💩💩💩foo🤔️bar亲爱的😭这是🥵爱情❤️🔥
+
query T
select foo(114514);
----
@@ -118,6 +136,11 @@ select add_sub_wrapper(1, 1);
----
114514
+query III
+select print_add_one(1), print_add_one(114513), print_add_two(2);
+----
+2 114514 4
+
# Create a mock table
statement ok
create table t1 (c1 INT, c2 INT);
@@ -251,6 +274,18 @@ drop function add_sub_types;
statement ok
drop function fib;
+statement ok
+drop function print;
+
+statement ok
+drop function print_add_one;
+
+statement ok
+drop function print_add_two;
+
+statement ok
+drop function regexp_replace_wrapper;
+
# Drop the mock table
statement ok
drop table t1;
diff --git a/integration_tests/doris-sink/README.md b/integration_tests/doris-sink/README.md
index 75baa2d2449f1..b62c2d2e3adcf 100644
--- a/integration_tests/doris-sink/README.md
+++ b/integration_tests/doris-sink/README.md
@@ -45,16 +45,10 @@ GRANT ALL ON *.* TO 'users'@'%';
4. Execute the SQL queries in sequence:
-- append-only sql:
- - create_source.sql
- - create_mv.sql
- - create_sink.sql
-
-- upsert sql:
- - upsert/create_table.sql
- - upsert/create_mv.sql
- - upsert/create_sink.sql
- - upsert/insert_update_delete.sql
+- create_source.sql
+- create_mv.sql
+- create_sink.sql
+- update_delete.sql
We only support `upsert` with doris' `UNIQUE KEY`
diff --git a/integration_tests/doris-sink/create_mv.sql b/integration_tests/doris-sink/create_mv.sql
index c367e6f2baa94..6e466703b0769 100644
--- a/integration_tests/doris-sink/create_mv.sql
+++ b/integration_tests/doris-sink/create_mv.sql
@@ -5,3 +5,11 @@ SELECT
event_timestamp AT TIME ZONE 'Asia/Shanghai' as event_timestamp_local
FROM
user_behaviors;
+
+CREATE MATERIALIZED VIEW upsert_bhv_mv AS
+SELECT
+ user_id,
+ target_id,
+ event_timestamp AT TIME ZONE 'Asia/Shanghai' as event_timestamp_local
+FROM
+ upsert_user_behaviors;
diff --git a/integration_tests/doris-sink/create_sink.sql b/integration_tests/doris-sink/create_sink.sql
index fa0cfddf7bf16..7cd1ac24857e9 100644
--- a/integration_tests/doris-sink/create_sink.sql
+++ b/integration_tests/doris-sink/create_sink.sql
@@ -9,4 +9,17 @@ FROM
doris.database = 'demo',
doris.table='demo_bhv_table',
force_append_only='true'
-);
\ No newline at end of file
+);
+
+CREATE SINK upsert_doris_sink
+FROM
+ upsert_bhv_mv WITH (
+ connector = 'doris',
+ type = 'upsert',
+ doris.url = 'http://fe:8030',
+ doris.user = 'users',
+ doris.password = '123456',
+ doris.database = 'demo',
+ doris.table='upsert_table',
+ primary_key = 'user_id'
+);
diff --git a/integration_tests/doris-sink/create_source.sql b/integration_tests/doris-sink/create_source.sql
index ed7c02341638a..0e42308511121 100644
--- a/integration_tests/doris-sink/create_source.sql
+++ b/integration_tests/doris-sink/create_source.sql
@@ -14,3 +14,20 @@ CREATE table user_behaviors (
fields.user_id.end = '1000',
datagen.rows.per.second = '100'
) FORMAT PLAIN ENCODE JSON;
+
+CREATE table upsert_user_behaviors (
+ user_id int,
+ target_id VARCHAR,
+ target_type VARCHAR,
+ event_timestamp TIMESTAMPTZ,
+ behavior_type VARCHAR,
+ parent_target_type VARCHAR,
+ parent_target_id VARCHAR,
+ PRIMARY KEY(user_id)
+);
+
+INSERT INTO upsert_user_behaviors VALUES
+ (1,'1','1','2020-01-01T01:01:01Z','1','1','1'),
+ (2,'2','2','2020-01-01T01:01:02Z','2','2','2'),
+ (3,'3','3','2020-01-01T01:01:03Z','3','3','3'),
+ (4,'4','4','2020-01-01T01:01:04Z','4','4','4');
diff --git a/integration_tests/doris-sink/docker-compose.yml b/integration_tests/doris-sink/docker-compose.yml
index 74fecbee2baab..fc7cfd751e989 100644
--- a/integration_tests/doris-sink/docker-compose.yml
+++ b/integration_tests/doris-sink/docker-compose.yml
@@ -74,6 +74,15 @@ services:
networks:
mynetwork:
ipv4_address: 172.21.0.9
+ postgres:
+ image: postgres:latest
+ command: tail -f /dev/null
+ volumes:
+ - "./update_delete.sql:/update_delete.sql"
+ restart: on-failure
+ networks:
+ mynetwork:
+ ipv4_address: 172.21.0.11
volumes:
risingwave-standalone:
external: false
diff --git a/integration_tests/doris-sink/doris_prepare.sql b/integration_tests/doris-sink/doris_prepare.sql
index c95e8ac3f9b32..b65e419999caf 100644
--- a/integration_tests/doris-sink/doris_prepare.sql
+++ b/integration_tests/doris-sink/doris_prepare.sql
@@ -11,5 +11,15 @@ PROPERTIES (
"replication_allocation" = "tag.location.default: 1"
);
+CREATE table upsert_table(
+ user_id int,
+ target_id text,
+ event_timestamp_local datetime
+) UNIQUE KEY(`user_id`)
+DISTRIBUTED BY HASH(`user_id`) BUCKETS 1
+PROPERTIES (
+ "replication_allocation" = "tag.location.default: 1"
+);
+
CREATE USER 'users'@'%' IDENTIFIED BY '123456';
GRANT ALL ON *.* TO 'users'@'%';
diff --git a/integration_tests/doris-sink/sink_check.py b/integration_tests/doris-sink/sink_check.py
index 39109f4194fef..510cc867dcda4 100644
--- a/integration_tests/doris-sink/sink_check.py
+++ b/integration_tests/doris-sink/sink_check.py
@@ -1,7 +1,7 @@
import subprocess
import sys
-relations = ['demo.demo_bhv_table']
+relations = ['demo.demo_bhv_table', 'demo.upsert_table']
failed_cases = []
for rel in relations:
@@ -18,6 +18,30 @@
if rows < 1:
failed_cases.append(rel)
+# update data
+subprocess.run(["docker", "compose", "exec", "postgres", "bash", "-c", "psql -h risingwave-standalone -p 4566 -d dev -U root -f update_delete.sql"], check=True)
+
+# delete
+sql = f"SELECT COUNT(*) FROM demo.upsert_table;"
+command = f'mysql -uroot -P9030 -hfe -e "{sql}"'
+output = subprocess.check_output(
+ ["docker", "compose", "exec", "mysql", "bash", "-c", command])
+rows = int(output.decode('utf-8').split('\n')[1])
+print(f"{rows} rows in demo.upsert_table")
+if rows != 3:
+ print(f"rows expected 3, get {rows}")
+ failed_cases.append("delete demo.upsert_table")
+
+# update
+sql = f"SELECT target_id FROM demo.upsert_table WHERE user_id = 3;"
+command = f'mysql -uroot -P9030 -hfe -e "{sql}"'
+output = subprocess.check_output(
+ ["docker", "compose", "exec", "mysql", "bash", "-c", command])
+id = int(output.decode('utf-8').split('\n')[1])
+if id != 30:
+ print(f"target_id expected 30, get {id}")
+ failed_cases.append("update demo.upsert_table")
+
if len(failed_cases) != 0:
print(f"Data check failed for case {failed_cases}")
sys.exit(1)
diff --git a/integration_tests/doris-sink/update_delete.sql b/integration_tests/doris-sink/update_delete.sql
new file mode 100644
index 0000000000000..adabd5163ef44
--- /dev/null
+++ b/integration_tests/doris-sink/update_delete.sql
@@ -0,0 +1,5 @@
+DELETE FROM upsert_user_behaviors WHERE user_id = 2;
+
+UPDATE upsert_user_behaviors SET target_id = 30 WHERE user_id = 3;
+
+FLUSH;
diff --git a/integration_tests/doris-sink/upsert/create_mv.sql b/integration_tests/doris-sink/upsert/create_mv.sql
deleted file mode 100644
index c367e6f2baa94..0000000000000
--- a/integration_tests/doris-sink/upsert/create_mv.sql
+++ /dev/null
@@ -1,7 +0,0 @@
-CREATE MATERIALIZED VIEW bhv_mv AS
-SELECT
- user_id,
- target_id,
- event_timestamp AT TIME ZONE 'Asia/Shanghai' as event_timestamp_local
-FROM
- user_behaviors;
diff --git a/integration_tests/doris-sink/upsert/create_sink.sql b/integration_tests/doris-sink/upsert/create_sink.sql
deleted file mode 100644
index e7bd5445ba557..0000000000000
--- a/integration_tests/doris-sink/upsert/create_sink.sql
+++ /dev/null
@@ -1,12 +0,0 @@
-CREATE SINK bhv_doris_sink
-FROM
- bhv_mv WITH (
- connector = 'doris',
- type = 'upsert',
- doris.url = 'http://fe:8030',
- doris.user = 'users',
- doris.password = '123456',
- doris.database = 'demo',
- doris.table='demo_bhv_table',
- primary_key = 'user_id'
-);
\ No newline at end of file
diff --git a/integration_tests/doris-sink/upsert/create_table.sql b/integration_tests/doris-sink/upsert/create_table.sql
deleted file mode 100644
index c6cfa87eed3c8..0000000000000
--- a/integration_tests/doris-sink/upsert/create_table.sql
+++ /dev/null
@@ -1,10 +0,0 @@
-CREATE table user_behaviors (
- user_id int,
- target_id VARCHAR,
- target_type VARCHAR,
- event_timestamp TIMESTAMPTZ,
- behavior_type VARCHAR,
- parent_target_type VARCHAR,
- parent_target_id VARCHAR,
- PRIMARY KEY(user_id)
-);
diff --git a/integration_tests/doris-sink/upsert/insert_update_delete.sql b/integration_tests/doris-sink/upsert/insert_update_delete.sql
deleted file mode 100644
index f21353c161154..0000000000000
--- a/integration_tests/doris-sink/upsert/insert_update_delete.sql
+++ /dev/null
@@ -1,8 +0,0 @@
-INSERT INTO user_behaviors VALUES(1,'1','1','2020-01-01T01:01:01Z','1','1','1'),
-(2,'2','2','2020-01-01T01:01:02Z','2','2','2'),
-(3,'3','3','2020-01-01T01:01:03Z','3','3','3'),
-(4,'4','4','2020-01-01T01:01:04Z','4','4','4');
-
-DELETE FROM user_behaviors WHERE user_id = 2;
-
-UPDATE user_behaviors SET target_id = 30 WHERE user_id = 3;
diff --git a/integration_tests/presto-trino/README.md b/integration_tests/presto-trino/README.md
index fe3ca48f3a92d..9b5ded7eb4ed0 100644
--- a/integration_tests/presto-trino/README.md
+++ b/integration_tests/presto-trino/README.md
@@ -4,6 +4,7 @@
1. Start the cluster with `docker compose up -d` command.
The command will start a RisingWave cluster together with a integrated trino and presto instance.
+
2. Connect the RisingWave frontend via the psql client. Create and insert data into the RisingWave table.
```shell
psql -h localhost -p 4566 -d dev -U root
@@ -16,6 +17,7 @@ INSERT 0 1
dev=> flush;
FLUSH
```
+
3. Query RisingWave from presto or trino
```shell
# Start trino client
diff --git a/integration_tests/presto-trino/create_source.sql b/integration_tests/presto-trino/create_source.sql
new file mode 100644
index 0000000000000..8e792d0ce1910
--- /dev/null
+++ b/integration_tests/presto-trino/create_source.sql
@@ -0,0 +1,26 @@
+CREATE TABLE IF NOT EXISTS data_types (
+ id BIGINT PRIMARY KEY,
+ varchar_column VARCHAR,
+ text_column TEXT,
+ integer_column INTEGER,
+ smallint_column SMALLINT,
+ bigint_column BIGINT,
+ -- decimal_column DECIMAL, prestodb cannot handle postgres's decimal when the precision is unspecified, as the precision range exceeds its maximum precision.
+ real_column REAL,
+ double_column DOUBLE PRECISION,
+ boolean_column BOOLEAN,
+ date_column DATE,
+ time_column TIME,
+ timestamp_column TIMESTAMP,
+ timestamptz_column TIMESTAMPTZ,
+ jsonb_column JSONB,
+ bytea_column BYTEA
+);
+
+INSERT INTO data_types (id, varchar_column, text_column, integer_column, smallint_column, bigint_column, real_column, double_column, boolean_column, date_column, time_column, timestamp_column, timestamptz_column, jsonb_column, bytea_column)
+VALUES
+ (1, 'Varchar value 1', 'Text value 1', 123, 456, 789, 12.34, 90.12, TRUE, '2023-05-22', '12:34:56', '2023-05-22 12:34:56', '2023-05-22T12:34:56Z', '{"key": "value"}', E'\\xDEADBEEF'),
+ (2, 'Varchar value 2', 'Text value 2', 234, 567, 890, 23.45, 01.23, FALSE, '2023-05-23', '23:45:01', '2023-05-23 23:45:01', '2023-05-23T23:45:01Z', '{"key": "value2"}', E'\\xFEEDBEEF'),
+ (3, 'Varchar value 3', 'Text value 3', 345, 678, 901, 34.56, 12.34, TRUE, '2023-05-24', '12:34:56', '2023-05-24 12:34:56', '2023-05-24T12:34:56Z', '{"key": "value3"}', E'\\xCAFEBABE'),
+ (4, 'Varchar value 4', 'Text value 4', 456, 789, 012, 45.67, 23.45, FALSE, '2023-05-25', '23:45:01', '2023-05-25 23:45:01', '2023-05-25T23:45:01Z', '{"key": "value4"}', E'\\xBABEC0DE'),
+ (5, 'Varchar value 5', 'Text value 5', 567, 890, 123, 56.78, 34.56, TRUE, '2023-05-26', '12:34:56', '2023-05-26 12:34:56', '2023-05-26T12:34:56Z', '{"key": "value5"}', E'\\xDEADBABE');
diff --git a/integration_tests/presto-trino/data_check b/integration_tests/presto-trino/data_check
new file mode 100644
index 0000000000000..1a0524c095179
--- /dev/null
+++ b/integration_tests/presto-trino/data_check
@@ -0,0 +1 @@
+data_types
\ No newline at end of file
diff --git a/integration_tests/presto-trino/docker-compose.yml b/integration_tests/presto-trino/docker-compose.yml
index 6e6f21508eabd..b12785139ebad 100644
--- a/integration_tests/presto-trino/docker-compose.yml
+++ b/integration_tests/presto-trino/docker-compose.yml
@@ -33,14 +33,16 @@ services:
profiles: [ "client" ]
entrypoint: [ "trino", "--server", "trino:8080", "--catalog", "risingwave", "--schema", "public" ]
presto:
- image: ahanaio/prestodb-sandbox
+ image: prestodb/presto:0.284
container_name: presto
volumes:
- ./etc/risingwave.properties:/opt/presto-server/etc/catalog/risingwave.properties
+ - ./etc/config.properties:/opt/presto-server/etc/config.properties
+ - ./etc/jvm.config:/opt/presto-server/etc/jvm.config
ports:
- "8080:8080"
presto-client:
- image: ahanaio/prestodb-sandbox
+ image: prestodb/presto:0.284
profiles: [ "client" ]
entrypoint: [ "presto-cli", "--server", "presto:8080", "--catalog", "risingwave", "--schema", "public" ]
volumes:
diff --git a/integration_tests/presto-trino/etc/config.properties b/integration_tests/presto-trino/etc/config.properties
new file mode 100644
index 0000000000000..60005e3b6e098
--- /dev/null
+++ b/integration_tests/presto-trino/etc/config.properties
@@ -0,0 +1,7 @@
+coordinator=true
+node-scheduler.include-coordinator=true
+http-server.http.port=8080
+query.max-memory=5GB
+query.max-memory-per-node=1GB
+discovery-server.enabled=true
+discovery.uri=http://0.0.0.0:8080
diff --git a/integration_tests/presto-trino/etc/jvm.config b/integration_tests/presto-trino/etc/jvm.config
new file mode 100644
index 0000000000000..ff656248be3ce
--- /dev/null
+++ b/integration_tests/presto-trino/etc/jvm.config
@@ -0,0 +1,9 @@
+-server
+-Xmx16G
+-XX:+UseG1GC
+-XX:G1HeapRegionSize=32M
+-XX:+UseGCOverheadLimit
+-XX:+ExplicitGCInvokesConcurrent
+-XX:+HeapDumpOnOutOfMemoryError
+-XX:+ExitOnOutOfMemoryError
+-Djdk.attach.allowAttachSelf=true
\ No newline at end of file
diff --git a/integration_tests/presto-trino/sink_check.py b/integration_tests/presto-trino/sink_check.py
new file mode 100644
index 0000000000000..be98cbf5d77a5
--- /dev/null
+++ b/integration_tests/presto-trino/sink_check.py
@@ -0,0 +1,16 @@
+import subprocess
+import sys
+
+failed_cases = []
+for client_image in ['presto-client', 'trino-client']:
+ output = subprocess.check_output(
+ ["docker", "compose", "run", client_image,
+ "--execute", "select * from data_types"],
+ )
+ rows_cnt = len(output.splitlines())
+ if rows_cnt < 1:
+ failed_cases.append(client_image)
+
+if len(failed_cases) != 0:
+ print(f"Data check failed for case {failed_cases}")
+ sys.exit(1)
diff --git a/integration_tests/starrocks-sink/README.md b/integration_tests/starrocks-sink/README.md
index 817ab57481e43..30cb79623d1e8 100644
--- a/integration_tests/starrocks-sink/README.md
+++ b/integration_tests/starrocks-sink/README.md
@@ -37,16 +37,10 @@ GRANT ALL ON *.* TO 'users'@'%';
3. Execute the SQL queries in sequence:
-- append-only sql:
- - create_source.sql
- - create_mv.sql
- - create_sink.sql
-
-- upsert sql:
- - upsert/create_table.sql
- - upsert/create_mv.sql
- - upsert/create_sink.sql
- - upsert/insert_update_delete.sql
+- create_source.sql
+- create_mv.sql
+- create_sink.sql
+- update_delete.sql
We only support `upsert` with starrocks' `PRIMARY KEY`
diff --git a/integration_tests/starrocks-sink/create_mv.sql b/integration_tests/starrocks-sink/create_mv.sql
index c367e6f2baa94..6e466703b0769 100644
--- a/integration_tests/starrocks-sink/create_mv.sql
+++ b/integration_tests/starrocks-sink/create_mv.sql
@@ -5,3 +5,11 @@ SELECT
event_timestamp AT TIME ZONE 'Asia/Shanghai' as event_timestamp_local
FROM
user_behaviors;
+
+CREATE MATERIALIZED VIEW upsert_bhv_mv AS
+SELECT
+ user_id,
+ target_id,
+ event_timestamp AT TIME ZONE 'Asia/Shanghai' as event_timestamp_local
+FROM
+ upsert_user_behaviors;
diff --git a/integration_tests/starrocks-sink/create_sink.sql b/integration_tests/starrocks-sink/create_sink.sql
index 56d1b227512de..f2f5b5eac9653 100644
--- a/integration_tests/starrocks-sink/create_sink.sql
+++ b/integration_tests/starrocks-sink/create_sink.sql
@@ -11,4 +11,19 @@ FROM
starrocks.database = 'demo',
starrocks.table = 'demo_bhv_table',
force_append_only='true'
-);
\ No newline at end of file
+);
+
+CREATE SINK upsert_starrocks_sink
+FROM
+ upsert_bhv_mv WITH (
+ connector = 'starrocks',
+ type = 'upsert',
+ starrocks.host = 'starrocks-fe',
+ starrocks.mysqlport = '9030',
+ starrocks.httpport = '8030',
+ starrocks.user = 'users',
+ starrocks.password = '123456',
+ starrocks.database = 'demo',
+ starrocks.table = 'upsert_table',
+ primary_key = 'user_id'
+);
diff --git a/integration_tests/starrocks-sink/create_source.sql b/integration_tests/starrocks-sink/create_source.sql
index ed7c02341638a..0e42308511121 100644
--- a/integration_tests/starrocks-sink/create_source.sql
+++ b/integration_tests/starrocks-sink/create_source.sql
@@ -14,3 +14,20 @@ CREATE table user_behaviors (
fields.user_id.end = '1000',
datagen.rows.per.second = '100'
) FORMAT PLAIN ENCODE JSON;
+
+CREATE table upsert_user_behaviors (
+ user_id int,
+ target_id VARCHAR,
+ target_type VARCHAR,
+ event_timestamp TIMESTAMPTZ,
+ behavior_type VARCHAR,
+ parent_target_type VARCHAR,
+ parent_target_id VARCHAR,
+ PRIMARY KEY(user_id)
+);
+
+INSERT INTO upsert_user_behaviors VALUES
+ (1,'1','1','2020-01-01T01:01:01Z','1','1','1'),
+ (2,'2','2','2020-01-01T01:01:02Z','2','2','2'),
+ (3,'3','3','2020-01-01T01:01:03Z','3','3','3'),
+ (4,'4','4','2020-01-01T01:01:04Z','4','4','4');
diff --git a/integration_tests/starrocks-sink/docker-compose.yml b/integration_tests/starrocks-sink/docker-compose.yml
index 41dabac20dc7f..4210206aa7705 100644
--- a/integration_tests/starrocks-sink/docker-compose.yml
+++ b/integration_tests/starrocks-sink/docker-compose.yml
@@ -52,6 +52,12 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: prometheus-0
+ postgres:
+ image: postgres:latest
+ command: tail -f /dev/null
+ volumes:
+ - "./update_delete.sql:/update_delete.sql"
+ restart: on-failure
volumes:
risingwave-standalone:
external: false
diff --git a/integration_tests/starrocks-sink/sink_check.py b/integration_tests/starrocks-sink/sink_check.py
index 699304854dc1f..7ab27e1e01cd1 100644
--- a/integration_tests/starrocks-sink/sink_check.py
+++ b/integration_tests/starrocks-sink/sink_check.py
@@ -1,7 +1,7 @@
import subprocess
import sys
-relations = ['demo.demo_bhv_table']
+relations = ['demo.demo_bhv_table', 'demo.upsert_table']
failed_cases = []
for rel in relations:
@@ -18,6 +18,30 @@
if rows < 1:
failed_cases.append(rel)
+# update data
+subprocess.run(["docker", "compose", "exec", "postgres", "bash", "-c", "psql -h risingwave-standalone -p 4566 -d dev -U root -f update_delete.sql"], check=True)
+
+# delete
+sql = f"SELECT COUNT(*) FROM demo.upsert_table;"
+command = f'mysql -uroot -P9030 -h127.0.0.1 -e "{sql}"'
+output = subprocess.check_output(
+ ["docker", "compose", "exec", "starrocks-fe", "bash", "-c", command])
+rows = int(output.decode('utf-8').split('\n')[1])
+print(f"{rows} rows in demo.upsert_table")
+if rows != 3:
+ print(f"rows expected 3, get {rows}")
+ failed_cases.append("delete demo.upsert_table")
+
+# update
+sql = f"SELECT target_id FROM demo.upsert_table WHERE user_id = 3;"
+command = f'mysql -uroot -P9030 -h127.0.0.1 -e "{sql}"'
+output = subprocess.check_output(
+ ["docker", "compose", "exec", "starrocks-fe", "bash", "-c", command])
+id = int(output.decode('utf-8').split('\n')[1])
+if id != 30:
+ print(f"target_id expected 30, get {id}")
+ failed_cases.append("update demo.upsert_table")
+
if len(failed_cases) != 0:
print(f"Data check failed for case {failed_cases}")
sys.exit(1)
diff --git a/integration_tests/starrocks-sink/starrocks_prepare.sql b/integration_tests/starrocks-sink/starrocks_prepare.sql
index aadaf85289b3c..6b304534061fe 100644
--- a/integration_tests/starrocks-sink/starrocks_prepare.sql
+++ b/integration_tests/starrocks-sink/starrocks_prepare.sql
@@ -9,5 +9,13 @@ CREATE table demo_bhv_table(
PRIMARY KEY(`user_id`)
DISTRIBUTED BY HASH(`user_id`) properties("replication_num" = "1");
+CREATE table upsert_table(
+ user_id int,
+ target_id text,
+ event_timestamp_local datetime
+) ENGINE=OLAP
+PRIMARY KEY(`user_id`)
+DISTRIBUTED BY HASH(`user_id`) properties("replication_num" = "1");
+
CREATE USER 'users'@'%' IDENTIFIED BY '123456';
GRANT ALL ON *.* TO 'users'@'%';
diff --git a/integration_tests/starrocks-sink/update_delete.sql b/integration_tests/starrocks-sink/update_delete.sql
new file mode 100644
index 0000000000000..adabd5163ef44
--- /dev/null
+++ b/integration_tests/starrocks-sink/update_delete.sql
@@ -0,0 +1,5 @@
+DELETE FROM upsert_user_behaviors WHERE user_id = 2;
+
+UPDATE upsert_user_behaviors SET target_id = 30 WHERE user_id = 3;
+
+FLUSH;
diff --git a/integration_tests/starrocks-sink/upsert/create_mv.sql b/integration_tests/starrocks-sink/upsert/create_mv.sql
deleted file mode 100644
index c367e6f2baa94..0000000000000
--- a/integration_tests/starrocks-sink/upsert/create_mv.sql
+++ /dev/null
@@ -1,7 +0,0 @@
-CREATE MATERIALIZED VIEW bhv_mv AS
-SELECT
- user_id,
- target_id,
- event_timestamp AT TIME ZONE 'Asia/Shanghai' as event_timestamp_local
-FROM
- user_behaviors;
diff --git a/integration_tests/starrocks-sink/upsert/create_sink.sql b/integration_tests/starrocks-sink/upsert/create_sink.sql
deleted file mode 100644
index d7557bc1bd4fc..0000000000000
--- a/integration_tests/starrocks-sink/upsert/create_sink.sql
+++ /dev/null
@@ -1,14 +0,0 @@
-CREATE SINK bhv_starrocks_sink
-FROM
- bhv_mv WITH (
- connector = 'starrocks',
- type = 'upsert',
- starrocks.host = 'starrocks-fe',
- starrocks.mysqlport = '9030',
- starrocks.httpport = '8030',
- starrocks.user = 'users',
- starrocks.password = '123456',
- starrocks.database = 'demo',
- starrocks.table = 'demo_bhv_table',
- primary_key = 'user_id'
-);
\ No newline at end of file
diff --git a/integration_tests/starrocks-sink/upsert/create_table.sql b/integration_tests/starrocks-sink/upsert/create_table.sql
deleted file mode 100644
index c6cfa87eed3c8..0000000000000
--- a/integration_tests/starrocks-sink/upsert/create_table.sql
+++ /dev/null
@@ -1,10 +0,0 @@
-CREATE table user_behaviors (
- user_id int,
- target_id VARCHAR,
- target_type VARCHAR,
- event_timestamp TIMESTAMPTZ,
- behavior_type VARCHAR,
- parent_target_type VARCHAR,
- parent_target_id VARCHAR,
- PRIMARY KEY(user_id)
-);
diff --git a/integration_tests/starrocks-sink/upsert/insert_update_delete.sql b/integration_tests/starrocks-sink/upsert/insert_update_delete.sql
deleted file mode 100644
index f21353c161154..0000000000000
--- a/integration_tests/starrocks-sink/upsert/insert_update_delete.sql
+++ /dev/null
@@ -1,8 +0,0 @@
-INSERT INTO user_behaviors VALUES(1,'1','1','2020-01-01T01:01:01Z','1','1','1'),
-(2,'2','2','2020-01-01T01:01:02Z','2','2','2'),
-(3,'3','3','2020-01-01T01:01:03Z','3','3','3'),
-(4,'4','4','2020-01-01T01:01:04Z','4','4','4');
-
-DELETE FROM user_behaviors WHERE user_id = 2;
-
-UPDATE user_behaviors SET target_id = 30 WHERE user_id = 3;
diff --git a/java/com_risingwave_java_binding_Binding.h b/java/com_risingwave_java_binding_Binding.h
index 282255cacd2fc..606110c405282 100644
--- a/java/com_risingwave_java_binding_Binding.h
+++ b/java/com_risingwave_java_binding_Binding.h
@@ -138,11 +138,19 @@ JNIEXPORT jstring JNICALL Java_com_risingwave_java_binding_Binding_iteratorGetSt
/*
* Class: com_risingwave_java_binding_Binding
* Method: iteratorGetTimestampValue
- * Signature: (JI)Ljava/sql/Timestamp;
+ * Signature: (JI)Ljava/time/LocalDatetime;
*/
JNIEXPORT jobject JNICALL Java_com_risingwave_java_binding_Binding_iteratorGetTimestampValue
(JNIEnv *, jclass, jlong, jint);
+/*
+ * Class: com_risingwave_java_binding_Binding
+ * Method: iteratorGetTimestamptzValue
+ * Signature: (JI)Ljava/time/OffsetDatetime;
+ */
+JNIEXPORT jobject JNICALL Java_com_risingwave_java_binding_Binding_iteratorGetTimestamptzValue
+ (JNIEnv *, jclass, jlong, jint);
+
/*
* Class: com_risingwave_java_binding_Binding
* Method: iteratorGetDecimalValue
@@ -154,7 +162,7 @@ JNIEXPORT jobject JNICALL Java_com_risingwave_java_binding_Binding_iteratorGetDe
/*
* Class: com_risingwave_java_binding_Binding
* Method: iteratorGetTimeValue
- * Signature: (JI)Ljava/sql/Time;
+ * Signature: (JI)Ljava/time/LocalTime;
*/
JNIEXPORT jobject JNICALL Java_com_risingwave_java_binding_Binding_iteratorGetTimeValue
(JNIEnv *, jclass, jlong, jint);
@@ -162,7 +170,7 @@ JNIEXPORT jobject JNICALL Java_com_risingwave_java_binding_Binding_iteratorGetTi
/*
* Class: com_risingwave_java_binding_Binding
* Method: iteratorGetDateValue
- * Signature: (JI)Ljava/sql/Date;
+ * Signature: (JI)Ljava/time/LocalDate;
*/
JNIEXPORT jobject JNICALL Java_com_risingwave_java_binding_Binding_iteratorGetDateValue
(JNIEnv *, jclass, jlong, jint);
diff --git a/java/connector-node/risingwave-connector-service/src/main/java/com/risingwave/connector/JsonDeserializer.java b/java/connector-node/risingwave-connector-service/src/main/java/com/risingwave/connector/JsonDeserializer.java
index a651b0c614236..c941b09efe95c 100644
--- a/java/connector-node/risingwave-connector-service/src/main/java/com/risingwave/connector/JsonDeserializer.java
+++ b/java/connector-node/risingwave-connector-service/src/main/java/com/risingwave/connector/JsonDeserializer.java
@@ -23,10 +23,10 @@
import com.risingwave.proto.ConnectorServiceProto.SinkWriterStreamRequest.WriteBatch.JsonPayload;
import com.risingwave.proto.Data;
import java.math.BigDecimal;
-import java.sql.Date;
-import java.sql.Time;
-import java.sql.Timestamp;
import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.LocalTime;
+import java.time.OffsetDateTime;
import java.util.Base64;
import java.util.Map;
import java.util.stream.Collectors;
@@ -135,10 +135,10 @@ private static BigDecimal castDecimal(Object value) {
}
}
- private static Time castTime(Object value) {
+ private static LocalTime castTime(Object value) {
try {
Long milli = castLong(value);
- return new Time(milli);
+ return LocalTime.ofNanoOfDay(milli * 1_000_000L);
} catch (RuntimeException e) {
throw io.grpc.Status.INVALID_ARGUMENT
.withDescription("unable to cast into time from " + value.getClass())
@@ -146,10 +146,10 @@ private static Time castTime(Object value) {
}
}
- private static Date castDate(Object value) {
+ private static LocalDate castDate(Object value) {
try {
- Long days = castLong(value) - 1;
- return Date.valueOf(LocalDate.of(1, 1, 1).plusDays(days));
+ Long days = castLong(value);
+ return LocalDate.ofEpochDay(days);
} catch (RuntimeException e) {
throw io.grpc.Status.INVALID_ARGUMENT
.withDescription("unable to cast into date from " + value.getClass())
@@ -190,14 +190,21 @@ private static Object validateJsonDataTypes(Data.DataType.TypeName typeName, Obj
}
return value;
case TIMESTAMP:
- case TIMESTAMPTZ:
if (!(value instanceof String)) {
throw io.grpc.Status.INVALID_ARGUMENT
.withDescription(
"Expected timestamp in string, got " + value.getClass())
.asRuntimeException();
}
- return Timestamp.valueOf((String) value);
+ return LocalDateTime.parse((String) value);
+ case TIMESTAMPTZ:
+ if (!(value instanceof String)) {
+ throw io.grpc.Status.INVALID_ARGUMENT
+ .withDescription(
+ "Expected timestamptz in string, got " + value.getClass())
+ .asRuntimeException();
+ }
+ return OffsetDateTime.parse((String) value);
case TIME:
return castTime(value);
case DATE:
diff --git a/java/connector-node/risingwave-connector-service/src/main/java/com/risingwave/connector/deserializer/StreamChunkDeserializer.java b/java/connector-node/risingwave-connector-service/src/main/java/com/risingwave/connector/deserializer/StreamChunkDeserializer.java
index 3dd3961a46228..c17504493f193 100644
--- a/java/connector-node/risingwave-connector-service/src/main/java/com/risingwave/connector/deserializer/StreamChunkDeserializer.java
+++ b/java/connector-node/risingwave-connector-service/src/main/java/com/risingwave/connector/deserializer/StreamChunkDeserializer.java
@@ -110,7 +110,6 @@ static ValueGetter[] buildValueGetter(TableSchema tableSchema) {
};
break;
case TIMESTAMP:
- case TIMESTAMPTZ:
ret[i] =
row -> {
if (row.isNull(index)) {
@@ -119,6 +118,15 @@ static ValueGetter[] buildValueGetter(TableSchema tableSchema) {
return row.getTimestamp(index);
};
break;
+ case TIMESTAMPTZ:
+ ret[i] =
+ row -> {
+ if (row.isNull(index)) {
+ return null;
+ }
+ return row.getTimestamptz(index);
+ };
+ break;
case TIME:
ret[i] =
row -> {
diff --git a/java/connector-node/risingwave-connector-test/src/test/java/com/risingwave/connector/sink/flink/FlinkMockTest.java b/java/connector-node/risingwave-connector-test/src/test/java/com/risingwave/connector/sink/flink/FlinkMockTest.java
index 70112ee701bff..0043925a67be0 100644
--- a/java/connector-node/risingwave-connector-test/src/test/java/com/risingwave/connector/sink/flink/FlinkMockTest.java
+++ b/java/connector-node/risingwave-connector-test/src/test/java/com/risingwave/connector/sink/flink/FlinkMockTest.java
@@ -26,6 +26,11 @@
import com.risingwave.proto.Data;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.LocalTime;
+import java.time.OffsetDateTime;
+import java.time.ZoneOffset;
import java.util.*;
import org.apache.flink.api.connector.sink.Committer;
import org.apache.flink.api.connector.sink.GlobalCommitter;
@@ -272,13 +277,22 @@ public void commitId(List ids) {
private TableSchema getTableSchema() {
return new TableSchema(
- Lists.newArrayList("id", "name"),
+ Lists.newArrayList(
+ "id", "name", "v_date", "v_time", "v_timestamp", "v_timestamptz"),
Lists.newArrayList(
Data.DataType.newBuilder()
.setTypeName(Data.DataType.TypeName.INT32)
.build(),
Data.DataType.newBuilder()
.setTypeName(Data.DataType.TypeName.VARCHAR)
+ .build(),
+ Data.DataType.newBuilder().setTypeName(Data.DataType.TypeName.DATE).build(),
+ Data.DataType.newBuilder().setTypeName(Data.DataType.TypeName.TIME).build(),
+ Data.DataType.newBuilder()
+ .setTypeName(Data.DataType.TypeName.TIMESTAMP)
+ .build(),
+ Data.DataType.newBuilder()
+ .setTypeName(Data.DataType.TypeName.TIMESTAMPTZ)
.build()),
Lists.newArrayList("id", "name"));
}
@@ -291,39 +305,97 @@ public void testSinkWriteV1() {
flinkDynamicAdapterFactory.createWriter(getTableSchema(), new HashMap<>());
List sinkRows =
java.util.Arrays.asList(
- new ArraySinkRow(Data.Op.INSERT, 1, "Alice"),
- new ArraySinkRow(Data.Op.INSERT, 2, "Bob"));
+ new ArraySinkRow(
+ Data.Op.INSERT,
+ 1,
+ "Alice",
+ LocalDate.ofEpochDay(0),
+ LocalTime.of(0, 0, 0, 0),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC)),
+ new ArraySinkRow(
+ Data.Op.INSERT,
+ 2,
+ "Bob",
+ LocalDate.ofEpochDay(0),
+ LocalTime.of(0, 0, 0, 1000000),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC)));
writer.write(sinkRows);
writer.barrier(true);
RowData rowData1 = mockStorage.commitMap.lastEntry().getValue().get(0);
- assertEquals(rowData1.getArity(), 2);
+ assertEquals(rowData1.getArity(), 6);
assertEquals(rowData1.getInt(0), 1);
assertEquals(rowData1.getString(1).toString(), "Alice");
+ assertEquals(rowData1.getInt(2), 0);
+ assertEquals(rowData1.getInt(3), 0);
+ assertEquals(
+ rowData1.getTimestamp(4, 0).toLocalDateTime(),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000));
+ assertEquals(
+ rowData1.getTimestamp(5, 0).toInstant(),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC).toInstant());
RowData rowData2 = mockStorage.commitMap.lastEntry().getValue().get(1);
- assertEquals(rowData2.getArity(), 2);
+ assertEquals(rowData2.getArity(), 6);
assertEquals(rowData2.getInt(0), 2);
assertEquals(rowData2.getString(1).toString(), "Bob");
+ assertEquals(rowData2.getInt(2), 0);
+ assertEquals(rowData2.getInt(3), 1);
+ assertEquals(
+ rowData2.getTimestamp(4, 0).toLocalDateTime(),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000));
+ assertEquals(
+ rowData2.getTimestamp(5, 0).toInstant(),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC).toInstant());
List sinkRows2 =
java.util.Arrays.asList(
- new ArraySinkRow(Data.Op.INSERT, 3, "xxx"),
- new ArraySinkRow(Data.Op.INSERT, 4, "hhh"));
+ new ArraySinkRow(
+ Data.Op.INSERT,
+ 3,
+ "xxx",
+ LocalDate.ofEpochDay(0),
+ LocalTime.of(0, 0, 0, 0),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC)),
+ new ArraySinkRow(
+ Data.Op.INSERT,
+ 4,
+ "hhh",
+ LocalDate.ofEpochDay(0),
+ LocalTime.of(0, 0, 0, 0),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC)));
writer.write(sinkRows2);
writer.barrier(true);
RowData rowData3 = mockStorage.commitMap.lastEntry().getValue().get(0);
- assertEquals(rowData3.getArity(), 2);
+ assertEquals(rowData3.getArity(), 6);
assertEquals(rowData3.getInt(0), 3);
assertEquals(rowData3.getString(1).toString(), "xxx");
+ assertEquals(rowData3.getInt(2), 0);
+ assertEquals(rowData3.getInt(3), 0);
+ assertEquals(
+ rowData3.getTimestamp(4, 0).toLocalDateTime(),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000));
+ assertEquals(
+ rowData3.getTimestamp(5, 0).toInstant(),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC).toInstant());
RowData rowData4 = mockStorage.commitMap.lastEntry().getValue().get(1);
- assertEquals(rowData4.getArity(), 2);
+ assertEquals(rowData4.getArity(), 6);
assertEquals(rowData4.getInt(0), 4);
assertEquals(rowData4.getString(1).toString(), "hhh");
+ assertEquals(rowData4.getInt(2), 0);
+ assertEquals(rowData4.getInt(3), 0);
+ assertEquals(
+ rowData4.getTimestamp(4, 0).toLocalDateTime(),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000));
+ assertEquals(rowData4.getTimestamp(5, 0).toString(), "1970-01-01T00:00:01.000001");
}
@Test
@@ -334,38 +406,96 @@ public void testSinkWriteV2() {
flinkDynamicAdapterFactory.createWriter(getTableSchema(), new HashMap<>());
List sinkRows =
java.util.Arrays.asList(
- new ArraySinkRow(Data.Op.INSERT, 1, "Alice"),
- new ArraySinkRow(Data.Op.INSERT, 2, "Bob"));
+ new ArraySinkRow(
+ Data.Op.INSERT,
+ 1,
+ "Alice",
+ LocalDate.ofEpochDay(0),
+ LocalTime.of(0, 0, 0, 0),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC)),
+ new ArraySinkRow(
+ Data.Op.INSERT,
+ 2,
+ "Bob",
+ LocalDate.ofEpochDay(0),
+ LocalTime.of(0, 0, 0, 1000000),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC)));
writer.write(sinkRows);
writer.barrier(true);
RowData rowData1 = mockStorage.commitMap.lastEntry().getValue().get(0);
- assertEquals(rowData1.getArity(), 2);
+ assertEquals(rowData1.getArity(), 6);
assertEquals(rowData1.getInt(0), 1);
assertEquals(rowData1.getString(1).toString(), "Alice");
+ assertEquals(rowData1.getInt(2), 0);
+ assertEquals(rowData1.getInt(3), 0);
+ assertEquals(
+ rowData1.getTimestamp(4, 0).toLocalDateTime(),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000));
+ assertEquals(
+ rowData1.getTimestamp(5, 0).toInstant(),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC).toInstant());
RowData rowData2 = mockStorage.commitMap.lastEntry().getValue().get(1);
- assertEquals(rowData2.getArity(), 2);
+ assertEquals(rowData2.getArity(), 6);
assertEquals(rowData2.getInt(0), 2);
assertEquals(rowData2.getString(1).toString(), "Bob");
+ assertEquals(rowData2.getInt(2), 0);
+ assertEquals(rowData2.getInt(3), 1);
+ assertEquals(
+ rowData2.getTimestamp(4, 0).toLocalDateTime(),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000));
+ assertEquals(
+ rowData2.getTimestamp(5, 0).toInstant(),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC).toInstant());
List sinkRows2 =
java.util.Arrays.asList(
- new ArraySinkRow(Data.Op.INSERT, 3, "xxx"),
- new ArraySinkRow(Data.Op.INSERT, 4, "hhh"));
+ new ArraySinkRow(
+ Data.Op.INSERT,
+ 3,
+ "xxx",
+ LocalDate.ofEpochDay(0),
+ LocalTime.of(0, 0, 0, 0),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC)),
+ new ArraySinkRow(
+ Data.Op.INSERT,
+ 4,
+ "hhh",
+ LocalDate.ofEpochDay(0),
+ LocalTime.of(0, 0, 0, 0),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC)));
writer.write(sinkRows2);
writer.barrier(true);
RowData rowData3 = mockStorage.commitMap.lastEntry().getValue().get(0);
- assertEquals(rowData3.getArity(), 2);
+ assertEquals(rowData3.getArity(), 6);
assertEquals(rowData3.getInt(0), 3);
assertEquals(rowData3.getString(1).toString(), "xxx");
+ assertEquals(rowData3.getInt(2), 0);
+ assertEquals(rowData3.getInt(3), 0);
+ assertEquals(
+ rowData3.getTimestamp(4, 0).toLocalDateTime(),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000));
+ assertEquals(
+ rowData3.getTimestamp(5, 0).toInstant(),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC).toInstant());
RowData rowData4 = mockStorage.commitMap.lastEntry().getValue().get(1);
- assertEquals(rowData4.getArity(), 2);
+ assertEquals(rowData4.getArity(), 6);
assertEquals(rowData4.getInt(0), 4);
assertEquals(rowData4.getString(1).toString(), "hhh");
+ assertEquals(rowData4.getInt(2), 0);
+ assertEquals(rowData4.getInt(3), 0);
+ assertEquals(
+ rowData4.getTimestamp(4, 0).toLocalDateTime(),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000));
+ assertEquals(rowData4.getTimestamp(5, 0).toString(), "1970-01-01T00:00:01.000001");
}
}
diff --git a/java/connector-node/risingwave-connector-test/src/test/java/com/risingwave/connector/sink/jdbc/JDBCSinkTest.java b/java/connector-node/risingwave-connector-test/src/test/java/com/risingwave/connector/sink/jdbc/JDBCSinkTest.java
index 000f941b88f1b..68e9f386b2152 100644
--- a/java/connector-node/risingwave-connector-test/src/test/java/com/risingwave/connector/sink/jdbc/JDBCSinkTest.java
+++ b/java/connector-node/risingwave-connector-test/src/test/java/com/risingwave/connector/sink/jdbc/JDBCSinkTest.java
@@ -25,6 +25,11 @@
import com.risingwave.proto.Data.DataType.TypeName;
import com.risingwave.proto.Data.Op;
import java.sql.*;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.LocalTime;
+import java.time.OffsetDateTime;
+import java.time.ZoneOffset;
import java.util.List;
import org.junit.Test;
import org.testcontainers.containers.JdbcDatabaseContainer;
@@ -38,9 +43,9 @@ private enum TestType {
}
private static final String pgCreateStmt =
- "CREATE TABLE %s (id INT PRIMARY KEY, v_varchar VARCHAR(255), v_date DATE, v_time TIME, v_timestamp TIMESTAMP, v_jsonb JSONB, v_bytea BYTEA)";
+ "CREATE TABLE %s (id INT PRIMARY KEY, v_varchar VARCHAR(255), v_date DATE, v_time TIME, v_timestamp TIMESTAMP, v_timestamptz TIMESTAMPTZ, v_jsonb JSONB, v_bytea BYTEA)";
private static final String mysqlCreateStmt =
- "CREATE TABLE %s (id INT PRIMARY KEY, v_varchar VARCHAR(255), v_date DATE, v_time TIME, v_timestamp TIMESTAMP, v_jsonb JSON, v_bytea BLOB)";
+ "CREATE TABLE %s (id INT PRIMARY KEY, v_varchar VARCHAR(255), v_date DATE, v_time TIME(6), v_timestamp DATETIME(6), v_timestamptz TIMESTAMP(6), v_jsonb JSON, v_bytea BLOB)";
static void createMockTable(String jdbcUrl, String tableName, TestType testType)
throws SQLException {
@@ -60,13 +65,21 @@ static void createMockTable(String jdbcUrl, String tableName, TestType testType)
static TableSchema getTestTableSchema() {
return new TableSchema(
Lists.newArrayList(
- "id", "v_varchar", "v_date", "v_time", "v_timestamp", "v_jsonb", "v_bytea"),
+ "id",
+ "v_varchar",
+ "v_date",
+ "v_time",
+ "v_timestamp",
+ "v_timestamptz",
+ "v_jsonb",
+ "v_bytea"),
Lists.newArrayList(
Data.DataType.newBuilder().setTypeName(TypeName.INT32).build(),
Data.DataType.newBuilder().setTypeName(TypeName.VARCHAR).build(),
Data.DataType.newBuilder().setTypeName(TypeName.DATE).build(),
Data.DataType.newBuilder().setTypeName(TypeName.TIME).build(),
Data.DataType.newBuilder().setTypeName(TypeName.TIMESTAMP).build(),
+ Data.DataType.newBuilder().setTypeName(TypeName.TIMESTAMPTZ).build(),
Data.DataType.newBuilder().setTypeName(TypeName.JSONB).build(),
Data.DataType.newBuilder().setTypeName(TypeName.BYTEA).build()),
Lists.newArrayList("id"));
@@ -89,9 +102,10 @@ static void testJDBCSync(JdbcDatabaseContainer> container, TestType testType)
Op.INSERT,
1,
"Alice",
- new Date(1000000000),
- new Time(1000000000),
- new Timestamp(1000000000),
+ LocalDate.ofEpochDay(0),
+ LocalTime.of(0, 0, 0, 1000),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC),
"{\"key\": \"password\", \"value\": \"Singularity123\"}",
"I want to sleep".getBytes())));
sink.barrier(true);
@@ -111,9 +125,10 @@ static void testJDBCSync(JdbcDatabaseContainer> container, TestType testType)
Op.INSERT,
2,
"Bob",
- new Date(1000000000),
- new Time(1000000000),
- new Timestamp(1000000000),
+ LocalDate.ofEpochDay(0),
+ LocalTime.of(0, 0, 0, 1000),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC),
"{\"key\": \"password\", \"value\": \"Singularity123\"}",
"I want to sleep".getBytes())));
sink.barrier(true);
@@ -150,18 +165,20 @@ static void testJDBCWrite(JdbcDatabaseContainer> container, TestType testType)
Op.INSERT,
1,
"Alice",
- new Date(1000000000),
- new Time(1000000000),
- new Timestamp(1000000000),
+ LocalDate.ofEpochDay(0),
+ LocalTime.of(0, 0, 0, 1000),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC),
"{\"key\": \"password\", \"value\": \"Singularity123\"}",
"I want to sleep".getBytes()),
new ArraySinkRow(
Op.INSERT,
2,
"Bob",
- new Date(1000000000),
- new Time(1000000000),
- new Timestamp(1000000000),
+ LocalDate.ofEpochDay(0),
+ LocalTime.of(0, 0, 0, 1000),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC),
"{\"key\": \"password\", \"value\": \"Singularity123\"}",
"I want to sleep".getBytes())));
@@ -177,27 +194,30 @@ static void testJDBCWrite(JdbcDatabaseContainer> container, TestType testType)
Op.UPDATE_DELETE,
1,
"Alice",
- new Date(1000000000),
- new Time(1000000000),
- new Timestamp(1000000000),
+ LocalDate.ofEpochDay(0),
+ LocalTime.of(0, 0, 0, 1000),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC),
"{\"key\": \"password\", \"value\": \"Singularity123\"}",
"I want to sleep".getBytes()),
new ArraySinkRow(
Op.UPDATE_INSERT,
1,
"Clare",
- new Date(2000000000),
- new Time(2000000000),
- new Timestamp(2000000000),
+ LocalDate.ofEpochDay(0),
+ LocalTime.of(0, 0, 0, 1000),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC),
"{\"key\": \"password\", \"value\": \"Singularity123123123123\"}",
"I want to eat".getBytes()),
new ArraySinkRow(
Op.DELETE,
2,
"Bob",
- new Date(1000000000),
- new Time(1000000000),
- new Timestamp(1000000000),
+ LocalDate.ofEpochDay(0),
+ LocalTime.of(0, 0, 0, 1000),
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000),
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC),
"{\"key\": \"password\", \"value\": \"Singularity123\"}",
"I want to sleep".getBytes())));
@@ -207,13 +227,18 @@ static void testJDBCWrite(JdbcDatabaseContainer> container, TestType testType)
// check if rows are inserted
assertEquals(1, rs.getInt(1));
assertEquals("Clare", rs.getString(2));
- assertEquals(new Date(2000000000).toString(), rs.getDate(3).toString());
- assertEquals(new Time(2000000000).toString(), rs.getTime(4).toString());
- assertEquals(new Timestamp(2000000000), rs.getTimestamp(5));
+ assertEquals(LocalDate.ofEpochDay(0), rs.getObject(3, LocalDate.class));
+ assertEquals(LocalTime.of(0, 0, 0, 1000), rs.getObject(4, LocalTime.class));
+ assertEquals(
+ LocalDateTime.of(1970, 1, 1, 0, 0, 0, 1000),
+ rs.getObject(5, LocalDateTime.class));
+ assertEquals(
+ OffsetDateTime.of(1970, 1, 1, 0, 0, 1, 1000, ZoneOffset.UTC),
+ rs.getObject(6, OffsetDateTime.class));
assertEquals(
"{\"key\": \"password\", \"value\": \"Singularity123123123123\"}",
- rs.getString(6));
- assertEquals("I want to eat", new String(rs.getBytes(7)));
+ rs.getString(7));
+ assertEquals("I want to eat", new String(rs.getBytes(8)));
assertFalse(rs.next());
}
diff --git a/java/connector-node/risingwave-sink-cassandra/src/main/java/com/risingwave/connector/CassandraSink.java b/java/connector-node/risingwave-sink-cassandra/src/main/java/com/risingwave/connector/CassandraSink.java
index d50010d6e2635..b0b7fb93c7b51 100644
--- a/java/connector-node/risingwave-sink-cassandra/src/main/java/com/risingwave/connector/CassandraSink.java
+++ b/java/connector-node/risingwave-sink-cassandra/src/main/java/com/risingwave/connector/CassandraSink.java
@@ -34,6 +34,8 @@
public class CassandraSink extends SinkWriterBase {
private static final Logger LOG = LoggerFactory.getLogger(CassandraSink.class);
+ private static final Integer MAX_BATCH_SIZE = 1024 * 16;
+
private final CqlSession session;
private final List updateRowCache = new ArrayList<>(1);
private final HashMap stmtMap;
@@ -122,6 +124,7 @@ private void write_append_only(Iterator rows) {
.withDescription("Unknown operation: " + op)
.asRuntimeException();
}
+ tryCommit();
}
}
@@ -155,6 +158,13 @@ private void write_upsert(Iterator rows) {
.withDescription("Unknown operation: " + op)
.asRuntimeException();
}
+ tryCommit();
+ }
+ }
+
+ private void tryCommit() {
+ if (batchBuilder.getStatementsCount() >= MAX_BATCH_SIZE) {
+ sync();
}
}
diff --git a/java/connector-node/risingwave-sink-cassandra/src/main/java/com/risingwave/connector/CassandraUtil.java b/java/connector-node/risingwave-sink-cassandra/src/main/java/com/risingwave/connector/CassandraUtil.java
index 3bb0789279373..a6be8f7fc89c1 100644
--- a/java/connector-node/risingwave-sink-cassandra/src/main/java/com/risingwave/connector/CassandraUtil.java
+++ b/java/connector-node/risingwave-sink-cassandra/src/main/java/com/risingwave/connector/CassandraUtil.java
@@ -24,9 +24,7 @@
import com.risingwave.proto.Data.DataType.TypeName;
import io.grpc.Status;
import java.nio.ByteBuffer;
-import java.sql.Date;
-import java.sql.Time;
-import java.sql.Timestamp;
+import java.time.OffsetDateTime;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -52,7 +50,10 @@ private static int getCorrespondingCassandraType(DataType dataType) {
case DECIMAL:
return com.datastax.oss.protocol.internal.ProtocolConstants.DataType.DECIMAL;
case TIMESTAMP:
- return com.datastax.oss.protocol.internal.ProtocolConstants.DataType.TIMESTAMP;
+ throw Status.INVALID_ARGUMENT
+ .withDescription(
+ "cassandra does not have a type corresponding to naive timestamp")
+ .asRuntimeException();
case TIMESTAMPTZ:
return com.datastax.oss.protocol.internal.ProtocolConstants.DataType.TIMESTAMP;
case DATE:
@@ -140,14 +141,16 @@ public static Object convertRow(Object value, TypeName typeName) {
case BOOLEAN:
case VARCHAR:
case DECIMAL:
+ case DATE:
+ case TIME:
return value;
case TIMESTAMP:
+ throw Status.INVALID_ARGUMENT
+ .withDescription(
+ "cassandra does not have a type corresponding to naive timestamp")
+ .asRuntimeException();
case TIMESTAMPTZ:
- return ((Timestamp) value).toInstant();
- case DATE:
- return ((Date) value).toLocalDate();
- case TIME:
- return ((Time) value).toLocalTime();
+ return ((OffsetDateTime) value).toInstant();
case INTERVAL:
return CqlDuration.from((String) value);
case BYTEA:
diff --git a/java/connector-node/risingwave-sink-deltalake/src/main/java/com/risingwave/connector/DeltaLakeSink.java b/java/connector-node/risingwave-sink-deltalake/src/main/java/com/risingwave/connector/DeltaLakeSink.java
index 86f4d6bdf09bc..3d2c23e4bcba4 100644
--- a/java/connector-node/risingwave-sink-deltalake/src/main/java/com/risingwave/connector/DeltaLakeSink.java
+++ b/java/connector-node/risingwave-sink-deltalake/src/main/java/com/risingwave/connector/DeltaLakeSink.java
@@ -25,9 +25,8 @@
import io.delta.standalone.actions.AddFile;
import io.delta.standalone.exceptions.DeltaConcurrentModificationException;
import java.io.IOException;
-import java.sql.Timestamp;
-import java.time.LocalDate;
-import java.time.temporal.ChronoUnit;
+import java.time.LocalDateTime;
+import java.time.ZoneOffset;
import java.util.*;
import org.apache.avro.Conversions;
import org.apache.avro.Schema;
@@ -88,13 +87,11 @@ public void write(Iterator rows) {
GenericRecord record = new GenericData.Record(this.sinkSchema);
for (int i = 0; i < this.sinkSchema.getFields().size(); i++) {
Object values;
- if (row.get(i) instanceof Timestamp) {
- values = ((Timestamp) row.get(i)).getTime();
- } else if (row.get(i) instanceof java.sql.Date) {
+ if (row.get(i) instanceof LocalDateTime) {
values =
- ChronoUnit.DAYS.between(
- LocalDate.ofEpochDay(0),
- ((java.sql.Date) row.get(i)).toLocalDate());
+ ((LocalDateTime) row.get(i))
+ .toInstant(ZoneOffset.UTC)
+ .toEpochMilli();
} else {
values = row.get(i);
}
diff --git a/java/connector-node/risingwave-sink-es-7/src/main/java/com/risingwave/connector/EsSink.java b/java/connector-node/risingwave-sink-es-7/src/main/java/com/risingwave/connector/EsSink.java
index 7a2e5742bb790..e40332a327112 100644
--- a/java/connector-node/risingwave-sink-es-7/src/main/java/com/risingwave/connector/EsSink.java
+++ b/java/connector-node/risingwave-sink-es-7/src/main/java/com/risingwave/connector/EsSink.java
@@ -20,7 +20,6 @@
import com.risingwave.connector.api.sink.SinkRow;
import com.risingwave.connector.api.sink.SinkWriterBase;
import io.grpc.Status;
-import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
@@ -249,7 +248,7 @@ public BulkListener(RequestTracker requestTracker) {
/** This method is called just before bulk is executed. */
@Override
public void beforeBulk(long executionId, BulkRequest request) {
- LOG.info("Sending bulk of {} actions to Elasticsearch.", request.numberOfActions());
+ LOG.debug("Sending bulk of {} actions to Elasticsearch.", request.numberOfActions());
}
/** This method is called after bulk execution. */
@@ -263,7 +262,7 @@ public void afterBulk(long executionId, BulkRequest request, BulkResponse respon
this.requestTracker.addErrResult(errMessage);
} else {
this.requestTracker.addOkResult(request.numberOfActions());
- LOG.info("Sent bulk of {} actions to Elasticsearch.", request.numberOfActions());
+ LOG.debug("Sent bulk of {} actions to Elasticsearch.", request.numberOfActions());
}
}
@@ -353,10 +352,4 @@ public void drop() {
public RestHighLevelClient getClient() {
return client;
}
-
- private final SimpleDateFormat createSimpleDateFormat(String pattern, TimeZone timeZone) {
- SimpleDateFormat sdf = new SimpleDateFormat(pattern);
- sdf.setTimeZone(timeZone);
- return sdf;
- }
}
diff --git a/java/connector-node/risingwave-sink-mock-flink/risingwave-sink-mock-flink-runtime/src/main/java/com/risingwave/mock/flink/runtime/RowDataImpl.java b/java/connector-node/risingwave-sink-mock-flink/risingwave-sink-mock-flink-runtime/src/main/java/com/risingwave/mock/flink/runtime/RowDataImpl.java
index c124af13dcf79..7129fdd1e1900 100644
--- a/java/connector-node/risingwave-sink-mock-flink/risingwave-sink-mock-flink-runtime/src/main/java/com/risingwave/mock/flink/runtime/RowDataImpl.java
+++ b/java/connector-node/risingwave-sink-mock-flink/risingwave-sink-mock-flink-runtime/src/main/java/com/risingwave/mock/flink/runtime/RowDataImpl.java
@@ -18,9 +18,10 @@
import com.risingwave.connector.api.sink.SinkRow;
import io.grpc.Status;
-import java.sql.Date;
-import java.sql.Timestamp;
-import java.time.temporal.ChronoField;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.LocalTime;
+import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.Base64;
import org.apache.flink.table.data.*;
@@ -89,10 +90,15 @@ public short getShort(int i) {
@Override
public int getInt(int i) {
- if (sinkRow.get(i) instanceof Date) {
- return (int) ((Date) sinkRow.get(i)).toLocalDate().getLong(ChronoField.EPOCH_DAY);
+ Object value = sinkRow.get(i);
+ if (value instanceof LocalDate) {
+ return (int) ((LocalDate) value).toEpochDay();
+ } else if (value instanceof LocalTime) {
+ // number of milliseconds of the day
+ return (int) (((LocalTime) value).toNanoOfDay() / 1_000_000L);
+ } else {
+ return (int) value;
}
- return (int) sinkRow.get(i);
}
@Override
@@ -122,7 +128,14 @@ public DecimalData getDecimal(int i, int i1, int i2) {
@Override
public TimestampData getTimestamp(int i, int i1) {
- return TimestampData.fromInstant(((Timestamp) sinkRow.get(i)).toInstant());
+ Object value = sinkRow.get(i);
+ if (value instanceof LocalDateTime) {
+ return TimestampData.fromLocalDateTime((LocalDateTime) value);
+ } else if (value instanceof OffsetDateTime) {
+ return TimestampData.fromInstant(((OffsetDateTime) value).toInstant());
+ } else {
+ throw Status.INTERNAL.withDescription("unreachable").asRuntimeException();
+ }
}
@Override
diff --git a/java/connector-node/risingwave-source-cdc/src/main/java/com/risingwave/connector/cdc/debezium/converters/DatetimeTypeConverter.java b/java/connector-node/risingwave-source-cdc/src/main/java/com/risingwave/connector/cdc/debezium/converters/DatetimeTypeConverter.java
index 0b68010ce667d..e0b2472cae2e4 100644
--- a/java/connector-node/risingwave-source-cdc/src/main/java/com/risingwave/connector/cdc/debezium/converters/DatetimeTypeConverter.java
+++ b/java/connector-node/risingwave-source-cdc/src/main/java/com/risingwave/connector/cdc/debezium/converters/DatetimeTypeConverter.java
@@ -60,12 +60,10 @@ private String convertDate(Object input) {
public static void main(String[] args) {
var converter = new DatetimeTypeConverter();
var d1 = LocalDate.of(1988, 5, 4);
- var d2 = java.sql.Date.valueOf("1960-01-01");
Integer d3 = 8989;
System.out.println(converter.convertDate(null));
System.out.println(converter.convertDate(d1));
- System.out.println(converter.convertDate(d2));
System.out.println(converter.convertDate(d3));
}
}
diff --git a/java/java-binding-integration-test/src/main/java/com/risingwave/java/binding/Utils.java b/java/java-binding-integration-test/src/main/java/com/risingwave/java/binding/Utils.java
index 9eba0aa8fb038..ecf77ef6c832c 100644
--- a/java/java-binding-integration-test/src/main/java/com/risingwave/java/binding/Utils.java
+++ b/java/java-binding-integration-test/src/main/java/com/risingwave/java/binding/Utils.java
@@ -14,6 +14,8 @@
package com.risingwave.java.binding;
+import java.time.ZoneOffset;
+
public class Utils {
public static void validateRow(BaseRow row) {
// The validation of row data are according to the data generation rule
@@ -48,7 +50,7 @@ public static void validateRow(BaseRow row) {
((Short) rowIndex).toString().repeat((rowIndex % 10) + 1)));
}
- if (row.getTimestamp(7).getTime() != rowIndex * 1000) {
+ if (row.getTimestamp(7).toInstant(ZoneOffset.UTC).toEpochMilli() != rowIndex * 1000) {
throw new RuntimeException(
String.format("invalid Timestamp value: %s %s", row.getTimestamp(7), rowIndex));
}
diff --git a/java/java-binding/src/main/java/com/risingwave/java/binding/BaseRow.java b/java/java-binding/src/main/java/com/risingwave/java/binding/BaseRow.java
index ef4de24bbd0c0..39f128ab262ad 100644
--- a/java/java-binding/src/main/java/com/risingwave/java/binding/BaseRow.java
+++ b/java/java-binding/src/main/java/com/risingwave/java/binding/BaseRow.java
@@ -53,11 +53,15 @@ public String getString(int index) {
return Binding.iteratorGetStringValue(pointer, index);
}
- public java.sql.Timestamp getTimestamp(int index) {
+ public java.time.LocalDateTime getTimestamp(int index) {
return Binding.iteratorGetTimestampValue(pointer, index);
}
- public java.sql.Time getTime(int index) {
+ public java.time.OffsetDateTime getTimestamptz(int index) {
+ return Binding.iteratorGetTimestamptzValue(pointer, index);
+ }
+
+ public java.time.LocalTime getTime(int index) {
return Binding.iteratorGetTimeValue(pointer, index);
}
@@ -65,7 +69,7 @@ public java.math.BigDecimal getDecimal(int index) {
return Binding.iteratorGetDecimalValue(pointer, index);
}
- public java.sql.Date getDate(int index) {
+ public java.time.LocalDate getDate(int index) {
return Binding.iteratorGetDateValue(pointer, index);
}
diff --git a/java/java-binding/src/main/java/com/risingwave/java/binding/Binding.java b/java/java-binding/src/main/java/com/risingwave/java/binding/Binding.java
index 6d677c9ddf4c5..fbd952cc68e64 100644
--- a/java/java-binding/src/main/java/com/risingwave/java/binding/Binding.java
+++ b/java/java-binding/src/main/java/com/risingwave/java/binding/Binding.java
@@ -69,13 +69,15 @@ public static native void tracingSlf4jEvent(
static native String iteratorGetStringValue(long pointer, int index);
- static native java.sql.Timestamp iteratorGetTimestampValue(long pointer, int index);
+ static native java.time.LocalDateTime iteratorGetTimestampValue(long pointer, int index);
+
+ static native java.time.OffsetDateTime iteratorGetTimestamptzValue(long pointer, int index);
static native java.math.BigDecimal iteratorGetDecimalValue(long pointer, int index);
- static native java.sql.Time iteratorGetTimeValue(long pointer, int index);
+ static native java.time.LocalTime iteratorGetTimeValue(long pointer, int index);
- static native java.sql.Date iteratorGetDateValue(long pointer, int index);
+ static native java.time.LocalDate iteratorGetDateValue(long pointer, int index);
static native String iteratorGetIntervalValue(long pointer, int index);
diff --git a/java/tools/maven/checkstyle.xml b/java/tools/maven/checkstyle.xml
index 4bd0d510e0fad..f6816332723c6 100644
--- a/java/tools/maven/checkstyle.xml
+++ b/java/tools/maven/checkstyle.xml
@@ -191,10 +191,24 @@ This file is based on the checkstyle file of Apache Beam.
+
+
+
+
+
+
+
+
+
+
+
+
+