+ )}
))}
diff --git a/dashboard/lib/api/api.ts b/dashboard/lib/api/api.ts
index 6c5d885542454..af0116e31f3bb 100644
--- a/dashboard/lib/api/api.ts
+++ b/dashboard/lib/api/api.ts
@@ -26,9 +26,7 @@ export const PREDEFINED_API_ENDPOINTS = [
]
export const DEFAULT_API_ENDPOINT: string =
- process.env.NODE_ENV === "production"
- ? PROD_API_ENDPOINT
- : MOCK_API_ENDPOINT; // EXTERNAL_META_NODE_API_ENDPOINT to debug with RisingWave servers
+ process.env.NODE_ENV === "production" ? PROD_API_ENDPOINT : MOCK_API_ENDPOINT // EXTERNAL_META_NODE_API_ENDPOINT to debug with RisingWave servers
export const API_ENDPOINT_KEY = "risingwave.dashboard.api.endpoint"
diff --git a/dashboard/lib/api/streaming.ts b/dashboard/lib/api/streaming.ts
index 1a8e97081caa4..948cd567d3f2b 100644
--- a/dashboard/lib/api/streaming.ts
+++ b/dashboard/lib/api/streaming.ts
@@ -22,6 +22,7 @@ import {
Schema,
Sink,
Source,
+ Subscription,
Table,
View,
} from "../../proto/gen/catalog"
@@ -47,9 +48,9 @@ export interface Relation {
owner: number
schemaId: number
databaseId: number
- columns: (ColumnCatalog | Field)[]
// For display
+ columns?: (ColumnCatalog | Field)[]
ownerName?: string
schemaName?: string
databaseName?: string
@@ -66,6 +67,8 @@ export function relationType(x: Relation) {
return "SINK"
} else if ((x as Source).info !== undefined) {
return "SOURCE"
+ } else if ((x as Subscription).dependentTableId !== undefined) {
+ return "SUBSCRIPTION"
} else {
return "UNKNOWN"
}
@@ -98,7 +101,8 @@ export async function getRelations() {
await getTables(),
await getIndexes(),
await getSinks(),
- await getSources()
+ await getSources(),
+ await getSubscriptions()
)
relations = sortBy(relations, (x) => x.id)
return relations
@@ -150,6 +154,14 @@ export async function getViews() {
return views
}
+export async function getSubscriptions() {
+ let subscriptions: Subscription[] = (await api.get("/subscriptions")).map(
+ Subscription.fromJSON
+ )
+ subscriptions = sortBy(subscriptions, (x) => x.id)
+ return subscriptions
+}
+
export async function getUsers() {
let users: UserInfo[] = (await api.get("/users")).map(UserInfo.fromJSON)
users = sortBy(users, (x) => x.id)
diff --git a/dashboard/mock-server.js b/dashboard/mock-server.js
index 2db52df788e22..50c55e12686b8 100644
--- a/dashboard/mock-server.js
+++ b/dashboard/mock-server.js
@@ -45,6 +45,10 @@ app.get("/indexes", (req, res, next) => {
res.json(require("./mock/indexes.json"))
})
+app.get("/indexes", (req, res, next) => {
+ res.json(require("./mock/indexes.json"))
+})
+
app.get("/internal_tables", (req, res, next) => {
res.json(require("./mock/internal_tables.json"))
})
diff --git a/dashboard/package-lock.json b/dashboard/package-lock.json
index 1c462e1675207..5bf9ae127252e 100644
--- a/dashboard/package-lock.json
+++ b/dashboard/package-lock.json
@@ -6,6 +6,7 @@
"": {
"hasInstallScript": true,
"dependencies": {
+ "16": "^0.0.2",
"@chakra-ui/react": "^2.3.1",
"@emotion/react": "^11.10.4",
"@emotion/styled": "^11.10.4",
@@ -26,7 +27,7 @@
"fabric": "^5.2.1",
"framer-motion": "^6.5.1",
"lodash": "^4.17.21",
- "next": "^14.1.0",
+ "next": "^14.1.1",
"nuqs": "^1.14.1",
"react": "^18.2.0",
"react-dom": "^18.2.0",
@@ -2092,9 +2093,9 @@
}
},
"node_modules/@next/env": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/env/-/env-14.1.0.tgz",
- "integrity": "sha512-Py8zIo+02ht82brwwhTg36iogzFqGLPXlRGKQw5s+qP/kMNc4MAyDeEwBKDijk6zTIbegEgu8Qy7C1LboslQAw=="
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/env/-/env-14.1.1.tgz",
+ "integrity": "sha512-7CnQyD5G8shHxQIIg3c7/pSeYFeMhsNbpU/bmvH7ZnDql7mNRgg8O2JZrhrc/soFnfBnKP4/xXNiiSIPn2w8gA=="
},
"node_modules/@next/eslint-plugin-next": {
"version": "14.1.0",
@@ -2161,9 +2162,9 @@
}
},
"node_modules/@next/swc-darwin-arm64": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.1.0.tgz",
- "integrity": "sha512-nUDn7TOGcIeyQni6lZHfzNoo9S0euXnu0jhsbMOmMJUBfgsnESdjN97kM7cBqQxZa8L/bM9om/S5/1dzCrW6wQ==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.1.1.tgz",
+ "integrity": "sha512-yDjSFKQKTIjyT7cFv+DqQfW5jsD+tVxXTckSe1KIouKk75t1qZmj/mV3wzdmFb0XHVGtyRjDMulfVG8uCKemOQ==",
"cpu": [
"arm64"
],
@@ -2176,9 +2177,9 @@
}
},
"node_modules/@next/swc-darwin-x64": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.1.0.tgz",
- "integrity": "sha512-1jgudN5haWxiAl3O1ljUS2GfupPmcftu2RYJqZiMJmmbBT5M1XDffjUtRUzP4W3cBHsrvkfOFdQ71hAreNQP6g==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.1.1.tgz",
+ "integrity": "sha512-KCQmBL0CmFmN8D64FHIZVD9I4ugQsDBBEJKiblXGgwn7wBCSe8N4Dx47sdzl4JAg39IkSN5NNrr8AniXLMb3aw==",
"cpu": [
"x64"
],
@@ -2191,9 +2192,9 @@
}
},
"node_modules/@next/swc-linux-arm64-gnu": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.1.0.tgz",
- "integrity": "sha512-RHo7Tcj+jllXUbK7xk2NyIDod3YcCPDZxj1WLIYxd709BQ7WuRYl3OWUNG+WUfqeQBds6kvZYlc42NJJTNi4tQ==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.1.1.tgz",
+ "integrity": "sha512-YDQfbWyW0JMKhJf/T4eyFr4b3tceTorQ5w2n7I0mNVTFOvu6CGEzfwT3RSAQGTi/FFMTFcuspPec/7dFHuP7Eg==",
"cpu": [
"arm64"
],
@@ -2206,9 +2207,9 @@
}
},
"node_modules/@next/swc-linux-arm64-musl": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.1.0.tgz",
- "integrity": "sha512-v6kP8sHYxjO8RwHmWMJSq7VZP2nYCkRVQ0qolh2l6xroe9QjbgV8siTbduED4u0hlk0+tjS6/Tuy4n5XCp+l6g==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.1.1.tgz",
+ "integrity": "sha512-fiuN/OG6sNGRN/bRFxRvV5LyzLB8gaL8cbDH5o3mEiVwfcMzyE5T//ilMmaTrnA8HLMS6hoz4cHOu6Qcp9vxgQ==",
"cpu": [
"arm64"
],
@@ -2221,9 +2222,9 @@
}
},
"node_modules/@next/swc-linux-x64-gnu": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.1.0.tgz",
- "integrity": "sha512-zJ2pnoFYB1F4vmEVlb/eSe+VH679zT1VdXlZKX+pE66grOgjmKJHKacf82g/sWE4MQ4Rk2FMBCRnX+l6/TVYzQ==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.1.1.tgz",
+ "integrity": "sha512-rv6AAdEXoezjbdfp3ouMuVqeLjE1Bin0AuE6qxE6V9g3Giz5/R3xpocHoAi7CufRR+lnkuUjRBn05SYJ83oKNQ==",
"cpu": [
"x64"
],
@@ -2236,9 +2237,9 @@
}
},
"node_modules/@next/swc-linux-x64-musl": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.1.0.tgz",
- "integrity": "sha512-rbaIYFt2X9YZBSbH/CwGAjbBG2/MrACCVu2X0+kSykHzHnYH5FjHxwXLkcoJ10cX0aWCEynpu+rP76x0914atg==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.1.1.tgz",
+ "integrity": "sha512-YAZLGsaNeChSrpz/G7MxO3TIBLaMN8QWMr3X8bt6rCvKovwU7GqQlDu99WdvF33kI8ZahvcdbFsy4jAFzFX7og==",
"cpu": [
"x64"
],
@@ -2251,9 +2252,9 @@
}
},
"node_modules/@next/swc-win32-arm64-msvc": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.1.0.tgz",
- "integrity": "sha512-o1N5TsYc8f/HpGt39OUQpQ9AKIGApd3QLueu7hXk//2xq5Z9OxmV6sQfNp8C7qYmiOlHYODOGqNNa0e9jvchGQ==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.1.1.tgz",
+ "integrity": "sha512-1L4mUYPBMvVDMZg1inUYyPvFSduot0g73hgfD9CODgbr4xiTYe0VOMTZzaRqYJYBA9mana0x4eaAaypmWo1r5A==",
"cpu": [
"arm64"
],
@@ -2266,9 +2267,9 @@
}
},
"node_modules/@next/swc-win32-ia32-msvc": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.1.0.tgz",
- "integrity": "sha512-XXIuB1DBRCFwNO6EEzCTMHT5pauwaSj4SWs7CYnME57eaReAKBXCnkUE80p/pAZcewm7hs+vGvNqDPacEXHVkw==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.1.1.tgz",
+ "integrity": "sha512-jvIE9tsuj9vpbbXlR5YxrghRfMuG0Qm/nZ/1KDHc+y6FpnZ/apsgh+G6t15vefU0zp3WSpTMIdXRUsNl/7RSuw==",
"cpu": [
"ia32"
],
@@ -2281,9 +2282,9 @@
}
},
"node_modules/@next/swc-win32-x64-msvc": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.1.0.tgz",
- "integrity": "sha512-9WEbVRRAqJ3YFVqEZIxUqkiO8l1nool1LmNxygr5HWF8AcSYsEpneUDhmjUVJEzO2A04+oPtZdombzzPPkTtgg==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.1.1.tgz",
+ "integrity": "sha512-S6K6EHDU5+1KrBDLko7/c1MNy/Ya73pIAmvKeFwsF4RmBFJSO7/7YeD4FnZ4iBdzE69PpQ4sOMU9ORKeNuxe8A==",
"cpu": [
"x64"
],
@@ -3350,6 +3351,14 @@
"resolved": "https://registry.npmjs.org/@zag-js/focus-visible/-/focus-visible-0.1.0.tgz",
"integrity": "sha512-PeaBcTmdZWcFf7n1aM+oiOdZc+sy14qi0emPIeUuGMTjbP0xLGrZu43kdpHnWSXy7/r4Ubp/vlg50MCV8+9Isg=="
},
+ "node_modules/16": {
+ "version": "0.0.2",
+ "resolved": "https://registry.npmjs.org/16/-/16-0.0.2.tgz",
+ "integrity": "sha512-AhG4lpdn+/it+U5Xl1bm5SbaHYTH5NfU/vXZkP7E7CHjtVtITuFVZKa3AZP3gN38RDJHYYtEqWmqzCutlXaR7w==",
+ "dependencies": {
+ "numeric": "^1.2.6"
+ }
+ },
"node_modules/abab": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/abab/-/abab-2.0.6.tgz",
@@ -8538,11 +8547,11 @@
}
},
"node_modules/next": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/next/-/next-14.1.0.tgz",
- "integrity": "sha512-wlzrsbfeSU48YQBjZhDzOwhWhGsy+uQycR8bHAOt1LY1bn3zZEcDyHQOEoN3aWzQ8LHCAJ1nqrWCc9XF2+O45Q==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/next/-/next-14.1.1.tgz",
+ "integrity": "sha512-McrGJqlGSHeaz2yTRPkEucxQKe5Zq7uPwyeHNmJaZNY4wx9E9QdxmTp310agFRoMuIYgQrCrT3petg13fSVOww==",
"dependencies": {
- "@next/env": "14.1.0",
+ "@next/env": "14.1.1",
"@swc/helpers": "0.5.2",
"busboy": "1.6.0",
"caniuse-lite": "^1.0.30001579",
@@ -8557,15 +8566,15 @@
"node": ">=18.17.0"
},
"optionalDependencies": {
- "@next/swc-darwin-arm64": "14.1.0",
- "@next/swc-darwin-x64": "14.1.0",
- "@next/swc-linux-arm64-gnu": "14.1.0",
- "@next/swc-linux-arm64-musl": "14.1.0",
- "@next/swc-linux-x64-gnu": "14.1.0",
- "@next/swc-linux-x64-musl": "14.1.0",
- "@next/swc-win32-arm64-msvc": "14.1.0",
- "@next/swc-win32-ia32-msvc": "14.1.0",
- "@next/swc-win32-x64-msvc": "14.1.0"
+ "@next/swc-darwin-arm64": "14.1.1",
+ "@next/swc-darwin-x64": "14.1.1",
+ "@next/swc-linux-arm64-gnu": "14.1.1",
+ "@next/swc-linux-arm64-musl": "14.1.1",
+ "@next/swc-linux-x64-gnu": "14.1.1",
+ "@next/swc-linux-x64-musl": "14.1.1",
+ "@next/swc-win32-arm64-msvc": "14.1.1",
+ "@next/swc-win32-ia32-msvc": "14.1.1",
+ "@next/swc-win32-x64-msvc": "14.1.1"
},
"peerDependencies": {
"@opentelemetry/api": "^1.1.0",
@@ -8680,6 +8689,11 @@
"set-blocking": "^2.0.0"
}
},
+ "node_modules/numeric": {
+ "version": "1.2.6",
+ "resolved": "https://registry.npmjs.org/numeric/-/numeric-1.2.6.tgz",
+ "integrity": "sha512-avBiDAP8siMa7AfJgYyuxw1oyII4z2sswS23+O+ZfV28KrtNzy0wxUFwi4f3RyM4eeeXNs1CThxR7pb5QQcMiw=="
+ },
"node_modules/nuqs": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/nuqs/-/nuqs-1.14.1.tgz",
@@ -11586,6 +11600,14 @@
}
},
"dependencies": {
+ "16": {
+ "version": "0.0.2",
+ "resolved": "https://registry.npmjs.org/16/-/16-0.0.2.tgz",
+ "integrity": "sha512-AhG4lpdn+/it+U5Xl1bm5SbaHYTH5NfU/vXZkP7E7CHjtVtITuFVZKa3AZP3gN38RDJHYYtEqWmqzCutlXaR7w==",
+ "requires": {
+ "numeric": "^1.2.6"
+ }
+ },
"@aashutoshrathi/word-wrap": {
"version": "1.2.6",
"resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz",
@@ -13123,9 +13145,9 @@
}
},
"@next/env": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/env/-/env-14.1.0.tgz",
- "integrity": "sha512-Py8zIo+02ht82brwwhTg36iogzFqGLPXlRGKQw5s+qP/kMNc4MAyDeEwBKDijk6zTIbegEgu8Qy7C1LboslQAw=="
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/env/-/env-14.1.1.tgz",
+ "integrity": "sha512-7CnQyD5G8shHxQIIg3c7/pSeYFeMhsNbpU/bmvH7ZnDql7mNRgg8O2JZrhrc/soFnfBnKP4/xXNiiSIPn2w8gA=="
},
"@next/eslint-plugin-next": {
"version": "14.1.0",
@@ -13176,57 +13198,57 @@
}
},
"@next/swc-darwin-arm64": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.1.0.tgz",
- "integrity": "sha512-nUDn7TOGcIeyQni6lZHfzNoo9S0euXnu0jhsbMOmMJUBfgsnESdjN97kM7cBqQxZa8L/bM9om/S5/1dzCrW6wQ==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.1.1.tgz",
+ "integrity": "sha512-yDjSFKQKTIjyT7cFv+DqQfW5jsD+tVxXTckSe1KIouKk75t1qZmj/mV3wzdmFb0XHVGtyRjDMulfVG8uCKemOQ==",
"optional": true
},
"@next/swc-darwin-x64": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.1.0.tgz",
- "integrity": "sha512-1jgudN5haWxiAl3O1ljUS2GfupPmcftu2RYJqZiMJmmbBT5M1XDffjUtRUzP4W3cBHsrvkfOFdQ71hAreNQP6g==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.1.1.tgz",
+ "integrity": "sha512-KCQmBL0CmFmN8D64FHIZVD9I4ugQsDBBEJKiblXGgwn7wBCSe8N4Dx47sdzl4JAg39IkSN5NNrr8AniXLMb3aw==",
"optional": true
},
"@next/swc-linux-arm64-gnu": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.1.0.tgz",
- "integrity": "sha512-RHo7Tcj+jllXUbK7xk2NyIDod3YcCPDZxj1WLIYxd709BQ7WuRYl3OWUNG+WUfqeQBds6kvZYlc42NJJTNi4tQ==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.1.1.tgz",
+ "integrity": "sha512-YDQfbWyW0JMKhJf/T4eyFr4b3tceTorQ5w2n7I0mNVTFOvu6CGEzfwT3RSAQGTi/FFMTFcuspPec/7dFHuP7Eg==",
"optional": true
},
"@next/swc-linux-arm64-musl": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.1.0.tgz",
- "integrity": "sha512-v6kP8sHYxjO8RwHmWMJSq7VZP2nYCkRVQ0qolh2l6xroe9QjbgV8siTbduED4u0hlk0+tjS6/Tuy4n5XCp+l6g==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.1.1.tgz",
+ "integrity": "sha512-fiuN/OG6sNGRN/bRFxRvV5LyzLB8gaL8cbDH5o3mEiVwfcMzyE5T//ilMmaTrnA8HLMS6hoz4cHOu6Qcp9vxgQ==",
"optional": true
},
"@next/swc-linux-x64-gnu": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.1.0.tgz",
- "integrity": "sha512-zJ2pnoFYB1F4vmEVlb/eSe+VH679zT1VdXlZKX+pE66grOgjmKJHKacf82g/sWE4MQ4Rk2FMBCRnX+l6/TVYzQ==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.1.1.tgz",
+ "integrity": "sha512-rv6AAdEXoezjbdfp3ouMuVqeLjE1Bin0AuE6qxE6V9g3Giz5/R3xpocHoAi7CufRR+lnkuUjRBn05SYJ83oKNQ==",
"optional": true
},
"@next/swc-linux-x64-musl": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.1.0.tgz",
- "integrity": "sha512-rbaIYFt2X9YZBSbH/CwGAjbBG2/MrACCVu2X0+kSykHzHnYH5FjHxwXLkcoJ10cX0aWCEynpu+rP76x0914atg==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.1.1.tgz",
+ "integrity": "sha512-YAZLGsaNeChSrpz/G7MxO3TIBLaMN8QWMr3X8bt6rCvKovwU7GqQlDu99WdvF33kI8ZahvcdbFsy4jAFzFX7og==",
"optional": true
},
"@next/swc-win32-arm64-msvc": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.1.0.tgz",
- "integrity": "sha512-o1N5TsYc8f/HpGt39OUQpQ9AKIGApd3QLueu7hXk//2xq5Z9OxmV6sQfNp8C7qYmiOlHYODOGqNNa0e9jvchGQ==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.1.1.tgz",
+ "integrity": "sha512-1L4mUYPBMvVDMZg1inUYyPvFSduot0g73hgfD9CODgbr4xiTYe0VOMTZzaRqYJYBA9mana0x4eaAaypmWo1r5A==",
"optional": true
},
"@next/swc-win32-ia32-msvc": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.1.0.tgz",
- "integrity": "sha512-XXIuB1DBRCFwNO6EEzCTMHT5pauwaSj4SWs7CYnME57eaReAKBXCnkUE80p/pAZcewm7hs+vGvNqDPacEXHVkw==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.1.1.tgz",
+ "integrity": "sha512-jvIE9tsuj9vpbbXlR5YxrghRfMuG0Qm/nZ/1KDHc+y6FpnZ/apsgh+G6t15vefU0zp3WSpTMIdXRUsNl/7RSuw==",
"optional": true
},
"@next/swc-win32-x64-msvc": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.1.0.tgz",
- "integrity": "sha512-9WEbVRRAqJ3YFVqEZIxUqkiO8l1nool1LmNxygr5HWF8AcSYsEpneUDhmjUVJEzO2A04+oPtZdombzzPPkTtgg==",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.1.1.tgz",
+ "integrity": "sha512-S6K6EHDU5+1KrBDLko7/c1MNy/Ya73pIAmvKeFwsF4RmBFJSO7/7YeD4FnZ4iBdzE69PpQ4sOMU9ORKeNuxe8A==",
"optional": true
},
"@nodelib/fs.scandir": {
@@ -17939,20 +17961,20 @@
"dev": true
},
"next": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/next/-/next-14.1.0.tgz",
- "integrity": "sha512-wlzrsbfeSU48YQBjZhDzOwhWhGsy+uQycR8bHAOt1LY1bn3zZEcDyHQOEoN3aWzQ8LHCAJ1nqrWCc9XF2+O45Q==",
- "requires": {
- "@next/env": "14.1.0",
- "@next/swc-darwin-arm64": "14.1.0",
- "@next/swc-darwin-x64": "14.1.0",
- "@next/swc-linux-arm64-gnu": "14.1.0",
- "@next/swc-linux-arm64-musl": "14.1.0",
- "@next/swc-linux-x64-gnu": "14.1.0",
- "@next/swc-linux-x64-musl": "14.1.0",
- "@next/swc-win32-arm64-msvc": "14.1.0",
- "@next/swc-win32-ia32-msvc": "14.1.0",
- "@next/swc-win32-x64-msvc": "14.1.0",
+ "version": "14.1.1",
+ "resolved": "https://registry.npmjs.org/next/-/next-14.1.1.tgz",
+ "integrity": "sha512-McrGJqlGSHeaz2yTRPkEucxQKe5Zq7uPwyeHNmJaZNY4wx9E9QdxmTp310agFRoMuIYgQrCrT3petg13fSVOww==",
+ "requires": {
+ "@next/env": "14.1.1",
+ "@next/swc-darwin-arm64": "14.1.1",
+ "@next/swc-darwin-x64": "14.1.1",
+ "@next/swc-linux-arm64-gnu": "14.1.1",
+ "@next/swc-linux-arm64-musl": "14.1.1",
+ "@next/swc-linux-x64-gnu": "14.1.1",
+ "@next/swc-linux-x64-musl": "14.1.1",
+ "@next/swc-win32-arm64-msvc": "14.1.1",
+ "@next/swc-win32-ia32-msvc": "14.1.1",
+ "@next/swc-win32-x64-msvc": "14.1.1",
"@swc/helpers": "0.5.2",
"busboy": "1.6.0",
"caniuse-lite": "^1.0.30001579",
@@ -18034,6 +18056,11 @@
"set-blocking": "^2.0.0"
}
},
+ "numeric": {
+ "version": "1.2.6",
+ "resolved": "https://registry.npmjs.org/numeric/-/numeric-1.2.6.tgz",
+ "integrity": "sha512-avBiDAP8siMa7AfJgYyuxw1oyII4z2sswS23+O+ZfV28KrtNzy0wxUFwi4f3RyM4eeeXNs1CThxR7pb5QQcMiw=="
+ },
"nuqs": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/nuqs/-/nuqs-1.14.1.tgz",
diff --git a/dashboard/package.json b/dashboard/package.json
index 1e84bcb8abb2d..a3716f7802ccf 100644
--- a/dashboard/package.json
+++ b/dashboard/package.json
@@ -13,6 +13,7 @@
"clean": "rm -rf .next/ && rm -rf out/"
},
"dependencies": {
+ "16": "^0.0.2",
"@chakra-ui/react": "^2.3.1",
"@emotion/react": "^11.10.4",
"@emotion/styled": "^11.10.4",
@@ -33,7 +34,7 @@
"fabric": "^5.2.1",
"framer-motion": "^6.5.1",
"lodash": "^4.17.21",
- "next": "^14.1.0",
+ "next": "^14.1.1",
"nuqs": "^1.14.1",
"react": "^18.2.0",
"react-dom": "^18.2.0",
diff --git a/dashboard/pages/subscriptions.tsx b/dashboard/pages/subscriptions.tsx
new file mode 100644
index 0000000000000..b2daa38c3f95c
--- /dev/null
+++ b/dashboard/pages/subscriptions.tsx
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2024 RisingWave Labs
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+import { Column, Relations } from "../components/Relations"
+import { getSubscriptions } from "../lib/api/streaming"
+import { Subscription as RwSubscription } from "../proto/gen/catalog"
+
+export default function Subscriptions() {
+ const subscriptionRetentionSeconds: Column = {
+ name: "Retention Seconds",
+ width: 3,
+ content: (r) => r.retentionSeconds ?? "unknown",
+ }
+
+ const subscriptionDependentTableId: Column = {
+ name: "Dependent Table Id",
+ width: 3,
+ content: (r) => r.dependentTableId ?? "unknown",
+ }
+ return Relations("Subscriptions", getSubscriptions, [
+ subscriptionRetentionSeconds,
+ subscriptionDependentTableId,
+ ])
+}
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 3c09a02bdd4c3..de5276eac9cf8 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -1,15 +1,9 @@
-FROM ubuntu:22.04 AS base
+FROM ubuntu:24.04 AS base
ENV LANG en_US.utf8
RUN apt-get update \
- && apt-get -y install ca-certificates build-essential libsasl2-dev openjdk-11-jdk software-properties-common
-
-# Install Python 3.12
-RUN add-apt-repository ppa:deadsnakes/ppa -y && \
- apt-get update -yy && \
- DEBIAN_FRONTEND=noninteractive apt-get install python3.12 python3.12-dev -yy
-ENV PYO3_PYTHON=python3.12
+ && apt-get -y install ca-certificates build-essential libsasl2-dev openjdk-11-jdk software-properties-common python3.12 python3.12-dev
FROM base AS rust-base
diff --git a/docker/Dockerfile.hdfs b/docker/Dockerfile.hdfs
index 53a6da30fe6e0..5f6a9c4af1ff4 100644
--- a/docker/Dockerfile.hdfs
+++ b/docker/Dockerfile.hdfs
@@ -1,16 +1,9 @@
-FROM ubuntu:22.04 AS base
+FROM ubuntu:24.04 AS base
ENV LANG en_US.utf8
RUN apt-get update \
- && apt-get -y install ca-certificates build-essential libsasl2-dev openjdk-11-jdk software-properties-common
-
-# Install Python 3.12
-RUN add-apt-repository ppa:deadsnakes/ppa -y && \
- apt-get update -yy && \
- DEBIAN_FRONTEND=noninteractive apt-get install python3.12 python3.12-dev -yy
-ENV PYO3_PYTHON=python3.12
-
+ && apt-get -y install ca-certificates build-essential libsasl2-dev openjdk-11-jdk software-properties-common python3.12 python3.12-dev
FROM base AS dashboard-builder
@@ -117,7 +110,7 @@ RUN cd /risingwave/java && mvn -B package -Dmaven.test.skip=true -Dno-build-rust
mkdir -p /risingwave/bin/connector-node && \
tar -zxvf /risingwave/java/connector-node/assembly/target/risingwave-connector-1.0.0.tar.gz -C /risingwave/bin/connector-node
-FROM ubuntu:22.04 as image-base
+FROM ubuntu:24.04 as image-base
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get -y install ca-certificates openjdk-11-jdk wget libsasl2-dev && rm -rf /var/lib/{apt,dpkg,cache,log}/
FROM image-base as risingwave
diff --git a/docker/aws/Dockerfile b/docker/aws/Dockerfile
index b1ada55db4816..1de5f1c7cce08 100644
--- a/docker/aws/Dockerfile
+++ b/docker/aws/Dockerfile
@@ -1,4 +1,4 @@
-FROM ubuntu:22.04
+FROM ubuntu:24.04
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get -y install ca-certificates && rm -rf /var/lib/{apt,dpkg,cache,log}/
RUN mkdir -p /risingwave/bin
diff --git a/docker/docker-compose-distributed-etcd.yml b/docker/docker-compose-distributed-etcd.yml
new file mode 100644
index 0000000000000..d0297a132b8fc
--- /dev/null
+++ b/docker/docker-compose-distributed-etcd.yml
@@ -0,0 +1,379 @@
+---
+version: "3"
+x-image: &image
+ image: ${RW_IMAGE:-risingwavelabs/risingwave:v1.8.2}
+services:
+ compactor-0:
+ <<: *image
+ command:
+ - compactor-node
+ - "--listen-addr"
+ - "0.0.0.0:6660"
+ - "--advertise-addr"
+ - "compactor-0:6660"
+ - "--prometheus-listener-addr"
+ - "0.0.0.0:1260"
+ - "--meta-address"
+ - "http://meta-node-0:5690"
+ - "--config-path"
+ - /risingwave.toml
+ expose:
+ - "6660"
+ - "1260"
+ ports: []
+ depends_on:
+ - meta-node-0
+ # - minio-0
+ volumes:
+ - "./risingwave.toml:/risingwave.toml"
+ environment:
+ RUST_BACKTRACE: "1"
+ # If ENABLE_TELEMETRY is not set, telemetry will start by default
+ ENABLE_TELEMETRY: ${ENABLE_TELEMETRY:-true}
+ container_name: compactor-0
+ healthcheck:
+ test:
+ - CMD-SHELL
+ - bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/6660; exit $$?;'
+ interval: 1s
+ timeout: 5s
+ retries: 5
+ restart: always
+ deploy:
+ resources:
+ limits:
+ memory: 2G
+ reservations:
+ memory: 1G
+ compute-node-0:
+ <<: *image
+ command:
+ - compute-node
+ - "--listen-addr"
+ - "0.0.0.0:5688"
+ - "--advertise-addr"
+ - "compute-node-0:5688"
+ - "--prometheus-listener-addr"
+ - "0.0.0.0:1222"
+ - "--meta-address"
+ - "http://meta-node-0:5690"
+ - "--config-path"
+ - /risingwave.toml
+ expose:
+ - "5688"
+ - "1222"
+ ports: []
+ depends_on:
+ - meta-node-0
+ # - minio-0
+ volumes:
+ - "./risingwave.toml:/risingwave.toml"
+ environment:
+ RUST_BACKTRACE: "1"
+ # If ENABLE_TELEMETRY is not set, telemetry will start by default
+ ENABLE_TELEMETRY: ${ENABLE_TELEMETRY:-true}
+ container_name: compute-node-0
+ healthcheck:
+ test:
+ - CMD-SHELL
+ - bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/5688; exit $$?;'
+ interval: 1s
+ timeout: 5s
+ retries: 5
+ restart: always
+ deploy:
+ resources:
+ limits:
+ memory: 26G
+ reservations:
+ memory: 26G
+ etcd-0:
+ image: "quay.io/coreos/etcd:v3.5.10"
+ command:
+ - /usr/local/bin/etcd
+ - "--listen-client-urls"
+ - "http://0.0.0.0:2388"
+ - "--advertise-client-urls"
+ - "http://etcd-0:2388"
+ - "--listen-peer-urls"
+ - "http://0.0.0.0:2389"
+ - "--initial-advertise-peer-urls"
+ - "http://etcd-0:2389"
+ - "--listen-metrics-urls"
+ - "http://0.0.0.0:2379"
+ - "--name"
+ - risedev-meta
+ - "--max-txn-ops"
+ - "999999"
+ - "--max-request-bytes"
+ - "10485760"
+ - "--auto-compaction-mode"
+ - periodic
+ - "--auto-compaction-retention"
+ - 1m
+ - "--snapshot-count"
+ - "10000"
+ - "--data-dir"
+ - /etcd-data
+ expose:
+ - "2388"
+ ports:
+ - "2388:2388"
+ - "2389:2389"
+ depends_on: []
+ volumes:
+ - "etcd-0:/etcd-data"
+ environment: {}
+ container_name: etcd-0
+ healthcheck:
+ test:
+ - CMD
+ - etcdctl
+ - --endpoints=http://localhost:2388
+ - endpoint
+ - health
+ interval: 1s
+ timeout: 5s
+ retries: 5
+ restart: always
+ frontend-node-0:
+ <<: *image
+ command:
+ - frontend-node
+ - "--listen-addr"
+ - "0.0.0.0:4566"
+ - "--meta-addr"
+ - "http://meta-node-0:5690"
+ - "--advertise-addr"
+ - "frontend-node-0:4566"
+ - "--config-path"
+ - /risingwave.toml
+ - "--prometheus-listener-addr"
+ - "0.0.0.0:2222"
+ expose:
+ - "4566"
+ ports:
+ - "4566:4566"
+ depends_on:
+ - meta-node-0
+ volumes:
+ - "./risingwave.toml:/risingwave.toml"
+ environment:
+ RUST_BACKTRACE: "1"
+ # If ENABLE_TELEMETRY is not set, telemetry will start by default
+ ENABLE_TELEMETRY: ${ENABLE_TELEMETRY:-true}
+ container_name: frontend-node-0
+ healthcheck:
+ test:
+ - CMD-SHELL
+ - bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/4566; exit $$?;'
+ interval: 1s
+ timeout: 5s
+ retries: 5
+ restart: always
+ deploy:
+ resources:
+ limits:
+ memory: 2G
+ reservations:
+ memory: 1G
+ grafana-0:
+ image: "grafana/grafana-oss:latest"
+ command: []
+ expose:
+ - "3001"
+ ports:
+ - "3001:3001"
+ depends_on: []
+ volumes:
+ - "grafana-0:/var/lib/grafana"
+ - "./grafana.ini:/etc/grafana/grafana.ini"
+ - "./grafana-risedev-datasource.yml:/etc/grafana/provisioning/datasources/grafana-risedev-datasource.yml"
+ - "./grafana-risedev-dashboard.yml:/etc/grafana/provisioning/dashboards/grafana-risedev-dashboard.yml"
+ - "./dashboards:/dashboards"
+ environment: {}
+ container_name: grafana-0
+ healthcheck:
+ test:
+ - CMD-SHELL
+ - bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/3001; exit $$?;'
+ interval: 1s
+ timeout: 5s
+ retries: 5
+ restart: always
+ meta-node-0:
+ <<: *image
+ command:
+ - meta-node
+ - "--listen-addr"
+ - "0.0.0.0:5690"
+ - "--advertise-addr"
+ - "meta-node-0:5690"
+ - "--dashboard-host"
+ - "0.0.0.0:5691"
+ - "--prometheus-host"
+ - "0.0.0.0:1250"
+ - "--prometheus-endpoint"
+ - "http://prometheus-0:9500"
+ - "--backend"
+ - etcd
+ - "--etcd-endpoints"
+ - "etcd-0:2388"
+ - "--state-store"
+ - "hummock+minio://hummockadmin:hummockadmin@minio-0:9301/hummock001"
+ - "--data-directory"
+ - "hummock_001"
+ - "--config-path"
+ - /risingwave.toml
+ expose:
+ - "5690"
+ - "1250"
+ - "5691"
+ ports:
+ - "5690:5690"
+ - "5691:5691"
+ depends_on:
+ - "etcd-0"
+ volumes:
+ - "./risingwave.toml:/risingwave.toml"
+ environment:
+ RUST_BACKTRACE: "1"
+ # If ENABLE_TELEMETRY is not set, telemetry will start by default
+ ENABLE_TELEMETRY: ${ENABLE_TELEMETRY:-true}
+ container_name: meta-node-0
+ healthcheck:
+ test:
+ - CMD-SHELL
+ - bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/5690; exit $$?;'
+ interval: 1s
+ timeout: 5s
+ retries: 5
+ restart: always
+ deploy:
+ resources:
+ limits:
+ memory: 2G
+ reservations:
+ memory: 1G
+ minio-0:
+ image: "quay.io/minio/minio:latest"
+ command:
+ - server
+ - "--address"
+ - "0.0.0.0:9301"
+ - "--console-address"
+ - "0.0.0.0:9400"
+ - /data
+ expose:
+ - "9301"
+ - "9400"
+ ports:
+ - "9301:9301"
+ - "9400:9400"
+ depends_on: []
+ volumes:
+ - "minio-0:/data"
+ entrypoint: "
+
+ /bin/sh -c '
+
+ set -e
+
+ mkdir -p \"/data/hummock001\"
+
+ /usr/bin/docker-entrypoint.sh \"$$0\" \"$$@\"
+
+ '"
+ environment:
+ MINIO_CI_CD: "1"
+ MINIO_PROMETHEUS_AUTH_TYPE: public
+ MINIO_PROMETHEUS_URL: "http://prometheus-0:9500"
+ MINIO_ROOT_PASSWORD: hummockadmin
+ MINIO_ROOT_USER: hummockadmin
+ MINIO_DOMAIN: "minio-0"
+ container_name: minio-0
+ healthcheck:
+ test:
+ - CMD-SHELL
+ - bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/9301; exit $$?;'
+ interval: 1s
+ timeout: 5s
+ retries: 5
+ restart: always
+ prometheus-0:
+ image: "prom/prometheus:latest"
+ command:
+ - "--config.file=/etc/prometheus/prometheus.yml"
+ - "--storage.tsdb.path=/prometheus"
+ - "--web.console.libraries=/usr/share/prometheus/console_libraries"
+ - "--web.console.templates=/usr/share/prometheus/consoles"
+ - "--web.listen-address=0.0.0.0:9500"
+ - "--storage.tsdb.retention.time=30d"
+ expose:
+ - "9500"
+ ports:
+ - "9500:9500"
+ depends_on: []
+ volumes:
+ - "prometheus-0:/prometheus"
+ - "./prometheus.yaml:/etc/prometheus/prometheus.yml"
+ environment: {}
+ container_name: prometheus-0
+ healthcheck:
+ test:
+ - CMD-SHELL
+ - sh -c 'printf "GET /-/healthy HTTP/1.0\n\n" | nc localhost 9500; exit $$?;'
+ interval: 1s
+ timeout: 5s
+ retries: 5
+ restart: always
+ message_queue:
+ image: "docker.vectorized.io/vectorized/redpanda:latest"
+ command:
+ - redpanda
+ - start
+ - "--smp"
+ - "1"
+ - "--reserve-memory"
+ - 0M
+ - "--memory"
+ - 4G
+ - "--overprovisioned"
+ - "--node-id"
+ - "0"
+ - "--check=false"
+ - "--kafka-addr"
+ - "PLAINTEXT://0.0.0.0:29092,OUTSIDE://0.0.0.0:9092"
+ - "--advertise-kafka-addr"
+ - "PLAINTEXT://message_queue:29092,OUTSIDE://localhost:9092"
+ expose:
+ - "29092"
+ - "9092"
+ - "9644"
+ ports:
+ - "29092:29092"
+ - "9092:9092"
+ - "9644:9644"
+ - "8081:8081"
+ depends_on: []
+ volumes:
+ - "message_queue:/var/lib/redpanda/data"
+ environment: {}
+ container_name: message_queue
+ healthcheck:
+ test: curl -f localhost:9644/v1/status/ready
+ interval: 1s
+ timeout: 5s
+ retries: 5
+ restart: always
+volumes:
+ etcd-0:
+ external: false
+ grafana-0:
+ external: false
+ minio-0:
+ external: false
+ prometheus-0:
+ external: false
+ message_queue:
+ external: false
diff --git a/docker/docker-compose-distributed.yml b/docker/docker-compose-distributed.yml
index 55cb1cbcffe3c..6a71b3488c41c 100644
--- a/docker/docker-compose-distributed.yml
+++ b/docker/docker-compose-distributed.yml
@@ -87,52 +87,22 @@ services:
memory: 26G
reservations:
memory: 26G
- etcd-0:
- image: "quay.io/coreos/etcd:v3.5.10"
- command:
- - /usr/local/bin/etcd
- - "--listen-client-urls"
- - "http://0.0.0.0:2388"
- - "--advertise-client-urls"
- - "http://etcd-0:2388"
- - "--listen-peer-urls"
- - "http://0.0.0.0:2389"
- - "--initial-advertise-peer-urls"
- - "http://etcd-0:2389"
- - "--listen-metrics-urls"
- - "http://0.0.0.0:2379"
- - "--name"
- - risedev-meta
- - "--max-txn-ops"
- - "999999"
- - "--max-request-bytes"
- - "10485760"
- - "--auto-compaction-mode"
- - periodic
- - "--auto-compaction-retention"
- - 1m
- - "--snapshot-count"
- - "10000"
- - "--data-dir"
- - /etcd-data
+ postgres-0:
+ image: "postgres:15-alpine"
+ environment:
+ - POSTGRES_HOST_AUTH_METHOD=trust
+ - POSTGRES_USER=postgres
+ - POSTGRES_DB=metadata
+ - POSTGRES_INITDB_ARGS=--encoding=UTF-8 --lc-collate=C --lc-ctype=C
expose:
- - "2388"
+ - "5432"
ports:
- - "2388:2388"
- - "2389:2389"
- depends_on: []
+ - "8432:5432"
volumes:
- - "etcd-0:/etcd-data"
- environment: {}
- container_name: etcd-0
+ - "postgres-0:/var/lib/postgresql/data"
healthcheck:
- test:
- - CMD
- - etcdctl
- - --endpoints=http://localhost:2388
- - endpoint
- - health
- interval: 1s
+ test: [ "CMD-SHELL", "pg_isready -U postgres" ]
+ interval: 2s
timeout: 5s
retries: 5
restart: always
@@ -216,9 +186,9 @@ services:
- "--prometheus-endpoint"
- "http://prometheus-0:9500"
- "--backend"
- - etcd
- - "--etcd-endpoints"
- - "etcd-0:2388"
+ - sql
+ - "--sql-endpoint"
+ - "postgres://postgres:@postgres-0:5432/metadata"
- "--state-store"
- "hummock+minio://hummockadmin:hummockadmin@minio-0:9301/hummock001"
- "--data-directory"
@@ -233,7 +203,8 @@ services:
- "5690:5690"
- "5691:5691"
depends_on:
- - "etcd-0"
+ - "postgres-0"
+ - "minio-0"
volumes:
- "./risingwave.toml:/risingwave.toml"
environment:
@@ -367,7 +338,7 @@ services:
retries: 5
restart: always
volumes:
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/docker/docker-compose-etcd.yml b/docker/docker-compose-etcd.yml
new file mode 100644
index 0000000000000..05d8d16ffdf98
--- /dev/null
+++ b/docker/docker-compose-etcd.yml
@@ -0,0 +1,278 @@
+---
+version: "3"
+x-image: &image
+ image: ${RW_IMAGE:-risingwavelabs/risingwave:v1.8.2}
+services:
+ risingwave-standalone:
+ <<: *image
+ command: "standalone --meta-opts=\" \
+ --listen-addr 0.0.0.0:5690 \
+ --advertise-addr 0.0.0.0:5690 \
+ --dashboard-host 0.0.0.0:5691 \
+ --prometheus-host 0.0.0.0:1250 \
+ --prometheus-endpoint http://prometheus-0:9500 \
+ --backend etcd \
+ --etcd-endpoints etcd-0:2388 \
+ --state-store hummock+minio://hummockadmin:hummockadmin@minio-0:9301/hummock001 \
+ --data-directory hummock_001 \
+ --config-path /risingwave.toml\" \
+ --compute-opts=\" \
+ --config-path /risingwave.toml \
+ --listen-addr 0.0.0.0:5688 \
+ --prometheus-listener-addr 0.0.0.0:1250 \
+ --advertise-addr 0.0.0.0:5688 \
+ --async-stack-trace verbose \
+ #--parallelism 4 \
+ #--total-memory-bytes 8589934592 \
+ --role both \
+ --meta-address http://0.0.0.0:5690\" \
+ --frontend-opts=\" \
+ --config-path /risingwave.toml \
+ --listen-addr 0.0.0.0:4566 \
+ --advertise-addr 0.0.0.0:4566 \
+ --prometheus-listener-addr 0.0.0.0:1250 \
+ --health-check-listener-addr 0.0.0.0:6786 \
+ --meta-addr http://0.0.0.0:5690\" \
+ --compactor-opts=\" \
+ --listen-addr 0.0.0.0:6660 \
+ --prometheus-listener-addr 0.0.0.0:1250 \
+ --advertise-addr 0.0.0.0:6660 \
+ --meta-address http://0.0.0.0:5690\""
+ expose:
+ - "6660"
+ - "4566"
+ - "5688"
+ - "5690"
+ - "1250"
+ - "5691"
+ ports:
+ - "4566:4566"
+ - "5690:5690"
+ - "5691:5691"
+ - "1250:1250"
+ depends_on:
+ - etcd-0
+ - minio-0
+ volumes:
+ - "./risingwave.toml:/risingwave.toml"
+ environment:
+ RUST_BACKTRACE: "1"
+ # If ENABLE_TELEMETRY is not set, telemetry will start by default
+ ENABLE_TELEMETRY: ${ENABLE_TELEMETRY:-true}
+ container_name: risingwave-standalone
+ healthcheck:
+ test:
+ - CMD-SHELL
+ - bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/6660; exit $$?;'
+ - bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/5688; exit $$?;'
+ - bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/4566; exit $$?;'
+ - bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/5690; exit $$?;'
+ interval: 1s
+ timeout: 5s
+ restart: always
+ deploy:
+ resources:
+ limits:
+ memory: 28G
+ reservations:
+ memory: 28G
+
+ etcd-0:
+ image: "quay.io/coreos/etcd:v3.5.10"
+ command:
+ - /usr/local/bin/etcd
+ - "--listen-client-urls"
+ - "http://0.0.0.0:2388"
+ - "--advertise-client-urls"
+ - "http://etcd-0:2388"
+ - "--listen-peer-urls"
+ - "http://0.0.0.0:2389"
+ - "--initial-advertise-peer-urls"
+ - "http://etcd-0:2389"
+ - "--listen-metrics-urls"
+ - "http://0.0.0.0:2379"
+ - "--name"
+ - risedev-meta
+ - "--max-txn-ops"
+ - "999999"
+ - "--max-request-bytes"
+ - "10485760"
+ - "--auto-compaction-mode"
+ - periodic
+ - "--auto-compaction-retention"
+ - 1m
+ - "--snapshot-count"
+ - "10000"
+ - "--data-dir"
+ - /etcd-data
+ expose:
+ - "2388"
+ ports:
+ - "2388:2388"
+ - "2389:2389"
+ depends_on: []
+ volumes:
+ - "etcd-0:/etcd-data"
+ environment: {}
+ container_name: etcd-0
+ healthcheck:
+ test:
+ - CMD
+ - etcdctl
+ - --endpoints=http://localhost:2388
+ - endpoint
+ - health
+ interval: 1s
+ timeout: 5s
+ retries: 5
+ restart: always
+
+ grafana-0:
+ image: "grafana/grafana-oss:latest"
+ command: []
+ expose:
+ - "3001"
+ ports:
+ - "3001:3001"
+ depends_on: []
+ volumes:
+ - "grafana-0:/var/lib/grafana"
+ - "./grafana.ini:/etc/grafana/grafana.ini"
+ - "./grafana-risedev-datasource.yml:/etc/grafana/provisioning/datasources/grafana-risedev-datasource.yml"
+ - "./grafana-risedev-dashboard.yml:/etc/grafana/provisioning/dashboards/grafana-risedev-dashboard.yml"
+ - "./dashboards:/dashboards"
+ environment: {}
+ container_name: grafana-0
+ healthcheck:
+ test:
+ - CMD-SHELL
+ - bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/3001; exit $$?;'
+ interval: 1s
+ timeout: 5s
+ retries: 5
+ restart: always
+
+ minio-0:
+ image: "quay.io/minio/minio:latest"
+ command:
+ - server
+ - "--address"
+ - "0.0.0.0:9301"
+ - "--console-address"
+ - "0.0.0.0:9400"
+ - /data
+ expose:
+ - "9301"
+ - "9400"
+ ports:
+ - "9301:9301"
+ - "9400:9400"
+ depends_on: []
+ volumes:
+ - "minio-0:/data"
+ entrypoint: "
+
+ /bin/sh -c '
+
+ set -e
+
+ mkdir -p \"/data/hummock001\"
+
+ /usr/bin/docker-entrypoint.sh \"$$0\" \"$$@\"
+
+ '"
+ environment:
+ MINIO_CI_CD: "1"
+ MINIO_PROMETHEUS_AUTH_TYPE: public
+ MINIO_PROMETHEUS_URL: "http://prometheus-0:9500"
+ MINIO_ROOT_PASSWORD: hummockadmin
+ MINIO_ROOT_USER: hummockadmin
+ MINIO_DOMAIN: "minio-0"
+ container_name: minio-0
+ healthcheck:
+ test:
+ - CMD-SHELL
+ - bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/9301; exit $$?;'
+ interval: 1s
+ timeout: 5s
+ retries: 5
+ restart: always
+
+ prometheus-0:
+ image: "prom/prometheus:latest"
+ command:
+ - "--config.file=/etc/prometheus/prometheus.yml"
+ - "--storage.tsdb.path=/prometheus"
+ - "--web.console.libraries=/usr/share/prometheus/console_libraries"
+ - "--web.console.templates=/usr/share/prometheus/consoles"
+ - "--web.listen-address=0.0.0.0:9500"
+ - "--storage.tsdb.retention.time=30d"
+ expose:
+ - "9500"
+ ports:
+ - "9500:9500"
+ depends_on: []
+ volumes:
+ - "prometheus-0:/prometheus"
+ - "./prometheus.yaml:/etc/prometheus/prometheus.yml"
+ environment: {}
+ container_name: prometheus-0
+ healthcheck:
+ test:
+ - CMD-SHELL
+ - sh -c 'printf "GET /-/healthy HTTP/1.0\n\n" | nc localhost 9500; exit $$?;'
+ interval: 1s
+ timeout: 5s
+ retries: 5
+ restart: always
+
+ message_queue:
+ image: "docker.vectorized.io/vectorized/redpanda:latest"
+ command:
+ - redpanda
+ - start
+ - "--smp"
+ - "1"
+ - "--reserve-memory"
+ - 0M
+ - "--memory"
+ - 4G
+ - "--overprovisioned"
+ - "--node-id"
+ - "0"
+ - "--check=false"
+ - "--kafka-addr"
+ - "PLAINTEXT://0.0.0.0:29092,OUTSIDE://0.0.0.0:9092"
+ - "--advertise-kafka-addr"
+ - "PLAINTEXT://message_queue:29092,OUTSIDE://localhost:9092"
+ expose:
+ - "29092"
+ - "9092"
+ - "9644"
+ ports:
+ - "29092:29092"
+ - "9092:9092"
+ - "9644:9644"
+ - "8081:8081"
+ depends_on: []
+ volumes:
+ - "message_queue:/var/lib/redpanda/data"
+ environment: {}
+ container_name: message_queue
+ healthcheck:
+ test: curl -f localhost:9644/v1/status/ready
+ interval: 1s
+ timeout: 5s
+ retries: 5
+ restart: always
+volumes:
+ etcd-0:
+ external: false
+ grafana-0:
+ external: false
+ minio-0:
+ external: false
+ prometheus-0:
+ external: false
+ message_queue:
+ external: false
diff --git a/docker/docker-compose-with-azblob.yml b/docker/docker-compose-with-azblob.yml
index e0b44c5768011..a1035180d6a68 100644
--- a/docker/docker-compose-with-azblob.yml
+++ b/docker/docker-compose-with-azblob.yml
@@ -1,7 +1,7 @@
---
version: "3"
x-image: &image
- image: ${RW_IMAGE:-risingwavelabs/risingwave:v1.8.1}
+ image: ${RW_IMAGE:-risingwavelabs/risingwave:v1.8.2}
services:
risingwave-standalone:
<<: *image
@@ -11,8 +11,8 @@ services:
--dashboard-host 0.0.0.0:5691 \
--prometheus-host 0.0.0.0:1250 \
--prometheus-endpoint http://prometheus-0:9500 \
- --backend etcd \
- --etcd-endpoints etcd-0:2388 \
+ --backend sql \
+ --sql-endpoint postgres://postgres:@postgres-0:5432/metadata \
--state-store hummock+azblob:// \
--data-directory hummock_001 \
--config-path /risingwave.toml\" \
@@ -51,7 +51,7 @@ services:
- "5691:5691"
- "1250:1250"
depends_on:
- - etcd-0
+ - postgres-0
env_file: multiple_object_storage.env
volumes:
- "./risingwave.toml:/risingwave.toml"
@@ -76,10 +76,10 @@ services:
memory: 28G
reservations:
memory: 28G
- etcd-0:
+ postgres-0:
extends:
file: docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: docker-compose.yml
@@ -93,7 +93,7 @@ services:
file: docker-compose.yml
service: message_queue
volumes:
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/docker/docker-compose-with-gcs.yml b/docker/docker-compose-with-gcs.yml
index 847172c2d09c1..64807f252e536 100644
--- a/docker/docker-compose-with-gcs.yml
+++ b/docker/docker-compose-with-gcs.yml
@@ -1,7 +1,7 @@
---
version: "3"
x-image: &image
- image: ${RW_IMAGE:-risingwavelabs/risingwave:v1.8.1}
+ image: ${RW_IMAGE:-risingwavelabs/risingwave:v1.8.2}
services:
risingwave-standalone:
<<: *image
@@ -11,8 +11,8 @@ services:
--dashboard-host 0.0.0.0:5691 \
--prometheus-host 0.0.0.0:1250 \
--prometheus-endpoint http://prometheus-0:9500 \
- --backend etcd \
- --etcd-endpoints etcd-0:2388 \
+ --backend sql \
+ --sql-endpoint postgres://postgres:@postgres-0:5432/metadata \
--state-store hummock+gcs:// \
--data-directory hummock_001 \
--config-path /risingwave.toml\" \
@@ -51,7 +51,7 @@ services:
- "5691:5691"
- "1250:1250"
depends_on:
- - etcd-0
+ - postgres-0
env_file: multiple_object_storage.env
volumes:
- "./risingwave.toml:/risingwave.toml"
@@ -76,10 +76,10 @@ services:
memory: 28G
reservations:
memory: 28G
- etcd-0:
+ postgres-0:
extends:
file: docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: docker-compose.yml
@@ -93,7 +93,7 @@ services:
file: docker-compose.yml
service: message_queue
volumes:
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/docker/docker-compose-with-local-fs.yml b/docker/docker-compose-with-local-fs.yml
index b45e624c619b3..c44995e063dea 100644
--- a/docker/docker-compose-with-local-fs.yml
+++ b/docker/docker-compose-with-local-fs.yml
@@ -1,7 +1,7 @@
---
version: "3"
x-image: &image
- image: ${RW_IMAGE:-risingwavelabs/risingwave:nightly-20231211}
+ image: ${RW_IMAGE:-risingwavelabs/risingwave:v1.8.2}
services:
risingwave-standalone:
<<: *image
@@ -11,8 +11,8 @@ services:
--dashboard-host 0.0.0.0:5691 \
--prometheus-host 0.0.0.0:1250 \
--prometheus-endpoint http://prometheus-0:9500 \
- --backend etcd \
- --etcd-endpoints etcd-0:2388 \
+ --backend sql \
+ --sql-endpoint postgres://postgres:@postgres-0:5432/metadata \
--state-store hummock+fs:// \
--data-directory hummock_001 \
--config-path /risingwave.toml\" \
@@ -50,7 +50,7 @@ services:
- "5691:5691"
- "1250:1250"
depends_on:
- - etcd-0
+ - postgres-0
volumes:
- "./risingwave.toml:/risingwave.toml"
environment:
@@ -74,10 +74,10 @@ services:
memory:
reservations:
memory:
- etcd-0:
+ postgres-0:
extends:
file: docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: docker-compose.yml
@@ -87,7 +87,7 @@ services:
file: docker-compose.yml
service: prometheus-0
volumes:
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/docker/docker-compose-with-obs.yml b/docker/docker-compose-with-obs.yml
index 5d0df0ca4f72d..634de89172b41 100644
--- a/docker/docker-compose-with-obs.yml
+++ b/docker/docker-compose-with-obs.yml
@@ -1,7 +1,7 @@
---
version: "3"
x-image: &image
- image: ${RW_IMAGE:-risingwavelabs/risingwave:v1.8.1}
+ image: ${RW_IMAGE:-risingwavelabs/risingwave:v1.8.2}
services:
risingwave-standalone:
<<: *image
@@ -11,8 +11,8 @@ services:
--dashboard-host 0.0.0.0:5691 \
--prometheus-host 0.0.0.0:1250 \
--prometheus-endpoint http://prometheus-0:9500 \
- --backend etcd \
- --etcd-endpoints etcd-0:2388 \
+ --backend sql \
+ --sql-endpoint postgres://postgres:@postgres-0:5432/metadata \
--state-store hummock+obs:// \
--data-directory hummock_001 \
--config-path /risingwave.toml\" \
@@ -51,7 +51,7 @@ services:
- "5691:5691"
- "1250:1250"
depends_on:
- - etcd-0
+ - postgres-0
env_file: multiple_object_storage.env
volumes:
- "./risingwave.toml:/risingwave.toml"
@@ -76,10 +76,10 @@ services:
memory: 28G
reservations:
memory: 28G
- etcd-0:
+ postgres-0:
extends:
file: docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: docker-compose.yml
@@ -93,7 +93,7 @@ services:
file: docker-compose.yml
service: message_queue
volumes:
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/docker/docker-compose-with-oss.yml b/docker/docker-compose-with-oss.yml
index 7296a7074d5a6..53466e51711ed 100644
--- a/docker/docker-compose-with-oss.yml
+++ b/docker/docker-compose-with-oss.yml
@@ -1,7 +1,7 @@
---
version: "3"
x-image: &image
- image: ${RW_IMAGE:-risingwavelabs/risingwave:v1.8.1}
+ image: ${RW_IMAGE:-risingwavelabs/risingwave:v1.8.2}
services:
risingwave-standalone:
<<: *image
@@ -11,8 +11,8 @@ services:
--dashboard-host 0.0.0.0:5691 \
--prometheus-host 0.0.0.0:1250 \
--prometheus-endpoint http://prometheus-0:9500 \
- --backend etcd \
- --etcd-endpoints etcd-0:2388 \
+ --backend sql \
+ --sql-endpoint postgres://postgres:@postgres-0:5432/metadata \
--state-store hummock+oss:// \
--data-directory hummock_001 \
--config-path /risingwave.toml\" \
@@ -51,7 +51,7 @@ services:
- "5691:5691"
- "1250:1250"
depends_on:
- - etcd-0
+ - postgres-0
env_file: multiple_object_storage.env
volumes:
- "./risingwave.toml:/risingwave.toml"
@@ -76,10 +76,10 @@ services:
memory: 28G
reservations:
memory: 28G
- etcd-0:
+ postgres-0:
extends:
file: docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: docker-compose.yml
@@ -93,7 +93,7 @@ services:
file: docker-compose.yml
service: message_queue
volumes:
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/docker/docker-compose-with-s3.yml b/docker/docker-compose-with-s3.yml
index 815489f82493e..34e1b02ddd38f 100644
--- a/docker/docker-compose-with-s3.yml
+++ b/docker/docker-compose-with-s3.yml
@@ -1,7 +1,7 @@
---
version: "3"
x-image: &image
- image: ${RW_IMAGE:-risingwavelabs/risingwave:v1.8.1}
+ image: ${RW_IMAGE:-risingwavelabs/risingwave:v1.8.2}
services:
risingwave-standalone:
<<: *image
@@ -11,8 +11,8 @@ services:
--dashboard-host 0.0.0.0:5691 \
--prometheus-host 0.0.0.0:1250 \
--prometheus-endpoint http://prometheus-0:9500 \
- --backend etcd \
- --etcd-endpoints etcd-0:2388 \
+ --backend sql \
+ --sql-endpoint postgres://postgres:@postgres-0:5432/metadata \
--state-store hummock+s3:// \
--data-directory hummock_001 \
--config-path /risingwave.toml\" \
@@ -51,7 +51,7 @@ services:
- "5691:5691"
- "1250:1250"
depends_on:
- - etcd-0
+ - postgres-0
env_file: aws.env
volumes:
- "./risingwave.toml:/risingwave.toml"
@@ -76,10 +76,10 @@ services:
memory: 28G
reservations:
memory: 28G
- etcd-0:
+ postgres-0:
extends:
file: docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: docker-compose.yml
@@ -93,7 +93,7 @@ services:
file: docker-compose.yml
service: message_queue
volumes:
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml
index 6259a5757b14f..a94324439a42c 100644
--- a/docker/docker-compose.yml
+++ b/docker/docker-compose.yml
@@ -1,7 +1,7 @@
---
version: "3"
x-image: &image
- image: ${RW_IMAGE:-risingwavelabs/risingwave:v1.8.1}
+ image: ${RW_IMAGE:-risingwavelabs/risingwave:v1.8.2}
services:
risingwave-standalone:
<<: *image
@@ -11,8 +11,8 @@ services:
--dashboard-host 0.0.0.0:5691 \
--prometheus-host 0.0.0.0:1250 \
--prometheus-endpoint http://prometheus-0:9500 \
- --backend etcd \
- --etcd-endpoints etcd-0:2388 \
+ --backend sql \
+ --sql-endpoint postgres://postgres:@postgres-0:5432/metadata \
--state-store hummock+minio://hummockadmin:hummockadmin@minio-0:9301/hummock001 \
--data-directory hummock_001 \
--config-path /risingwave.toml\" \
@@ -51,7 +51,7 @@ services:
- "5691:5691"
- "1250:1250"
depends_on:
- - etcd-0
+ - postgres-0
- minio-0
volumes:
- "./risingwave.toml:/risingwave.toml"
@@ -77,52 +77,22 @@ services:
reservations:
memory: 28G
- etcd-0:
- image: "quay.io/coreos/etcd:v3.5.10"
- command:
- - /usr/local/bin/etcd
- - "--listen-client-urls"
- - "http://0.0.0.0:2388"
- - "--advertise-client-urls"
- - "http://etcd-0:2388"
- - "--listen-peer-urls"
- - "http://0.0.0.0:2389"
- - "--initial-advertise-peer-urls"
- - "http://etcd-0:2389"
- - "--listen-metrics-urls"
- - "http://0.0.0.0:2379"
- - "--name"
- - risedev-meta
- - "--max-txn-ops"
- - "999999"
- - "--max-request-bytes"
- - "10485760"
- - "--auto-compaction-mode"
- - periodic
- - "--auto-compaction-retention"
- - 1m
- - "--snapshot-count"
- - "10000"
- - "--data-dir"
- - /etcd-data
+ postgres-0:
+ image: "postgres:15-alpine"
+ environment:
+ - POSTGRES_HOST_AUTH_METHOD=trust
+ - POSTGRES_USER=postgres
+ - POSTGRES_DB=metadata
+ - POSTGRES_INITDB_ARGS=--encoding=UTF-8 --lc-collate=C --lc-ctype=C
expose:
- - "2388"
+ - "5432"
ports:
- - "2388:2388"
- - "2389:2389"
- depends_on: []
+ - "8432:5432"
volumes:
- - "etcd-0:/etcd-data"
- environment: {}
- container_name: etcd-0
+ - "postgres-0:/var/lib/postgresql/data"
healthcheck:
- test:
- - CMD
- - etcdctl
- - --endpoints=http://localhost:2388
- - endpoint
- - health
- interval: 1s
+ test: [ "CMD-SHELL", "pg_isready -U postgres" ]
+ interval: 2s
timeout: 5s
retries: 5
restart: always
@@ -266,7 +236,7 @@ services:
retries: 5
restart: always
volumes:
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/e2e_test/backfill/rate_limit/slow-udf.slt b/e2e_test/backfill/rate_limit/slow-udf.slt
new file mode 100644
index 0000000000000..a2b1a6fc63130
--- /dev/null
+++ b/e2e_test/backfill/rate_limit/slow-udf.slt
@@ -0,0 +1,49 @@
+statement ok
+create table t(v1 int);
+
+statement ok
+insert into t select 2 from generate_series(1, 1000000);
+
+statement ok
+set streaming_rate_limit=1;
+
+statement ok
+set background_ddl=true;
+
+statement ok
+CREATE FUNCTION delay(secs int) RETURNS int LANGUAGE python AS $$
+def delay(n):
+ import time
+ time.sleep(n)
+ return n
+$$;
+
+statement ok
+create sink m1 as select delay(v1) from t with (connector='blackhole');
+
+statement ok
+set background_ddl = false;
+
+statement ok
+set streaming_rate_limit=default;
+
+statement ok
+flush;
+
+statement ok
+flush;
+
+statement ok
+flush;
+
+statement ok
+flush;
+
+statement ok
+drop sink m1;
+
+statement ok
+drop function delay;
+
+statement ok
+drop table t;
\ No newline at end of file
diff --git a/e2e_test/batch/catalog/has_privilege.slt.part b/e2e_test/batch/catalog/has_privilege.slt.part
new file mode 100644
index 0000000000000..a742db0c51d6f
--- /dev/null
+++ b/e2e_test/batch/catalog/has_privilege.slt.part
@@ -0,0 +1,264 @@
+statement ok
+CREATE USER test_user;
+
+statement ok
+CREATE SCHEMA test_schema;
+
+statement ok
+CREATE TABLE foo (id INT, name VARCHAR);
+
+statement ok
+CREATE VIEW foo_view AS SELECT * FROM foo;
+
+statement ok
+CREATE INDEX foo_index ON foo(id);
+
+statement ok
+CREATE MATERIALIZED VIEW foo_mv AS SELECT * FROM foo;
+
+statement ok
+CREATE SOURCE foo_source (a int, b int) with (
+ connector = 'datagen',
+ datagen.rows.per.second = '1',
+ datagen.split.num = '1'
+);
+
+statement ok
+CREATE TABLE bar (id INT);
+
+statement ok
+GRANT ALL PRIVILEGES ON foo TO test_user GRANTED BY root;
+
+statement ok
+GRANT INSERT ON bar TO test_user WITH GRANT OPTION GRANTED BY root;
+
+statement ok
+GRANT INSERT ON foo_view TO test_user WITH GRANT OPTION GRANTED BY root;
+
+statement ok
+GRANT SELECT ON ALL TABLES IN SCHEMA public TO test_user WITH GRANT OPTION GRANTED BY root;
+
+statement ok
+GRANT SELECT ON ALL MATERIALIZED VIEWS IN SCHEMA public TO test_user WITH GRANT OPTION GRANTED BY root;
+
+statement ok
+GRANT SELECT ON ALL SOURCES IN SCHEMA public TO test_user WITH GRANT OPTION GRANTED BY root;
+
+statement ok
+GRANT CREATE ON SCHEMA test_schema TO test_user;
+
+query error table not found: bar_err
+GRANT INSERT ON bar_err TO test_user WITH GRANT OPTION GRANTED BY root;
+
+query error Invalid parameter user: User test_user_err not found
+SELECT has_table_privilege('test_user_err', 'foo', 'SELECT');
+
+query error Invalid parameter name: class not found: foo_err
+SELECT has_table_privilege('test_user', 'foo_err', 'SELECT');
+
+query error Invalid parameter privilege: unrecognized privilege type: "SELE CT"
+SELECT has_table_privilege('test_user', 'foo', 'SELE CT');
+
+query error Invalid parameter privilege: unrecognized privilege type: "SELECT INSERT"
+SELECT has_table_privilege('test_user', 'foo', 'SELECT INSERT');
+
+query error Invalid parameter privilege
+SELECT has_table_privilege('test_user', 'foo', 'SELECT, INSERT WITH GRANT OPTION');
+
+query error Invalid parameter user: User test_user_err not found
+SELECT has_schema_privilege('test_user_err', 'test_schema', 'CREATE');
+
+query error Invalid parameter schema: schema not found: test_schema_err
+SELECT has_schema_privilege('test_user', 'test_schema_err', 'CREATE');
+
+query error Invalid parameter privilege: unrecognized privilege type: "INSERT"
+SELECT has_schema_privilege('test_user', 'test_schema', 'INSERT');
+
+query error Invalid parameter privilege: unrecognized privilege type: "DELETE"
+SELECT has_any_column_privilege('test_user', 'foo_mv'::regclass, 'DELETE');
+
+query I
+SELECT has_table_privilege('test_user', 'foo', 'SELECT');
+----
+t
+
+query I
+SELECT has_table_privilege('test_user', 'foo', 'SELECT WITH GRANT OPTION');
+----
+t
+
+query I
+SELECT has_table_privilege('test_user', 'foo', 'INSERT WITH GRANT OPTION');
+----
+f
+
+query I
+SELECT has_table_privilege('test_user', 'foo', 'INSERT, SELECT WITH GRANT OPTION');
+----
+t
+
+query I
+SELECT has_table_privilege('test_user', 'foo', 'DELETE, INSERT, SELECT WITH GRANT OPTION');
+----
+t
+
+query I
+SELECT has_table_privilege('test_user', 'foo', 'DELETE WITH GRANT OPTION, INSERT, SELECT WITH GRANT OPTION');
+----
+f
+
+query I
+SELECT has_table_privilege('test_user', 'foo_view', 'SELECT');
+----
+t
+
+query I
+SELECT has_table_privilege('test_user', 'foo_view'::regclass, 'INSERT');
+----
+t
+
+query I
+SELECT has_table_privilege('test_user', 'foo_view'::regclass, 'UPDATE');
+----
+f
+
+query I
+SELECT has_any_column_privilege('test_user', 'foo_view'::regclass, 'INSERT');
+----
+t
+
+query I
+SELECT has_table_privilege('test_user', 'foo_mv', 'SELECT');
+----
+t
+
+query I
+SELECT has_table_privilege('test_user', 'foo_mv'::regclass, 'SELECT WITH GRANT OPTION');
+----
+t
+
+query I
+SELECT has_any_column_privilege('test_user', 'foo_mv'::regclass, 'SELECT WITH GRANT OPTION');
+----
+t
+
+query I
+SELECT has_table_privilege('test_user', 'foo_mv', 'INSERT');
+----
+f
+
+query I
+SELECT has_table_privilege('test_user', 'foo_source'::regclass, 'SELECT');
+----
+t
+
+query I
+SELECT has_table_privilege('test_user', 'foo_source', 'INSERT');
+----
+f
+
+# Indexes are granted by `GRANT SELECT ON ALL MATERIALIZED VIEWS`
+query I
+SELECT has_table_privilege('test_user', 'foo_index'::regclass, 'SELECT');
+----
+t
+
+query I
+SELECT has_table_privilege('test_user', 'foo_index', 'INSERT');
+----
+f
+
+query I
+SELECT has_table_privilege('test_user', 'bar', 'INSERT');
+----
+t
+
+query I
+SELECT has_table_privilege('bar', 'INSERT');
+----
+t
+
+query I
+SELECT has_table_privilege('bar'::regclass, 'SELECT');
+----
+t
+
+query I
+SELECT has_table_privilege('bar'::regclass, 'SELECT');
+----
+t
+
+query I
+SELECT has_table_privilege('test_user', 'bar', 'UPDATE');
+----
+f
+
+query I
+SELECT has_table_privilege('test_user', 'bar'::regclass, 'INSERT WITH GRANT OPTION');
+----
+t
+
+query I
+SELECT has_schema_privilege('public', 'USAGE');
+----
+t
+
+query I
+SELECT has_schema_privilege('test_user', 'test_schema', 'USAGE');
+----
+f
+
+query I
+SELECT has_schema_privilege('test_user', 'test_schema', 'CREATE');
+----
+t
+
+statement ok
+REVOKE SELECT ON ALL TABLES IN SCHEMA public FROM test_user GRANTED BY root;
+
+query I
+SELECT has_table_privilege('test_user', 'bar'::regclass, 'SELECT');
+----
+f
+
+query I
+SELECT has_table_privilege('test_user', 'foo_view', 'SELECT');
+----
+f
+
+query I
+SELECT has_table_privilege('test_user', 'foo_view', 'INSERT');
+----
+t
+
+statement ok
+REVOKE INSERT ON foo_view FROM test_user GRANTED BY root;
+
+query I
+SELECT has_table_privilege('test_user', 'foo_view', 'INSERT');
+----
+f
+
+statement ok
+DROP SOURCE foo_source;
+
+statement ok
+DROP MATERIALIZED VIEW foo_mv;
+
+statement ok
+DROP INDEX foo_index;
+
+statement ok
+DROP VIEW foo_view;
+
+statement ok
+DROP TABLE foo;
+
+statement ok
+DROP TABLE bar;
+
+statement ok
+DROP SCHEMA test_schema;
+
+statement ok
+DROP USER test_user;
diff --git a/e2e_test/error_ui/simple/main.slt b/e2e_test/error_ui/simple/main.slt
index 8ef82e1f0d1c7..6bcbbde608cf8 100644
--- a/e2e_test/error_ui/simple/main.slt
+++ b/e2e_test/error_ui/simple/main.slt
@@ -13,8 +13,10 @@ create function int_42() returns int as int_42 using link '555.0.0.1:8815';
----
db error: ERROR: Failed to run the query
-Caused by:
- Flight service error: invalid address: 555.0.0.1:8815, err: failed to parse address: http://555.0.0.1:8815: invalid IPv4 address
+Caused by these errors (recent errors listed first):
+ 1: Expr error
+ 2: UDF error
+ 3: Flight service error: invalid address: 555.0.0.1:8815, err: failed to parse address: http://555.0.0.1:8815: invalid IPv4 address
statement error
diff --git a/e2e_test/sink/remote/jdbc.check.pg.slt b/e2e_test/sink/remote/jdbc.check.pg.slt
index d532610c391e5..1ec8c827d939b 100644
--- a/e2e_test/sink/remote/jdbc.check.pg.slt
+++ b/e2e_test/sink/remote/jdbc.check.pg.slt
@@ -46,7 +46,7 @@ select * from t1_uuid;
221 74605c5a-a7bb-4b3b-8742-2a12e9709dea hello world
-query T
+query TIT
select * from sk_t1_uuid
----
-21189447-8736-44bd-b254-26b5dec91da9
+21189447-8736-44bd-b254-26b5dec91da9 2 bb
diff --git a/e2e_test/sink/remote/jdbc.load.slt b/e2e_test/sink/remote/jdbc.load.slt
index 97bdabaa47515..9a4ede4e032ed 100644
--- a/e2e_test/sink/remote/jdbc.load.slt
+++ b/e2e_test/sink/remote/jdbc.load.slt
@@ -166,22 +166,22 @@ INSERT INTO t1_uuid values (221, '74605c5a-a7bb-4b3b-8742-2a12e9709dea', 'hello
statement ok
-CREATE TABLE t1_test_uuid_delete (id varchar, primary key(id));
+CREATE TABLE t1_test_uuid_delete (id varchar, v1 int, v2 varchar, primary key(id, v2));
statement ok
-INSERT INTO t1_test_uuid_delete VALUES ('fb48ecc1-917f-4f4b-ab6d-d8e37809caf8'), ('21189447-8736-44bd-b254-26b5dec91da9');
+INSERT INTO t1_test_uuid_delete VALUES ('fb48ecc1-917f-4f4b-ab6d-d8e37809caf8', 1, 'aa'), ('21189447-8736-44bd-b254-26b5dec91da9', 2, 'bb');
statement ok
CREATE SINK sk_t1_uuid FROM t1_test_uuid_delete WITH (
connector='jdbc',
jdbc.url='jdbc:postgresql://db:5432/test?user=test&password=connector',
table.name='sk_t1_uuid',
- primary_key='id',
+ primary_key='id, v2',
type='upsert'
);
statement ok
-DELETE FROM t1_test_uuid_delete WHERE ID='fb48ecc1-917f-4f4b-ab6d-d8e37809caf8';
+DELETE FROM t1_test_uuid_delete WHERE id='fb48ecc1-917f-4f4b-ab6d-d8e37809caf8' AND v2='aa';
statement ok
diff --git a/e2e_test/sink/remote/pg_create_table.sql b/e2e_test/sink/remote/pg_create_table.sql
index dab753ee05d6d..ee272ef747a7a 100644
--- a/e2e_test/sink/remote/pg_create_table.sql
+++ b/e2e_test/sink/remote/pg_create_table.sql
@@ -84,4 +84,4 @@ CREATE TABLE biz.t2 (
"aBc" INTEGER PRIMARY KEY
);
-CREATE TABLE sk_t1_uuid (id uuid, primary key(id));
+CREATE TABLE sk_t1_uuid (id uuid, v1 int, v2 varchar, primary key(id, v2));
diff --git a/e2e_test/source/cdc/cdc.check_new_rows.slt b/e2e_test/source/cdc/cdc.check_new_rows.slt
index a9daaa5f458fb..77c8b6b5448ca 100644
--- a/e2e_test/source/cdc/cdc.check_new_rows.slt
+++ b/e2e_test/source/cdc/cdc.check_new_rows.slt
@@ -125,30 +125,58 @@ query II
select id, my_int from list_with_null_shared order by id;
----
1 {1,2,NULL}
-2 {NULL,-1,-2}
+2 {NULL,3,4}
+3 {NULL,-3,-4}
+4 {-4,-5,-6}
-# will fix in https://github.com/risingwavelabs/risingwave/pull/16416
+# my_num: varchar[]
query II
select id, my_num from list_with_null_shared order by id;
----
-1 {1.1,POSITIVE_INFINITY,NULL}
+1 NULL
+2 {2.2,0,NULL}
+3 NULL
+4 {NULL,-99999999999999999.9999}
+
+# my_num1: numeric[]
+query II
+select id, my_num_1 from list_with_null_shared order by id;
+----
+1 NULL
+2 {2.2,0,NULL}
+3 NULL
+4 {NULL,-99999999999999999.9999}
+
+# my_num2: rw_int256[]
+query II
+select id, my_num_2 from list_with_null_shared order by id;
+----
+1 NULL
2 NULL
+3 NULL
+4 NULL
# Due to the bug in Debezium, if a enum list contains `NULL`, the list will be converted to `NULL`
query II
select id, my_mood from list_with_null_shared order by id;
----
1 NULL
-2 NULL
+2 {happy,ok,sad}
+3 NULL
+4 NULL
query II
select id, my_uuid from list_with_null_shared order by id;
----
1 {bb488f9b-330d-4012-b849-12adeb49e57e,bb488f9b-330d-4012-b849-12adeb49e57f,NULL}
-2 {NULL,471acecf-a4b4-4ed3-a211-7fb2291f159f,9bc35adf-fb11-4130-944c-e7eadb96b829}
+2 {2de296df-eda7-4202-a81f-1036100ef4f6,2977afbc-0b12-459c-a36f-f623fc9e9840}
+3 {NULL,471acecf-a4b4-4ed3-a211-7fb2291f159f,9bc35adf-fb11-4130-944c-e7eadb96b829}
+4 {b2e4636d-fa03-4ad4-bf16-029a79dca3e2}
query II
select id, my_bytea from list_with_null_shared order by id;
----
1 {"\\x00","\\x01",NULL}
-2 {NULL,"\\x99","\\xaa"}
+2 {"\\x00","\\x01","\\x02"}
+3 {NULL,"\\x99","\\xaa"}
+4 {"\\x88","\\x99","\\xaa"}
diff --git a/e2e_test/source/cdc/cdc.share_stream.slt b/e2e_test/source/cdc/cdc.share_stream.slt
index 8eb48a8c81dc4..480c707fb6f42 100644
--- a/e2e_test/source/cdc/cdc.share_stream.slt
+++ b/e2e_test/source/cdc/cdc.share_stream.slt
@@ -283,6 +283,13 @@ CREATE TABLE numeric_to_varchar_shared (
PRIMARY KEY (id)
) FROM pg_source TABLE 'public.numeric_table';
+statement ok
+CREATE TABLE numeric_to_numeric_shared (
+ id int,
+ num numeric,
+ PRIMARY KEY (id)
+) FROM pg_source TABLE 'public.numeric_table';
+
statement ok
CREATE TABLE numeric_list_to_rw_int256_list_shared (
id int,
@@ -297,6 +304,13 @@ CREATE TABLE numeric_list_to_varchar_list_shared (
PRIMARY KEY (id)
) FROM pg_source TABLE 'public.numeric_list';
+statement ok
+CREATE TABLE numeric_list_to_numeric_list_shared (
+ id int,
+ num numeric[],
+ PRIMARY KEY (id)
+) FROM pg_source TABLE 'public.numeric_list';
+
statement ok
CREATE TABLE enum_to_varchar_shared (
id int,
@@ -310,6 +324,8 @@ CREATE TABLE list_with_null_shared (
id int,
my_int int[],
my_num varchar[],
+ my_num_1 numeric[],
+ my_num_2 rw_int256[],
my_mood varchar[],
my_uuid varchar[],
my_bytea bytea[],
@@ -327,6 +343,8 @@ insert into numeric_table values(105, 115792089237316195423570985008687907853269
insert into numeric_table values(106, 'NaN'::numeric);
insert into numeric_table values(107, 'Infinity'::numeric);
INSERT INTO enum_table VALUES (2, 'ok');
+insert into numeric_list values(3, '{3.14, 6, 57896044618658097711785492504343953926634992332820282019728792003956564819967, 57896044618658097711785492504343953926634992332820282019728792003956564819968, 115792089237316195423570985008687907853269984665640564039457584007913129639936.555555}');
+INSERT INTO numeric_list values(4, '{nan, infinity, 524596}');
"
sleep 3s
@@ -366,6 +384,23 @@ select * from numeric_to_rw_int256_shared order by id;
106 NULL
107 NULL
+query II
+select * from numeric_to_numeric_shared order by id;
+----
+1 3.14
+2 NULL
+3 NULL
+4 NULL
+5 NULL
+6 NaN
+7 Infinity
+102 NULL
+103 NULL
+104 NULL
+105 NULL
+106 NaN
+107 Infinity
+
system ok
psql -c "
DELETE FROM numeric_table WHERE id IN (102, 103, 104, 105, 106, 107);
@@ -375,13 +410,25 @@ query II
select * from numeric_list_to_varchar_list_shared order by id;
----
1 {3.14,6,57896044618658097711785492504343953926634992332820282019728792003956564819967,57896044618658097711785492504343953926634992332820282019728792003956564819968,115792089237316195423570985008687907853269984665640564039457584007913129639936.555555}
-2 {NAN,POSITIVE_INFINITY,NEGATIVE_INFINITY}
+2 NULL
+3 {3.14,6,57896044618658097711785492504343953926634992332820282019728792003956564819967,57896044618658097711785492504343953926634992332820282019728792003956564819968,115792089237316195423570985008687907853269984665640564039457584007913129639936.555555}
+4 NULL
query II
select * from numeric_list_to_rw_int256_list_shared order by id;
----
-1 {NULL,6,57896044618658097711785492504343953926634992332820282019728792003956564819967,NULL,NULL}
-2 {NULL,NULL,NULL}
+1 NULL
+2 NULL
+3 NULL
+4 NULL
+
+query II
+select * from numeric_list_to_numeric_list_shared order by id;
+----
+1 NULL
+2 NULL
+3 NULL
+4 NULL
query II
select * from enum_to_varchar_shared order by id;
diff --git a/e2e_test/source/cdc/postgres_cdc.sql b/e2e_test/source/cdc/postgres_cdc.sql
index 2a34a52051096..6a60644ad6980 100644
--- a/e2e_test/source/cdc/postgres_cdc.sql
+++ b/e2e_test/source/cdc/postgres_cdc.sql
@@ -97,5 +97,6 @@ CREATE TABLE enum_table (
);
INSERT INTO enum_table VALUES (1, 'happy');
-CREATE TABLE list_with_null(id int primary key, my_int int[], my_num numeric[], my_mood mood[], my_uuid uuid[], my_bytea bytea[]);
-INSERT INTO list_with_null VALUES (1, '{1,2,NULL}', '{1.1,inf,NULL}', '{happy,ok,NULL}', '{bb488f9b-330d-4012-b849-12adeb49e57e,bb488f9b-330d-4012-b849-12adeb49e57f, NULL}', '{\\x00,\\x01,NULL}');
+CREATE TABLE list_with_null(id int primary key, my_int int[], my_num numeric[], my_num_1 numeric[], my_num_2 numeric[], my_mood mood[], my_uuid uuid[], my_bytea bytea[]);
+INSERT INTO list_with_null VALUES (1, '{1,2,NULL}', '{1.1,inf,NULL}', '{1.1,inf,NULL}', '{1.1,inf,NULL}', '{happy,ok,NULL}', '{bb488f9b-330d-4012-b849-12adeb49e57e,bb488f9b-330d-4012-b849-12adeb49e57f, NULL}', '{\\x00,\\x01,NULL}');
+INSERT INTO list_with_null VALUES (2, '{NULL,3,4}', '{2.2,0,NULL}' , '{2.2,0,NULL}', '{2.2,0,NULL}', '{happy,ok,sad}', '{2de296df-eda7-4202-a81f-1036100ef4f6,2977afbc-0b12-459c-a36f-f623fc9e9840}', '{\\x00,\\x01,\\x02}');
diff --git a/e2e_test/source/cdc/postgres_cdc_insert.sql b/e2e_test/source/cdc/postgres_cdc_insert.sql
index 6b4fde2e71244..a02a35a020965 100644
--- a/e2e_test/source/cdc/postgres_cdc_insert.sql
+++ b/e2e_test/source/cdc/postgres_cdc_insert.sql
@@ -23,4 +23,5 @@ insert into numeric_table values(107, 'Infinity'::numeric);
INSERT INTO enum_table VALUES (3, 'sad');
--- to avoid escaping issues of psql -c "", we insert this row here and check the result in check_new_rows.slt
-INSERT INTO list_with_null VALUES (2, '{NULL,-1,-2}', '{NULL,nan,-inf}', '{NULL,sad,ok}', '{NULL,471acecf-a4b4-4ed3-a211-7fb2291f159f,9bc35adf-fb11-4130-944c-e7eadb96b829}', '{NULL,\\x99,\\xAA}');
+INSERT INTO list_with_null VALUES (3, '{NULL,-3,-4}', '{NULL,nan,-inf}', '{NULL,nan,-inf}', '{NULL,nan,-inf}', '{NULL,sad,ok}', '{NULL,471acecf-a4b4-4ed3-a211-7fb2291f159f,9bc35adf-fb11-4130-944c-e7eadb96b829}', '{NULL,\\x99,\\xAA}');
+INSERT INTO list_with_null VALUES (4, '{-4,-5,-6}', '{NULL,-99999999999999999.9999}', '{NULL,-99999999999999999.9999}', '{NULL,-99999999999999999.9999}', '{NULL,sad,ok}', '{b2e4636d-fa03-4ad4-bf16-029a79dca3e2}', '{\\x88,\\x99,\\xAA}');
diff --git a/e2e_test/source_inline/cdc/mysql/mysql_create_drop.slt b/e2e_test/source_inline/cdc/mysql/mysql_create_drop.slt
index da3d82083755f..10854d97b6440 100644
--- a/e2e_test/source_inline/cdc/mysql/mysql_create_drop.slt
+++ b/e2e_test/source_inline/cdc/mysql/mysql_create_drop.slt
@@ -5,6 +5,11 @@ control substitution on
statement ok
ALTER SYSTEM SET max_concurrent_creating_streaming_jobs TO 1;
+system ok
+mysql -e "
+ SET GLOBAL time_zone = '+01:00';
+"
+
system ok
mysql -e "
DROP DATABASE IF EXISTS testdb1; CREATE DATABASE testdb1;
@@ -159,53 +164,53 @@ sleep 5s
query IT
select * from tt1;
----
-1 2023-10-23 10:00:00+00:00
+1 2023-10-23 09:00:00+00:00
query IT
select * from tt2;
----
-2 2023-10-23 11:00:00+00:00
+2 2023-10-23 10:00:00+00:00
query IT
select * from tt3;
----
-3 2023-10-23 12:00:00+00:00
+3 2023-10-23 11:00:00+00:00
query IT
select * from tt4;
----
-4 2023-10-23 13:00:00+00:00
+4 2023-10-23 12:00:00+00:00
query IT
select * from tt5;
----
-5 2023-10-23 14:00:00+00:00
+5 2023-10-23 13:00:00+00:00
query IT
select * from tt1_shared;
----
-1 2023-10-23 10:00:00+00:00
+1 2023-10-23 09:00:00+00:00
query IT
select * from tt2_shared;
----
-2 2023-10-23 11:00:00+00:00
+2 2023-10-23 10:00:00+00:00
query IT
select * from tt3_shared;
----
-3 2023-10-23 12:00:00+00:00
+3 2023-10-23 11:00:00+00:00
query IT
select * from tt4_shared;
----
-4 2023-10-23 13:00:00+00:00
+4 2023-10-23 12:00:00+00:00
query IT
select * from tt5_shared;
----
-5 2023-10-23 14:00:00+00:00
+5 2023-10-23 13:00:00+00:00
statement ok
drop table tt1;
@@ -224,3 +229,8 @@ drop table tt5;
statement ok
drop source s cascade;
+
+system ok
+mysql -e "
+ SET GLOBAL time_zone = '+00:00';
+"
diff --git a/e2e_test/source_inline/kafka/shared_source.slt b/e2e_test/source_inline/kafka/shared_source.slt
index 57ab0b95de9b5..f180e6e0d8351 100644
--- a/e2e_test/source_inline/kafka/shared_source.slt
+++ b/e2e_test/source_inline/kafka/shared_source.slt
@@ -28,7 +28,7 @@ select count(*) from rw_internal_tables where name like '%s0%';
sleep 1s
-# Ingestion does not start (state table is empty), even after sleep
+# SourceExecutor's ingestion does not start (state table is empty), even after sleep
system ok
internal_table.mjs --name s0 --type source
----
@@ -41,28 +41,21 @@ create materialized view mv_1 as select * from s0;
# Wait enough time to ensure SourceExecutor consumes all Kafka data.
sleep 2s
-# Ingestion started
+# SourceExecutor's ingestion started, but it only starts from latest.
system ok
internal_table.mjs --name s0 --type source
----
-0,"{""split_info"": {""partition"": 0, ""start_offset"": 0, ""stop_offset"": null, ""topic"": ""shared_source""}, ""split_type"": ""kafka""}"
-1,"{""split_info"": {""partition"": 1, ""start_offset"": 0, ""stop_offset"": null, ""topic"": ""shared_source""}, ""split_type"": ""kafka""}"
-2,"{""split_info"": {""partition"": 2, ""start_offset"": 0, ""stop_offset"": null, ""topic"": ""shared_source""}, ""split_type"": ""kafka""}"
-3,"{""split_info"": {""partition"": 3, ""start_offset"": 0, ""stop_offset"": null, ""topic"": ""shared_source""}, ""split_type"": ""kafka""}"
-
+(empty)
-# The result is non-deterministic:
-# If the upstream row comes before the backfill row, it will be ignored, and the result state is Backfilling.
-# If the upstream row comes after the backfill row, the result state is Finished.
-# Uncomment below and run manually to see the result.
-# system ok
-# internal_table.mjs --name mv_1 --type sourcebackfill
-# ----
-# 0,"{""Backfilling"": ""0""}"
-# 1,"{""Backfilling"": ""0""}"
-# 2,"{""Backfilling"": ""0""}"
-# 3,"{""Backfilling"": ""0""}"
+# offset 0 must be backfilled, not from upstream.
+system ok
+internal_table.mjs --name mv_1 --type sourcebackfill
+----
+0,"{""Backfilling"": ""0""}"
+1,"{""Backfilling"": ""0""}"
+2,"{""Backfilling"": ""0""}"
+3,"{""Backfilling"": ""0""}"
# This does not affect the behavior for CREATE MATERIALIZED VIEW below. It also uses the shared source, and creates SourceBackfillExecutor.
@@ -108,6 +101,16 @@ EOF
sleep 2s
+# SourceExecutor's finally got new data now.
+system ok
+internal_table.mjs --name s0 --type source
+----
+0,"{""split_info"": {""partition"": 0, ""start_offset"": 1, ""stop_offset"": null, ""topic"": ""shared_source""}, ""split_type"": ""kafka""}"
+1,"{""split_info"": {""partition"": 1, ""start_offset"": 1, ""stop_offset"": null, ""topic"": ""shared_source""}, ""split_type"": ""kafka""}"
+2,"{""split_info"": {""partition"": 2, ""start_offset"": 1, ""stop_offset"": null, ""topic"": ""shared_source""}, ""split_type"": ""kafka""}"
+3,"{""split_info"": {""partition"": 3, ""start_offset"": 1, ""stop_offset"": null, ""topic"": ""shared_source""}, ""split_type"": ""kafka""}"
+
+
query IT rowsort
select v1, v2 from s0;
----
@@ -143,7 +146,9 @@ internal_table.mjs --name s0 --type source
3,"{""split_info"": {""partition"": 3, ""start_offset"": 1, ""stop_offset"": null, ""topic"": ""shared_source""}, ""split_type"": ""kafka""}"
-# Same as above, the result is still non-deterministic: Some partitions may be: "{""Backfilling"": ""1""}"
+# The result is non-deterministic:
+# If the upstream row comes before the backfill row, it will be ignored, and the result state is "{""Backfilling"": ""1""}".
+# If the upstream row comes after the backfill row, the result state is Finished.
# Uncomment below and run manually to see the result.
# system ok
diff --git a/scripts/source/prepare_ci_pubsub/src/main.rs b/e2e_test/source_inline/pubsub/prepare-data.rs
old mode 100644
new mode 100755
similarity index 60%
rename from scripts/source/prepare_ci_pubsub/src/main.rs
rename to e2e_test/source_inline/pubsub/prepare-data.rs
index 5357e2d4b6065..81bd1ba73e41e
--- a/scripts/source/prepare_ci_pubsub/src/main.rs
+++ b/e2e_test/source_inline/pubsub/prepare-data.rs
@@ -1,6 +1,19 @@
-use std::fs::File;
-use std::io::prelude::*;
-use std::io::BufReader;
+#!/usr/bin/env -S cargo -Zscript
+```cargo
+[dependencies]
+anyhow = "1"
+google-cloud-googleapis = { version = "0.12", features = ["pubsub"] }
+google-cloud-pubsub = "0.24"
+tokio = { version = "0.2", package = "madsim-tokio", features = [
+ "rt",
+ "rt-multi-thread",
+ "sync",
+ "macros",
+ "time",
+ "signal",
+ "fs",
+] }
+```
use google_cloud_googleapis::pubsub::v1::PubsubMessage;
use google_cloud_pubsub::client::Client;
@@ -38,21 +51,11 @@ async fn main() -> anyhow::Result<()> {
.await?;
}
- let path = std::env::current_exe()?
- .parent()
- .and_then(|p| p.parent())
- .and_then(|p| p.parent())
- .unwrap()
- .join("scripts/source/test_data/pubsub_1_test_topic.1");
-
- let file = File::open(path)?;
- let file = BufReader::new(file);
-
let publisher = topic.new_publisher(Default::default());
- for line in file.lines().map_while(Result::ok) {
+ for line in DATA.lines() {
let a = publisher
.publish(PubsubMessage {
- data: line.clone().into_bytes(),
+ data: line.to_string().into_bytes(),
..Default::default()
})
.await;
@@ -62,3 +65,25 @@ async fn main() -> anyhow::Result<()> {
Ok(())
}
+
+const DATA: &str = r#"{"v1":1,"v2":"name0"}
+{"v1":2,"v2":"name0"}
+{"v1":6,"v2":"name3"}
+{"v1":0,"v2":"name5"}
+{"v1":5,"v2":"name8"}
+{"v1":6,"v2":"name4"}
+{"v1":8,"v2":"name9"}
+{"v1":9,"v2":"name2"}
+{"v1":4,"v2":"name6"}
+{"v1":5,"v2":"name3"}
+{"v1":8,"v2":"name8"}
+{"v1":9,"v2":"name2"}
+{"v1":2,"v2":"name3"}
+{"v1":4,"v2":"name7"}
+{"v1":7,"v2":"name0"}
+{"v1":0,"v2":"name9"}
+{"v1":3,"v2":"name2"}
+{"v1":7,"v2":"name5"}
+{"v1":1,"v2":"name7"}
+{"v1":3,"v2":"name9"}
+"#;
diff --git a/e2e_test/source/basic/pubsub.slt b/e2e_test/source_inline/pubsub/pubsub.slt
similarity index 65%
rename from e2e_test/source/basic/pubsub.slt
rename to e2e_test/source_inline/pubsub/pubsub.slt
index b245d9b2aea89..cfb5f551cf36a 100644
--- a/e2e_test/source/basic/pubsub.slt
+++ b/e2e_test/source_inline/pubsub/pubsub.slt
@@ -1,15 +1,20 @@
+control substitution on
+
+system ok
+e2e_test/source_inline/pubsub/prepare-data.rs
+
# fail with invalid emulator_host
-statement error
+statement error failed to lookup address information
CREATE TABLE s1 (v1 int, v2 varchar) WITH (
+ connector = 'google_pubsub',
pubsub.subscription = 'test-subscription-1',
pubsub.emulator_host = 'invalid_host:5981'
) FORMAT PLAIN ENCODE JSON;
statement ok
CREATE TABLE s1 (v1 int, v2 varchar) WITH (
- connector = 'google_pubsub',
+ ${RISEDEV_PUBSUB_WITH_OPTIONS_COMMON},
pubsub.subscription = 'test-subscription-1',
- pubsub.emulator_host = 'localhost:5980'
) FORMAT PLAIN ENCODE JSON;
statement ok
@@ -18,29 +23,25 @@ SELECT * FROM s1;
statement ok
DROP TABLE s1;
-# fail with invalid subscription
-statement error
+statement error subscription test-subscription-not-exist does not exist
CREATE TABLE s2 (v1 int, v2 varchar) WITH (
- connector = 'google_pubsub',
- pubsub.subscription = 'test-subscription-not-2',
- pubsub.emulator_host = 'localhost:5980'
+ ${RISEDEV_PUBSUB_WITH_OPTIONS_COMMON},
+ pubsub.subscription = 'test-subscription-not-exist',
) FORMAT PLAIN ENCODE JSON;
statement ok
CREATE TABLE s2 (v1 int, v2 varchar) WITH (
- connector = 'google_pubsub',
+ ${RISEDEV_PUBSUB_WITH_OPTIONS_COMMON},
pubsub.subscription = 'test-subscription-2',
- pubsub.emulator_host = 'localhost:5980'
) FORMAT PLAIN ENCODE JSON;
# fail if both start_offset and start_snapshot are provided
-statement error
+statement error specify at most one of start_offset or start_snapshot
CREATE TABLE s3 (v1 int, v2 varchar) WITH (
- connector = 'google_pubsub',
+ ${RISEDEV_PUBSUB_WITH_OPTIONS_COMMON},
pubsub.subscription = 'test-subscription-3',
- pubsub.emulator_host = 'localhost:5980',
- pubsub.start_offset = "121212",
- pubsub.start_snapshot = "snapshot-that-doesnt-exist"
+ pubsub.start_offset.nanos = '121212',
+ pubsub.start_snapshot = 'snapshot-that-doesnt-exist'
) FORMAT PLAIN ENCODE JSON;
# wait for source
diff --git a/e2e_test/udf/external_udf.slt b/e2e_test/udf/external_udf.slt
index 096a605709d67..7a38506f81563 100644
--- a/e2e_test/udf/external_udf.slt
+++ b/e2e_test/udf/external_udf.slt
@@ -1,7 +1,7 @@
# Before running this test:
# python3 e2e_test/udf/test.py
# or:
-# cd java/udf-example && mvn package && java -jar target/risingwave-udf-example.jar
+# cd e2e_test/udf/java && mvn package && java -jar target/risingwave-udf-example.jar
# Create a function.
statement ok
diff --git a/java/udf-example/README.md b/e2e_test/udf/java/README.md
similarity index 100%
rename from java/udf-example/README.md
rename to e2e_test/udf/java/README.md
diff --git a/java/udf-example/pom.xml b/e2e_test/udf/java/pom.xml
similarity index 86%
rename from java/udf-example/pom.xml
rename to e2e_test/udf/java/pom.xml
index 8bf51cd108128..7ecd7c54dca17 100644
--- a/java/udf-example/pom.xml
+++ b/e2e_test/udf/java/pom.xml
@@ -5,17 +5,9 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
4.0.0
-
-
- com.risingwave
- risingwave-java-root
- 0.1.0-SNAPSHOT
- ../pom.xml
-
-
com.risingwaverisingwave-udf-example
- 0.1.1-SNAPSHOT
+ 0.1.0-SNAPSHOTudf-examplehttps://docs.risingwave.com/docs/current/udf-java
@@ -31,7 +23,7 @@
com.risingwaverisingwave-udf
- 0.1.3-SNAPSHOT
+ 0.2.0com.google.code.gson
diff --git a/java/udf-example/src/main/java/com/example/UdfExample.java b/e2e_test/udf/java/src/main/java/com/example/UdfExample.java
similarity index 99%
rename from java/udf-example/src/main/java/com/example/UdfExample.java
rename to e2e_test/udf/java/src/main/java/com/example/UdfExample.java
index 883dc5035514c..1702e244bf1ff 100644
--- a/java/udf-example/src/main/java/com/example/UdfExample.java
+++ b/e2e_test/udf/java/src/main/java/com/example/UdfExample.java
@@ -33,7 +33,7 @@
public class UdfExample {
public static void main(String[] args) throws IOException {
- try (var server = new UdfServer("0.0.0.0", 8815)) {
+ try (var server = new UdfServer("localhost", 8815)) {
server.addFunction("int_42", new Int42());
server.addFunction("float_to_decimal", new FloatToDecimal());
server.addFunction("sleep", new Sleep());
diff --git a/e2e_test/udf/requirements.txt b/e2e_test/udf/requirements.txt
index 8642e2b1ec254..36688db1ed1ee 100644
--- a/e2e_test/udf/requirements.txt
+++ b/e2e_test/udf/requirements.txt
@@ -1,2 +1,3 @@
flask
-waitress
\ No newline at end of file
+waitress
+arrow_udf==0.2.1
\ No newline at end of file
diff --git a/e2e_test/udf/test.py b/e2e_test/udf/test.py
index 6195476a80004..4443a81a6e74d 100644
--- a/e2e_test/udf/test.py
+++ b/e2e_test/udf/test.py
@@ -19,9 +19,7 @@
from typing import Iterator, List, Optional, Tuple, Any
from decimal import Decimal
-sys.path.append("src/expr/udf/python") # noqa
-
-from risingwave.udf import udf, udtf, UdfServer
+from arrow_udf import udf, udtf, UdfServer
@udf(input_types=[], result_type="INT")
@@ -47,13 +45,21 @@ def gcd3(x: int, y: int, z: int) -> int:
return gcd(gcd(x, y), z)
-@udf(input_types=["BYTEA"], result_type="STRUCT")
+@udf(
+ input_types=["BYTEA"],
+ result_type="STRUCT",
+)
def extract_tcp_info(tcp_packet: bytes):
src_addr, dst_addr = struct.unpack("!4s4s", tcp_packet[12:20])
src_port, dst_port = struct.unpack("!HH", tcp_packet[20:24])
src_addr = socket.inet_ntoa(src_addr)
dst_addr = socket.inet_ntoa(dst_addr)
- return src_addr, dst_addr, src_port, dst_port
+ return {
+ "src_addr": src_addr,
+ "dst_addr": dst_addr,
+ "src_port": src_port,
+ "dst_port": dst_port,
+ }
@udtf(input_types="INT", result_types="INT")
@@ -84,7 +90,7 @@ def hex_to_dec(hex: Optional[str]) -> Optional[Decimal]:
return dec
-@udf(input_types=["FLOAT8"], result_type="DECIMAL")
+@udf(input_types=["FLOAT64"], result_type="DECIMAL")
def float_to_decimal(f: float) -> Decimal:
return Decimal(f)
@@ -120,21 +126,49 @@ def jsonb_array_identity(list: List[Any]) -> List[Any]:
return list
-@udf(input_types="STRUCT", result_type="STRUCT")
+@udf(
+ input_types="STRUCT",
+ result_type="STRUCT",
+)
def jsonb_array_struct_identity(v: Tuple[List[Any], int]) -> Tuple[List[Any], int]:
return v
-ALL_TYPES = "BOOLEAN,SMALLINT,INT,BIGINT,FLOAT4,FLOAT8,DECIMAL,DATE,TIME,TIMESTAMP,INTERVAL,VARCHAR,BYTEA,JSONB".split(
- ","
-) + [
- "STRUCT"
-]
-
-
@udf(
- input_types=ALL_TYPES,
- result_type=f"struct<{','.join(ALL_TYPES)}>",
+ input_types=[
+ "boolean",
+ "int16",
+ "int32",
+ "int64",
+ "float32",
+ "float64",
+ "decimal",
+ "date32",
+ "time64",
+ "timestamp",
+ "interval",
+ "string",
+ "binary",
+ "json",
+ "struct",
+ ],
+ result_type="""struct<
+ boolean: boolean,
+ int16: int16,
+ int32: int32,
+ int64: int64,
+ float32: float32,
+ float64: float64,
+ decimal: decimal,
+ date32: date32,
+ time64: time64,
+ timestamp: timestamp,
+ interval: interval,
+ string: string,
+ binary: binary,
+ json: json,
+ struct: struct,
+ >""",
)
def return_all(
bool,
@@ -153,28 +187,60 @@ def return_all(
jsonb,
struct,
):
- return (
- bool,
- i16,
- i32,
- i64,
- f32,
- f64,
- decimal,
- date,
- time,
- timestamp,
- interval,
- varchar,
- bytea,
- jsonb,
- struct,
- )
+ return {
+ "boolean": bool,
+ "int16": i16,
+ "int32": i32,
+ "int64": i64,
+ "float32": f32,
+ "float64": f64,
+ "decimal": decimal,
+ "date32": date,
+ "time64": time,
+ "timestamp": timestamp,
+ "interval": interval,
+ "string": varchar,
+ "binary": bytea,
+ "json": jsonb,
+ "struct": struct,
+ }
@udf(
- input_types=[t + "[]" for t in ALL_TYPES],
- result_type=f"struct<{','.join(t + '[]' for t in ALL_TYPES)}>",
+ input_types=[
+ "boolean[]",
+ "int16[]",
+ "int32[]",
+ "int64[]",
+ "float32[]",
+ "float64[]",
+ "decimal[]",
+ "date32[]",
+ "time64[]",
+ "timestamp[]",
+ "interval[]",
+ "string[]",
+ "binary[]",
+ "json[]",
+ "struct[]",
+ ],
+ result_type="""struct<
+ boolean: boolean[],
+ int16: int16[],
+ int32: int32[],
+ int64: int64[],
+ float32: float32[],
+ float64: float64[],
+ decimal: decimal[],
+ date32: date32[],
+ time64: time64[],
+ timestamp: timestamp[],
+ interval: interval[],
+ string: string[],
+ binary: binary[],
+ json: json[],
+ struct: struct[],
+ >""",
)
def return_all_arrays(
bool,
@@ -193,23 +259,23 @@ def return_all_arrays(
jsonb,
struct,
):
- return (
- bool,
- i16,
- i32,
- i64,
- f32,
- f64,
- decimal,
- date,
- time,
- timestamp,
- interval,
- varchar,
- bytea,
- jsonb,
- struct,
- )
+ return {
+ "boolean": bool,
+ "int16": i16,
+ "int32": i32,
+ "int64": i64,
+ "float32": f32,
+ "float64": f64,
+ "decimal": decimal,
+ "date32": date,
+ "time64": time,
+ "timestamp": timestamp,
+ "interval": interval,
+ "string": varchar,
+ "binary": bytea,
+ "json": jsonb,
+ "struct": struct,
+ }
if __name__ == "__main__":
diff --git a/e2e_test/udf/wasm/Cargo.toml b/e2e_test/udf/wasm/Cargo.toml
index 250bd8132ca53..54c7da45b1af8 100644
--- a/e2e_test/udf/wasm/Cargo.toml
+++ b/e2e_test/udf/wasm/Cargo.toml
@@ -8,7 +8,7 @@ edition = "2021"
crate-type = ["cdylib"]
[dependencies]
-arrow-udf = "0.2"
+arrow-udf = "0.3"
genawaiter = "0.99"
rust_decimal = "1"
serde_json = "1"
diff --git a/integration_tests/ad-click/docker-compose.yml b/integration_tests/ad-click/docker-compose.yml
index f1a2bbc8419cb..62d5c3fb76517 100644
--- a/integration_tests/ad-click/docker-compose.yml
+++ b/integration_tests/ad-click/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -37,7 +37,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/ad-ctr/docker-compose.yml b/integration_tests/ad-ctr/docker-compose.yml
index fb297a8fec8ab..0298f014db11a 100644
--- a/integration_tests/ad-ctr/docker-compose.yml
+++ b/integration_tests/ad-ctr/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -37,7 +37,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/big-query-sink/docker-compose.yml b/integration_tests/big-query-sink/docker-compose.yml
index ca16ca8f90b3b..6c93903df8bba 100644
--- a/integration_tests/big-query-sink/docker-compose.yml
+++ b/integration_tests/big-query-sink/docker-compose.yml
@@ -7,10 +7,10 @@ services:
service: risingwave-standalone
volumes:
- "../../gcp-rwctest.json:/gcp-rwctest.json"
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -32,7 +32,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/cassandra-and-scylladb-sink/docker-compose.yml b/integration_tests/cassandra-and-scylladb-sink/docker-compose.yml
index 425e086c56ba0..0fa224ddab9d0 100644
--- a/integration_tests/cassandra-and-scylladb-sink/docker-compose.yml
+++ b/integration_tests/cassandra-and-scylladb-sink/docker-compose.yml
@@ -22,10 +22,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -45,7 +45,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/cdn-metrics/docker-compose.yml b/integration_tests/cdn-metrics/docker-compose.yml
index 69eb63a448eb2..87adef35f8cf4 100644
--- a/integration_tests/cdn-metrics/docker-compose.yml
+++ b/integration_tests/cdn-metrics/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -37,7 +37,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/citus-cdc/docker-compose.yml b/integration_tests/citus-cdc/docker-compose.yml
index d77e0e80dbfa9..6ce8341047ee4 100644
--- a/integration_tests/citus-cdc/docker-compose.yml
+++ b/integration_tests/citus-cdc/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -90,7 +90,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/clickhouse-sink/docker-compose.yml b/integration_tests/clickhouse-sink/docker-compose.yml
index 76b0f7fe607f5..1cf61ff8dfa30 100644
--- a/integration_tests/clickhouse-sink/docker-compose.yml
+++ b/integration_tests/clickhouse-sink/docker-compose.yml
@@ -17,10 +17,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -36,7 +36,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/clickstream/docker-compose.yml b/integration_tests/clickstream/docker-compose.yml
index faf66b770af7d..857c93f0d7577 100644
--- a/integration_tests/clickstream/docker-compose.yml
+++ b/integration_tests/clickstream/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -37,7 +37,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/client-library/csharp/csharp.csproj b/integration_tests/client-library/csharp/csharp.csproj
index 2d7ae2b7e2f48..f57b4028aabe9 100644
--- a/integration_tests/client-library/csharp/csharp.csproj
+++ b/integration_tests/client-library/csharp/csharp.csproj
@@ -12,7 +12,7 @@
-
+
diff --git a/integration_tests/client-library/docker-compose.yml b/integration_tests/client-library/docker-compose.yml
index d2e4663fdc378..c6868eaa42140 100644
--- a/integration_tests/client-library/docker-compose.yml
+++ b/integration_tests/client-library/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -63,7 +63,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/cockroach-sink/docker-compose.yml b/integration_tests/cockroach-sink/docker-compose.yml
index a205dca9e19cf..b6b0c8d9e6c5f 100644
--- a/integration_tests/cockroach-sink/docker-compose.yml
+++ b/integration_tests/cockroach-sink/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -38,7 +38,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/debezium-mongo/docker-compose.yaml b/integration_tests/debezium-mongo/docker-compose.yaml
index 4ae90ea22eb94..886e4622ad6ac 100644
--- a/integration_tests/debezium-mongo/docker-compose.yaml
+++ b/integration_tests/debezium-mongo/docker-compose.yaml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
diff --git a/integration_tests/debezium-mysql/docker-compose.yml b/integration_tests/debezium-mysql/docker-compose.yml
index 77ff7689b78f8..3462e5e3d09d1 100644
--- a/integration_tests/debezium-mysql/docker-compose.yml
+++ b/integration_tests/debezium-mysql/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -78,7 +78,7 @@ volumes:
external: false
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/debezium-postgres/docker-compose.yml b/integration_tests/debezium-postgres/docker-compose.yml
index ef9094e432f6f..c81c33fb3e455 100644
--- a/integration_tests/debezium-postgres/docker-compose.yml
+++ b/integration_tests/debezium-postgres/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -88,7 +88,7 @@ volumes:
external: false
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/debezium-sqlserver/docker-compose.yml b/integration_tests/debezium-sqlserver/docker-compose.yml
index 848d2dc5393ab..e88cb36e548b7 100644
--- a/integration_tests/debezium-sqlserver/docker-compose.yml
+++ b/integration_tests/debezium-sqlserver/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -76,7 +76,7 @@ volumes:
external: false
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/deltalake-sink/docker-compose.yml b/integration_tests/deltalake-sink/docker-compose.yml
index a486e2336b502..70b1e3c22e325 100644
--- a/integration_tests/deltalake-sink/docker-compose.yml
+++ b/integration_tests/deltalake-sink/docker-compose.yml
@@ -13,10 +13,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -32,7 +32,7 @@ services:
volumes:
compute-node-0:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/doris-sink/docker-compose.yml b/integration_tests/doris-sink/docker-compose.yml
index fc7cfd751e989..e1a7f1ef5e90e 100644
--- a/integration_tests/doris-sink/docker-compose.yml
+++ b/integration_tests/doris-sink/docker-compose.yml
@@ -35,10 +35,10 @@ services:
networks:
mynetwork:
ipv4_address: 172.21.0.4
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
networks:
mynetwork:
ipv4_address: 172.21.0.5
@@ -86,7 +86,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/elasticsearch-sink/docker-compose.yml b/integration_tests/elasticsearch-sink/docker-compose.yml
index 195e8d0070eb8..c885b7136a606 100644
--- a/integration_tests/elasticsearch-sink/docker-compose.yml
+++ b/integration_tests/elasticsearch-sink/docker-compose.yml
@@ -29,10 +29,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -48,7 +48,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/feature-store/docker-compose.yml b/integration_tests/feature-store/docker-compose.yml
index b1767f8d04864..71633cce20a19 100644
--- a/integration_tests/feature-store/docker-compose.yml
+++ b/integration_tests/feature-store/docker-compose.yml
@@ -73,10 +73,10 @@ services:
[kafka,meta-node-0,frontend-node-0]
volumes:
- ".log:/opt/feature-store/.log"
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose-distributed.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose-distributed.yml
@@ -98,7 +98,7 @@ services:
file: ../../docker/docker-compose-distributed.yml
service: prometheus-0
volumes:
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/http-sink/docker-compose.yml b/integration_tests/http-sink/docker-compose.yml
index 8fba5ff352dc0..12546c4f5dd28 100644
--- a/integration_tests/http-sink/docker-compose.yml
+++ b/integration_tests/http-sink/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -24,7 +24,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/iceberg-cdc/docker-compose.yml b/integration_tests/iceberg-cdc/docker-compose.yml
index e483003907746..703571c2f10a5 100644
--- a/integration_tests/iceberg-cdc/docker-compose.yml
+++ b/integration_tests/iceberg-cdc/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -127,7 +127,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/iceberg-sink/docker-compose.yml b/integration_tests/iceberg-sink/docker-compose.yml
index c7182b873d85b..91cec5dd24430 100644
--- a/integration_tests/iceberg-sink/docker-compose.yml
+++ b/integration_tests/iceberg-sink/docker-compose.yml
@@ -53,10 +53,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -161,7 +161,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/iceberg-sink2/docker/hive/docker-compose.yml b/integration_tests/iceberg-sink2/docker/hive/docker-compose.yml
index 3314083c1077b..449d6043e6e95 100644
--- a/integration_tests/iceberg-sink2/docker/hive/docker-compose.yml
+++ b/integration_tests/iceberg-sink2/docker/hive/docker-compose.yml
@@ -75,10 +75,10 @@ services:
networks:
iceberg_net:
- etcd-0:
+ postgres-0:
extends:
file: ../../../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
networks:
iceberg_net:
@@ -106,7 +106,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
minio-0:
external: false
diff --git a/integration_tests/iceberg-sink2/docker/jdbc/docker-compose.yml b/integration_tests/iceberg-sink2/docker/jdbc/docker-compose.yml
index 3f2bb75479563..714fcdf6e0b95 100644
--- a/integration_tests/iceberg-sink2/docker/jdbc/docker-compose.yml
+++ b/integration_tests/iceberg-sink2/docker/jdbc/docker-compose.yml
@@ -76,17 +76,17 @@ services:
networks:
iceberg_net:
- etcd-0:
+ postgres-0:
extends:
file: ../../../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
networks:
iceberg_net:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
minio-0:
external: false
diff --git a/integration_tests/iceberg-sink2/docker/rest/docker-compose.yml b/integration_tests/iceberg-sink2/docker/rest/docker-compose.yml
index 025db74c23cae..ee3e1da9b62da 100644
--- a/integration_tests/iceberg-sink2/docker/rest/docker-compose.yml
+++ b/integration_tests/iceberg-sink2/docker/rest/docker-compose.yml
@@ -80,17 +80,17 @@ services:
networks:
iceberg_net:
- etcd-0:
+ postgres-0:
extends:
file: ../../../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
networks:
iceberg_net:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
minio-0:
external: false
diff --git a/integration_tests/iceberg-sink2/docker/storage/docker-compose.yml b/integration_tests/iceberg-sink2/docker/storage/docker-compose.yml
index 8d6d6f72d53c5..4e0ec11eff500 100644
--- a/integration_tests/iceberg-sink2/docker/storage/docker-compose.yml
+++ b/integration_tests/iceberg-sink2/docker/storage/docker-compose.yml
@@ -56,17 +56,17 @@ services:
networks:
iceberg_net:
- etcd-0:
+ postgres-0:
extends:
file: ../../../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
networks:
iceberg_net:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
minio-0:
external: false
diff --git a/integration_tests/iceberg-source/docker/hive/docker-compose.yml b/integration_tests/iceberg-source/docker/hive/docker-compose.yml
index 3314083c1077b..449d6043e6e95 100644
--- a/integration_tests/iceberg-source/docker/hive/docker-compose.yml
+++ b/integration_tests/iceberg-source/docker/hive/docker-compose.yml
@@ -75,10 +75,10 @@ services:
networks:
iceberg_net:
- etcd-0:
+ postgres-0:
extends:
file: ../../../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
networks:
iceberg_net:
@@ -106,7 +106,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
minio-0:
external: false
diff --git a/integration_tests/iceberg-source/docker/jdbc/docker-compose.yml b/integration_tests/iceberg-source/docker/jdbc/docker-compose.yml
index 3f2bb75479563..714fcdf6e0b95 100644
--- a/integration_tests/iceberg-source/docker/jdbc/docker-compose.yml
+++ b/integration_tests/iceberg-source/docker/jdbc/docker-compose.yml
@@ -76,17 +76,17 @@ services:
networks:
iceberg_net:
- etcd-0:
+ postgres-0:
extends:
file: ../../../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
networks:
iceberg_net:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
minio-0:
external: false
diff --git a/integration_tests/iceberg-source/docker/rest/docker-compose.yml b/integration_tests/iceberg-source/docker/rest/docker-compose.yml
index 025db74c23cae..ee3e1da9b62da 100644
--- a/integration_tests/iceberg-source/docker/rest/docker-compose.yml
+++ b/integration_tests/iceberg-source/docker/rest/docker-compose.yml
@@ -80,17 +80,17 @@ services:
networks:
iceberg_net:
- etcd-0:
+ postgres-0:
extends:
file: ../../../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
networks:
iceberg_net:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
minio-0:
external: false
diff --git a/integration_tests/iceberg-source/docker/storage/docker-compose.yml b/integration_tests/iceberg-source/docker/storage/docker-compose.yml
index 8d6d6f72d53c5..4e0ec11eff500 100644
--- a/integration_tests/iceberg-source/docker/storage/docker-compose.yml
+++ b/integration_tests/iceberg-source/docker/storage/docker-compose.yml
@@ -56,17 +56,17 @@ services:
networks:
iceberg_net:
- etcd-0:
+ postgres-0:
extends:
file: ../../../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
networks:
iceberg_net:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
minio-0:
external: false
diff --git a/integration_tests/kafka-cdc-sink/docker-compose.yml b/integration_tests/kafka-cdc-sink/docker-compose.yml
index c8c31d7762cf6..81f892354b8a0 100644
--- a/integration_tests/kafka-cdc-sink/docker-compose.yml
+++ b/integration_tests/kafka-cdc-sink/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -133,7 +133,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/kafka-cdc/docker-compose.yml b/integration_tests/kafka-cdc/docker-compose.yml
index d62fa2acd9df3..f42c4399178d0 100644
--- a/integration_tests/kafka-cdc/docker-compose.yml
+++ b/integration_tests/kafka-cdc/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -38,7 +38,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/kinesis-s3-source/docker-compose.yml b/integration_tests/kinesis-s3-source/docker-compose.yml
index 9108537309bd7..dc91e2095cbde 100644
--- a/integration_tests/kinesis-s3-source/docker-compose.yml
+++ b/integration_tests/kinesis-s3-source/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -50,7 +50,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/livestream/docker-compose.yml b/integration_tests/livestream/docker-compose.yml
index 1f3e0736a4def..8dffce371562a 100644
--- a/integration_tests/livestream/docker-compose.yml
+++ b/integration_tests/livestream/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -37,7 +37,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/mindsdb/docker-compose.yml b/integration_tests/mindsdb/docker-compose.yml
index c6eeec5c75ee6..40fe4e6192fa3 100644
--- a/integration_tests/mindsdb/docker-compose.yml
+++ b/integration_tests/mindsdb/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -52,7 +52,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/mongodb-cdc/docker-compose.yaml b/integration_tests/mongodb-cdc/docker-compose.yaml
index 60d477945b38b..eaf519b440569 100644
--- a/integration_tests/mongodb-cdc/docker-compose.yaml
+++ b/integration_tests/mongodb-cdc/docker-compose.yaml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -37,7 +37,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/mqtt/docker-compose.yml b/integration_tests/mqtt/docker-compose.yml
index 9db7e7c04f8fc..04f73404be6aa 100644
--- a/integration_tests/mqtt/docker-compose.yml
+++ b/integration_tests/mqtt/docker-compose.yml
@@ -13,10 +13,10 @@ services:
- echo "running command"; printf 'allow_anonymous true\nlistener 1883 0.0.0.0' > /mosquitto/config/mosquitto.conf; echo "starting service..."; cat /mosquitto/config/mosquitto.conf;/docker-entrypoint.sh;/usr/sbin/mosquitto -c /mosquitto/config/mosquitto.conf
ports:
- 1883:1883
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -36,7 +36,7 @@ services:
volumes:
compute-node-0:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/mysql-cdc/docker-compose.yml b/integration_tests/mysql-cdc/docker-compose.yml
index 5cd3003669d7b..c0bba2ccc008b 100644
--- a/integration_tests/mysql-cdc/docker-compose.yml
+++ b/integration_tests/mysql-cdc/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -70,7 +70,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/mysql-sink/docker-compose.yml b/integration_tests/mysql-sink/docker-compose.yml
index 97d3d78ce4cb0..3e1fc5544276f 100644
--- a/integration_tests/mysql-sink/docker-compose.yml
+++ b/integration_tests/mysql-sink/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -41,7 +41,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/nats/docker-compose.yml b/integration_tests/nats/docker-compose.yml
index 2010ec2d8b4ed..891c865744747 100644
--- a/integration_tests/nats/docker-compose.yml
+++ b/integration_tests/nats/docker-compose.yml
@@ -10,10 +10,10 @@ services:
ports:
- "4222:4222"
command: -js
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -50,7 +50,7 @@ services:
volumes:
compute-node-0:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/pinot-sink/docker-compose.yml b/integration_tests/pinot-sink/docker-compose.yml
index 222c1d7e39735..fc4ad250880ce 100644
--- a/integration_tests/pinot-sink/docker-compose.yml
+++ b/integration_tests/pinot-sink/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -84,7 +84,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/postgres-cdc/docker-compose.yml b/integration_tests/postgres-cdc/docker-compose.yml
index 1898d20a8c9ae..7650da0779178 100644
--- a/integration_tests/postgres-cdc/docker-compose.yml
+++ b/integration_tests/postgres-cdc/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -30,7 +30,7 @@ services:
- POSTGRES_PASSWORD=123456
- POSTGRES_DB=mydb
ports:
- - 8432:5432
+ - 5432:5432
healthcheck:
test: [ "CMD-SHELL", "pg_isready --username=myuser --dbname=mydb" ]
interval: 5s
@@ -78,7 +78,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/postgres-sink/docker-compose.yml b/integration_tests/postgres-sink/docker-compose.yml
index e443965c2e5be..4d8638fdc3c07 100644
--- a/integration_tests/postgres-sink/docker-compose.yml
+++ b/integration_tests/postgres-sink/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -44,7 +44,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/presto-trino/docker-compose.yml b/integration_tests/presto-trino/docker-compose.yml
index b12785139ebad..a56135a4ae597 100644
--- a/integration_tests/presto-trino/docker-compose.yml
+++ b/integration_tests/presto-trino/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -48,7 +48,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/prometheus/docker-compose.yml b/integration_tests/prometheus/docker-compose.yml
index 0beb75839d4cd..de3249df9253a 100644
--- a/integration_tests/prometheus/docker-compose.yml
+++ b/integration_tests/prometheus/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -67,7 +67,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/prometheus/prometheus.yaml b/integration_tests/prometheus/prometheus.yaml
index c70ddd434064e..7d955a835b32d 100644
--- a/integration_tests/prometheus/prometheus.yaml
+++ b/integration_tests/prometheus/prometheus.yaml
@@ -27,7 +27,7 @@ scrape_configs:
- job_name: etcd
static_configs:
- - targets: ["etcd-0:2379"]
+ - targets: ["postgres-0:2379"]
- job_name: redpanda
static_configs:
diff --git a/integration_tests/redis-sink/docker-compose.yml b/integration_tests/redis-sink/docker-compose.yml
index 0fd33048a29bf..dce27ae99895c 100644
--- a/integration_tests/redis-sink/docker-compose.yml
+++ b/integration_tests/redis-sink/docker-compose.yml
@@ -16,10 +16,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -39,7 +39,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/schema-registry/docker-compose.yml b/integration_tests/schema-registry/docker-compose.yml
index 3020a96790fbf..80d4b90e4f7d2 100644
--- a/integration_tests/schema-registry/docker-compose.yml
+++ b/integration_tests/schema-registry/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -62,7 +62,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/starrocks-sink/docker-compose.yml b/integration_tests/starrocks-sink/docker-compose.yml
index 81ef7c277dad0..70918713643d6 100644
--- a/integration_tests/starrocks-sink/docker-compose.yml
+++ b/integration_tests/starrocks-sink/docker-compose.yml
@@ -37,10 +37,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -62,7 +62,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/superset/docker-compose.yml b/integration_tests/superset/docker-compose.yml
index 271e79755aab3..746a80fb9a064 100644
--- a/integration_tests/superset/docker-compose.yml
+++ b/integration_tests/superset/docker-compose.yml
@@ -15,10 +15,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -112,7 +112,7 @@ volumes:
external: false
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/tidb-cdc-sink/docker-compose.yml b/integration_tests/tidb-cdc-sink/docker-compose.yml
index 9177936d47ab8..f1b4cb0ebdd7b 100644
--- a/integration_tests/tidb-cdc-sink/docker-compose.yml
+++ b/integration_tests/tidb-cdc-sink/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -204,7 +204,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/twitter-pulsar/docker-compose.yml b/integration_tests/twitter-pulsar/docker-compose.yml
index 8061425d48d38..d684be6b876a8 100644
--- a/integration_tests/twitter-pulsar/docker-compose.yml
+++ b/integration_tests/twitter-pulsar/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -42,7 +42,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/twitter/docker-compose.yml b/integration_tests/twitter/docker-compose.yml
index e5637b7f2664f..37b2723cb8e50 100644
--- a/integration_tests/twitter/docker-compose.yml
+++ b/integration_tests/twitter/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -37,7 +37,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/upsert-avro/docker-compose.yml b/integration_tests/upsert-avro/docker-compose.yml
index 695c8f8fcb043..291528f6fb319 100644
--- a/integration_tests/upsert-avro/docker-compose.yml
+++ b/integration_tests/upsert-avro/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose.yml
@@ -43,7 +43,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/integration_tests/vector/docker-compose.yml b/integration_tests/vector/docker-compose.yml
index 2179cd66542c4..4c2e6100b714a 100644
--- a/integration_tests/vector/docker-compose.yml
+++ b/integration_tests/vector/docker-compose.yml
@@ -5,10 +5,10 @@ services:
extends:
file: ../../docker/docker-compose.yml
service: risingwave-standalone
- etcd-0:
+ postgres-0:
extends:
file: ../../docker/docker-compose-distributed.yml
- service: etcd-0
+ service: postgres-0
grafana-0:
extends:
file: ../../docker/docker-compose-distributed.yml
@@ -34,7 +34,7 @@ services:
volumes:
risingwave-standalone:
external: false
- etcd-0:
+ postgres-0:
external: false
grafana-0:
external: false
diff --git a/java/connector-node/python-client/.gitignore b/java/connector-node/python-client/.gitignore
index 600d2d33badf4..536c32383d754 100644
--- a/java/connector-node/python-client/.gitignore
+++ b/java/connector-node/python-client/.gitignore
@@ -1 +1,2 @@
-.vscode
\ No newline at end of file
+.vscode
+sink-client-venv/
diff --git a/java/connector-node/python-client/integration_tests.py b/java/connector-node/python-client/integration_tests.py
index b16b5eaf34ad4..909859afc218a 100644
--- a/java/connector-node/python-client/integration_tests.py
+++ b/java/connector-node/python-client/integration_tests.py
@@ -117,7 +117,7 @@ def load_stream_chunk_payload(input_file):
return payloads
-def test_sink(prop, format, payload_input, table_schema, is_coordinated=False):
+def test_sink(prop, payload_input, table_schema, is_coordinated=False):
sink_param = connector_service_pb2.SinkParam(
sink_id=0,
properties=prop,
@@ -128,7 +128,6 @@ def test_sink(prop, format, payload_input, table_schema, is_coordinated=False):
request_list = [
connector_service_pb2.SinkWriterStreamRequest(
start=connector_service_pb2.SinkWriterStreamRequest.StartSink(
- format=format,
sink_param=sink_param,
)
)
@@ -291,9 +290,6 @@ def test_stream_chunk_data_format(param):
parser.add_argument(
"--deltalake_sink", action="store_true", help="run deltalake sink test"
)
- parser.add_argument(
- "--input_file", default="./data/sink_input.json", help="input data to run tests"
- )
parser.add_argument(
"--input_binary_file",
default="./data/sink_input",
@@ -302,29 +298,18 @@ def test_stream_chunk_data_format(param):
parser.add_argument(
"--es_sink", action="store_true", help="run elasticsearch sink test"
)
- parser.add_argument(
- "--data_format_use_json", default=True, help="choose json or streamchunk"
- )
args = parser.parse_args()
- use_json = args.data_format_use_json == True or args.data_format_use_json == "True"
- if use_json:
- payload = load_json_payload(args.input_file)
- format = connector_service_pb2.SinkPayloadFormat.JSON
- else:
- payload = load_stream_chunk_payload(args.input_binary_file)
- format = connector_service_pb2.SinkPayloadFormat.STREAM_CHUNK
+ payload = load_stream_chunk_payload(args.input_binary_file)
# stream chunk format
if args.stream_chunk_format_test:
param = {
- "format": format,
"payload_input": payload,
"table_schema": make_mock_schema_stream_chunk(),
}
test_stream_chunk_data_format(param)
param = {
- "format": format,
"payload_input": payload,
"table_schema": make_mock_schema(),
}
@@ -337,7 +322,5 @@ def test_stream_chunk_data_format(param):
test_deltalake_sink(param)
if args.es_sink:
test_elasticsearch_sink(param)
-
- # json format
if args.upsert_iceberg_sink:
test_upsert_iceberg_sink(param)
diff --git a/java/connector-node/risingwave-connector-service/src/main/java/com/risingwave/connector/JsonDeserializer.java b/java/connector-node/risingwave-connector-service/src/main/java/com/risingwave/connector/JsonDeserializer.java
deleted file mode 100644
index c941b09efe95c..0000000000000
--- a/java/connector-node/risingwave-connector-service/src/main/java/com/risingwave/connector/JsonDeserializer.java
+++ /dev/null
@@ -1,246 +0,0 @@
-// Copyright 2024 RisingWave Labs
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package com.risingwave.connector;
-
-import static io.grpc.Status.INVALID_ARGUMENT;
-
-import com.google.gson.Gson;
-import com.risingwave.connector.api.TableSchema;
-import com.risingwave.connector.api.sink.*;
-import com.risingwave.proto.ConnectorServiceProto;
-import com.risingwave.proto.ConnectorServiceProto.SinkWriterStreamRequest.WriteBatch.JsonPayload;
-import com.risingwave.proto.Data;
-import java.math.BigDecimal;
-import java.time.LocalDate;
-import java.time.LocalDateTime;
-import java.time.LocalTime;
-import java.time.OffsetDateTime;
-import java.util.Base64;
-import java.util.Map;
-import java.util.stream.Collectors;
-
-public class JsonDeserializer implements Deserializer {
- private final TableSchema tableSchema;
-
- public JsonDeserializer(TableSchema tableSchema) {
- this.tableSchema = tableSchema;
- }
-
- // Encoding here should be consistent with `datum_to_json_object()` in
- // src/connector/src/sink/mod.rs
- @Override
- public CloseableIterable deserialize(
- ConnectorServiceProto.SinkWriterStreamRequest.WriteBatch writeBatch) {
- if (!writeBatch.hasJsonPayload()) {
- throw INVALID_ARGUMENT
- .withDescription("expected JsonPayload, got " + writeBatch.getPayloadCase())
- .asRuntimeException();
- }
- JsonPayload jsonPayload = writeBatch.getJsonPayload();
- return new TrivialCloseIterable<>(
- jsonPayload.getRowOpsList().stream()
- .map(
- rowOp -> {
- Map columnValues =
- new Gson().fromJson(rowOp.getLine(), Map.class);
- Object[] values = new Object[columnValues.size()];
- for (String columnName : tableSchema.getColumnNames()) {
- if (!columnValues.containsKey(columnName)) {
- throw INVALID_ARGUMENT
- .withDescription(
- "column "
- + columnName
- + " not found in json")
- .asRuntimeException();
- }
- Data.DataType.TypeName typeName =
- tableSchema.getColumnType(columnName);
- values[tableSchema.getColumnIndex(columnName)] =
- validateJsonDataTypes(
- typeName, columnValues.get(columnName));
- }
- return (SinkRow) new ArraySinkRow(rowOp.getOpType(), values);
- })
- .collect(Collectors.toList()));
- }
-
- private static Long castLong(Object value) {
- if (value instanceof Integer) {
- return ((Integer) value).longValue();
- } else if (value instanceof Double) {
- double d = (Double) value;
- if (d % 1.0 != 0.0) {
-
- throw io.grpc.Status.INVALID_ARGUMENT
- .withDescription(
- "unable to cast into long from non-integer double value: " + d)
- .asRuntimeException();
- }
- return ((Double) value).longValue();
- } else if (value instanceof Long) {
- return (Long) value;
- } else if (value instanceof Short) {
- return ((Short) value).longValue();
- } else if (value instanceof Float) {
- double f = (Float) value;
- if (f % 1.0 != 0.0) {
-
- throw io.grpc.Status.INVALID_ARGUMENT
- .withDescription(
- "unable to cast into long from non-integer float value: " + f)
- .asRuntimeException();
- }
- return ((Float) value).longValue();
- } else {
- throw io.grpc.Status.INVALID_ARGUMENT
- .withDescription("unable to cast into long from " + value.getClass())
- .asRuntimeException();
- }
- }
-
- private static Double castDouble(Object value) {
- if (value instanceof Double) {
- return (Double) value;
- } else if (value instanceof Float) {
- return ((Float) value).doubleValue();
- } else {
- throw io.grpc.Status.INVALID_ARGUMENT
- .withDescription("unable to cast into double from " + value.getClass())
- .asRuntimeException();
- }
- }
-
- private static BigDecimal castDecimal(Object value) {
- if (value instanceof String) {
- // FIXME(eric): See `datum_to_json_object()` in src/connector/src/sink/mod.rs
- return new BigDecimal((String) value);
- } else if (value instanceof BigDecimal) {
- return (BigDecimal) value;
- } else {
- throw io.grpc.Status.INVALID_ARGUMENT
- .withDescription("unable to cast into double from " + value.getClass())
- .asRuntimeException();
- }
- }
-
- private static LocalTime castTime(Object value) {
- try {
- Long milli = castLong(value);
- return LocalTime.ofNanoOfDay(milli * 1_000_000L);
- } catch (RuntimeException e) {
- throw io.grpc.Status.INVALID_ARGUMENT
- .withDescription("unable to cast into time from " + value.getClass())
- .asRuntimeException();
- }
- }
-
- private static LocalDate castDate(Object value) {
- try {
- Long days = castLong(value);
- return LocalDate.ofEpochDay(days);
- } catch (RuntimeException e) {
- throw io.grpc.Status.INVALID_ARGUMENT
- .withDescription("unable to cast into date from " + value.getClass())
- .asRuntimeException();
- }
- }
-
- private static Object validateJsonDataTypes(Data.DataType.TypeName typeName, Object value) {
- // value might be null
- if (value == null) {
- return null;
- }
- switch (typeName) {
- case INT16:
- return castLong(value).shortValue();
- case INT32:
- return castLong(value).intValue();
- case INT64:
- return castLong(value);
- case VARCHAR:
- if (!(value instanceof String)) {
- throw io.grpc.Status.INVALID_ARGUMENT
- .withDescription("Expected string, got " + value.getClass())
- .asRuntimeException();
- }
- return value;
- case DOUBLE:
- return castDouble(value);
- case FLOAT:
- return castDouble(value).floatValue();
- case DECIMAL:
- return castDecimal(value);
- case BOOLEAN:
- if (!(value instanceof Boolean)) {
- throw io.grpc.Status.INVALID_ARGUMENT
- .withDescription("Expected boolean, got " + value.getClass())
- .asRuntimeException();
- }
- return value;
- case TIMESTAMP:
- if (!(value instanceof String)) {
- throw io.grpc.Status.INVALID_ARGUMENT
- .withDescription(
- "Expected timestamp in string, got " + value.getClass())
- .asRuntimeException();
- }
- return LocalDateTime.parse((String) value);
- case TIMESTAMPTZ:
- if (!(value instanceof String)) {
- throw io.grpc.Status.INVALID_ARGUMENT
- .withDescription(
- "Expected timestamptz in string, got " + value.getClass())
- .asRuntimeException();
- }
- return OffsetDateTime.parse((String) value);
- case TIME:
- return castTime(value);
- case DATE:
- return castDate(value);
- case INTERVAL:
- if (!(value instanceof String)) {
- throw io.grpc.Status.INVALID_ARGUMENT
- .withDescription("Expected interval, got " + value.getClass())
- .asRuntimeException();
- }
- return value;
- case JSONB:
- if (!(value instanceof String)) {
- throw io.grpc.Status.INVALID_ARGUMENT
- .withDescription("Expected jsonb, got " + value.getClass())
- .asRuntimeException();
- }
- return value;
- case BYTEA:
- if (!(value instanceof String)) {
- throw io.grpc.Status.INVALID_ARGUMENT
- .withDescription("Expected bytea, got " + value.getClass())
- .asRuntimeException();
- }
- return Base64.getDecoder().decode((String) value);
- case LIST:
- if (!(value instanceof java.util.ArrayList>)) {
- throw io.grpc.Status.INVALID_ARGUMENT
- .withDescription("Expected list, got " + value.getClass())
- .asRuntimeException();
- }
- return ((java.util.ArrayList>) value).toArray();
- default:
- throw io.grpc.Status.INVALID_ARGUMENT
- .withDescription("unsupported type " + typeName)
- .asRuntimeException();
- }
- }
-}
diff --git a/java/connector-node/risingwave-connector-service/src/main/java/com/risingwave/connector/SinkWriterStreamObserver.java b/java/connector-node/risingwave-connector-service/src/main/java/com/risingwave/connector/SinkWriterStreamObserver.java
index cd61da38d6cb5..53dfe326fbd9d 100644
--- a/java/connector-node/risingwave-connector-service/src/main/java/com/risingwave/connector/SinkWriterStreamObserver.java
+++ b/java/connector-node/risingwave-connector-service/src/main/java/com/risingwave/connector/SinkWriterStreamObserver.java
@@ -206,19 +206,7 @@ private void bindSink(ConnectorServiceProto.SinkWriterStreamRequest.StartSink st
String connectorName = getConnectorName(sinkParam);
SinkFactory sinkFactory = SinkUtils.getSinkFactory(connectorName);
sink = sinkFactory.createWriter(tableSchema, sinkParam.getPropertiesMap());
- switch (startSink.getFormat()) {
- case FORMAT_UNSPECIFIED:
- case UNRECOGNIZED:
- throw INVALID_ARGUMENT
- .withDescription("should specify payload format in request")
- .asRuntimeException();
- case JSON:
- deserializer = new JsonDeserializer(tableSchema);
- break;
- case STREAM_CHUNK:
- deserializer = new StreamChunkDeserializer(tableSchema);
- break;
- }
+ deserializer = new StreamChunkDeserializer(tableSchema);
this.connectorName = connectorName.toUpperCase();
ConnectorNodeMetrics.incActiveSinkConnections(connectorName, "node1");
}
diff --git a/java/connector-node/risingwave-connector-service/src/main/resources/mysql.properties b/java/connector-node/risingwave-connector-service/src/main/resources/mysql.properties
index f77dd3c1ea4f6..0c62a51986b1c 100644
--- a/java/connector-node/risingwave-connector-service/src/main/resources/mysql.properties
+++ b/java/connector-node/risingwave-connector-service/src/main/resources/mysql.properties
@@ -25,5 +25,6 @@ name=${hostname}:${port}:${database.name}.${table.name:-RW_CDC_Sharing}
# In sharing cdc mode, transaction metadata will be enabled in frontend
provide.transaction.metadata=${transactional:-false}
## Pass-through driver properties
-# set connector timezone to UTC(+00:00)
+# force connection session timezone to UTC(+00:00)
driver.connectionTimeZone=+00:00
+driver.forceConnectionTimeZoneToSession=true
diff --git a/java/connector-node/risingwave-connector-test/src/test/java/com/risingwave/connector/sink/DeserializerTest.java b/java/connector-node/risingwave-connector-test/src/test/java/com/risingwave/connector/sink/DeserializerTest.java
deleted file mode 100644
index 9284a2ef8fd20..0000000000000
--- a/java/connector-node/risingwave-connector-test/src/test/java/com/risingwave/connector/sink/DeserializerTest.java
+++ /dev/null
@@ -1,44 +0,0 @@
-// Copyright 2024 RisingWave Labs
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package com.risingwave.connector.sink;
-
-import com.risingwave.connector.JsonDeserializer;
-import com.risingwave.connector.TestUtils;
-import com.risingwave.connector.api.sink.SinkRow;
-import com.risingwave.proto.ConnectorServiceProto;
-import com.risingwave.proto.ConnectorServiceProto.SinkWriterStreamRequest.WriteBatch.JsonPayload;
-import com.risingwave.proto.Data;
-import junit.framework.TestCase;
-
-public class DeserializerTest extends TestCase {
- public void testJsonDeserializer() {
- JsonDeserializer deserializer = new JsonDeserializer(TestUtils.getMockTableSchema());
- JsonPayload jsonPayload =
- JsonPayload.newBuilder()
- .addRowOps(
- JsonPayload.RowOp.newBuilder()
- .setOpType(Data.Op.INSERT)
- .setLine("{\"id\": 1, \"name\": \"John\"}")
- .build())
- .build();
- ConnectorServiceProto.SinkWriterStreamRequest.WriteBatch writeBatch =
- ConnectorServiceProto.SinkWriterStreamRequest.WriteBatch.newBuilder()
- .setJsonPayload(jsonPayload)
- .build();
- SinkRow outcome = deserializer.deserialize(writeBatch).iterator().next();
- assertEquals(outcome.get(0), 1);
- assertEquals(outcome.get(1), "John");
- }
-}
diff --git a/java/connector-node/risingwave-connector-test/src/test/java/com/risingwave/connector/sink/SinkStreamObserverTest.java b/java/connector-node/risingwave-connector-test/src/test/java/com/risingwave/connector/sink/SinkStreamObserverTest.java
index f0dcc4c1c4930..885fc7eb927a3 100644
--- a/java/connector-node/risingwave-connector-test/src/test/java/com/risingwave/connector/sink/SinkStreamObserverTest.java
+++ b/java/connector-node/risingwave-connector-test/src/test/java/com/risingwave/connector/sink/SinkStreamObserverTest.java
@@ -14,10 +14,10 @@
package com.risingwave.connector.sink;
+import com.google.protobuf.ByteString;
import com.risingwave.connector.SinkWriterStreamObserver;
import com.risingwave.connector.TestUtils;
import com.risingwave.proto.ConnectorServiceProto;
-import com.risingwave.proto.Data.Op;
import io.grpc.stub.StreamObserver;
import java.util.Map;
import org.junit.Assert;
@@ -94,7 +94,6 @@ public void testOnNext_syncValidation() {
.setStart(
ConnectorServiceProto.SinkWriterStreamRequest.StartSink.newBuilder()
.setSinkParam(fileSinkParam)
- .setFormat(ConnectorServiceProto.SinkPayloadFormat.JSON)
.build())
.build();
ConnectorServiceProto.SinkWriterStreamRequest firstSync =
@@ -138,7 +137,6 @@ public void testOnNext_startEpochValidation() {
.setStart(
ConnectorServiceProto.SinkWriterStreamRequest.StartSink.newBuilder()
.setSinkParam(fileSinkParam)
- .setFormat(ConnectorServiceProto.SinkPayloadFormat.JSON)
.build())
.build();
ConnectorServiceProto.SinkWriterStreamRequest firstSync =
@@ -156,6 +154,8 @@ public void testOnNext_startEpochValidation() {
sinkWriterStreamObserver.onNext(firstSync);
}
+ // WARN! This test is skipped in CI pipeline see
+ // `.github/workflows/connector-node-integration.yml`
@Test
public void testOnNext_writeValidation() {
SinkWriterStreamObserver sinkWriterStreamObserver;
@@ -164,10 +164,16 @@ public void testOnNext_writeValidation() {
ConnectorServiceProto.SinkWriterStreamRequest.newBuilder()
.setStart(
ConnectorServiceProto.SinkWriterStreamRequest.StartSink.newBuilder()
- .setFormat(ConnectorServiceProto.SinkPayloadFormat.JSON)
.setSinkParam(fileSinkParam))
.build();
+ // Encoded StreamChunk: 1 'test'
+ byte[] data1 =
+ new byte[] {
+ 8, 1, 18, 1, 1, 26, 20, 8, 2, 18, 6, 8, 1, 18, 2, 1, 1, 26, 8, 8, 1, 18, 4, 0,
+ 0, 0, 1, 26, 42, 8, 6, 18, 6, 8, 1, 18, 2, 1, 1, 26, 20, 8, 1, 18, 16, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 26, 8, 8, 1, 18, 4, 116, 101, 115, 116
+ };
ConnectorServiceProto.SinkWriterStreamRequest firstWrite =
ConnectorServiceProto.SinkWriterStreamRequest.newBuilder()
.setWriteBatch(
@@ -175,19 +181,11 @@ public void testOnNext_writeValidation() {
.newBuilder()
.setEpoch(0)
.setBatchId(1)
- .setJsonPayload(
+ .setStreamChunkPayload(
ConnectorServiceProto.SinkWriterStreamRequest
- .WriteBatch.JsonPayload.newBuilder()
- .addRowOps(
- ConnectorServiceProto
- .SinkWriterStreamRequest
- .WriteBatch.JsonPayload
- .RowOp.newBuilder()
- .setOpType(Op.INSERT)
- .setLine(
- "{\"id\": 1, \"name\": \"test\"}")
- .build()))
- .build())
+ .WriteBatch.StreamChunkPayload.newBuilder()
+ .setBinaryData(ByteString.copyFrom(data1))
+ .build()))
.build();
ConnectorServiceProto.SinkWriterStreamRequest firstSync =
@@ -199,6 +197,13 @@ public void testOnNext_writeValidation() {
.build())
.build();
+ // Encoded StreamChunk: 2 'test'
+ byte[] data2 =
+ new byte[] {
+ 8, 1, 18, 1, 1, 26, 20, 8, 2, 18, 6, 8, 1, 18, 2, 1, 1, 26, 8, 8, 1, 18, 4, 0,
+ 0, 0, 2, 26, 42, 8, 6, 18, 6, 8, 1, 18, 2, 1, 1, 26, 20, 8, 1, 18, 16, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 26, 8, 8, 1, 18, 4, 116, 101, 115, 116
+ };
ConnectorServiceProto.SinkWriterStreamRequest secondWrite =
ConnectorServiceProto.SinkWriterStreamRequest.newBuilder()
.setWriteBatch(
@@ -206,19 +211,11 @@ public void testOnNext_writeValidation() {
.newBuilder()
.setEpoch(1)
.setBatchId(2)
- .setJsonPayload(
+ .setStreamChunkPayload(
ConnectorServiceProto.SinkWriterStreamRequest
- .WriteBatch.JsonPayload.newBuilder()
- .addRowOps(
- ConnectorServiceProto
- .SinkWriterStreamRequest
- .WriteBatch.JsonPayload
- .RowOp.newBuilder()
- .setOpType(Op.INSERT)
- .setLine(
- "{\"id\": 2, \"name\": \"test\"}")
- .build()))
- .build())
+ .WriteBatch.StreamChunkPayload.newBuilder()
+ .setBinaryData(ByteString.copyFrom(data2))
+ .build()))
.build();
ConnectorServiceProto.SinkWriterStreamRequest secondWriteWrongEpoch =
@@ -228,19 +225,11 @@ public void testOnNext_writeValidation() {
.newBuilder()
.setEpoch(2)
.setBatchId(3)
- .setJsonPayload(
+ .setStreamChunkPayload(
ConnectorServiceProto.SinkWriterStreamRequest
- .WriteBatch.JsonPayload.newBuilder()
- .addRowOps(
- ConnectorServiceProto
- .SinkWriterStreamRequest
- .WriteBatch.JsonPayload
- .RowOp.newBuilder()
- .setOpType(Op.INSERT)
- .setLine(
- "{\"id\": 2, \"name\": \"test\"}")
- .build()))
- .build())
+ .WriteBatch.StreamChunkPayload.newBuilder()
+ .setBinaryData(ByteString.copyFrom(data2))
+ .build()))
.build();
boolean exceptionThrown = false;
@@ -251,7 +240,10 @@ public void testOnNext_writeValidation() {
sinkWriterStreamObserver.onNext(firstWrite);
} catch (RuntimeException e) {
exceptionThrown = true;
- Assert.assertTrue(e.getMessage().toLowerCase().contains("batch id"));
+ if (!e.getMessage().toLowerCase().contains("batch id")) {
+ e.printStackTrace();
+ Assert.fail("Expected `batch id`, but got " + e.getMessage());
+ }
}
if (!exceptionThrown) {
Assert.fail("Expected exception not thrown: `invalid batch id`");
@@ -267,7 +259,10 @@ public void testOnNext_writeValidation() {
sinkWriterStreamObserver.onNext(secondWriteWrongEpoch);
} catch (RuntimeException e) {
exceptionThrown = true;
- Assert.assertTrue(e.getMessage().toLowerCase().contains("invalid epoch"));
+ if (!e.getMessage().toLowerCase().contains("invalid epoch")) {
+ e.printStackTrace();
+ Assert.fail("Expected `invalid epoch`, but got " + e.getMessage());
+ }
}
if (!exceptionThrown) {
Assert.fail(
diff --git a/java/connector-node/risingwave-sink-jdbc/src/main/java/com/risingwave/connector/JDBCSink.java b/java/connector-node/risingwave-sink-jdbc/src/main/java/com/risingwave/connector/JDBCSink.java
index 4672f628c1769..d854e561878f4 100644
--- a/java/connector-node/risingwave-sink-jdbc/src/main/java/com/risingwave/connector/JDBCSink.java
+++ b/java/connector-node/risingwave-sink-jdbc/src/main/java/com/risingwave/connector/JDBCSink.java
@@ -53,9 +53,8 @@ public JDBCSink(JDBCSinkConfig config, TableSchema tableSchema) {
this.config = config;
try {
conn = JdbcUtils.getConnection(config.getJdbcUrl());
- // Retrieve primary keys and column type mappings from the database
- this.pkColumnNames =
- getPkColumnNames(conn, config.getTableName(), config.getSchemaName());
+ // Table schema has been validated before, so we get the PK from it directly
+ this.pkColumnNames = tableSchema.getPrimaryKeys();
// column name -> java.sql.Types
Map columnTypeMapping =
getColumnTypeMapping(conn, config.getTableName(), config.getSchemaName());
@@ -72,9 +71,10 @@ public JDBCSink(JDBCSinkConfig config, TableSchema tableSchema) {
.collect(Collectors.toList());
LOG.info(
- "schema = {}, table = {}, columnSqlTypes = {}, pkIndices = {}",
+ "schema = {}, table = {}, tableSchema = {}, columnSqlTypes = {}, pkIndices = {}",
config.getSchemaName(),
config.getTableName(),
+ tableSchema,
columnSqlTypes,
pkIndices);
@@ -125,28 +125,6 @@ private static Map getColumnTypeMapping(
return columnTypeMap;
}
- private static List getPkColumnNames(
- Connection conn, String tableName, String schemaName) {
- List pkColumnNames = new ArrayList<>();
- try {
- var pks = conn.getMetaData().getPrimaryKeys(null, schemaName, tableName);
- while (pks.next()) {
- pkColumnNames.add(pks.getString(JDBC_COLUMN_NAME_KEY));
- }
- } catch (SQLException e) {
- throw Status.INTERNAL
- .withDescription(
- String.format(ERROR_REPORT_TEMPLATE, e.getSQLState(), e.getMessage()))
- .asRuntimeException();
- }
- LOG.info(
- "schema = {}, table = {}: detected pk column = {}",
- schemaName,
- tableName,
- pkColumnNames);
- return pkColumnNames;
- }
-
@Override
public boolean write(Iterable rows) {
final int maxRetryCount = 4;
@@ -311,7 +289,7 @@ public void prepareDelete(SinkRow row) {
.asRuntimeException();
}
try {
- jdbcDialect.bindDeleteStatement(deleteStatement, row);
+ jdbcDialect.bindDeleteStatement(deleteStatement, tableSchema, row);
deleteStatement.addBatch();
} catch (SQLException e) {
throw Status.INTERNAL
@@ -362,7 +340,7 @@ private void executeStatement(PreparedStatement stmt) throws SQLException {
if (stmt == null) {
return;
}
- LOG.debug("Executing statement: {}", stmt);
+ LOG.info("Executing statement: {}", stmt);
stmt.executeBatch();
stmt.clearParameters();
}
diff --git a/java/connector-node/risingwave-sink-jdbc/src/main/java/com/risingwave/connector/jdbc/JdbcDialect.java b/java/connector-node/risingwave-sink-jdbc/src/main/java/com/risingwave/connector/jdbc/JdbcDialect.java
index 3a091ff33f895..308f9927457a2 100644
--- a/java/connector-node/risingwave-sink-jdbc/src/main/java/com/risingwave/connector/jdbc/JdbcDialect.java
+++ b/java/connector-node/risingwave-sink-jdbc/src/main/java/com/risingwave/connector/jdbc/JdbcDialect.java
@@ -112,5 +112,6 @@ void bindInsertIntoStatement(
throws SQLException;
/** Bind the values of primary key fields to the {@code DELETE} statement. */
- void bindDeleteStatement(PreparedStatement stmt, SinkRow row) throws SQLException;
+ void bindDeleteStatement(PreparedStatement stmt, TableSchema tableSchema, SinkRow row)
+ throws SQLException;
}
diff --git a/java/connector-node/risingwave-sink-jdbc/src/main/java/com/risingwave/connector/jdbc/MySqlDialect.java b/java/connector-node/risingwave-sink-jdbc/src/main/java/com/risingwave/connector/jdbc/MySqlDialect.java
index b79a2f9f35094..2c4ea73448a60 100644
--- a/java/connector-node/risingwave-sink-jdbc/src/main/java/com/risingwave/connector/jdbc/MySqlDialect.java
+++ b/java/connector-node/risingwave-sink-jdbc/src/main/java/com/risingwave/connector/jdbc/MySqlDialect.java
@@ -26,18 +26,17 @@
public class MySqlDialect implements JdbcDialect {
- private final int[] columnSqlTypes;
private final int[] pkIndices;
private final int[] pkColumnSqlTypes;
public MySqlDialect(List columnSqlTypes, List pkIndices) {
- this.columnSqlTypes = columnSqlTypes.stream().mapToInt(i -> i).toArray();
+ var columnSqlTypesArr = columnSqlTypes.stream().mapToInt(i -> i).toArray();
this.pkIndices = pkIndices.stream().mapToInt(i -> i).toArray();
// derive sql types for pk columns
var pkColumnSqlTypes = new int[pkIndices.size()];
for (int i = 0; i < pkIndices.size(); i++) {
- pkColumnSqlTypes[i] = this.columnSqlTypes[this.pkIndices[i]];
+ pkColumnSqlTypes[i] = columnSqlTypesArr[this.pkIndices[i]];
}
this.pkColumnSqlTypes = pkColumnSqlTypes;
}
@@ -118,12 +117,13 @@ public void bindInsertIntoStatement(
}
@Override
- public void bindDeleteStatement(PreparedStatement stmt, SinkRow row) throws SQLException {
+ public void bindDeleteStatement(PreparedStatement stmt, TableSchema tableSchema, SinkRow row)
+ throws SQLException {
// set the values of primary key fields
int placeholderIdx = 1;
- for (int idx : pkIndices) {
- Object pkField = row.get(idx);
- stmt.setObject(placeholderIdx++, pkField, pkColumnSqlTypes[idx]);
+ for (int i = 0; i < pkIndices.length; ++i) {
+ Object pkField = row.get(pkIndices[i]);
+ stmt.setObject(placeholderIdx++, pkField, pkColumnSqlTypes[i]);
}
}
}
diff --git a/java/connector-node/risingwave-sink-jdbc/src/main/java/com/risingwave/connector/jdbc/PostgresDialect.java b/java/connector-node/risingwave-sink-jdbc/src/main/java/com/risingwave/connector/jdbc/PostgresDialect.java
index 022a54f8c356e..6264d9c5eac18 100644
--- a/java/connector-node/risingwave-sink-jdbc/src/main/java/com/risingwave/connector/jdbc/PostgresDialect.java
+++ b/java/connector-node/risingwave-sink-jdbc/src/main/java/com/risingwave/connector/jdbc/PostgresDialect.java
@@ -31,18 +31,10 @@ public class PostgresDialect implements JdbcDialect {
private final int[] columnSqlTypes;
private final int[] pkIndices;
- private final int[] pkColumnSqlTypes;
public PostgresDialect(List columnSqlTypes, List pkIndices) {
this.columnSqlTypes = columnSqlTypes.stream().mapToInt(i -> i).toArray();
this.pkIndices = pkIndices.stream().mapToInt(i -> i).toArray();
-
- // derive sql types for pk columns
- var pkColumnSqlTypes = new int[pkIndices.size()];
- for (int i = 0; i < pkIndices.size(); i++) {
- pkColumnSqlTypes[i] = this.columnSqlTypes[this.pkIndices[i]];
- }
- this.pkColumnSqlTypes = pkColumnSqlTypes;
}
private static final HashMap RW_TYPE_TO_JDBC_TYPE_NAME;
@@ -166,12 +158,13 @@ public void bindInsertIntoStatement(
}
@Override
- public void bindDeleteStatement(PreparedStatement stmt, SinkRow row) throws SQLException {
+ public void bindDeleteStatement(PreparedStatement stmt, TableSchema tableSchema, SinkRow row)
+ throws SQLException {
// set the values of primary key fields
int placeholderIdx = 1;
- for (int idx : pkIndices) {
- Object pkField = row.get(idx);
- stmt.setObject(placeholderIdx++, pkField, pkColumnSqlTypes[idx]);
+ for (int pkIdx : pkIndices) {
+ Object pkField = row.get(pkIdx);
+ stmt.setObject(placeholderIdx++, pkField, columnSqlTypes[pkIdx]);
}
}
}
diff --git a/java/dev.md b/java/dev.md
index ac20c30fe69fa..148fde173baad 100644
--- a/java/dev.md
+++ b/java/dev.md
@@ -56,9 +56,3 @@ Config with the following. It may work.
"java.format.settings.profile": "Android"
}
```
-
-## Deploy UDF Library to Maven
-
-```sh
-mvn clean deploy --pl udf --am
-```
\ No newline at end of file
diff --git a/java/pom.xml b/java/pom.xml
index 922c62ead69e5..f1ee457ef3b84 100644
--- a/java/pom.xml
+++ b/java/pom.xml
@@ -37,8 +37,6 @@
proto
- udf
- udf-examplejava-bindingcommon-utilsjava-binding-integration-test
@@ -572,4 +570,4 @@
https://s01.oss.sonatype.org/service/local/staging/deploy/maven2/
-
+
\ No newline at end of file
diff --git a/java/udf/CHANGELOG.md b/java/udf/CHANGELOG.md
deleted file mode 100644
index fb1f055783225..0000000000000
--- a/java/udf/CHANGELOG.md
+++ /dev/null
@@ -1,39 +0,0 @@
-# Changelog
-
-All notable changes to this project will be documented in this file.
-
-The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
-and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
-
-## [Unreleased]
-
-## [0.1.3] - 2023-12-06
-
-### Fixed
-
-- Fix decimal type output.
-
-## [0.1.2] - 2023-12-04
-
-### Fixed
-
-- Fix index-out-of-bound error when string or string list is large.
-- Fix memory leak.
-
-## [0.1.1] - 2023-12-03
-
-### Added
-
-- Support struct in struct and struct[] in struct.
-
-### Changed
-
-- Bump Arrow version to 14.
-
-### Fixed
-
-- Fix unconstrained decimal type.
-
-## [0.1.0] - 2023-09-01
-
-- Initial release.
\ No newline at end of file
diff --git a/java/udf/README.md b/java/udf/README.md
deleted file mode 100644
index 200b897b8b890..0000000000000
--- a/java/udf/README.md
+++ /dev/null
@@ -1,274 +0,0 @@
-# RisingWave Java UDF SDK
-
-This library provides a Java SDK for creating user-defined functions (UDF) in RisingWave.
-
-## Introduction
-
-RisingWave supports user-defined functions implemented as external functions.
-With the RisingWave Java UDF SDK, users can define custom UDFs using Java and start a Java process as a UDF server.
-RisingWave can then remotely access the UDF server to execute the defined functions.
-
-## Installation
-
-To install the RisingWave Java UDF SDK:
-
-```sh
-git clone https://github.com/risingwavelabs/risingwave.git
-cd risingwave/java/udf
-mvn install
-```
-
-Or you can add the following dependency to your `pom.xml` file:
-
-```xml
-
-
- com.risingwave
- risingwave-udf
- 0.1.0
-
-
-```
-
-
-## Creating a New Project
-
-> NOTE: You can also start from the [udf-example](../udf-example) project without creating the project from scratch.
-
-To create a new project using the RisingWave Java UDF SDK, follow these steps:
-
-```sh
-mvn archetype:generate -DgroupId=com.example -DartifactId=udf-example -DarchetypeArtifactId=maven-archetype-quickstart -DarchetypeVersion=1.4 -DinteractiveMode=false
-```
-
-Configure your `pom.xml` file as follows:
-
-```xml
-
-
- 4.0.0
- com.example
- udf-example
- 1.0-SNAPSHOT
-
-
-
- com.risingwave
- risingwave-udf
- 0.1.0
-
-
-
-```
-
-The `--add-opens` flag must be added when running unit tests through Maven:
-
-```xml
-
-
-
- org.apache.maven.plugins
- maven-surefire-plugin
- 3.0.0
-
- --add-opens=java.base/java.nio=ALL-UNNAMED
-
-
-
-
-```
-
-## Scalar Functions
-
-A user-defined scalar function maps zero, one, or multiple scalar values to a new scalar value.
-
-In order to define a scalar function, one has to create a new class that implements the `ScalarFunction`
-interface in `com.risingwave.functions` and implement exactly one evaluation method named `eval(...)`.
-This method must be declared public and non-static.
-
-Any [data type](#data-types) listed in the data types section can be used as a parameter or return type of an evaluation method.
-
-Here's an example of a scalar function that calculates the greatest common divisor (GCD) of two integers:
-
-```java
-import com.risingwave.functions.ScalarFunction;
-
-public class Gcd implements ScalarFunction {
- public int eval(int a, int b) {
- while (b != 0) {
- int temp = b;
- b = a % b;
- a = temp;
- }
- return a;
- }
-}
-```
-
-> **NOTE:** Differences with Flink
-> 1. The `ScalarFunction` is an interface instead of an abstract class.
-> 2. Multiple overloaded `eval` methods are not supported.
-> 3. Variable arguments such as `eval(Integer...)` are not supported.
-
-## Table Functions
-
-A user-defined table function maps zero, one, or multiple scalar values to one or multiple
-rows (structured types).
-
-In order to define a table function, one has to create a new class that implements the `TableFunction`
-interface in `com.risingwave.functions` and implement exactly one evaluation method named `eval(...)`.
-This method must be declared public and non-static.
-
-The return type must be an `Iterator` of any [data type](#data-types) listed in the data types section.
-Similar to scalar functions, input and output data types are automatically extracted using reflection.
-This includes the generic argument T of the return value for determining an output data type.
-
-Here's an example of a table function that generates a series of integers:
-
-```java
-import com.risingwave.functions.TableFunction;
-
-public class Series implements TableFunction {
- public Iterator eval(int n) {
- return java.util.stream.IntStream.range(0, n).iterator();
- }
-}
-```
-
-> **NOTE:** Differences with Flink
-> 1. The `TableFunction` is an interface instead of an abstract class. It has no generic arguments.
-> 2. Instead of calling `collect` to emit a row, the `eval` method returns an `Iterator` of the output rows.
-> 3. Multiple overloaded `eval` methods are not supported.
-> 4. Variable arguments such as `eval(Integer...)` are not supported.
-> 5. In SQL, table functions can be used in the `FROM` clause directly. `JOIN LATERAL TABLE` is not supported.
-
-## UDF Server
-
-To create a UDF server and register functions:
-
-```java
-import com.risingwave.functions.UdfServer;
-
-public class App {
- public static void main(String[] args) {
- try (var server = new UdfServer("0.0.0.0", 8815)) {
- // register functions
- server.addFunction("gcd", new Gcd());
- server.addFunction("series", new Series());
- // start the server
- server.start();
- server.awaitTermination();
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-}
-```
-
-To run the UDF server, execute the following command:
-
-```sh
-_JAVA_OPTIONS="--add-opens=java.base/java.nio=ALL-UNNAMED" mvn exec:java -Dexec.mainClass="com.example.App"
-```
-
-## Creating Functions in RisingWave
-
-```sql
-create function gcd(int, int) returns int
-as gcd using link 'http://localhost:8815';
-
-create function series(int) returns table (x int)
-as series using link 'http://localhost:8815';
-```
-
-For more detailed information and examples, please refer to the official RisingWave [documentation](https://www.risingwave.dev/docs/current/user-defined-functions/#4-declare-your-functions-in-risingwave).
-
-## Using Functions in RisingWave
-
-Once the user-defined functions are created in RisingWave, you can use them in SQL queries just like any built-in functions. Here are a few examples:
-
-```sql
-select gcd(25, 15);
-
-select * from series(10);
-```
-
-## Data Types
-
-The RisingWave Java UDF SDK supports the following data types:
-
-| SQL Type | Java Type | Notes |
-| ---------------- | --------------------------------------- | ------------------ |
-| BOOLEAN | boolean, Boolean | |
-| SMALLINT | short, Short | |
-| INT | int, Integer | |
-| BIGINT | long, Long | |
-| REAL | float, Float | |
-| DOUBLE PRECISION | double, Double | |
-| DECIMAL | BigDecimal | |
-| DATE | java.time.LocalDate | |
-| TIME | java.time.LocalTime | |
-| TIMESTAMP | java.time.LocalDateTime | |
-| INTERVAL | com.risingwave.functions.PeriodDuration | |
-| VARCHAR | String | |
-| BYTEA | byte[] | |
-| JSONB | String | Use `@DataTypeHint("JSONB") String` as the type. See [example](#jsonb). |
-| T[] | T'[] | `T` can be any of the above SQL types. `T'` should be the corresponding Java type.|
-| STRUCT<> | user-defined class | Define a data class as the type. See [example](#struct-type). |
-| ...others | | Not supported yet. |
-
-### JSONB
-
-```java
-import com.google.gson.Gson;
-
-// Returns the i-th element of a JSON array.
-public class JsonbAccess implements ScalarFunction {
- static Gson gson = new Gson();
-
- public @DataTypeHint("JSONB") String eval(@DataTypeHint("JSONB") String json, int index) {
- if (json == null)
- return null;
- var array = gson.fromJson(json, Object[].class);
- if (index >= array.length || index < 0)
- return null;
- var obj = array[index];
- return gson.toJson(obj);
- }
-}
-```
-
-```sql
-create function jsonb_access(jsonb, int) returns jsonb
-as jsonb_access using link 'http://localhost:8815';
-```
-
-### Struct Type
-
-```java
-// Split a socket address into host and port.
-public static class IpPort implements ScalarFunction {
- public static class SocketAddr {
- public String host;
- public short port;
- }
-
- public SocketAddr eval(String addr) {
- var socketAddr = new SocketAddr();
- var parts = addr.split(":");
- socketAddr.host = parts[0];
- socketAddr.port = Short.parseShort(parts[1]);
- return socketAddr;
- }
-}
-```
-
-```sql
-create function ip_port(varchar) returns struct
-as ip_port using link 'http://localhost:8815';
-```
-
-## Full Example
-
-You can checkout [udf-example](../udf-example) and use it as a template to create your own UDFs.
diff --git a/java/udf/pom.xml b/java/udf/pom.xml
deleted file mode 100644
index f747603ca8429..0000000000000
--- a/java/udf/pom.xml
+++ /dev/null
@@ -1,58 +0,0 @@
-
- 4.0.0
-
- com.risingwave
- risingwave-udf
- jar
- 0.1.3-SNAPSHOT
-
-
- risingwave-java-root
- com.risingwave
- 0.1.0-SNAPSHOT
- ../pom.xml
-
-
- RisingWave Java UDF SDK
- https://docs.risingwave.com/docs/current/udf-java
-
-
-
- org.junit.jupiter
- junit-jupiter-engine
- 5.9.1
- test
-
-
- org.apache.arrow
- arrow-vector
- 14.0.0
-
-
- org.apache.arrow
- flight-core
- 14.0.0
-
-
- org.slf4j
- slf4j-api
- 2.0.7
-
-
- org.slf4j
- slf4j-simple
- 2.0.7
-
-
-
-
-
- kr.motd.maven
- os-maven-plugin
- 1.7.0
-
-
-
-
\ No newline at end of file
diff --git a/java/udf/src/main/java/com/risingwave/functions/DataTypeHint.java b/java/udf/src/main/java/com/risingwave/functions/DataTypeHint.java
deleted file mode 100644
index 7baf0fe4c6115..0000000000000
--- a/java/udf/src/main/java/com/risingwave/functions/DataTypeHint.java
+++ /dev/null
@@ -1,23 +0,0 @@
-// Copyright 2024 RisingWave Labs
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package com.risingwave.functions;
-
-import java.lang.annotation.*;
-
-@Retention(RetentionPolicy.RUNTIME)
-@Target({ElementType.METHOD, ElementType.FIELD, ElementType.PARAMETER})
-public @interface DataTypeHint {
- String value();
-}
diff --git a/java/udf/src/main/java/com/risingwave/functions/PeriodDuration.java b/java/udf/src/main/java/com/risingwave/functions/PeriodDuration.java
deleted file mode 100644
index 6d704100f6f35..0000000000000
--- a/java/udf/src/main/java/com/risingwave/functions/PeriodDuration.java
+++ /dev/null
@@ -1,29 +0,0 @@
-// Copyright 2024 RisingWave Labs
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package com.risingwave.functions;
-
-import java.time.Duration;
-import java.time.Period;
-
-/** Combination of Period and Duration. */
-public class PeriodDuration extends org.apache.arrow.vector.PeriodDuration {
- public PeriodDuration(Period period, Duration duration) {
- super(period, duration);
- }
-
- PeriodDuration(org.apache.arrow.vector.PeriodDuration base) {
- super(base.getPeriod(), base.getDuration());
- }
-}
diff --git a/java/udf/src/main/java/com/risingwave/functions/ScalarFunction.java b/java/udf/src/main/java/com/risingwave/functions/ScalarFunction.java
deleted file mode 100644
index 5f3fcaf287330..0000000000000
--- a/java/udf/src/main/java/com/risingwave/functions/ScalarFunction.java
+++ /dev/null
@@ -1,53 +0,0 @@
-// Copyright 2024 RisingWave Labs
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package com.risingwave.functions;
-
-/**
- * Base interface for a user-defined scalar function. A user-defined scalar function maps zero, one,
- * or multiple scalar values to a new scalar value.
- *
- *
The behavior of a {@link ScalarFunction} can be defined by implementing a custom evaluation
- * method. An evaluation method must be declared publicly, not static, and named eval.
- * Multiple overloaded methods named eval are not supported yet.
- *
- *
By default, input and output data types are automatically extracted using reflection.
- *
- *
The following examples show how to specify a scalar function:
- *
- *
{@code
- * // a function that accepts two INT arguments and computes a sum
- * class SumFunction implements ScalarFunction {
- * public Integer eval(Integer a, Integer b) {
- * return a + b;
- * }
- * }
- *
- * // a function that returns a struct type
- * class StructFunction implements ScalarFunction {
- * public static class KeyValue {
- * public String key;
- * public int value;
- * }
- *
- * public KeyValue eval(int a) {
- * KeyValue kv = new KeyValue();
- * kv.key = a.toString();
- * kv.value = a;
- * return kv;
- * }
- * }
- * }
- */
-public interface ScalarFunction extends UserDefinedFunction {}
diff --git a/java/udf/src/main/java/com/risingwave/functions/ScalarFunctionBatch.java b/java/udf/src/main/java/com/risingwave/functions/ScalarFunctionBatch.java
deleted file mode 100644
index 5d837d3b370f9..0000000000000
--- a/java/udf/src/main/java/com/risingwave/functions/ScalarFunctionBatch.java
+++ /dev/null
@@ -1,61 +0,0 @@
-// Copyright 2024 RisingWave Labs
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package com.risingwave.functions;
-
-import java.lang.invoke.MethodHandle;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.function.Function;
-import org.apache.arrow.memory.BufferAllocator;
-import org.apache.arrow.vector.VectorSchemaRoot;
-
-/** Batch-processing wrapper over a user-defined scalar function. */
-class ScalarFunctionBatch extends UserDefinedFunctionBatch {
- ScalarFunction function;
- MethodHandle methodHandle;
- Function