diff --git a/.licenserc.yaml b/.licenserc.yaml index 137943a1f6dd0..93449f89340cb 100644 --- a/.licenserc.yaml +++ b/.licenserc.yaml @@ -17,6 +17,7 @@ header: - "**/*.d.ts" - "src/sqlparser/**/*.rs" - "java/connector-node/risingwave-source-cdc/src/main/java/com/risingwave/connector/cdc/debezium/internal/*.java" + - "java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/**/*.java" - "src/meta/model_v2/migration/**/*.rs" - "lints/ui/**" diff --git a/ci/scripts/gen-integration-test-yaml.py b/ci/scripts/gen-integration-test-yaml.py index b8bc655b60bb1..8f39ab6edb180 100644 --- a/ci/scripts/gen-integration-test-yaml.py +++ b/ci/scripts/gen-integration-test-yaml.py @@ -15,6 +15,7 @@ 'mysql-sink': ['json'], 'postgres-sink': ['json'], 'iceberg-cdc': ['json'], + 'iceberg-sink': ['none'], 'twitter': ['json', 'protobuf'], 'twitter-pulsar': ['json'], 'debezium-mysql': ['json'], diff --git a/ci/workflows/pull-request.yml b/ci/workflows/pull-request.yml index a67f915d943cc..f0239d5aa3feb 100644 --- a/ci/workflows/pull-request.yml +++ b/ci/workflows/pull-request.yml @@ -214,7 +214,7 @@ steps: config: ci/docker-compose.yml mount-buildkite-agent: true - ./ci/plugins/upload-failure-logs - timeout_in_minutes: 5 + timeout_in_minutes: 10 retry: *auto-retry - label: "end-to-end iceberg sink v2 test" @@ -229,7 +229,7 @@ steps: config: ci/docker-compose.yml mount-buildkite-agent: true - ./ci/plugins/upload-failure-logs - timeout_in_minutes: 10 + timeout_in_minutes: 15 retry: *auto-retry - label: "end-to-end iceberg cdc test" @@ -244,7 +244,7 @@ steps: config: ci/docker-compose.yml mount-buildkite-agent: true - ./ci/plugins/upload-failure-logs - timeout_in_minutes: 10 + timeout_in_minutes: 15 retry: *auto-retry - label: "end-to-end pulsar sink test" diff --git a/e2e_test/iceberg/test_case/cdc/load.slt b/e2e_test/iceberg/test_case/cdc/load.slt index 12abdd283397d..e9f1d815d20cb 100644 --- a/e2e_test/iceberg/test_case/cdc/load.slt +++ b/e2e_test/iceberg/test_case/cdc/load.slt @@ -24,6 +24,7 @@ CREATE SINK s1 AS select * from products WITH ( connector = 'iceberg', type = 'upsert', force_append_only = 'false', + catalog.name = 'demo', database.name = 'demo_db', table.name = 'demo_table', catalog.type = 'storage', diff --git a/e2e_test/iceberg/test_case/iceberg_sink_append_only.slt b/e2e_test/iceberg/test_case/iceberg_sink_append_only.slt index f3156a9b40ca5..69cd931dbf836 100644 --- a/e2e_test/iceberg/test_case/iceberg_sink_append_only.slt +++ b/e2e_test/iceberg/test_case/iceberg_sink_append_only.slt @@ -25,6 +25,7 @@ CREATE SINK s6 AS select * from mv6 WITH ( force_append_only = 'true', database.name = 'demo_db', table.name = 'demo_table', + catalog.name = 'demo', catalog.type = 'storage', warehouse.path = 's3://icebergdata/demo', s3.endpoint = 'http://127.0.0.1:9301', diff --git a/e2e_test/iceberg/test_case/iceberg_sink_upsert.slt b/e2e_test/iceberg/test_case/iceberg_sink_upsert.slt index 8d70c9b2e19a6..1d0be3d56463e 100644 --- a/e2e_test/iceberg/test_case/iceberg_sink_upsert.slt +++ b/e2e_test/iceberg/test_case/iceberg_sink_upsert.slt @@ -12,6 +12,7 @@ CREATE SINK s6 AS select mv6.id as id, mv6.v1 as v1, mv6.v2 as v2, mv6.v3 as v3, connector = 'iceberg', type = 'upsert', force_append_only = 'false', + catalog.name = 'demo', database.name = 'demo_db', table.name = 'demo_table', catalog.type = 'storage', diff --git a/e2e_test/sink/iceberg_sink.slt b/e2e_test/sink/iceberg_sink.slt index 4935032e88285..d8efe836b4995 100644 --- a/e2e_test/sink/iceberg_sink.slt +++ b/e2e_test/sink/iceberg_sink.slt @@ -14,6 +14,7 @@ CREATE SINK s6 AS select mv6.v1 as v1, mv6.v2 as v2, mv6.v3 as v3 from mv6 WITH s3.access.key = 'hummockadmin', s3.secret.key = 'hummockadmin', s3.region = 'us-east-1', + catalog.name = 'demo', catalog.type = 'storage', database.name='demo_db', table.name='demo_table' diff --git a/integration_tests/iceberg-sink2/README.md b/integration_tests/iceberg-sink2/README.md index 54f34ea646e0e..496111fca71fe 100644 --- a/integration_tests/iceberg-sink2/README.md +++ b/integration_tests/iceberg-sink2/README.md @@ -1,7 +1,15 @@ -Use following steps to run: +# How to run the test -1. ./risedev d full-iceberg-bench -2. cd docker; docker compose up -d -3. poetry update -4. poetry run python init.py -5. poetry run python check.py \ No newline at end of file +Run following command to run the test: + +```bash +cd python +poetry update +poetry run python main.py +``` + +# How to override risingwave image version: + +```bash +export RW_IMAGE= +``` \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/docker-compose.yml b/integration_tests/iceberg-sink2/docker/docker-compose.yml deleted file mode 100644 index 566ce2895bf32..0000000000000 --- a/integration_tests/iceberg-sink2/docker/docker-compose.yml +++ /dev/null @@ -1,28 +0,0 @@ -version: "3" - -services: - spark: - image: apache/spark:3.4.1 - container_name: spark - user: root - healthcheck: - test: netstat -ltn | grep -c ":15002" - interval: 5s - retries: 120 - ports: - - "15002:15002" - networks: - iceberg_net: - environment: - - SPARK_HOME=/opt/spark - - PYSPARK_PYTHON=/usr/bin/python3.9 - - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/spark/bin:/opt/spark/sbin - - AWS_ACCESS_KEY_ID=admin - - AWS_SECRET_ACCESS_KEY=password - - AWS_REGION=us-east-1 - volumes: - - './spark-script:/spark-script' - command: [ "bash", "/spark-script/spark-connect-server.sh" ] - -networks: - iceberg_net: diff --git a/integration_tests/iceberg-sink2/docker/hive/config.ini b/integration_tests/iceberg-sink2/docker/hive/config.ini new file mode 100644 index 0000000000000..d644f3c0d46a1 --- /dev/null +++ b/integration_tests/iceberg-sink2/docker/hive/config.ini @@ -0,0 +1,20 @@ +[risingwave] +db=dev +user=root +host=127.0.0.1 +port=4566 + +[sink] +connector = iceberg +type=append-only +force_append_only = true +catalog.type = hive +catalog.uri = thrift://metastore:9083 +warehouse.path = s3://icebergdata/demo +s3.endpoint=http://minio-0:9301 +s3.access.key = hummockadmin +s3.secret.key = hummockadmin +s3.region = ap-southeast-1 +catalog.name = demo +database.name=s1 +table.name=t1 \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/hive/docker-compose.yml b/integration_tests/iceberg-sink2/docker/hive/docker-compose.yml new file mode 100644 index 0000000000000..527c963ab0477 --- /dev/null +++ b/integration_tests/iceberg-sink2/docker/hive/docker-compose.yml @@ -0,0 +1,116 @@ +version: '3.8' + +services: + postgres: + image: postgres:16.1 + environment: + POSTGRES_USER: admin + POSTGRES_PASSWORD: 123456 + POSTGRES_DB: metastore_db + expose: + - 5432 + ports: + - "5432:5432" + networks: + iceberg_net: + spark: + depends_on: + - minio-0 + - metastore + image: ghcr.io/icelake-io/icelake-spark:1.0 + environment: + - AWS_ACCESS_KEY_ID=hummockadmin + - AWS_SECRET_ACCESS_KEY=hummockadmin + - AWS_REGION=us-east-1 + - SPARK_HOME=/opt/spark + - PYSPARK_PYTHON=/usr/bin/python3.9 + - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/spark/bin:/opt/spark/sbin + user: root + networks: + iceberg_net: + links: + - minio-0:icebergdata.minio-0 + expose: + - 15002 + healthcheck: + test: netstat -ltn | grep -c 15002 + interval: 1s + retries: 1200 + volumes: + - ./spark-script:/spark-script + entrypoint: [ "/spark-script/spark-connect-server.sh" ] + + risingwave-standalone: + extends: + file: ../../../../docker/docker-compose.yml + service: risingwave-standalone + healthcheck: + test: + - CMD-SHELL + - bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/4566; exit $$?;' + interval: 1s + timeout: 30s + environment: + - AWS_REGION=us-east-1 + links: + - minio-0:icebergdata.minio-0 + networks: + iceberg_net: + + minio-0: + extends: + file: ../../../../docker/docker-compose.yml + service: minio-0 + entrypoint: " + /bin/sh -c ' + + set -e + + mkdir -p \"/data/icebergdata/demo\" + mkdir -p \"/data/hummock001\" + + /usr/bin/docker-entrypoint.sh \"$$0\" \"$$@\" + + '" + networks: + iceberg_net: + + etcd-0: + extends: + file: ../../../../docker/docker-compose.yml + service: etcd-0 + networks: + iceberg_net: + + metastore: + image: naushadh/hive-metastore + depends_on: + - postgres + environment: + - DATABASE_HOST=postgres + - DATABASE_DB=metastore_db + - DATABASE_USER=admin + - DATABASE_PASSWORD=123456 + - AWS_ACCESS_KEY_ID=hummockadmin + - AWS_SECRET_ACCESS_KEY=hummockadmin + - S3_ENDPOINT_URL=http://minio-0:9301 + - S3_BUCKET=icebergdata + - S3_PREFIX=demo + ports: + - "9083:9083" + expose: + - 9083 + networks: + iceberg_net: + +volumes: + risingwave-standalone: + external: false + etcd-0: + external: false + minio-0: + external: false + +networks: + iceberg_net: + name: iceberg \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/hive/spark-script/spark-connect-server.sh b/integration_tests/iceberg-sink2/docker/hive/spark-script/spark-connect-server.sh new file mode 100755 index 0000000000000..210a0663bea6e --- /dev/null +++ b/integration_tests/iceberg-sink2/docker/hive/spark-script/spark-connect-server.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +set -ex + +JARS=$(find /opt/spark/deps -type f -name "*.jar" | tr '\n' ':') + +/opt/spark/sbin/start-connect-server.sh \ + --master local[3] \ + --driver-class-path $JARS \ + --conf spark.driver.bindAddress=0.0.0.0 \ + --conf spark.sql.catalog.demo=org.apache.iceberg.spark.SparkCatalog \ + --conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions \ + --conf spark.sql.catalog.demo.catalog-impl=org.apache.iceberg.hive.HiveCatalog \ + --conf spark.sql.catalog.demo.uri=thrift://metastore:9083 \ + --conf spark.sql.catalog.demo.clients=10 \ + --conf spark.sql.catalog.demo.warehouse=s3a://icebergdata/demo \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.endpoint=http://minio-0:9301 \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.path.style.access=true \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.access.key=hummockadmin \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.secret.key=hummockadmin \ + --conf spark.sql.defaultCatalog=demo + +tail -f /opt/spark/logs/spark*.out \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/jdbc/config.ini b/integration_tests/iceberg-sink2/docker/jdbc/config.ini new file mode 100644 index 0000000000000..a4fbd29bd3346 --- /dev/null +++ b/integration_tests/iceberg-sink2/docker/jdbc/config.ini @@ -0,0 +1,22 @@ +[risingwave] +db=dev +user=root +host=127.0.0.1 +port=4566 + +[sink] +connector = iceberg +type=append-only +force_append_only = true +warehouse.path = s3://icebergdata/demo +s3.endpoint=http://minio-0:9301 +s3.access.key = hummockadmin +s3.secret.key = hummockadmin +s3.region = ap-southeast-1 +catalog.name = demo +catalog.type = jdbc +catalog.uri = jdbc:postgresql://postgres:5432/iceberg +catalog.jdbc.user = admin +catalog.jdbc.password = 123456 +database.name=s1 +table.name=t1 \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/jdbc/docker-compose.yml b/integration_tests/iceberg-sink2/docker/jdbc/docker-compose.yml new file mode 100644 index 0000000000000..8f9dee095ed72 --- /dev/null +++ b/integration_tests/iceberg-sink2/docker/jdbc/docker-compose.yml @@ -0,0 +1,96 @@ +version: '3.8' + +services: + postgres: + image: postgres:16.1 + environment: + POSTGRES_USER: admin + POSTGRES_PASSWORD: 123456 + POSTGRES_DB: iceberg + expose: + - 5432 + ports: + - "5432:5432" + networks: + iceberg_net: + + spark: + depends_on: + - minio-0 + - postgres + image: ghcr.io/icelake-io/icelake-spark:1.0 + environment: + - AWS_ACCESS_KEY_ID=hummockadmin + - AWS_SECRET_ACCESS_KEY=hummockadmin + - AWS_REGION=us-east-1 + - SPARK_HOME=/opt/spark + - PYSPARK_PYTHON=/usr/bin/python3.9 + - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/spark/bin:/opt/spark/sbin + user: root + networks: + iceberg_net: + links: + - minio-0:icebergdata.minio-0 + expose: + - 15002 + healthcheck: + test: netstat -ltn | grep -c 15002 + interval: 1s + retries: 1200 + volumes: + - ./spark-script:/spark-script + entrypoint: [ "/spark-script/spark-connect-server.sh" ] + + risingwave-standalone: + extends: + file: ../../../../docker/docker-compose.yml + service: risingwave-standalone + healthcheck: + test: + - CMD-SHELL + - bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/4566; exit $$?;' + interval: 1s + timeout: 30s + environment: + - AWS_REGION=us-east-1 + links: + - minio-0:icebergdata.minio-0 + networks: + iceberg_net: + + minio-0: + extends: + file: ../../../../docker/docker-compose.yml + service: minio-0 + entrypoint: " + /bin/sh -c ' + + set -e + + mkdir -p \"/data/icebergdata/demo\" + mkdir -p \"/data/hummock001\" + + /usr/bin/docker-entrypoint.sh \"$$0\" \"$$@\" + + '" + networks: + iceberg_net: + + etcd-0: + extends: + file: ../../../../docker/docker-compose.yml + service: etcd-0 + networks: + iceberg_net: + +volumes: + risingwave-standalone: + external: false + etcd-0: + external: false + minio-0: + external: false + +networks: + iceberg_net: + name: iceberg \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/jdbc/spark-script/spark-connect-server.sh b/integration_tests/iceberg-sink2/docker/jdbc/spark-script/spark-connect-server.sh new file mode 100755 index 0000000000000..8c3f752dc6414 --- /dev/null +++ b/integration_tests/iceberg-sink2/docker/jdbc/spark-script/spark-connect-server.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +set -ex + +JARS=$(find /opt/spark/deps -type f -name "*.jar" | tr '\n' ':') + +/opt/spark/sbin/start-connect-server.sh \ + --master local[3] \ + --driver-class-path $JARS \ + --conf spark.driver.bindAddress=0.0.0.0 \ + --conf spark.sql.catalog.demo=org.apache.iceberg.spark.SparkCatalog \ + --conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions \ + --conf spark.sql.catalog.demo.catalog-impl=org.apache.iceberg.jdbc.JdbcCatalog \ + --conf spark.sql.catalog.demo.io-impl=org.apache.iceberg.aws.s3.S3FileIO \ + --conf spark.sql.catalog.demo.warehouse=s3://icebergdata/demo \ + --conf spark.sql.catalog.demo.uri=jdbc:postgresql://postgres:5432/iceberg \ + --conf spark.sql.catalog.demo.jdbc.user=admin \ + --conf spark.sql.catalog.demo.jdbc.password=123456 \ + --conf spark.sql.catalog.demo.s3.endpoint=http://minio-0:9301 \ + --conf spark.sql.catalog.demo.s3.path.style.access=true \ + --conf spark.sql.catalog.demo.s3.access.key=hummockadmin \ + --conf spark.sql.catalog.demo.s3.secret.key=hummockadmin \ + --conf spark.sql.defaultCatalog=demo + +tail -f /opt/spark/logs/spark*.out \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/rest/config.ini b/integration_tests/iceberg-sink2/docker/rest/config.ini index ac2a551c5c141..28b1e4c159f5b 100644 --- a/integration_tests/iceberg-sink2/docker/rest/config.ini +++ b/integration_tests/iceberg-sink2/docker/rest/config.ini @@ -1,7 +1,3 @@ -[default] -aws_key=hummockadmin -aws_secret=hummockadmin - [risingwave] db=dev user=root @@ -9,15 +5,16 @@ host=127.0.0.1 port=4566 [sink] -connector = iceberg +connector=iceberg type=append-only -force_append_only = true -catalog.type = rest -catalog.uri = http://rest:8181 -warehouse.path = s3://icebergdata/demo/s1/t1 +force_append_only=true s3.endpoint=http://minio-0:9301 s3.access.key = hummockadmin s3.secret.key = hummockadmin s3.region = ap-southeast-1 +catalog.type=rest +catalog.name=demo +catalog.uri = http://rest:8181 +warehouse.path = s3://icebergdata/demo database.name=s1 -table.name=s1.t1 \ No newline at end of file +table.name=t1 \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/spark-script/.gitignore b/integration_tests/iceberg-sink2/docker/spark-script/.gitignore deleted file mode 100644 index 51dcf07222856..0000000000000 --- a/integration_tests/iceberg-sink2/docker/spark-script/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -derby.log -metastore_db -.ivy \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/spark-script/init-table.sql b/integration_tests/iceberg-sink2/docker/spark-script/init-table.sql deleted file mode 100644 index 8f605921ce9e7..0000000000000 --- a/integration_tests/iceberg-sink2/docker/spark-script/init-table.sql +++ /dev/null @@ -1,18 +0,0 @@ -CREATE SCHEMA IF NOT EXISTS s1; - -USE s1; - -DROP TABLE IF EXISTS t1; - -CREATE TABLE t1 -( - id bigint, - name string, - distance bigint -) USING iceberg -TBLPROPERTIES ('format-version'='2'); - -INSERT INTO t1 VALUES (1, "a", 100), (2, "b", 200); - - - diff --git a/integration_tests/iceberg-sink2/docker/spark-script/insert-table.sql b/integration_tests/iceberg-sink2/docker/spark-script/insert-table.sql deleted file mode 100644 index ebeb17d270941..0000000000000 --- a/integration_tests/iceberg-sink2/docker/spark-script/insert-table.sql +++ /dev/null @@ -1,6 +0,0 @@ -USE s1; - -INSERT INTO t1 VALUES (3, "a", 300), (4, "b", 400); - - - diff --git a/integration_tests/iceberg-sink2/docker/spark-script/inspect-table.sql b/integration_tests/iceberg-sink2/docker/spark-script/inspect-table.sql deleted file mode 100644 index b626a74f4e53e..0000000000000 --- a/integration_tests/iceberg-sink2/docker/spark-script/inspect-table.sql +++ /dev/null @@ -1,7 +0,0 @@ -DESCRIBE demo.s1.t1.files; - -SELECT * FROM demo.s1.t1.files; - -DESCRIBE demo.s1.t1.manifests; - -SELECT * FROM demo.s1.t1.manifests; diff --git a/integration_tests/iceberg-sink2/docker/spark-script/query-table.sql b/integration_tests/iceberg-sink2/docker/spark-script/query-table.sql deleted file mode 100644 index 7de932bec5d49..0000000000000 --- a/integration_tests/iceberg-sink2/docker/spark-script/query-table.sql +++ /dev/null @@ -1 +0,0 @@ -SELECT * FROM demo.s1.t1 ORDER BY id ASC; diff --git a/integration_tests/iceberg-sink2/docker/spark-script/run-sql-file.sh b/integration_tests/iceberg-sink2/docker/spark-script/run-sql-file.sh deleted file mode 100644 index 685f228e4e016..0000000000000 --- a/integration_tests/iceberg-sink2/docker/spark-script/run-sql-file.sh +++ /dev/null @@ -1,27 +0,0 @@ -set -ex - -ICEBERG_VERSION=1.3.1 -DEPENDENCIES="org.apache.iceberg:iceberg-spark-runtime-3.3_2.12:$ICEBERG_VERSION,org.apache.hadoop:hadoop-aws:3.3.2" - -## add AWS dependency -#AWS_SDK_VERSION=2.20.18 -#AWS_MAVEN_GROUP=software.amazon.awssdk -#AWS_PACKAGES=( -# "bundle" -#) -#for pkg in "${AWS_PACKAGES[@]}"; do -# DEPENDENCIES+=",$AWS_MAVEN_GROUP:$pkg:$AWS_SDK_VERSION" -#done - -spark-sql --packages $DEPENDENCIES \ - --master local[3] \ - --files /spark-script/log4j.properties \ - --conf spark.sql.catalog.demo=org.apache.iceberg.spark.SparkCatalog \ - --conf spark.sql.catalog.demo.type=hadoop \ - --conf spark.sql.catalog.demo.warehouse=s3a://icebergdata/demo \ - --conf spark.sql.catalog.demo.hadoop.fs.s3a.endpoint=http://minio:9000 \ - --conf spark.sql.catalog.demo.hadoop.fs.s3a.path.style.access=true \ - --conf spark.sql.catalog.demo.hadoop.fs.s3a.access.key=admin \ - --conf spark.sql.catalog.demo.hadoop.fs.s3a.secret.key=password \ - --conf spark.sql.defaultCatalog=demo \ - -f /spark-script/$1.sql diff --git a/integration_tests/iceberg-sink2/docker/spark-script/spark-connect-server.sh b/integration_tests/iceberg-sink2/docker/spark-script/spark-connect-server.sh deleted file mode 100755 index 2ca7e5026fea5..0000000000000 --- a/integration_tests/iceberg-sink2/docker/spark-script/spark-connect-server.sh +++ /dev/null @@ -1,20 +0,0 @@ -set -ex - -ICEBERG_VERSION=1.3.1 -SPARK_VERSION=3.4.1 - -PACKAGES="org.apache.iceberg:iceberg-spark-runtime-3.4_2.12:$ICEBERG_VERSION,org.apache.hadoop:hadoop-aws:3.3.2" -PACKAGES="$PACKAGES,org.apache.spark:spark-connect_2.12:$SPARK_VERSION" - -/opt/spark/sbin/start-connect-server.sh --packages $PACKAGES \ - --master local[3] \ - --conf spark.driver.bindAddress=0.0.0.0 \ - --conf spark.sql.catalog.demo=org.apache.iceberg.spark.SparkCatalog \ - --conf spark.sql.catalog.demo.type=hadoop \ - --conf spark.sql.catalog.demo.warehouse=s3a://renjie-iceberg-bench/wh \ - --conf spark.sql.catalog.demo.hadoop.fs.s3a.path.style.access=true \ - --conf spark.sql.catalog.demo.hadoop.fs.s3a.access.key=admin \ - --conf spark.sql.catalog.demo.hadoop.fs.s3a.secret.key=password \ - --conf spark.sql.defaultCatalog=demo - -tail -f /opt/spark/logs/spark*.out \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/storage/config.ini b/integration_tests/iceberg-sink2/docker/storage/config.ini new file mode 100644 index 0000000000000..13e912b8fc3b8 --- /dev/null +++ b/integration_tests/iceberg-sink2/docker/storage/config.ini @@ -0,0 +1,19 @@ +[risingwave] +db=dev +user=root +host=127.0.0.1 +port=4566 + +[sink] +connector = iceberg +type=append-only +force_append_only = true +s3.endpoint=http://minio-0:9301 +s3.access.key = hummockadmin +s3.secret.key = hummockadmin +s3.region = ap-southeast-1 +catalog.type = storage +catalog.name = demo +warehouse.path = s3://icebergdata/demo +database.name=s1 +table.name=t1 \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/storage/docker-compose.yml b/integration_tests/iceberg-sink2/docker/storage/docker-compose.yml new file mode 100644 index 0000000000000..7a29e5f83c145 --- /dev/null +++ b/integration_tests/iceberg-sink2/docker/storage/docker-compose.yml @@ -0,0 +1,75 @@ +version: '3.8' + +services: + spark: + depends_on: + - minio-0 + image: ghcr.io/icelake-io/icelake-spark:latest + environment: + - SPARK_HOME=/opt/spark + - PYSPARK_PYTHON=/usr/bin/python3.9 + - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/spark/bin:/opt/spark/sbin + user: root + networks: + iceberg_net: + links: + - minio-0:icebergdata.minio-0 + expose: + - 15002 + healthcheck: + test: netstat -ltn | grep -c 15002 + interval: 1s + retries: 1200 + volumes: + - ./spark-script:/spark-script + entrypoint: ["/spark-script/spark-connect-server.sh"] + + risingwave-standalone: + extends: + file: ../../../../docker/docker-compose.yml + service: risingwave-standalone + healthcheck: + test: + - CMD-SHELL + - bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/4566; exit $$?;' + interval: 1s + timeout: 30s + networks: + iceberg_net: + + minio-0: + extends: + file: ../../../../docker/docker-compose.yml + service: minio-0 + entrypoint: " + + /bin/sh -c ' + + set -e + + mkdir -p \"/data/icebergdata/demo\" + mkdir -p \"/data/hummock001\" + + /usr/bin/docker-entrypoint.sh \"$$0\" \"$$@\" + + '" + networks: + iceberg_net: + + etcd-0: + extends: + file: ../../../../docker/docker-compose.yml + service: etcd-0 + networks: + iceberg_net: + +volumes: + risingwave-standalone: + external: false + etcd-0: + external: false + minio-0: + external: false + +networks: + iceberg_net: \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/storage/spark-script/spark-connect-server.sh b/integration_tests/iceberg-sink2/docker/storage/spark-script/spark-connect-server.sh new file mode 100755 index 0000000000000..d37ed983fc236 --- /dev/null +++ b/integration_tests/iceberg-sink2/docker/storage/spark-script/spark-connect-server.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +set -ex + +JARS=$(find /opt/spark/deps -type f -name "*.jar" | tr '\n' ':') + +/opt/spark/sbin/start-connect-server.sh \ + --master local[3] \ + --driver-class-path $JARS \ + --conf spark.driver.bindAddress=0.0.0.0 \ + --conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions \ + --conf spark.sql.catalog.demo=org.apache.iceberg.spark.SparkCatalog \ + --conf spark.sql.catalog.demo.type=hadoop \ + --conf spark.sql.catalog.demo.warehouse=s3a://icebergdata/demo \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.endpoint=http://minio-0:9301 \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.path.style.access=true \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.access.key=hummockadmin \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.secret.key=hummockadmin \ + --conf spark.sql.defaultCatalog=demo + +tail -f /opt/spark/logs/spark*.out \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/python/main.py b/integration_tests/iceberg-sink2/python/main.py index 9ad070b0c1b9b..ff0c90d4cf752 100644 --- a/integration_tests/iceberg-sink2/python/main.py +++ b/integration_tests/iceberg-sink2/python/main.py @@ -6,28 +6,37 @@ import time -def case_dir(case_name): - return f"../docker/{case_name}" +def read_config(filename): + config = configparser.ConfigParser() + config.read(filename) + print({section: dict(config[section]) for section in config.sections()}) + return config -def start_docker(case_name): - subprocess.run(["docker-compose", "up", "-d", "--wait"], cwd=case_dir(case_name), check=False) +class DockerCompose(object): + def __init__(self, case_name: str): + self.case_name = case_name + def case_dir(self): + return f"../docker/{self.case_name}" -def stop_docker(case_name): - subprocess.run(["docker", "compose", "down", "-v", "--remove-orphans"], cwd=case_dir(case_name), - capture_output=True, - check=True) + def get_ip(self, container_name): + return subprocess.check_output([ + "docker", "inspect", "-f", "{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}", + container_name], cwd=self.case_dir()).decode("utf-8").rstrip() + def __enter__(self): + subprocess.run(["docker-compose", "up", "-d", "--wait"], cwd=self.case_dir(), check=False) + return self -def get_ip(case_name, container_name): - return subprocess.check_output(["docker", "inspect", "-f", "{{range.NetworkSettings.Networks}}{{.IPAddress}}{{" - "end}}", - container_name], cwd=case_dir(case_name)).decode("utf-8").rstrip() + def __exit__(self, exc_type, exc_val, exc_tb): + subprocess.run(["docker", "compose", "down", "-v", "--remove-orphans"], cwd=self.case_dir(), + capture_output=True, + check=True) -def init_spark_table(case_name): - spark_ip = get_ip(case_dir(case_name), f"{case_name}-spark-1") +def init_spark_table(docker): + spark_ip = docker.get_ip(f"{docker.case_name}-spark-1") url = f"sc://{spark_ip}:15002" print(f"Spark url is {url}") spark = SparkSession.builder.remote(url).getOrCreate() @@ -52,10 +61,8 @@ def init_spark_table(case_name): spark.sql(sql) -def init_risingwave_mv(config): - aws_key = config['default']['aws_key'] - aws_secret = config['default']['aws_secret'] - +def init_risingwave_mv(docker): + config = read_config(f"{docker.case_dir()}/config.ini") sink_config = config['sink'] sink_param = ",\n".join([f"{k}='{v}'" for k, v in sink_config.items()]) sqls = [ @@ -105,8 +112,8 @@ def init_risingwave_mv(config): cursor.execute(sql) -def check_spark_table(case_name): - spark_ip = get_ip(case_dir(case_name), f"{case_name}-spark-1") +def check_spark_table(docker): + spark_ip = docker.get_ip(f"{docker.case_name}-spark-1") url = f"sc://{spark_ip}:15002" print(f"Spark url is {url}") spark = SparkSession.builder.remote(url).getOrCreate() @@ -118,19 +125,20 @@ def check_spark_table(case_name): for sql in sqls: print(f"Executing sql: {sql}") result = spark.sql(sql).collect() - print(f"Result is {result}") + assert result[0][0] > 100, f"Inserted result is too small: {result[0][0]}, test failed" + + +def run_case(case): + with DockerCompose(case) as docker: + init_spark_table(docker) + init_risingwave_mv(docker) + print("Let risingwave to run") + time.sleep(5) + check_spark_table(docker) if __name__ == "__main__": - case_name = "rest" - config = configparser.ConfigParser() - config.read(f"{case_dir(case_name)}/config.ini") - print({section: dict(config[section]) for section in config.sections()}) - start_docker(case_name) - print("Waiting for docker to be ready") - init_spark_table(case_name) - init_risingwave_mv(config) - print("Let risingwave to run") - time.sleep(3) - check_spark_table(case_name) - stop_docker(case_name) + case_names = ["rest", "storage", "jdbc", "hive"] + for case_name in case_names: + print(f"Running test case: {case_name}") + run_case(case_name) diff --git a/integration_tests/iceberg-sink2/python/poetry.lock b/integration_tests/iceberg-sink2/python/poetry.lock index cf29a6f294aad..fc560537f1e64 100644 --- a/integration_tests/iceberg-sink2/python/poetry.lock +++ b/integration_tests/iceberg-sink2/python/poetry.lock @@ -19,163 +19,167 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "grpcio" -version = "1.60.0" +version = "1.62.0" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.7" files = [ - {file = "grpcio-1.60.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:d020cfa595d1f8f5c6b343530cd3ca16ae5aefdd1e832b777f9f0eb105f5b139"}, - {file = "grpcio-1.60.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b98f43fcdb16172dec5f4b49f2fece4b16a99fd284d81c6bbac1b3b69fcbe0ff"}, - {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:20e7a4f7ded59097c84059d28230907cd97130fa74f4a8bfd1d8e5ba18c81491"}, - {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452ca5b4afed30e7274445dd9b441a35ece656ec1600b77fff8c216fdf07df43"}, - {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43e636dc2ce9ece583b3e2ca41df5c983f4302eabc6d5f9cd04f0562ee8ec1ae"}, - {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e306b97966369b889985a562ede9d99180def39ad42c8014628dd3cc343f508"}, - {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f897c3b127532e6befdcf961c415c97f320d45614daf84deba0a54e64ea2457b"}, - {file = "grpcio-1.60.0-cp310-cp310-win32.whl", hash = "sha256:b87efe4a380887425bb15f220079aa8336276398dc33fce38c64d278164f963d"}, - {file = "grpcio-1.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:a9c7b71211f066908e518a2ef7a5e211670761651039f0d6a80d8d40054047df"}, - {file = "grpcio-1.60.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:fb464479934778d7cc5baf463d959d361954d6533ad34c3a4f1d267e86ee25fd"}, - {file = "grpcio-1.60.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:4b44d7e39964e808b071714666a812049765b26b3ea48c4434a3b317bac82f14"}, - {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:90bdd76b3f04bdb21de5398b8a7c629676c81dfac290f5f19883857e9371d28c"}, - {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91229d7203f1ef0ab420c9b53fe2ca5c1fbeb34f69b3bc1b5089466237a4a134"}, - {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b36a2c6d4920ba88fa98075fdd58ff94ebeb8acc1215ae07d01a418af4c0253"}, - {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:297eef542156d6b15174a1231c2493ea9ea54af8d016b8ca7d5d9cc65cfcc444"}, - {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:87c9224acba0ad8bacddf427a1c2772e17ce50b3042a789547af27099c5f751d"}, - {file = "grpcio-1.60.0-cp311-cp311-win32.whl", hash = "sha256:95ae3e8e2c1b9bf671817f86f155c5da7d49a2289c5cf27a319458c3e025c320"}, - {file = "grpcio-1.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:467a7d31554892eed2aa6c2d47ded1079fc40ea0b9601d9f79204afa8902274b"}, - {file = "grpcio-1.60.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:a7152fa6e597c20cb97923407cf0934e14224af42c2b8d915f48bc3ad2d9ac18"}, - {file = "grpcio-1.60.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:7db16dd4ea1b05ada504f08d0dca1cd9b926bed3770f50e715d087c6f00ad748"}, - {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:b0571a5aef36ba9177e262dc88a9240c866d903a62799e44fd4aae3f9a2ec17e"}, - {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fd9584bf1bccdfff1512719316efa77be235469e1e3295dce64538c4773840b"}, - {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6a478581b1a1a8fdf3318ecb5f4d0cda41cacdffe2b527c23707c9c1b8fdb55"}, - {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:77c8a317f0fd5a0a2be8ed5cbe5341537d5c00bb79b3bb27ba7c5378ba77dbca"}, - {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1c30bb23a41df95109db130a6cc1b974844300ae2e5d68dd4947aacba5985aa5"}, - {file = "grpcio-1.60.0-cp312-cp312-win32.whl", hash = "sha256:2aef56e85901c2397bd557c5ba514f84de1f0ae5dd132f5d5fed042858115951"}, - {file = "grpcio-1.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:e381fe0c2aa6c03b056ad8f52f8efca7be29fb4d9ae2f8873520843b6039612a"}, - {file = "grpcio-1.60.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:92f88ca1b956eb8427a11bb8b4a0c0b2b03377235fc5102cb05e533b8693a415"}, - {file = "grpcio-1.60.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:e278eafb406f7e1b1b637c2cf51d3ad45883bb5bd1ca56bc05e4fc135dfdaa65"}, - {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:a48edde788b99214613e440fce495bbe2b1e142a7f214cce9e0832146c41e324"}, - {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de2ad69c9a094bf37c1102b5744c9aec6cf74d2b635558b779085d0263166454"}, - {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:073f959c6f570797272f4ee9464a9997eaf1e98c27cb680225b82b53390d61e6"}, - {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c826f93050c73e7769806f92e601e0efdb83ec8d7c76ddf45d514fee54e8e619"}, - {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9e30be89a75ee66aec7f9e60086fadb37ff8c0ba49a022887c28c134341f7179"}, - {file = "grpcio-1.60.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b0fb2d4801546598ac5cd18e3ec79c1a9af8b8f2a86283c55a5337c5aeca4b1b"}, - {file = "grpcio-1.60.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:9073513ec380434eb8d21970e1ab3161041de121f4018bbed3146839451a6d8e"}, - {file = "grpcio-1.60.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:74d7d9fa97809c5b892449b28a65ec2bfa458a4735ddad46074f9f7d9550ad13"}, - {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:1434ca77d6fed4ea312901122dc8da6c4389738bf5788f43efb19a838ac03ead"}, - {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e61e76020e0c332a98290323ecfec721c9544f5b739fab925b6e8cbe1944cf19"}, - {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675997222f2e2f22928fbba640824aebd43791116034f62006e19730715166c0"}, - {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5208a57eae445ae84a219dfd8b56e04313445d146873117b5fa75f3245bc1390"}, - {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:428d699c8553c27e98f4d29fdc0f0edc50e9a8a7590bfd294d2edb0da7be3629"}, - {file = "grpcio-1.60.0-cp38-cp38-win32.whl", hash = "sha256:83f2292ae292ed5a47cdcb9821039ca8e88902923198f2193f13959360c01860"}, - {file = "grpcio-1.60.0-cp38-cp38-win_amd64.whl", hash = "sha256:705a68a973c4c76db5d369ed573fec3367d7d196673fa86614b33d8c8e9ebb08"}, - {file = "grpcio-1.60.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c193109ca4070cdcaa6eff00fdb5a56233dc7610216d58fb81638f89f02e4968"}, - {file = "grpcio-1.60.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:676e4a44e740deaba0f4d95ba1d8c5c89a2fcc43d02c39f69450b1fa19d39590"}, - {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5ff21e000ff2f658430bde5288cb1ac440ff15c0d7d18b5fb222f941b46cb0d2"}, - {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c86343cf9ff7b2514dd229bdd88ebba760bd8973dac192ae687ff75e39ebfab"}, - {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fd3b3968ffe7643144580f260f04d39d869fcc2cddb745deef078b09fd2b328"}, - {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:30943b9530fe3620e3b195c03130396cd0ee3a0d10a66c1bee715d1819001eaf"}, - {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b10241250cb77657ab315270b064a6c7f1add58af94befa20687e7c8d8603ae6"}, - {file = "grpcio-1.60.0-cp39-cp39-win32.whl", hash = "sha256:79a050889eb8d57a93ed21d9585bb63fca881666fc709f5d9f7f9372f5e7fd03"}, - {file = "grpcio-1.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a97a681e82bc11a42d4372fe57898d270a2707f36c45c6676e49ce0d5c41353"}, - {file = "grpcio-1.60.0.tar.gz", hash = "sha256:2199165a1affb666aa24adf0c97436686d0a61bc5fc113c037701fb7c7fceb96"}, + {file = "grpcio-1.62.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:136ffd79791b1eddda8d827b607a6285474ff8a1a5735c4947b58c481e5e4271"}, + {file = "grpcio-1.62.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:d6a56ba703be6b6267bf19423d888600c3f574ac7c2cc5e6220af90662a4d6b0"}, + {file = "grpcio-1.62.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:4cd356211579043fce9f52acc861e519316fff93980a212c8109cca8f47366b6"}, + {file = "grpcio-1.62.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e803e9b58d8f9b4ff0ea991611a8d51b31c68d2e24572cd1fe85e99e8cc1b4f8"}, + {file = "grpcio-1.62.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4c04fe33039b35b97c02d2901a164bbbb2f21fb9c4e2a45a959f0b044c3512c"}, + {file = "grpcio-1.62.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:95370c71b8c9062f9ea033a0867c4c73d6f0ff35113ebd2618171ec1f1e903e0"}, + {file = "grpcio-1.62.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c912688acc05e4ff012c8891803659d6a8a8b5106f0f66e0aed3fb7e77898fa6"}, + {file = "grpcio-1.62.0-cp310-cp310-win32.whl", hash = "sha256:821a44bd63d0f04e33cf4ddf33c14cae176346486b0df08b41a6132b976de5fc"}, + {file = "grpcio-1.62.0-cp310-cp310-win_amd64.whl", hash = "sha256:81531632f93fece32b2762247c4c169021177e58e725494f9a746ca62c83acaa"}, + {file = "grpcio-1.62.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:3fa15850a6aba230eed06b236287c50d65a98f05054a0f01ccedf8e1cc89d57f"}, + {file = "grpcio-1.62.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:36df33080cd7897623feff57831eb83c98b84640b016ce443305977fac7566fb"}, + {file = "grpcio-1.62.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:7a195531828b46ea9c4623c47e1dc45650fc7206f8a71825898dd4c9004b0928"}, + {file = "grpcio-1.62.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab140a3542bbcea37162bdfc12ce0d47a3cda3f2d91b752a124cc9fe6776a9e2"}, + {file = "grpcio-1.62.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f9d6c3223914abb51ac564dc9c3782d23ca445d2864321b9059d62d47144021"}, + {file = "grpcio-1.62.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fbe0c20ce9a1cff75cfb828b21f08d0a1ca527b67f2443174af6626798a754a4"}, + {file = "grpcio-1.62.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38f69de9c28c1e7a8fd24e4af4264726637b72f27c2099eaea6e513e7142b47e"}, + {file = "grpcio-1.62.0-cp311-cp311-win32.whl", hash = "sha256:ce1aafdf8d3f58cb67664f42a617af0e34555fe955450d42c19e4a6ad41c84bd"}, + {file = "grpcio-1.62.0-cp311-cp311-win_amd64.whl", hash = "sha256:eef1d16ac26c5325e7d39f5452ea98d6988c700c427c52cbc7ce3201e6d93334"}, + {file = "grpcio-1.62.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8aab8f90b2a41208c0a071ec39a6e5dbba16fd827455aaa070fec241624ccef8"}, + {file = "grpcio-1.62.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:62aa1659d8b6aad7329ede5d5b077e3d71bf488d85795db517118c390358d5f6"}, + {file = "grpcio-1.62.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:0d7ae7fc7dbbf2d78d6323641ded767d9ec6d121aaf931ec4a5c50797b886532"}, + {file = "grpcio-1.62.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f359d635ee9428f0294bea062bb60c478a8ddc44b0b6f8e1f42997e5dc12e2ee"}, + {file = "grpcio-1.62.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d48e5b1f8f4204889f1acf30bb57c30378e17c8d20df5acbe8029e985f735c"}, + {file = "grpcio-1.62.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:662d3df5314ecde3184cf87ddd2c3a66095b3acbb2d57a8cada571747af03873"}, + {file = "grpcio-1.62.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92cdb616be44c8ac23a57cce0243af0137a10aa82234f23cd46e69e115071388"}, + {file = "grpcio-1.62.0-cp312-cp312-win32.whl", hash = "sha256:0b9179478b09ee22f4a36b40ca87ad43376acdccc816ce7c2193a9061bf35701"}, + {file = "grpcio-1.62.0-cp312-cp312-win_amd64.whl", hash = "sha256:614c3ed234208e76991992342bab725f379cc81c7dd5035ee1de2f7e3f7a9842"}, + {file = "grpcio-1.62.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:7e1f51e2a460b7394670fdb615e26d31d3260015154ea4f1501a45047abe06c9"}, + {file = "grpcio-1.62.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:bcff647e7fe25495e7719f779cc219bbb90b9e79fbd1ce5bda6aae2567f469f2"}, + {file = "grpcio-1.62.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:56ca7ba0b51ed0de1646f1735154143dcbdf9ec2dbe8cc6645def299bb527ca1"}, + {file = "grpcio-1.62.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e84bfb2a734e4a234b116be208d6f0214e68dcf7804306f97962f93c22a1839"}, + {file = "grpcio-1.62.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c1488b31a521fbba50ae86423f5306668d6f3a46d124f7819c603979fc538c4"}, + {file = "grpcio-1.62.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:98d8f4eb91f1ce0735bf0b67c3b2a4fea68b52b2fd13dc4318583181f9219b4b"}, + {file = "grpcio-1.62.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b3d3d755cfa331d6090e13aac276d4a3fb828bf935449dc16c3d554bf366136b"}, + {file = "grpcio-1.62.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a33f2bfd8a58a02aab93f94f6c61279be0f48f99fcca20ebaee67576cd57307b"}, + {file = "grpcio-1.62.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:5e709f7c8028ce0443bddc290fb9c967c1e0e9159ef7a030e8c21cac1feabd35"}, + {file = "grpcio-1.62.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:2f3d9a4d0abb57e5f49ed5039d3ed375826c2635751ab89dcc25932ff683bbb6"}, + {file = "grpcio-1.62.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:62ccb92f594d3d9fcd00064b149a0187c246b11e46ff1b7935191f169227f04c"}, + {file = "grpcio-1.62.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:921148f57c2e4b076af59a815467d399b7447f6e0ee10ef6d2601eb1e9c7f402"}, + {file = "grpcio-1.62.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f897b16190b46bc4d4aaf0a32a4b819d559a37a756d7c6b571e9562c360eed72"}, + {file = "grpcio-1.62.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1bc8449084fe395575ed24809752e1dc4592bb70900a03ca42bf236ed5bf008f"}, + {file = "grpcio-1.62.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81d444e5e182be4c7856cd33a610154fe9ea1726bd071d07e7ba13fafd202e38"}, + {file = "grpcio-1.62.0-cp38-cp38-win32.whl", hash = "sha256:88f41f33da3840b4a9bbec68079096d4caf629e2c6ed3a72112159d570d98ebe"}, + {file = "grpcio-1.62.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc2836cb829895ee190813446dce63df67e6ed7b9bf76060262c55fcd097d270"}, + {file = "grpcio-1.62.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:fcc98cff4084467839d0a20d16abc2a76005f3d1b38062464d088c07f500d170"}, + {file = "grpcio-1.62.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:0d3dee701e48ee76b7d6fbbba18ba8bc142e5b231ef7d3d97065204702224e0e"}, + {file = "grpcio-1.62.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:b7a6be562dd18e5d5bec146ae9537f20ae1253beb971c0164f1e8a2f5a27e829"}, + {file = "grpcio-1.62.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29cb592c4ce64a023712875368bcae13938c7f03e99f080407e20ffe0a9aa33b"}, + {file = "grpcio-1.62.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eda79574aec8ec4d00768dcb07daba60ed08ef32583b62b90bbf274b3c279f7"}, + {file = "grpcio-1.62.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7eea57444a354ee217fda23f4b479a4cdfea35fb918ca0d8a0e73c271e52c09c"}, + {file = "grpcio-1.62.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0e97f37a3b7c89f9125b92d22e9c8323f4e76e7993ba7049b9f4ccbe8bae958a"}, + {file = "grpcio-1.62.0-cp39-cp39-win32.whl", hash = "sha256:39cd45bd82a2e510e591ca2ddbe22352e8413378852ae814549c162cf3992a93"}, + {file = "grpcio-1.62.0-cp39-cp39-win_amd64.whl", hash = "sha256:b71c65427bf0ec6a8b48c68c17356cb9fbfc96b1130d20a07cb462f4e4dcdcd5"}, + {file = "grpcio-1.62.0.tar.gz", hash = "sha256:748496af9238ac78dcd98cce65421f1adce28c3979393e3609683fcd7f3880d7"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.60.0)"] +protobuf = ["grpcio-tools (>=1.62.0)"] [[package]] name = "grpcio-status" -version = "1.60.0" +version = "1.62.0" description = "Status proto mapping for gRPC" optional = false python-versions = ">=3.6" files = [ - {file = "grpcio-status-1.60.0.tar.gz", hash = "sha256:f10e0b6db3adc0fdc244b71962814ee982996ef06186446b5695b9fa635aa1ab"}, - {file = "grpcio_status-1.60.0-py3-none-any.whl", hash = "sha256:7d383fa36e59c1e61d380d91350badd4d12ac56e4de2c2b831b050362c3c572e"}, + {file = "grpcio-status-1.62.0.tar.gz", hash = "sha256:0d693e9c09880daeaac060d0c3dba1ae470a43c99e5d20dfeafd62cf7e08a85d"}, + {file = "grpcio_status-1.62.0-py3-none-any.whl", hash = "sha256:3baac03fcd737310e67758c4082a188107f771d32855bce203331cd4c9aa687a"}, ] [package.dependencies] googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.60.0" +grpcio = ">=1.62.0" protobuf = ">=4.21.6" [[package]] name = "numpy" -version = "1.26.3" +version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf"}, - {file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd"}, - {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6"}, - {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b"}, - {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178"}, - {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485"}, - {file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3"}, - {file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce"}, - {file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374"}, - {file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6"}, - {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2"}, - {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda"}, - {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e"}, - {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00"}, - {file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b"}, - {file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4"}, - {file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13"}, - {file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e"}, - {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3"}, - {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419"}, - {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166"}, - {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36"}, - {file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511"}, - {file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b"}, - {file = "numpy-1.26.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f"}, - {file = "numpy-1.26.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f"}, - {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b"}, - {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137"}, - {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58"}, - {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb"}, - {file = "numpy-1.26.3-cp39-cp39-win32.whl", hash = "sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03"}, - {file = "numpy-1.26.3-cp39-cp39-win_amd64.whl", hash = "sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2"}, - {file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e"}, - {file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0"}, - {file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5"}, - {file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] [[package]] name = "pandas" -version = "2.1.4" +version = "2.2.0" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bdec823dc6ec53f7a6339a0e34c68b144a7a1fd28d80c260534c39c62c5bf8c9"}, - {file = "pandas-2.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:294d96cfaf28d688f30c918a765ea2ae2e0e71d3536754f4b6de0ea4a496d034"}, - {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b728fb8deba8905b319f96447a27033969f3ea1fea09d07d296c9030ab2ed1d"}, - {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00028e6737c594feac3c2df15636d73ace46b8314d236100b57ed7e4b9ebe8d9"}, - {file = "pandas-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:426dc0f1b187523c4db06f96fb5c8d1a845e259c99bda74f7de97bd8a3bb3139"}, - {file = "pandas-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:f237e6ca6421265643608813ce9793610ad09b40154a3344a088159590469e46"}, - {file = "pandas-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b7d852d16c270e4331f6f59b3e9aa23f935f5c4b0ed2d0bc77637a8890a5d092"}, - {file = "pandas-2.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7d5f2f54f78164b3d7a40f33bf79a74cdee72c31affec86bfcabe7e0789821"}, - {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa6e92e639da0d6e2017d9ccff563222f4eb31e4b2c3cf32a2a392fc3103c0d"}, - {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d797591b6846b9db79e65dc2d0d48e61f7db8d10b2a9480b4e3faaddc421a171"}, - {file = "pandas-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2d3e7b00f703aea3945995ee63375c61b2e6aa5aa7871c5d622870e5e137623"}, - {file = "pandas-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:dc9bf7ade01143cddc0074aa6995edd05323974e6e40d9dbde081021ded8510e"}, - {file = "pandas-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:482d5076e1791777e1571f2e2d789e940dedd927325cc3cb6d0800c6304082f6"}, - {file = "pandas-2.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8a706cfe7955c4ca59af8c7a0517370eafbd98593155b48f10f9811da440248b"}, - {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0513a132a15977b4a5b89aabd304647919bc2169eac4c8536afb29c07c23540"}, - {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f17f2b6fc076b2a0078862547595d66244db0f41bf79fc5f64a5c4d635bead"}, - {file = "pandas-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:45d63d2a9b1b37fa6c84a68ba2422dc9ed018bdaa668c7f47566a01188ceeec1"}, - {file = "pandas-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:f69b0c9bb174a2342818d3e2778584e18c740d56857fc5cdb944ec8bbe4082cf"}, - {file = "pandas-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f06bda01a143020bad20f7a85dd5f4a1600112145f126bc9e3e42077c24ef34"}, - {file = "pandas-2.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab5796839eb1fd62a39eec2916d3e979ec3130509930fea17fe6f81e18108f6a"}, - {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbaf9e8d3a63a9276d707b4d25930a262341bca9874fcb22eff5e3da5394732"}, - {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ebfd771110b50055712b3b711b51bee5d50135429364d0498e1213a7adc2be8"}, - {file = "pandas-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ea107e0be2aba1da619cc6ba3f999b2bfc9669a83554b1904ce3dd9507f0860"}, - {file = "pandas-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:d65148b14788b3758daf57bf42725caa536575da2b64df9964c563b015230984"}, - {file = "pandas-2.1.4.tar.gz", hash = "sha256:fcb68203c833cc735321512e13861358079a96c174a61f5116a1de89c58c0ef7"}, + {file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"}, + {file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"}, + {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e0b4fc3ddceb56ec8a287313bc22abe17ab0eb184069f08fc6a9352a769b18"}, + {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20404d2adefe92aed3b38da41d0847a143a09be982a31b85bc7dd565bdba0f4e"}, + {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ea3ee3f125032bfcade3a4cf85131ed064b4f8dd23e5ce6fa16473e48ebcaf5"}, + {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9670b3ac00a387620489dfc1bca66db47a787f4e55911f1293063a78b108df1"}, + {file = "pandas-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a946f210383c7e6d16312d30b238fd508d80d927014f3b33fb5b15c2f895430"}, + {file = "pandas-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a1b438fa26b208005c997e78672f1aa8138f67002e833312e6230f3e57fa87d5"}, + {file = "pandas-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ce2fbc8d9bf303ce54a476116165220a1fedf15985b09656b4b4275300e920b"}, + {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2707514a7bec41a4ab81f2ccce8b382961a29fbe9492eab1305bb075b2b1ff4f"}, + {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85793cbdc2d5bc32620dc8ffa715423f0c680dacacf55056ba13454a5be5de88"}, + {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cfd6c2491dc821b10c716ad6776e7ab311f7df5d16038d0b7458bc0b67dc10f3"}, + {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a146b9dcacc3123aa2b399df1a284de5f46287a4ab4fbfc237eac98a92ebcb71"}, + {file = "pandas-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbc1b53c0e1fdf16388c33c3cca160f798d38aea2978004dd3f4d3dec56454c9"}, + {file = "pandas-2.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a41d06f308a024981dcaa6c41f2f2be46a6b186b902c94c2674e8cb5c42985bc"}, + {file = "pandas-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:159205c99d7a5ce89ecfc37cb08ed179de7783737cea403b295b5eda8e9c56d1"}, + {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1e1f3861ea9132b32f2133788f3b14911b68102d562715d71bd0013bc45440"}, + {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:761cb99b42a69005dec2b08854fb1d4888fdf7b05db23a8c5a099e4b886a2106"}, + {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a20628faaf444da122b2a64b1e5360cde100ee6283ae8effa0d8745153809a2e"}, + {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f5be5d03ea2073627e7111f61b9f1f0d9625dc3c4d8dda72cc827b0c58a1d042"}, + {file = "pandas-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:a626795722d893ed6aacb64d2401d017ddc8a2341b49e0384ab9bf7112bdec30"}, + {file = "pandas-2.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9f66419d4a41132eb7e9a73dcec9486cf5019f52d90dd35547af11bc58f8637d"}, + {file = "pandas-2.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57abcaeda83fb80d447f28ab0cc7b32b13978f6f733875ebd1ed14f8fbc0f4ab"}, + {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60f1f7dba3c2d5ca159e18c46a34e7ca7247a73b5dd1a22b6d59707ed6b899a"}, + {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb61dc8567b798b969bcc1fc964788f5a68214d333cade8319c7ab33e2b5d88a"}, + {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:52826b5f4ed658fa2b729264d63f6732b8b29949c7fd234510d57c61dbeadfcd"}, + {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bde2bc699dbd80d7bc7f9cab1e23a95c4375de615860ca089f34e7c64f4a8de7"}, + {file = "pandas-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:3de918a754bbf2da2381e8a3dcc45eede8cd7775b047b923f9006d5f876802ae"}, + {file = "pandas-2.2.0.tar.gz", hash = "sha256:30b83f7c3eb217fb4d1b494a57a2fda5444f17834f5df2de6b2ffff68dc3c8e2"}, ] [package.dependencies] @@ -185,50 +189,50 @@ numpy = [ ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" -tzdata = ">=2022.1" +tzdata = ">=2022.7" [package.extras] -all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"] -aws = ["s3fs (>=2022.05.0)"] -clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"] -compression = ["zstandard (>=0.17.0)"] -computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"] -feather = ["pyarrow (>=7.0.0)"] -fss = ["fsspec (>=2022.05.0)"] -gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"] -hdf5 = ["tables (>=3.7.0)"] -html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"] -mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"] -parquet = ["pyarrow (>=7.0.0)"] -performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"] -plot = ["matplotlib (>=3.6.1)"] -postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"] -spss = ["pyreadstat (>=1.1.5)"] -sql-other = ["SQLAlchemy (>=1.4.36)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.8.0)"] +xml = ["lxml (>=4.9.2)"] [[package]] name = "protobuf" -version = "4.25.2" +version = "4.25.3" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.2-cp310-abi3-win32.whl", hash = "sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6"}, - {file = "protobuf-4.25.2-cp310-abi3-win_amd64.whl", hash = "sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9"}, - {file = "protobuf-4.25.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020"}, - {file = "protobuf-4.25.2-cp38-cp38-win32.whl", hash = "sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61"}, - {file = "protobuf-4.25.2-cp38-cp38-win_amd64.whl", hash = "sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62"}, - {file = "protobuf-4.25.2-cp39-cp39-win32.whl", hash = "sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3"}, - {file = "protobuf-4.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0"}, - {file = "protobuf-4.25.2-py3-none-any.whl", hash = "sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830"}, - {file = "protobuf-4.25.2.tar.gz", hash = "sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e"}, + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, ] [[package]] @@ -325,51 +329,51 @@ files = [ [[package]] name = "pyarrow" -version = "14.0.2" +version = "15.0.0" description = "Python library for Apache Arrow" optional = false python-versions = ">=3.8" files = [ - {file = "pyarrow-14.0.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:ba9fe808596c5dbd08b3aeffe901e5f81095baaa28e7d5118e01354c64f22807"}, - {file = "pyarrow-14.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:22a768987a16bb46220cef490c56c671993fbee8fd0475febac0b3e16b00a10e"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dbba05e98f247f17e64303eb876f4a80fcd32f73c7e9ad975a83834d81f3fda"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a898d134d00b1eca04998e9d286e19653f9d0fcb99587310cd10270907452a6b"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:87e879323f256cb04267bb365add7208f302df942eb943c93a9dfeb8f44840b1"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:76fc257559404ea5f1306ea9a3ff0541bf996ff3f7b9209fc517b5e83811fa8e"}, - {file = "pyarrow-14.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0c4a18e00f3a32398a7f31da47fefcd7a927545b396e1f15d0c85c2f2c778cd"}, - {file = "pyarrow-14.0.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:87482af32e5a0c0cce2d12eb3c039dd1d853bd905b04f3f953f147c7a196915b"}, - {file = "pyarrow-14.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:059bd8f12a70519e46cd64e1ba40e97eae55e0cbe1695edd95384653d7626b23"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f16111f9ab27e60b391c5f6d197510e3ad6654e73857b4e394861fc79c37200"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06ff1264fe4448e8d02073f5ce45a9f934c0f3db0a04460d0b01ff28befc3696"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6dd4f4b472ccf4042f1eab77e6c8bce574543f54d2135c7e396f413046397d5a"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:32356bfb58b36059773f49e4e214996888eeea3a08893e7dbde44753799b2a02"}, - {file = "pyarrow-14.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:52809ee69d4dbf2241c0e4366d949ba035cbcf48409bf404f071f624ed313a2b"}, - {file = "pyarrow-14.0.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:c87824a5ac52be210d32906c715f4ed7053d0180c1060ae3ff9b7e560f53f944"}, - {file = "pyarrow-14.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a25eb2421a58e861f6ca91f43339d215476f4fe159eca603c55950c14f378cc5"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c1da70d668af5620b8ba0a23f229030a4cd6c5f24a616a146f30d2386fec422"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cc61593c8e66194c7cdfae594503e91b926a228fba40b5cf25cc593563bcd07"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:78ea56f62fb7c0ae8ecb9afdd7893e3a7dbeb0b04106f5c08dbb23f9c0157591"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:37c233ddbce0c67a76c0985612fef27c0c92aef9413cf5aa56952f359fcb7379"}, - {file = "pyarrow-14.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:e4b123ad0f6add92de898214d404e488167b87b5dd86e9a434126bc2b7a5578d"}, - {file = "pyarrow-14.0.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e354fba8490de258be7687f341bc04aba181fc8aa1f71e4584f9890d9cb2dec2"}, - {file = "pyarrow-14.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:20e003a23a13da963f43e2b432483fdd8c38dc8882cd145f09f21792e1cf22a1"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc0de7575e841f1595ac07e5bc631084fd06ca8b03c0f2ecece733d23cd5102a"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e986dc859712acb0bd45601229021f3ffcdfc49044b64c6d071aaf4fa49e98"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f7d029f20ef56673a9730766023459ece397a05001f4e4d13805111d7c2108c0"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:209bac546942b0d8edc8debda248364f7f668e4aad4741bae58e67d40e5fcf75"}, - {file = "pyarrow-14.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:1e6987c5274fb87d66bb36816afb6f65707546b3c45c44c28e3c4133c010a881"}, - {file = "pyarrow-14.0.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a01d0052d2a294a5f56cc1862933014e696aa08cc7b620e8c0cce5a5d362e976"}, - {file = "pyarrow-14.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a51fee3a7db4d37f8cda3ea96f32530620d43b0489d169b285d774da48ca9785"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64df2bf1ef2ef14cee531e2dfe03dd924017650ffaa6f9513d7a1bb291e59c15"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c0fa3bfdb0305ffe09810f9d3e2e50a2787e3a07063001dcd7adae0cee3601a"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c65bf4fd06584f058420238bc47a316e80dda01ec0dfb3044594128a6c2db794"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:63ac901baec9369d6aae1cbe6cca11178fb018a8d45068aaf5bb54f94804a866"}, - {file = "pyarrow-14.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:75ee0efe7a87a687ae303d63037d08a48ef9ea0127064df18267252cfe2e9541"}, - {file = "pyarrow-14.0.2.tar.gz", hash = "sha256:36cef6ba12b499d864d1def3e990f97949e0b79400d08b7cf74504ffbd3eb025"}, + {file = "pyarrow-15.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0a524532fd6dd482edaa563b686d754c70417c2f72742a8c990b322d4c03a15d"}, + {file = "pyarrow-15.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a6bdb314affa9c2e0d5dddf3d9cbb9ef4a8dddaa68669975287d47ece67642"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66958fd1771a4d4b754cd385835e66a3ef6b12611e001d4e5edfcef5f30391e2"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f500956a49aadd907eaa21d4fff75f73954605eaa41f61cb94fb008cf2e00c6"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6f87d9c4f09e049c2cade559643424da84c43a35068f2a1c4653dc5b1408a929"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85239b9f93278e130d86c0e6bb455dcb66fc3fd891398b9d45ace8799a871a1e"}, + {file = "pyarrow-15.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b8d43e31ca16aa6e12402fcb1e14352d0d809de70edd185c7650fe80e0769e3"}, + {file = "pyarrow-15.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:fa7cd198280dbd0c988df525e50e35b5d16873e2cdae2aaaa6363cdb64e3eec5"}, + {file = "pyarrow-15.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8780b1a29d3c8b21ba6b191305a2a607de2e30dab399776ff0aa09131e266340"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0ec198ccc680f6c92723fadcb97b74f07c45ff3fdec9dd765deb04955ccf19"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036a7209c235588c2f07477fe75c07e6caced9b7b61bb897c8d4e52c4b5f9555"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2bd8a0e5296797faf9a3294e9fa2dc67aa7f10ae2207920dbebb785c77e9dbe5"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e8ebed6053dbe76883a822d4e8da36860f479d55a762bd9e70d8494aed87113e"}, + {file = "pyarrow-15.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:17d53a9d1b2b5bd7d5e4cd84d018e2a45bc9baaa68f7e6e3ebed45649900ba99"}, + {file = "pyarrow-15.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9950a9c9df24090d3d558b43b97753b8f5867fb8e521f29876aa021c52fda351"}, + {file = "pyarrow-15.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:003d680b5e422d0204e7287bb3fa775b332b3fce2996aa69e9adea23f5c8f970"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f75fce89dad10c95f4bf590b765e3ae98bcc5ba9f6ce75adb828a334e26a3d40"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca9cb0039923bec49b4fe23803807e4ef39576a2bec59c32b11296464623dc2"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ed5a78ed29d171d0acc26a305a4b7f83c122d54ff5270810ac23c75813585e4"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6eda9e117f0402dfcd3cd6ec9bfee89ac5071c48fc83a84f3075b60efa96747f"}, + {file = "pyarrow-15.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a3a6180c0e8f2727e6f1b1c87c72d3254cac909e609f35f22532e4115461177"}, + {file = "pyarrow-15.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:19a8918045993349b207de72d4576af0191beef03ea655d8bdb13762f0cd6eac"}, + {file = "pyarrow-15.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0ec076b32bacb6666e8813a22e6e5a7ef1314c8069d4ff345efa6246bc38593"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5db1769e5d0a77eb92344c7382d6543bea1164cca3704f84aa44e26c67e320fb"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2617e3bf9df2a00020dd1c1c6dce5cc343d979efe10bc401c0632b0eef6ef5b"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:d31c1d45060180131caf10f0f698e3a782db333a422038bf7fe01dace18b3a31"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:c8c287d1d479de8269398b34282e206844abb3208224dbdd7166d580804674b7"}, + {file = "pyarrow-15.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:07eb7f07dc9ecbb8dace0f58f009d3a29ee58682fcdc91337dfeb51ea618a75b"}, + {file = "pyarrow-15.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:47af7036f64fce990bb8a5948c04722e4e3ea3e13b1007ef52dfe0aa8f23cf7f"}, + {file = "pyarrow-15.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93768ccfff85cf044c418bfeeafce9a8bb0cee091bd8fd19011aff91e58de540"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6ee87fd6892700960d90abb7b17a72a5abb3b64ee0fe8db6c782bcc2d0dc0b4"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:001fca027738c5f6be0b7a3159cc7ba16a5c52486db18160909a0831b063c4e4"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:d1c48648f64aec09accf44140dccb92f4f94394b8d79976c426a5b79b11d4fa7"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:972a0141be402bb18e3201448c8ae62958c9c7923dfaa3b3d4530c835ac81aed"}, + {file = "pyarrow-15.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:f01fc5cf49081426429127aa2d427d9d98e1cb94a32cb961d583a70b7c4504e6"}, + {file = "pyarrow-15.0.0.tar.gz", hash = "sha256:876858f549d540898f927eba4ef77cd549ad8d24baa3207cf1b72e5788b50e83"}, ] [package.dependencies] -numpy = ">=1.16.6" +numpy = ">=1.16.6,<2" [[package]] name = "pyspark" @@ -413,13 +417,13 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2023.3.post1" +version = "2024.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] [[package]] @@ -435,13 +439,13 @@ files = [ [[package]] name = "tzdata" -version = "2023.4" +version = "2024.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, - {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [metadata] diff --git a/integration_tests/iceberg-sink2/run.sh b/integration_tests/iceberg-sink2/run.sh new file mode 100755 index 0000000000000..166f98e0355b0 --- /dev/null +++ b/integration_tests/iceberg-sink2/run.sh @@ -0,0 +1,4 @@ +# Don't remove the `--quiet` option since poetry has a bug when printing output, see +# https://github.com/python-poetry/poetry/issues/3412 +"$HOME"/.local/bin/poetry update --quiet +"$HOME"/.local/bin/poetry run python main.py \ No newline at end of file diff --git a/integration_tests/scripts/run_demos.py b/integration_tests/scripts/run_demos.py index 87967daa50b2c..d6fd4ecbed9c3 100644 --- a/integration_tests/scripts/run_demos.py +++ b/integration_tests/scripts/run_demos.py @@ -42,7 +42,7 @@ def run_demo(demo: str, format: str, wait_time=40): sleep(10) continue # Fallback to default version when the protobuf version doesn't exist. - sql_file = os.path.join(demo_dir, fname) + sql_file = os.path.join(demo_dir, fname) if not os.path.exists(sql_file): continue run_sql_file(sql_file, demo_dir) @@ -55,7 +55,16 @@ def iceberg_cdc_demo(): project_dir = dirname(file_dir) demo_dir = os.path.join(project_dir, demo) print("Running demo: iceberg-cdc") - subprocess.run(["bash","./run_test.sh"], cwd=demo_dir, check=True) + subprocess.run(["bash", "./run_test.sh"], cwd=demo_dir, check=True) + + +def iceberg_sink_demo(): + demo = "iceberg-sink2" + file_dir = dirname(abspath(__file__)) + project_dir = dirname(file_dir) + demo_dir = os.path.join(project_dir, demo) + print("Running demo: iceberg-sink2") + subprocess.run(["bash", "./run.sh"], cwd=demo_dir, check=True) arg_parser = argparse.ArgumentParser(description="Run the demo") @@ -75,5 +84,7 @@ def iceberg_cdc_demo(): if args.case == "iceberg-cdc": iceberg_cdc_demo() +elif args.case == "iceberg-sink": + iceberg_sink_demo() else: run_demo(args.case, args.format) diff --git a/java/connector-node/assembly/assembly.xml b/java/connector-node/assembly/assembly.xml index 9cf457d8a0b6d..26df6e8a71af9 100644 --- a/java/connector-node/assembly/assembly.xml +++ b/java/connector-node/assembly/assembly.xml @@ -42,9 +42,13 @@ *:risingwave-sink-es-7 *:risingwave-sink-cassandra *:risingwave-sink-jdbc + *:risingwave-sink-iceberg *:risingwave-sink-mock-flink-http-sink + + org.apache.iceberg:iceberg-common + true true diff --git a/java/connector-node/risingwave-sink-iceberg/pom.xml b/java/connector-node/risingwave-sink-iceberg/pom.xml index 753691c05dd6d..9f733d830a475 100644 --- a/java/connector-node/risingwave-sink-iceberg/pom.xml +++ b/java/connector-node/risingwave-sink-iceberg/pom.xml @@ -65,11 +65,36 @@ iceberg-data ${iceberg.version} + + org.apache.iceberg + iceberg-hive-metastore + ${iceberg.version} + + + org.apache.hive + hive-metastore + org.apache.parquet parquet-avro 1.12.3 + + org.apache.hadoop + hadoop-common + + + org.apache.hadoop + hadoop-mapreduce-client-core + + + org.apache.hadoop + hadoop-mapreduce-client-common + + + org.apache.hadoop + hadoop-mapreduce-client-jobclient + org.apache.iceberg iceberg-aws @@ -78,13 +103,37 @@ software.amazon.awssdk s3 - 2.18.20 software.amazon.awssdk - url-connection-client - 2.18.20 + sts + + + org.postgresql + postgresql + + + mysql + mysql-connector-java + + + org.xerial + sqlite-jdbc + + + software.amazon.awssdk + apache-client + + + junit + junit + test + + + org.assertj + assertj-core + 3.24.2 + test - diff --git a/java/connector-node/risingwave-sink-iceberg/src/main/java/com/risingwave/connector/catalog/JniCatalogWrapper.java b/java/connector-node/risingwave-sink-iceberg/src/main/java/com/risingwave/connector/catalog/JniCatalogWrapper.java index 583747f3b2f3f..e8c900b37e88d 100644 --- a/java/connector-node/risingwave-sink-iceberg/src/main/java/com/risingwave/connector/catalog/JniCatalogWrapper.java +++ b/java/connector-node/risingwave-sink-iceberg/src/main/java/com/risingwave/connector/catalog/JniCatalogWrapper.java @@ -74,6 +74,15 @@ public static JniCatalogWrapper create(String name, String klassName, String[] p checkArgument( props.length % 2 == 0, "props should be key-value pairs, but length is: " + props.length); + + // Thread.currentThread().setContextClassLoader(ClassLoader.getSystemClassLoader()); + System.out.println("Current thread name is: " + Thread.currentThread().getName()); + + // try { + // Thread.currentThread().getContextClassLoader().loadClass(klassName); + // } catch (ClassNotFoundException e) { + // throw new RuntimeException(e); + // } try { HashMap config = new HashMap<>(props.length / 2); for (int i = 0; i < props.length; i += 2) { diff --git a/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynClasses.java b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynClasses.java new file mode 100644 index 0000000000000..65a75e774d2ff --- /dev/null +++ b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynClasses.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.common; + +import org.apache.iceberg.relocated.com.google.common.base.Joiner; +import org.apache.iceberg.relocated.com.google.common.collect.Sets; + +import java.util.Set; + +public class DynClasses { + + private DynClasses() {} + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + private Class foundClass = null; + private boolean nullOk = false; + private Set classNames = Sets.newLinkedHashSet(); + + private Builder() {} + + /** + * Set the {@link ClassLoader} used to lookup classes by name. + * + *

If not set, the current thread's ClassLoader is used. + * + * @param newLoader a ClassLoader + * @return this Builder for method chaining + */ + public Builder loader(ClassLoader newLoader) { + return this; + } + + /** + * Checks for an implementation of the class by name. + * + * @param className name of a class + * @return this Builder for method chaining + */ + public Builder impl(String className) { + classNames.add(className); + + if (foundClass != null) { + return this; + } + + try { + this.foundClass = Class.forName(className); + } catch (ClassNotFoundException e) { + // not the right implementation + } + + return this; + } + + /** + * Instructs this builder to return null if no class is found, rather than throwing an + * Exception. + * + * @return this Builder for method chaining + */ + public Builder orNull() { + this.nullOk = true; + return this; + } + + /** + * Returns the first implementation or throws ClassNotFoundException if one was not found. + * + * @param Java superclass + * @return a {@link Class} for the first implementation found + * @throws ClassNotFoundException if no implementation was found + */ + @SuppressWarnings("unchecked") + public Class buildChecked() throws ClassNotFoundException { + if (!nullOk && foundClass == null) { + throw new ClassNotFoundException( + "Cannot find class; alternatives: " + Joiner.on(", ").join(classNames)); + } + return (Class) foundClass; + } + + /** + * Returns the first implementation or throws RuntimeException if one was not found. + * + * @param Java superclass + * @return a {@link Class} for the first implementation found + * @throws RuntimeException if no implementation was found + */ + @SuppressWarnings("unchecked") + public Class build() { + if (!nullOk && foundClass == null) { + throw new RuntimeException( + "Cannot find class; alternatives: " + Joiner.on(", ").join(classNames)); + } + return (Class) foundClass; + } + } +} diff --git a/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynConstructors.java b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynConstructors.java new file mode 100644 index 0000000000000..61566f4e191ff --- /dev/null +++ b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynConstructors.java @@ -0,0 +1,298 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.common; + +import org.apache.iceberg.relocated.com.google.common.base.Preconditions; +import org.apache.iceberg.relocated.com.google.common.base.Throwables; +import org.apache.iceberg.relocated.com.google.common.collect.Maps; + +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Arrays; +import java.util.Map; + +/** Copied from parquet-common */ +public class DynConstructors { + + private DynConstructors() {} + + public static class Ctor extends DynMethods.UnboundMethod { + private final Constructor ctor; + private final Class constructed; + + private Ctor(Constructor constructor, Class constructed) { + super(null, "newInstance"); + this.ctor = constructor; + this.constructed = constructed; + } + + public Class getConstructedClass() { + return constructed; + } + + public C newInstanceChecked(Object... args) throws Exception { + try { + if (args.length > ctor.getParameterCount()) { + return ctor.newInstance(Arrays.copyOfRange(args, 0, ctor.getParameterCount())); + } else { + return ctor.newInstance(args); + } + } catch (InstantiationException | IllegalAccessException e) { + throw e; + } catch (InvocationTargetException e) { + Throwables.propagateIfInstanceOf(e.getCause(), Exception.class); + Throwables.propagateIfInstanceOf(e.getCause(), RuntimeException.class); + throw Throwables.propagate(e.getCause()); + } + } + + public C newInstance(Object... args) { + try { + return newInstanceChecked(args); + } catch (Exception e) { + Throwables.propagateIfInstanceOf(e, RuntimeException.class); + throw Throwables.propagate(e); + } + } + + @Override + @SuppressWarnings("unchecked") + public R invoke(Object target, Object... args) { + Preconditions.checkArgument( + target == null, "Invalid call to constructor: target must be null"); + return (R) newInstance(args); + } + + @Override + @SuppressWarnings("unchecked") + public R invokeChecked(Object target, Object... args) throws Exception { + Preconditions.checkArgument( + target == null, "Invalid call to constructor: target must be null"); + return (R) newInstanceChecked(args); + } + + @Override + public DynMethods.BoundMethod bind(Object receiver) { + throw new IllegalStateException("Cannot bind constructors"); + } + + @Override + public boolean isStatic() { + return true; + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(constructor=" + ctor + ", class=" + constructed + ")"; + } + } + + public static Builder builder() { + return new Builder(); + } + + public static Builder builder(Class baseClass) { + return new Builder(baseClass); + } + + public static class Builder { + private final Class baseClass; + private Ctor ctor = null; + private Map problems = Maps.newHashMap(); + + public Builder(Class baseClass) { + this.baseClass = baseClass; + } + + public Builder() { + this.baseClass = null; + } + + /** + * Set the {@link ClassLoader} used to lookup classes by name. + * + *

If not set, the current thread's ClassLoader is used. + * + * @param newLoader a ClassLoader + * @return this Builder for method chaining + */ + public Builder loader(ClassLoader newLoader) { + return this; + } + + public Builder impl(String className, Class... types) { + // don't do any work if an implementation has been found + if (ctor != null) { + return this; + } + + try { + Class targetClass = Class.forName(className); + impl(targetClass, types); + } catch (NoClassDefFoundError | ClassNotFoundException e) { + // cannot load this implementation + problems.put(className, e); + } + return this; + } + + public Builder impl(Class targetClass, Class... types) { + // don't do any work if an implementation has been found + if (ctor != null) { + return this; + } + + try { + ctor = new Ctor(targetClass.getConstructor(types), targetClass); + } catch (NoSuchMethodException e) { + // not the right implementation + problems.put(methodName(targetClass, types), e); + } + return this; + } + + public Builder hiddenImpl(Class... types) { + hiddenImpl(baseClass, types); + return this; + } + + @SuppressWarnings("unchecked") + public Builder hiddenImpl(String className, Class... types) { + // don't do any work if an implementation has been found + if (ctor != null) { + return this; + } + + try { + Class targetClass = Class.forName(className); + hiddenImpl(targetClass, types); + } catch (NoClassDefFoundError | ClassNotFoundException e) { + // cannot load this implementation + problems.put(className, e); + } + return this; + } + + public Builder hiddenImpl(Class targetClass, Class... types) { + // don't do any work if an implementation has been found + if (ctor != null) { + return this; + } + + try { + Constructor hidden = targetClass.getDeclaredConstructor(types); + AccessController.doPrivileged(new MakeAccessible(hidden)); + ctor = new Ctor(hidden, targetClass); + } catch (SecurityException e) { + // unusable + problems.put(methodName(targetClass, types), e); + } catch (NoSuchMethodException e) { + // not the right implementation + problems.put(methodName(targetClass, types), e); + } + return this; + } + + @SuppressWarnings("unchecked") + public Ctor buildChecked() throws NoSuchMethodException { + if (ctor != null) { + return ctor; + } + throw buildCheckedException(baseClass, problems); + } + + @SuppressWarnings("unchecked") + public Ctor build() { + if (ctor != null) { + return ctor; + } + throw buildRuntimeException(baseClass, problems); + } + } + + private static class MakeAccessible implements PrivilegedAction { + private Constructor hidden; + + MakeAccessible(Constructor hidden) { + this.hidden = hidden; + } + + @Override + public Void run() { + hidden.setAccessible(true); + return null; + } + } + + private static NoSuchMethodException buildCheckedException( + Class baseClass, Map problems) { + NoSuchMethodException exc = + new NoSuchMethodException( + "Cannot find constructor for " + baseClass + "\n" + formatProblems(problems)); + problems.values().forEach(exc::addSuppressed); + return exc; + } + + private static RuntimeException buildRuntimeException( + Class baseClass, Map problems) { + RuntimeException exc = + new RuntimeException( + "Cannot find constructor for " + baseClass + "\n" + formatProblems(problems)); + problems.values().forEach(exc::addSuppressed); + return exc; + } + + private static String formatProblems(Map problems) { + StringBuilder sb = new StringBuilder(); + boolean first = true; + for (Map.Entry problem : problems.entrySet()) { + if (first) { + first = false; + } else { + sb.append("\n"); + } + sb.append("\tMissing ") + .append(problem.getKey()) + .append(" [") + .append(problem.getValue().getClass().getName()) + .append(": ") + .append(problem.getValue().getMessage()) + .append("]"); + } + return sb.toString(); + } + + private static String methodName(Class targetClass, Class... types) { + StringBuilder sb = new StringBuilder(); + sb.append(targetClass.getName()).append("("); + boolean first = true; + for (Class type : types) { + if (first) { + first = false; + } else { + sb.append(","); + } + sb.append(type.getName()); + } + sb.append(")"); + return sb.toString(); + } +} diff --git a/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynFields.java b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynFields.java new file mode 100644 index 0000000000000..80b6af8cb7f93 --- /dev/null +++ b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynFields.java @@ -0,0 +1,428 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.common; + +import org.apache.iceberg.relocated.com.google.common.base.Joiner; +import org.apache.iceberg.relocated.com.google.common.base.MoreObjects; +import org.apache.iceberg.relocated.com.google.common.base.Preconditions; +import org.apache.iceberg.relocated.com.google.common.base.Throwables; +import org.apache.iceberg.relocated.com.google.common.collect.Sets; + +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Set; + +public class DynFields { + + private DynFields() {} + + /** + * Convenience wrapper class around {@link Field}. + * + *

Allows callers to invoke the wrapped method with all Exceptions wrapped by RuntimeException, + * or with a single Exception catch block. + */ + public static class UnboundField { + private final Field field; + private final String name; + + private UnboundField(Field field, String name) { + this.field = field; + this.name = name; + } + + @SuppressWarnings("unchecked") + public T get(Object target) { + try { + return (T) field.get(target); + } catch (IllegalAccessException e) { + throw Throwables.propagate(e); + } + } + + public void set(Object target, T value) { + try { + field.set(target, value); + } catch (IllegalAccessException e) { + throw Throwables.propagate(e); + } + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("class", field.getDeclaringClass().toString()) + .add("name", name) + .add("type", field.getType()) + .toString(); + } + + /** + * Returns this method as a BoundMethod for the given receiver. + * + * @param target an Object on which to get or set this field + * @return a {@link BoundField} for this field and the target + * @throws IllegalStateException if the method is static + * @throws IllegalArgumentException if the receiver's class is incompatible + */ + public BoundField bind(Object target) { + Preconditions.checkState( + !isStatic() || this == AlwaysNull.INSTANCE, "Cannot bind static field %s", name); + Preconditions.checkArgument( + field.getDeclaringClass().isAssignableFrom(target.getClass()), + "Cannot bind field %s to instance of %s", + name, + target.getClass()); + + return new BoundField<>(this, target); + } + + /** + * Returns this field as a StaticField. + * + * @return a {@link StaticField} for this field + * @throws IllegalStateException if the method is not static + */ + public StaticField asStatic() { + Preconditions.checkState(isStatic(), "Field %s is not static", name); + return new StaticField<>(this); + } + + /** Returns whether the field is a static field. */ + public boolean isStatic() { + return Modifier.isStatic(field.getModifiers()); + } + + /** Returns whether the field is always null. */ + public boolean isAlwaysNull() { + return this == AlwaysNull.INSTANCE; + } + } + + private static class AlwaysNull extends UnboundField { + private static final AlwaysNull INSTANCE = new AlwaysNull(); + + private AlwaysNull() { + super(null, "AlwaysNull"); + } + + @Override + public Void get(Object target) { + return null; + } + + @Override + public void set(Object target, Void value) {} + + @Override + public String toString() { + return "Field(AlwaysNull)"; + } + + @Override + public boolean isStatic() { + return true; + } + + @Override + public boolean isAlwaysNull() { + return true; + } + } + + public static class StaticField { + private final UnboundField field; + + private StaticField(UnboundField field) { + this.field = field; + } + + public T get() { + return field.get(null); + } + + public void set(T value) { + field.set(null, value); + } + } + + public static class BoundField { + private final UnboundField field; + private final Object target; + + private BoundField(UnboundField field, Object target) { + this.field = field; + this.target = target; + } + + public T get() { + return field.get(target); + } + + public void set(T value) { + field.set(target, value); + } + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + private UnboundField field = null; + private final Set candidates = Sets.newHashSet(); + private boolean defaultAlwaysNull = false; + + private Builder() {} + + /** + * Set the {@link ClassLoader} used to lookup classes by name. + * + *

If not set, the current thread's ClassLoader is used. + * + * @param newLoader a ClassLoader + * @return this Builder for method chaining + */ + public Builder loader(ClassLoader newLoader) { + return this; + } + + /** + * Instructs this builder to return AlwaysNull if no implementation is found. + * + * @return this Builder for method chaining + */ + public Builder defaultAlwaysNull() { + this.defaultAlwaysNull = true; + return this; + } + + /** + * Checks for an implementation, first finding the class by name. + * + * @param className name of a class + * @param fieldName name of the field + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getField(String) + */ + public Builder impl(String className, String fieldName) { + // don't do any work if an implementation has been found + if (field != null) { + return this; + } + + try { + Class targetClass = Class.forName(className); + impl(targetClass, fieldName); + } catch (ClassNotFoundException e) { + // not the right implementation + candidates.add(className + "." + fieldName); + } + return this; + } + + /** + * Checks for an implementation. + * + * @param targetClass a class instance + * @param fieldName name of a field (different from constructor) + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getField(String) + */ + public Builder impl(Class targetClass, String fieldName) { + // don't do any work if an implementation has been found + if (field != null || targetClass == null) { + return this; + } + + try { + this.field = new UnboundField<>(targetClass.getField(fieldName), fieldName); + } catch (NoSuchFieldException e) { + // not the right implementation + candidates.add(targetClass.getName() + "." + fieldName); + } + return this; + } + + /** + * Checks for a hidden implementation, first finding the class by name. + * + * @param className name of a class + * @param fieldName name of a field (different from constructor) + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getField(String) + */ + public Builder hiddenImpl(String className, String fieldName) { + // don't do any work if an implementation has been found + if (field != null) { + return this; + } + + try { + Class targetClass = Class.forName(className); + hiddenImpl(targetClass, fieldName); + } catch (ClassNotFoundException e) { + // not the right implementation + candidates.add(className + "." + fieldName); + } + return this; + } + + /** + * Checks for a hidden implementation. + * + * @param targetClass a class instance + * @param fieldName name of a field (different from constructor) + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getField(String) + */ + public Builder hiddenImpl(Class targetClass, String fieldName) { + // don't do any work if an implementation has been found + if (field != null || targetClass == null) { + return this; + } + + try { + Field hidden = targetClass.getDeclaredField(fieldName); + AccessController.doPrivileged(new MakeFieldAccessible(hidden)); + this.field = new UnboundField(hidden, fieldName); + } catch (SecurityException | NoSuchFieldException e) { + // unusable + candidates.add(targetClass.getName() + "." + fieldName); + } + return this; + } + + /** + * Returns the first valid implementation as a UnboundField or throws a NoSuchFieldException if + * there is none. + * + * @param Java class stored in the field + * @return a {@link UnboundField} with a valid implementation + * @throws NoSuchFieldException if no implementation was found + */ + @SuppressWarnings("unchecked") + public UnboundField buildChecked() throws NoSuchFieldException { + if (field != null) { + return (UnboundField) field; + } else if (defaultAlwaysNull) { + return (UnboundField) AlwaysNull.INSTANCE; + } else { + throw new NoSuchFieldException( + "Cannot find field from candidates: " + Joiner.on(", ").join(candidates)); + } + } + + /** + * Returns the first valid implementation as a BoundMethod or throws a NoSuchMethodException if + * there is none. + * + * @param target an Object on which to get and set the field + * @param Java class stored in the field + * @return a {@link BoundField} with a valid implementation and target + * @throws IllegalStateException if the method is static + * @throws IllegalArgumentException if the receiver's class is incompatible + * @throws NoSuchFieldException if no implementation was found + */ + public BoundField buildChecked(Object target) throws NoSuchFieldException { + return this.buildChecked().bind(target); + } + + /** + * Returns the first valid implementation as a UnboundField or throws a NoSuchFieldException if + * there is none. + * + * @param Java class stored in the field + * @return a {@link UnboundField} with a valid implementation + * @throws RuntimeException if no implementation was found + */ + @SuppressWarnings("unchecked") + public UnboundField build() { + if (field != null) { + return (UnboundField) field; + } else if (defaultAlwaysNull) { + return (UnboundField) AlwaysNull.INSTANCE; + } else { + throw new RuntimeException( + "Cannot find field from candidates: " + Joiner.on(", ").join(candidates)); + } + } + + /** + * Returns the first valid implementation as a BoundMethod or throws a RuntimeException if there + * is none. + * + * @param target an Object on which to get and set the field + * @param Java class stored in the field + * @return a {@link BoundField} with a valid implementation and target + * @throws IllegalStateException if the method is static + * @throws IllegalArgumentException if the receiver's class is incompatible + * @throws RuntimeException if no implementation was found + */ + public BoundField build(Object target) { + return this.build().bind(target); + } + + /** + * Returns the first valid implementation as a StaticField or throws a NoSuchFieldException if + * there is none. + * + * @param Java class stored in the field + * @return a {@link StaticField} with a valid implementation + * @throws IllegalStateException if the method is not static + * @throws NoSuchFieldException if no implementation was found + */ + public StaticField buildStaticChecked() throws NoSuchFieldException { + return this.buildChecked().asStatic(); + } + + /** + * Returns the first valid implementation as a StaticField or throws a RuntimeException if there + * is none. + * + * @param Java class stored in the field + * @return a {@link StaticField} with a valid implementation + * @throws IllegalStateException if the method is not static + * @throws RuntimeException if no implementation was found + */ + public StaticField buildStatic() { + return this.build().asStatic(); + } + } + + private static class MakeFieldAccessible implements PrivilegedAction { + private Field hidden; + + MakeFieldAccessible(Field hidden) { + this.hidden = hidden; + } + + @Override + public Void run() { + hidden.setAccessible(true); + return null; + } + } +} diff --git a/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynMethods.java b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynMethods.java new file mode 100644 index 0000000000000..281c3d34ed304 --- /dev/null +++ b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynMethods.java @@ -0,0 +1,522 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.common; + +import org.apache.iceberg.relocated.com.google.common.base.Preconditions; +import org.apache.iceberg.relocated.com.google.common.base.Throwables; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Arrays; + +/** Copied from parquet-common */ +public class DynMethods { + + private DynMethods() {} + + /** + * Convenience wrapper class around {@link Method}. + * + *

Allows callers to invoke the wrapped method with all Exceptions wrapped by RuntimeException, + * or with a single Exception catch block. + */ + public static class UnboundMethod { + + private final Method method; + private final String name; + private final int argLength; + + UnboundMethod(Method method, String name) { + this.method = method; + this.name = name; + this.argLength = + (method == null || method.isVarArgs()) ? -1 : method.getParameterTypes().length; + } + + @SuppressWarnings("unchecked") + public R invokeChecked(Object target, Object... args) throws Exception { + try { + if (argLength < 0) { + return (R) method.invoke(target, args); + } else { + return (R) method.invoke(target, Arrays.copyOfRange(args, 0, argLength)); + } + + } catch (InvocationTargetException e) { + Throwables.propagateIfInstanceOf(e.getCause(), Exception.class); + Throwables.propagateIfInstanceOf(e.getCause(), RuntimeException.class); + throw Throwables.propagate(e.getCause()); + } + } + + public R invoke(Object target, Object... args) { + try { + return this.invokeChecked(target, args); + } catch (Exception e) { + Throwables.propagateIfInstanceOf(e, RuntimeException.class); + throw Throwables.propagate(e); + } + } + + /** + * Returns this method as a BoundMethod for the given receiver. + * + * @param receiver an Object to receive the method invocation + * @return a {@link BoundMethod} for this method and the receiver + * @throws IllegalStateException if the method is static + * @throws IllegalArgumentException if the receiver's class is incompatible + */ + public BoundMethod bind(Object receiver) { + Preconditions.checkState( + !isStatic(), "Cannot bind static method %s", method.toGenericString()); + Preconditions.checkArgument( + method.getDeclaringClass().isAssignableFrom(receiver.getClass()), + "Cannot bind %s to instance of %s", + method.toGenericString(), + receiver.getClass()); + + return new BoundMethod(this, receiver); + } + + /** Returns whether the method is a static method. */ + public boolean isStatic() { + return Modifier.isStatic(method.getModifiers()); + } + + /** Returns whether the method is a noop. */ + public boolean isNoop() { + return this == NOOP; + } + + /** + * Returns this method as a StaticMethod. + * + * @return a {@link StaticMethod} for this method + * @throws IllegalStateException if the method is not static + */ + public StaticMethod asStatic() { + Preconditions.checkState(isStatic(), "Method is not static"); + return new StaticMethod(this); + } + + @Override + public String toString() { + return "DynMethods.UnboundMethod(name=" + name + " method=" + method.toGenericString() + ")"; + } + + /** Singleton {@link UnboundMethod}, performs no operation and returns null. */ + private static final UnboundMethod NOOP = + new UnboundMethod(null, "NOOP") { + @Override + public R invokeChecked(Object target, Object... args) throws Exception { + return null; + } + + @Override + public BoundMethod bind(Object receiver) { + return new BoundMethod(this, receiver); + } + + @Override + public StaticMethod asStatic() { + return new StaticMethod(this); + } + + @Override + public boolean isStatic() { + return true; + } + + @Override + public String toString() { + return "DynMethods.UnboundMethod(NOOP)"; + } + }; + } + + public static class BoundMethod { + private final UnboundMethod method; + private final Object receiver; + + private BoundMethod(UnboundMethod method, Object receiver) { + this.method = method; + this.receiver = receiver; + } + + public R invokeChecked(Object... args) throws Exception { + return method.invokeChecked(receiver, args); + } + + public R invoke(Object... args) { + return method.invoke(receiver, args); + } + } + + public static class StaticMethod { + private final UnboundMethod method; + + private StaticMethod(UnboundMethod method) { + this.method = method; + } + + public R invokeChecked(Object... args) throws Exception { + return method.invokeChecked(null, args); + } + + public R invoke(Object... args) { + return method.invoke(null, args); + } + } + + /** + * Constructs a new builder for calling methods dynamically. + * + * @param methodName name of the method the builder will locate + * @return a Builder for finding a method + */ + public static Builder builder(String methodName) { + return new Builder(methodName); + } + + public static class Builder { + private final String name; + private UnboundMethod method = null; + + public Builder(String methodName) { + this.name = methodName; + } + + /** + * Set the {@link ClassLoader} used to lookup classes by name. + * + *

If not set, the current thread's ClassLoader is used. + * + * @param newLoader a ClassLoader + * @return this Builder for method chaining + */ + public Builder loader(ClassLoader newLoader) { + return this; + } + + /** + * If no implementation has been found, adds a NOOP method. + * + *

Note: calls to impl will not match after this method is called! + * + * @return this Builder for method chaining + */ + public Builder orNoop() { + if (method == null) { + this.method = UnboundMethod.NOOP; + } + return this; + } + + /** + * Checks for an implementation, first finding the given class by name. + * + * @param className name of a class + * @param methodName name of a method (different from constructor) + * @param argClasses argument classes for the method + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getMethod(String, Class[]) + */ + public Builder impl(String className, String methodName, Class... argClasses) { + // don't do any work if an implementation has been found + if (method != null) { + return this; + } + + try { + Class targetClass = Class.forName(className); + impl(targetClass, methodName, argClasses); + } catch (ClassNotFoundException e) { + // not the right implementation + } + return this; + } + + /** + * Checks for an implementation, first finding the given class by name. + * + *

The name passed to the constructor is the method name used. + * + * @param className name of a class + * @param argClasses argument classes for the method + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getMethod(String, Class[]) + */ + public Builder impl(String className, Class... argClasses) { + impl(className, name, argClasses); + return this; + } + + /** + * Checks for a method implementation. + * + * @param targetClass a class instance + * @param methodName name of a method (different from constructor) + * @param argClasses argument classes for the method + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getMethod(String, Class[]) + */ + public Builder impl(Class targetClass, String methodName, Class... argClasses) { + // don't do any work if an implementation has been found + if (method != null) { + return this; + } + + try { + this.method = new UnboundMethod(targetClass.getMethod(methodName, argClasses), name); + } catch (NoSuchMethodException e) { + // not the right implementation + } + return this; + } + + /** + * Checks for a method implementation. + * + *

The name passed to the constructor is the method name used. + * + * @param targetClass a class instance + * @param argClasses argument classes for the method + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getMethod(String, Class[]) + */ + public Builder impl(Class targetClass, Class... argClasses) { + impl(targetClass, name, argClasses); + return this; + } + + public Builder ctorImpl(Class targetClass, Class... argClasses) { + // don't do any work if an implementation has been found + if (method != null) { + return this; + } + + try { + this.method = new DynConstructors.Builder().impl(targetClass, argClasses).buildChecked(); + } catch (NoSuchMethodException e) { + // not the right implementation + } + return this; + } + + public Builder ctorImpl(String className, Class... argClasses) { + // don't do any work if an implementation has been found + if (method != null) { + return this; + } + + try { + this.method = new DynConstructors.Builder().impl(className, argClasses).buildChecked(); + } catch (NoSuchMethodException e) { + // not the right implementation + } + return this; + } + + /** + * Checks for an implementation, first finding the given class by name. + * + * @param className name of a class + * @param methodName name of a method (different from constructor) + * @param argClasses argument classes for the method + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getMethod(String, Class[]) + */ + public Builder hiddenImpl(String className, String methodName, Class... argClasses) { + // don't do any work if an implementation has been found + if (method != null) { + return this; + } + + try { + Class targetClass = Class.forName(className ); + hiddenImpl(targetClass, methodName, argClasses); + } catch (ClassNotFoundException e) { + // not the right implementation + } + return this; + } + + /** + * Checks for an implementation, first finding the given class by name. + * + *

The name passed to the constructor is the method name used. + * + * @param className name of a class + * @param argClasses argument classes for the method + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getMethod(String, Class[]) + */ + public Builder hiddenImpl(String className, Class... argClasses) { + hiddenImpl(className, name, argClasses); + return this; + } + + /** + * Checks for a method implementation. + * + * @param targetClass a class instance + * @param methodName name of a method (different from constructor) + * @param argClasses argument classes for the method + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getMethod(String, Class[]) + */ + public Builder hiddenImpl(Class targetClass, String methodName, Class... argClasses) { + // don't do any work if an implementation has been found + if (method != null) { + return this; + } + + try { + Method hidden = targetClass.getDeclaredMethod(methodName, argClasses); + AccessController.doPrivileged(new MakeAccessible(hidden)); + this.method = new UnboundMethod(hidden, name); + } catch (SecurityException | NoSuchMethodException e) { + // unusable or not the right implementation + } + return this; + } + + /** + * Checks for a method implementation. + * + *

The name passed to the constructor is the method name used. + * + * @param targetClass a class instance + * @param argClasses argument classes for the method + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getMethod(String, Class[]) + */ + public Builder hiddenImpl(Class targetClass, Class... argClasses) { + hiddenImpl(targetClass, name, argClasses); + return this; + } + + /** + * Returns the first valid implementation as a UnboundMethod or throws a RuntimeError if there + * is none. + * + * @return a {@link UnboundMethod} with a valid implementation + * @throws RuntimeException if no implementation was found + */ + public UnboundMethod build() { + if (method != null) { + return method; + } else { + throw new RuntimeException("Cannot find method: " + name); + } + } + + /** + * Returns the first valid implementation as a BoundMethod or throws a RuntimeError if there is + * none. + * + * @param receiver an Object to receive the method invocation + * @return a {@link BoundMethod} with a valid implementation and receiver + * @throws IllegalStateException if the method is static + * @throws IllegalArgumentException if the receiver's class is incompatible + * @throws RuntimeException if no implementation was found + */ + public BoundMethod build(Object receiver) { + return build().bind(receiver); + } + + /** + * Returns the first valid implementation as a UnboundMethod or throws a NoSuchMethodException + * if there is none. + * + * @return a {@link UnboundMethod} with a valid implementation + * @throws NoSuchMethodException if no implementation was found + */ + public UnboundMethod buildChecked() throws NoSuchMethodException { + if (method != null) { + return method; + } else { + throw new NoSuchMethodException("Cannot find method: " + name); + } + } + + /** + * Returns the first valid implementation as a BoundMethod or throws a NoSuchMethodException if + * there is none. + * + * @param receiver an Object to receive the method invocation + * @return a {@link BoundMethod} with a valid implementation and receiver + * @throws IllegalStateException if the method is static + * @throws IllegalArgumentException if the receiver's class is incompatible + * @throws NoSuchMethodException if no implementation was found + */ + public BoundMethod buildChecked(Object receiver) throws NoSuchMethodException { + return buildChecked().bind(receiver); + } + + /** + * Returns the first valid implementation as a StaticMethod or throws a NoSuchMethodException if + * there is none. + * + * @return a {@link StaticMethod} with a valid implementation + * @throws IllegalStateException if the method is not static + * @throws NoSuchMethodException if no implementation was found + */ + public StaticMethod buildStaticChecked() throws NoSuchMethodException { + return buildChecked().asStatic(); + } + + /** + * Returns the first valid implementation as a StaticMethod or throws a RuntimeException if + * there is none. + * + * @return a {@link StaticMethod} with a valid implementation + * @throws IllegalStateException if the method is not static + * @throws RuntimeException if no implementation was found + */ + public StaticMethod buildStatic() { + return build().asStatic(); + } + } + + private static class MakeAccessible implements PrivilegedAction { + private Method hidden; + + MakeAccessible(Method hidden) { + this.hidden = hidden; + } + + @Override + public Void run() { + hidden.setAccessible(true); + return null; + } + } +} diff --git a/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/README.md b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/README.md new file mode 100644 index 0000000000000..e817f66e2d93e --- /dev/null +++ b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/README.md @@ -0,0 +1,6 @@ +# Why we need this package? + +In this package we have override the `iceberg-common` package, since in original `iceberg-common` it uses `Thread.getContextClassLoader` to load classes dynamically. +While this works well in most cases, it will fail when invoked by jni, since by default jni threads was passed bootstrap class loader, and `Thread.getContextClassLoader` +will inherit parent thread's class loader. That's to say, all threads created by jni will use bootstrap class loader. While we can use `Thread.setContextClassLoader` to it system class loader +manually, but it's not possible in all cases since iceberg used thread pools internally, which can't be hooked by us. \ No newline at end of file diff --git a/java/connector-node/risingwave-sink-iceberg/src/test/java/com/risingwave/connector/catalog/JniCatalogWrapperTest.java b/java/connector-node/risingwave-sink-iceberg/src/test/java/com/risingwave/connector/catalog/JniCatalogWrapperTest.java new file mode 100644 index 0000000000000..0edf0bce9a7d6 --- /dev/null +++ b/java/connector-node/risingwave-sink-iceberg/src/test/java/com/risingwave/connector/catalog/JniCatalogWrapperTest.java @@ -0,0 +1,44 @@ +// Copyright 2024 RisingWave Labs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.risingwave.connector.catalog; + +import org.junit.Ignore; +import org.junit.Test; + +@Ignore +public class JniCatalogWrapperTest { + @Test + public void testJdbc() throws Exception { + System.setProperty("aws.region", "us-east-1"); + JniCatalogWrapper catalog = + JniCatalogWrapper.create( + "demo", + "org.apache.iceberg.jdbc.JdbcCatalog", + new String[] { + "uri", "jdbc:postgresql://172.17.0.3:5432/iceberg", + "jdbc.user", "admin", + "jdbc.password", "123456", + "warehouse", "s3://icebergdata/demo", + "io-impl", "org.apache.iceberg.aws.s3.S3FileIO", + "s3.endpoint", "http://172.17.0.2:9301", + "s3.region", "us-east-1", + "s3.path-style-access", "true", + "s3.access-key-id", "hummockadmin", + "s3.secret-access-key", "hummockadmin", + }); + + System.out.println(catalog.loadTable("s1.t1")); + } +} diff --git a/java/pom.xml b/java/pom.xml index c6e39b34cfc0b..5c59a2571012d 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -84,6 +84,9 @@ 4.15.0 1.18.0 1.17.6 + 3.45.0.0 + 2.21.42 + 3.1.3 @@ -324,6 +327,51 @@ simpleclient_httpserver 0.5.0 + + org.xerial + sqlite-jdbc + ${sqlite.version} + + + software.amazon.awssdk + s3 + ${aws.version} + + + software.amazon.awssdk + sts + ${aws.version} + + + software.amazon.awssdk + apache-client + ${aws.version} + + + org.apache.hadoop + hadoop-common + ${hadoop.version} + + + org.apache.hive + hive-metastore + ${hive.version} + + + org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop.version} + + + org.apache.hadoop + hadoop-mapreduce-client-common + ${hadoop.version} + + + org.apache.hadoop + hadoop-mapreduce-client-jobclient + ${hadoop.version} + org.apache.spark spark-sql_2.12 @@ -381,6 +429,7 @@ /tools/maven/checkstyle.xml true true + **/org/apache/iceberg/common/* @@ -396,6 +445,9 @@ + + **/org/apache/iceberg/common/** + diff --git a/src/connector/src/sink/iceberg/jni_catalog.rs b/src/connector/src/sink/iceberg/jni_catalog.rs index 08a876fb62ac8..d88a63d398c65 100644 --- a/src/connector/src/sink/iceberg/jni_catalog.rs +++ b/src/connector/src/sink/iceberg/jni_catalog.rs @@ -60,7 +60,7 @@ impl Catalog for JniCatalog { let metadata_location = resp.metadata_location.clone().ok_or_else(|| { icelake::Error::new( - icelake::ErrorKind::IcebergFeatureUnsupported, + ErrorKind::IcebergFeatureUnsupported, "Loading uncommitted table is not supported!", ) })?; @@ -155,8 +155,8 @@ impl JniCatalog { JObject::null(), )?; for (i, (key, value)) in java_catalog_props.iter().enumerate() { - let key_j_str = env.new_string(key).unwrap(); - let value_j_str = env.new_string(value).unwrap(); + let key_j_str = env.new_string(key)?; + let value_j_str = env.new_string(value)?; env.set_object_array_element(&props, i as i32 * 2, key_j_str)?; env.set_object_array_element(&props, i as i32 * 2 + 1, value_j_str)?; } @@ -165,18 +165,15 @@ impl JniCatalog { .call_static_method( "com/risingwave/connector/catalog/JniCatalogWrapper", "create", - "(Ljava/lang/String;Ljava/lang/String;[Ljava/lang/String;)V", + "(Ljava/lang/String;Ljava/lang/String;[Ljava/lang/String;)Lcom/risingwave/connector/catalog/JniCatalogWrapper;", &[ (&env.new_string(name.to_string()).unwrap()).into(), (&env.new_string(catalog_impl.to_string()).unwrap()).into(), (&props).into(), ], - ) - .unwrap(); + )?; - let jni_catalog = env - .new_global_ref(jni_catalog_wrapper.l().unwrap()) - .unwrap(); + let jni_catalog = env.new_global_ref(jni_catalog_wrapper.l().unwrap())?; Ok(Arc::new(Self { java_catalog: jni_catalog, diff --git a/src/connector/src/sink/iceberg/mod.rs b/src/connector/src/sink/iceberg/mod.rs index 0c3840af1f3bb..ce4269851cec7 100644 --- a/src/connector/src/sink/iceberg/mod.rs +++ b/src/connector/src/sink/iceberg/mod.rs @@ -67,10 +67,7 @@ use crate::sink::{Result, SinkCommitCoordinator, SinkParam}; /// This iceberg sink is WIP. When it ready, we will change this name to "iceberg". pub const ICEBERG_SINK: &str = "iceberg"; -static RW_CATALOG_NAME: &str = "risingwave"; - -#[derive(Debug, Clone, Deserialize, WithOptions, Default)] -#[serde(deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq, Eq, Deserialize, WithOptions, Default)] pub struct IcebergConfig { pub connector: String, // Avoid deny unknown field. Must be "iceberg" @@ -83,7 +80,13 @@ pub struct IcebergConfig { pub table_name: String, // Full name of table, must include schema name #[serde(rename = "database.name")] - pub database_name: String, // Database name of table + pub database_name: Option, + // Database name of table + + // Catalog name, can be omitted for storage catalog, but + // must be set for other catalogs. + #[serde(rename = "catalog.name")] + pub catalog_name: Option, // Catalog type supported by iceberg, such as "storage", "rest". // If not set, we use "storage" as default. @@ -167,11 +170,20 @@ impl IcebergConfig { } } - // All configs starts with "catalog." will be treated as java configs. + if config.catalog_name.is_none() && config.catalog_type.as_deref() != Some("storage") { + return Err(SinkError::Config(anyhow!( + "catalog.name must be set for non-storage catalog" + ))); + } + + // All configs start with "catalog." will be treated as java configs. config.java_catalog_props = values .iter() .filter(|(k, _v)| { - k.starts_with("catalog.") && k != &"catalog.uri" && k != &"catalog.type" + k.starts_with("catalog.") + && k != &"catalog.uri" + && k != &"catalog.type" + && k != &"catalog.name" }) .map(|(k, v)| (k[8..].to_string(), v.to_string())) .collect(); @@ -183,18 +195,36 @@ impl IcebergConfig { self.catalog_type.as_deref().unwrap_or("storage") } + fn catalog_name(&self) -> String { + self.catalog_name + .as_ref() + .map(|s| s.to_string()) + .unwrap_or_else(|| "risingwave".to_string()) + } + + fn full_table_name(&self) -> Result { + let ret = if let Some(database_name) = &self.database_name { + TableIdentifier::new(vec![database_name, &self.table_name]) + } else { + TableIdentifier::new(vec![&self.table_name]) + }; + + ret.context("Failed to create table identifier") + .map_err(|e| SinkError::Iceberg(anyhow!(e))) + } + fn build_iceberg_configs(&self) -> Result> { let mut iceberg_configs = HashMap::new(); let catalog_type = self.catalog_type().to_string(); iceberg_configs.insert(CATALOG_TYPE.to_string(), catalog_type.clone()); - iceberg_configs.insert(CATALOG_NAME.to_string(), RW_CATALOG_NAME.to_string()); + iceberg_configs.insert(CATALOG_NAME.to_string(), self.catalog_name()); match catalog_type.as_str() { "storage" => { iceberg_configs.insert( - format!("iceberg.catalog.{}.warehouse", RW_CATALOG_NAME), + format!("iceberg.catalog.{}.warehouse", self.catalog_name()), self.path.clone(), ); } @@ -202,7 +232,7 @@ impl IcebergConfig { let uri = self.uri.clone().ok_or_else(|| { SinkError::Iceberg(anyhow!("`catalog.uri` must be set in rest catalog")) })?; - iceberg_configs.insert(format!("iceberg.catalog.{}.uri", RW_CATALOG_NAME), uri); + iceberg_configs.insert(format!("iceberg.catalog.{}.uri", self.catalog_name()), uri); } _ => { return Err(SinkError::Iceberg(anyhow!( @@ -248,7 +278,11 @@ impl IcebergConfig { }; iceberg_configs.insert("iceberg.table.io.bucket".to_string(), bucket); - iceberg_configs.insert("iceberg.table.io.root".to_string(), root); + + // Only storage catalog should set this. + if catalog_type == "storage" { + iceberg_configs.insert("iceberg.table.io.root".to_string(), root); + } // #TODO // Support load config file iceberg_configs.insert( @@ -266,7 +300,7 @@ impl IcebergConfig { let catalog_type = self.catalog_type().to_string(); iceberg_configs.insert(CATALOG_TYPE.to_string(), catalog_type.clone()); - iceberg_configs.insert(CATALOG_NAME.to_string(), "risingwave".to_string()); + iceberg_configs.insert(CATALOG_NAME.to_string(), self.catalog_name()); if let Some(region) = &self.region { iceberg_configs.insert( @@ -291,7 +325,7 @@ impl IcebergConfig { self.secret_key.clone().to_string(), ); - let (bucket, root) = { + let (bucket, _) = { let url = Url::parse(&self.path).map_err(|e| SinkError::Iceberg(anyhow!(e)))?; let bucket = url .host_str() @@ -307,7 +341,6 @@ impl IcebergConfig { }; iceberg_configs.insert("iceberg.table.io.bucket".to_string(), bucket); - iceberg_configs.insert("iceberg.table.io.root".to_string(), root); // #TODO // Support load config file iceberg_configs.insert( @@ -335,14 +368,15 @@ impl IcebergConfig { ); if let Some(endpoint) = &self.endpoint { - iceberg_configs.insert("s3.endpoint".to_string(), endpoint.clone().to_string()); + java_catalog_configs + .insert("s3.endpoint".to_string(), endpoint.clone().to_string()); } - iceberg_configs.insert( + java_catalog_configs.insert( "s3.access-key-id".to_string(), self.access_key.clone().to_string(), ); - iceberg_configs.insert( + java_catalog_configs.insert( "s3.secret-access-key".to_string(), self.secret_key.clone().to_string(), ); @@ -358,25 +392,18 @@ impl IcebergConfig { let catalog = load_catalog(&iceberg_configs).await?; Ok(catalog) } - catalog_type - if catalog_type == "hive" - || catalog_type == "sql" - || catalog_type == "glue" - || catalog_type == "dynamodb" => - { + catalog_type if catalog_type == "hive" || catalog_type == "jdbc" => { // Create java catalog let (base_catalog_config, java_catalog_props) = self.build_jni_catalog_configs()?; let catalog_impl = match catalog_type { "hive" => "org.apache.iceberg.hive.HiveCatalog", - "sql" => "org.apache.iceberg.jdbc.JdbcCatalog", - "glue" => "org.apache.iceberg.aws.glue.GlueCatalog", - "dynamodb" => "org.apache.iceberg.aws.dynamodb.DynamoDbCatalog", + "jdbc" => "org.apache.iceberg.jdbc.JdbcCatalog", _ => unreachable!(), }; jni_catalog::JniCatalog::build( base_catalog_config, - "risingwave", + self.catalog_name(), catalog_impl, java_catalog_props, ) @@ -384,7 +411,7 @@ impl IcebergConfig { "mock" => Ok(Arc::new(MockCatalog {})), _ => { bail!( - "Unsupported catalog type: {}, only support `storage`, `rest`, `hive`, `sql`, `glue`, `dynamodb`", + "Unsupported catalog type: {}, only support `storage`, `rest`, `hive`, `jdbc`", self.catalog_type() ) } @@ -397,12 +424,9 @@ impl IcebergConfig { .await .context("Unable to load iceberg catalog")?; - let table_id = TableIdentifier::new( - vec![self.database_name.as_str()] - .into_iter() - .chain(self.table_name.split('.')), - ) - .context("Unable to parse table name")?; + let table_id = self + .full_table_name() + .context("Unable to parse table name")?; catalog.load_table(&table_id).await.map_err(Into::into) } @@ -827,7 +851,7 @@ impl WriteResult { let delete_files: Vec; if let serde_json::Value::Array(values) = values .remove(DATA_FILES) - .ok_or_else(|| anyhow!("icberg sink metadata should have data_files object"))? + .ok_or_else(|| anyhow!("iceberg sink metadata should have data_files object"))? { data_files = values .into_iter() @@ -835,11 +859,11 @@ impl WriteResult { .collect::, icelake::Error>>() .unwrap(); } else { - bail!("icberg sink metadata should have data_files object"); + bail!("iceberg sink metadata should have data_files object"); } if let serde_json::Value::Array(values) = values .remove(DELETE_FILES) - .ok_or_else(|| anyhow!("icberg sink metadata should have data_files object"))? + .ok_or_else(|| anyhow!("iceberg sink metadata should have data_files object"))? { delete_files = values .into_iter() @@ -982,8 +1006,11 @@ pub fn try_matches_arrow_schema(rw_schema: &Schema, arrow_schema: &ArrowSchema) #[cfg(test)] mod test { + use std::collections::HashMap; + use risingwave_common::catalog::Field; + use crate::sink::iceberg::IcebergConfig; use crate::source::DataType; #[test] @@ -1018,4 +1045,167 @@ mod test { ]); try_matches_arrow_schema(&risingwave_schema, &arrow_schema).unwrap(); } + + #[test] + fn test_parse_iceberg_config() { + let values = [ + ("connector", "iceberg"), + ("type", "upsert"), + ("primary_key", "v1"), + ("warehouse.path", "s3://iceberg"), + ("s3.endpoint", "http://127.0.0.1:9301"), + ("s3.access.key", "hummockadmin"), + ("s3.secret.key", "hummockadmin"), + ("s3.region", "us-east-1"), + ("catalog.type", "jdbc"), + ("catalog.name", "demo"), + ("catalog.uri", "jdbc://postgresql://postgres:5432/iceberg"), + ("catalog.jdbc.user", "admin"), + ("catalog.jdbc.password", "123456"), + ("database.name", "demo_db"), + ("table.name", "demo_table"), + ] + .into_iter() + .map(|(k, v)| (k.to_string(), v.to_string())) + .collect(); + + let iceberg_config = IcebergConfig::from_hashmap(values).unwrap(); + + let expected_iceberg_config = IcebergConfig { + connector: "iceberg".to_string(), + r#type: "upsert".to_string(), + force_append_only: false, + table_name: "demo_table".to_string(), + database_name: Some("demo_db".to_string()), + catalog_name: Some("demo".to_string()), + catalog_type: Some("jdbc".to_string()), + path: "s3://iceberg".to_string(), + uri: Some("jdbc://postgresql://postgres:5432/iceberg".to_string()), + region: Some("us-east-1".to_string()), + endpoint: Some("http://127.0.0.1:9301".to_string()), + access_key: "hummockadmin".to_string(), + secret_key: "hummockadmin".to_string(), + primary_key: Some(vec!["v1".to_string()]), + java_catalog_props: [("jdbc.user", "admin"), ("jdbc.password", "123456")] + .into_iter() + .map(|(k, v)| (k.to_string(), v.to_string())) + .collect(), + }; + + assert_eq!(iceberg_config, expected_iceberg_config); + + assert_eq!( + &iceberg_config.full_table_name().unwrap().to_string(), + "demo_db.demo_table" + ); + } + + async fn test_create_catalog(configs: HashMap) { + let iceberg_config = IcebergConfig::from_hashmap(configs).unwrap(); + + let table = iceberg_config.load_table().await.unwrap(); + + println!("{:?}", table.table_name()); + } + + #[tokio::test] + #[ignore] + async fn test_storage_catalog() { + let values = [ + ("connector", "iceberg"), + ("type", "append-only"), + ("force_append_only", "true"), + ("s3.endpoint", "http://127.0.0.1:9301"), + ("s3.access.key", "hummockadmin"), + ("s3.secret.key", "hummockadmin"), + ("s3.region", "us-east-1"), + ("catalog.name", "demo"), + ("catalog.type", "storage"), + ("warehouse.path", "s3://icebergdata/demo"), + ("database.name", "s1"), + ("table.name", "t1"), + ] + .into_iter() + .map(|(k, v)| (k.to_string(), v.to_string())) + .collect(); + + test_create_catalog(values).await; + } + + #[tokio::test] + #[ignore] + async fn test_rest_catalog() { + let values = [ + ("connector", "iceberg"), + ("type", "append-only"), + ("force_append_only", "true"), + ("s3.endpoint", "http://127.0.0.1:9301"), + ("s3.access.key", "hummockadmin"), + ("s3.secret.key", "hummockadmin"), + ("s3.region", "us-east-1"), + ("catalog.name", "demo"), + ("catalog.type", "rest"), + ("catalog.uri", "http://192.168.167.4:8181"), + ("warehouse.path", "s3://icebergdata/demo"), + ("database.name", "s1"), + ("table.name", "t1"), + ] + .into_iter() + .map(|(k, v)| (k.to_string(), v.to_string())) + .collect(); + + test_create_catalog(values).await; + } + + #[tokio::test] + #[ignore] + async fn test_jdbc_catalog() { + let values = [ + ("connector", "iceberg"), + ("type", "append-only"), + ("force_append_only", "true"), + ("s3.endpoint", "http://127.0.0.1:9301"), + ("s3.access.key", "hummockadmin"), + ("s3.secret.key", "hummockadmin"), + ("s3.region", "us-east-1"), + ("catalog.name", "demo"), + ("catalog.type", "jdbc"), + ("catalog.uri", "jdbc:postgresql://localhost:5432/iceberg"), + ("catalog.jdbc.user", "admin"), + ("catalog.jdbc.password", "123456"), + ("warehouse.path", "s3://icebergdata/demo"), + ("database.name", "s1"), + ("table.name", "t1"), + ] + .into_iter() + .map(|(k, v)| (k.to_string(), v.to_string())) + .collect(); + + test_create_catalog(values).await; + } + + #[tokio::test] + #[ignore] + async fn test_hive_catalog() { + let values = [ + ("connector", "iceberg"), + ("type", "append-only"), + ("force_append_only", "true"), + ("s3.endpoint", "http://127.0.0.1:9301"), + ("s3.access.key", "hummockadmin"), + ("s3.secret.key", "hummockadmin"), + ("s3.region", "us-east-1"), + ("catalog.name", "demo"), + ("catalog.type", "hive"), + ("catalog.uri", "thrift://localhost:9083"), + ("warehouse.path", "s3://icebergdata/demo"), + ("database.name", "s1"), + ("table.name", "t1"), + ] + .into_iter() + .map(|(k, v)| (k.to_string(), v.to_string())) + .collect(); + + test_create_catalog(values).await; + } } diff --git a/src/connector/with_options_sink.yaml b/src/connector/with_options_sink.yaml index 2b23913a1fc32..35b06ec33fb76 100644 --- a/src/connector/with_options_sink.yaml +++ b/src/connector/with_options_sink.yaml @@ -123,7 +123,10 @@ IcebergConfig: required: true - name: database.name field_type: String - required: true + required: false + - name: catalog.name + field_type: String + required: false - name: catalog.type field_type: String required: false diff --git a/src/frontend/planner_test/tests/testdata/input/sink.yaml b/src/frontend/planner_test/tests/testdata/input/sink.yaml index f9241cdc7c9a9..851a4de205fae 100644 --- a/src/frontend/planner_test/tests/testdata/input/sink.yaml +++ b/src/frontend/planner_test/tests/testdata/input/sink.yaml @@ -89,6 +89,7 @@ type = 'append-only', force_append_only = 'true', catalog.type = 'mock', + catalog.name = 'demo', database.name = 'demo_db', table.name = 'sparse_table', warehouse.path = 's3://icebergdata/demo', @@ -107,6 +108,7 @@ type = 'append-only', force_append_only = 'true', catalog.type = 'mock', + catalog.name = 'demo', database.name = 'demo_db', table.name = 'range_table', warehouse.path = 's3://icebergdata/demo', @@ -124,6 +126,7 @@ connector = 'iceberg', type = 'upsert', catalog.type = 'mock', + catalog.name = 'demo', database.name = 'demo_db', table.name = 'sparse_table', warehouse.path = 's3://icebergdata/demo', @@ -142,6 +145,7 @@ connector = 'iceberg', type = 'upsert', catalog.type = 'mock', + catalog.name = 'demo', database.name = 'demo_db', table.name = 'range_table', warehouse.path = 's3://icebergdata/demo', diff --git a/src/frontend/planner_test/tests/testdata/output/sink.yaml b/src/frontend/planner_test/tests/testdata/output/sink.yaml index f733646a905aa..60b4a0165ba69 100644 --- a/src/frontend/planner_test/tests/testdata/output/sink.yaml +++ b/src/frontend/planner_test/tests/testdata/output/sink.yaml @@ -184,6 +184,7 @@ type = 'append-only', force_append_only = 'true', catalog.type = 'mock', + catalog.name = 'demo', database.name = 'demo_db', table.name = 'sparse_table', warehouse.path = 's3://icebergdata/demo', @@ -205,6 +206,7 @@ type = 'append-only', force_append_only = 'true', catalog.type = 'mock', + catalog.name = 'demo', database.name = 'demo_db', table.name = 'range_table', warehouse.path = 's3://icebergdata/demo', @@ -223,6 +225,7 @@ connector = 'iceberg', type = 'upsert', catalog.type = 'mock', + catalog.name = 'demo', database.name = 'demo_db', table.name = 'sparse_table', warehouse.path = 's3://icebergdata/demo', @@ -244,6 +247,7 @@ connector = 'iceberg', type = 'upsert', catalog.type = 'mock', + catalog.name = 'demo', database.name = 'demo_db', table.name = 'range_table', warehouse.path = 's3://icebergdata/demo', diff --git a/src/frontend/src/handler/create_source.rs b/src/frontend/src/handler/create_source.rs index c8cfd938c23a2..726013fe1c18b 100644 --- a/src/frontend/src/handler/create_source.rs +++ b/src/frontend/src/handler/create_source.rs @@ -1155,7 +1155,7 @@ pub async fn check_iceberg_source( }; let iceberg_config = IcebergConfig { - database_name: properties.database_name, + database_name: Some(properties.database_name), table_name: properties.table_name, catalog_type: Some(properties.catalog_type), path: properties.warehouse_path, diff --git a/src/jni_core/src/jvm_runtime.rs b/src/jni_core/src/jvm_runtime.rs index d0193a7717d2c..99438f818620a 100644 --- a/src/jni_core/src/jvm_runtime.rs +++ b/src/jni_core/src/jvm_runtime.rs @@ -216,12 +216,14 @@ pub fn execute_with_jni_env( let ret = f(&mut env); match env.exception_check() { - Ok(true) => env - .exception_clear() - .inspect_err(|e| { + Ok(true) => { + env.exception_describe().inspect_err(|e| { + tracing::warn!(error = %e.as_report(), "Failed to describe jvm exception"); + })?; + env.exception_clear().inspect_err(|e| { tracing::warn!(error = %e.as_report(), "Exception occurred but failed to clear"); - }) - .unwrap(), + })?; + } Ok(false) => { // No exception, do nothing }