diff --git a/Makefile.toml b/Makefile.toml index 983b304d74e51..fd173d7c0e1d6 100644 --- a/Makefile.toml +++ b/Makefile.toml @@ -874,13 +874,13 @@ description = "🌟 Run unit tests" [tasks.build-connector-node] category = "RiseDev - Build" dependencies = ["prepare"] -condition = { env_set = [ - "ENABLE_BUILD_RW_CONNECTOR", -], files_modified = { input = [ - "./java/connector-node/**/*", -], output = [ - "./java/connector-node/assembly/target/**/*", -] } } +#condition = { env_set = [ +# "ENABLE_BUILD_RW_CONNECTOR", +#], files_modified = { input = [ +# "./java/connector-node/**/*", +#], output = [ +# "./java/connector-node/assembly/target/**/*", +#] } } description = "Build RisingWave Connector from source" script = ''' #!/usr/bin/env bash diff --git a/integration_tests/iceberg-sink2/docker/hive/config.ini b/integration_tests/iceberg-sink2/docker/hive/config.ini new file mode 100644 index 0000000000000..400521c9129b5 --- /dev/null +++ b/integration_tests/iceberg-sink2/docker/hive/config.ini @@ -0,0 +1,24 @@ +[default] +aws_key=hummockadmin +aws_secret=hummockadmin + +[risingwave] +db=dev +user=root +host=127.0.0.1 +port=4566 + +[sink] +connector = iceberg +type=append-only +force_append_only = true +catalog.type = hive +catalog.uri = thrift://metastore:9083 +warehouse.path = s3://icebergdata/demo +s3.endpoint=http://minio-0:9301 +s3.access.key = hummockadmin +s3.secret.key = hummockadmin +s3.region = ap-southeast-1 +catalog.name = demo +database.name=s1 +table.name=t1 \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/hive/docker-compose.yml b/integration_tests/iceberg-sink2/docker/hive/docker-compose.yml new file mode 100644 index 0000000000000..fa495c7b7c802 --- /dev/null +++ b/integration_tests/iceberg-sink2/docker/hive/docker-compose.yml @@ -0,0 +1,111 @@ +version: '3.8' + +services: + postgres: + image: postgres:16.1 + environment: + POSTGRES_USER: admin + POSTGRES_PASSWORD: 123456 + POSTGRES_DB: metastore_db + expose: + - 5432 + ports: + - "5432:5432" + networks: + iceberg_net: + spark: + depends_on: + - minio-0 + - metastore + image: ghcr.io/icelake-io/icelake-spark:1.0 + environment: + - AWS_ACCESS_KEY_ID=hummockadmin + - AWS_SECRET_ACCESS_KEY=hummockadmin + - AWS_REGION=us-east-1 + - SPARK_HOME=/opt/spark + - PYSPARK_PYTHON=/usr/bin/python3.9 + - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/spark/bin:/opt/spark/sbin + user: root + networks: + iceberg_net: + links: + - minio-0:icebergdata.minio-0 + expose: + - 15002 + healthcheck: + test: netstat -ltn | grep -c 15002 + interval: 1s + retries: 1200 + volumes: + - ./spark-script:/spark-script + entrypoint: [ "/spark-script/spark-connect-server.sh" ] + + risingwave-standalone: + extends: + file: ../../../../docker/docker-compose.yml + service: risingwave-standalone + healthcheck: + test: + - CMD-SHELL + - bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/4566; exit $$?;' + interval: 1s + timeout: 30s + networks: + iceberg_net: + + minio-0: + extends: + file: ../../../../docker/docker-compose.yml + service: minio-0 + entrypoint: " + /bin/sh -c ' + + set -e + + mkdir -p \"/data/icebergdata/demo\" + mkdir -p \"/data/hummock001\" + + /usr/bin/docker-entrypoint.sh \"$$0\" \"$$@\" + + '" + networks: + iceberg_net: + + etcd-0: + extends: + file: ../../../../docker/docker-compose.yml + service: etcd-0 + networks: + iceberg_net: + + metastore: + image: naushadh/hive-metastore + depends_on: + - postgres + environment: + - DATABASE_HOST=postgres + - DATABASE_DB=metastore_db + - DATABASE_USER=admin + - DATABASE_PASSWORD=123456 + - AWS_ACCESS_KEY_ID=hummockadmin + - AWS_SECRET_ACCESS_KEY=hummockadmin + - S3_ENDPOINT_URL=http://minio-0:9301 + - S3_BUCKET=icebergdata + - S3_PREFIX=demo + ports: + - "9083:9083" + expose: + - 9083 + networks: + iceberg_net: + +volumes: + risingwave-standalone: + external: false + etcd-0: + external: false + minio-0: + external: false + +networks: + iceberg_net: \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/hive/spark-script/spark-connect-server.sh b/integration_tests/iceberg-sink2/docker/hive/spark-script/spark-connect-server.sh new file mode 100755 index 0000000000000..210a0663bea6e --- /dev/null +++ b/integration_tests/iceberg-sink2/docker/hive/spark-script/spark-connect-server.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +set -ex + +JARS=$(find /opt/spark/deps -type f -name "*.jar" | tr '\n' ':') + +/opt/spark/sbin/start-connect-server.sh \ + --master local[3] \ + --driver-class-path $JARS \ + --conf spark.driver.bindAddress=0.0.0.0 \ + --conf spark.sql.catalog.demo=org.apache.iceberg.spark.SparkCatalog \ + --conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions \ + --conf spark.sql.catalog.demo.catalog-impl=org.apache.iceberg.hive.HiveCatalog \ + --conf spark.sql.catalog.demo.uri=thrift://metastore:9083 \ + --conf spark.sql.catalog.demo.clients=10 \ + --conf spark.sql.catalog.demo.warehouse=s3a://icebergdata/demo \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.endpoint=http://minio-0:9301 \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.path.style.access=true \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.access.key=hummockadmin \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.secret.key=hummockadmin \ + --conf spark.sql.defaultCatalog=demo + +tail -f /opt/spark/logs/spark*.out \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/jdbc/config.ini b/integration_tests/iceberg-sink2/docker/jdbc/config.ini new file mode 100644 index 0000000000000..8360cada94c15 --- /dev/null +++ b/integration_tests/iceberg-sink2/docker/jdbc/config.ini @@ -0,0 +1,26 @@ +[default] +aws_key=hummockadmin +aws_secret=hummockadmin + +[risingwave] +db=dev +user=root +host=127.0.0.1 +port=4566 + +[sink] +connector = iceberg +type=append-only +force_append_only = true +catalog.type = jdbc +catalog.uri = jdbc:postgresql://postgres:5432/iceberg +catalog.jdbc.user=admin +catalog.jdbc.password=123456 +warehouse.path = s3://icebergdata/demo +s3.endpoint=http://minio-0:9301 +s3.access.key = hummockadmin +s3.secret.key = hummockadmin +s3.region = ap-southeast-1 +catalog.name = demo +database.name=s1 +table.name=t1 \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/jdbc/docker-compose.yml b/integration_tests/iceberg-sink2/docker/jdbc/docker-compose.yml new file mode 100644 index 0000000000000..c9f9701bc934c --- /dev/null +++ b/integration_tests/iceberg-sink2/docker/jdbc/docker-compose.yml @@ -0,0 +1,91 @@ +version: '3.8' + +services: + postgres: + image: postgres:16.1 + environment: + POSTGRES_USER: admin + POSTGRES_PASSWORD: 123456 + POSTGRES_DB: iceberg + expose: + - 5432 + ports: + - "5432:5432" + networks: + iceberg_net: + + spark: + depends_on: + - minio-0 + - postgres + image: ghcr.io/icelake-io/icelake-spark:1.0 + environment: + - AWS_ACCESS_KEY_ID=hummockadmin + - AWS_SECRET_ACCESS_KEY=hummockadmin + - AWS_REGION=us-east-1 + - SPARK_HOME=/opt/spark + - PYSPARK_PYTHON=/usr/bin/python3.9 + - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/spark/bin:/opt/spark/sbin + user: root + networks: + iceberg_net: + links: + - minio-0:icebergdata.minio-0 + expose: + - 15002 + healthcheck: + test: netstat -ltn | grep -c 15002 + interval: 1s + retries: 1200 + volumes: + - ./spark-script:/spark-script + entrypoint: ["/spark-script/spark-connect-server.sh"] + + risingwave-standalone: + extends: + file: ../../../../docker/docker-compose.yml + service: risingwave-standalone + healthcheck: + test: + - CMD-SHELL + - bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/4566; exit $$?;' + interval: 1s + timeout: 30s + networks: + iceberg_net: + + minio-0: + extends: + file: ../../../../docker/docker-compose.yml + service: minio-0 + entrypoint: " + /bin/sh -c ' + + set -e + + mkdir -p \"/data/icebergdata/demo\" + mkdir -p \"/data/hummock001\" + + /usr/bin/docker-entrypoint.sh \"$$0\" \"$$@\" + + '" + networks: + iceberg_net: + + etcd-0: + extends: + file: ../../../../docker/docker-compose.yml + service: etcd-0 + networks: + iceberg_net: + +volumes: + risingwave-standalone: + external: false + etcd-0: + external: false + minio-0: + external: false + +networks: + iceberg_net: \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/jdbc/spark-script/spark-connect-server.sh b/integration_tests/iceberg-sink2/docker/jdbc/spark-script/spark-connect-server.sh new file mode 100755 index 0000000000000..2e894dfb42d70 --- /dev/null +++ b/integration_tests/iceberg-sink2/docker/jdbc/spark-script/spark-connect-server.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +set -ex + +JARS=$(find /opt/spark/deps -type f -name "*.jar" | tr '\n' ':') + +/opt/spark/sbin/start-connect-server.sh \ + --master local[3] \ + --driver-class-path $JARS \ + --conf spark.driver.extraJavaOptions="-Djdbc.drivers=org.postgresql.Driver" \ + --conf spark.driver.bindAddress=0.0.0.0 \ + --conf spark.sql.catalog.demo=org.apache.iceberg.spark.SparkCatalog \ + --conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions \ + --conf spark.sql.catalog.demo.catalog-impl=org.apache.iceberg.jdbc.JdbcCatalog \ + --conf spark.sql.catalog.demo.io-impl=org.apache.iceberg.aws.s3.S3FileIO \ + --conf spark.sql.catalog.demo.warehouse=s3://icebergdata/demo \ + --conf spark.sql.catalog.demo.uri=jdbc:postgresql://postgres:5432/iceberg \ + --conf spark.sql.catalog.demo.jdbc.user=admin \ + --conf spark.sql.catalog.demo.jdbc.password=123456 \ + --conf spark.sql.catalog.demo.s3.endpoint=http://minio-0:9301 \ + --conf spark.sql.catalog.demo.s3.path.style.access=true \ + --conf spark.sql.catalog.demo.s3.access.key=hummockadmin \ + --conf spark.sql.catalog.demo.s3.secret.key=hummockadmin \ + --conf spark.sql.defaultCatalog=demo + +tail -f /opt/spark/logs/spark*.out \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/rest/config.ini b/integration_tests/iceberg-sink2/docker/rest/config.ini index ac2a551c5c141..2dfc77c2bc765 100644 --- a/integration_tests/iceberg-sink2/docker/rest/config.ini +++ b/integration_tests/iceberg-sink2/docker/rest/config.ini @@ -20,4 +20,4 @@ s3.access.key = hummockadmin s3.secret.key = hummockadmin s3.region = ap-southeast-1 database.name=s1 -table.name=s1.t1 \ No newline at end of file +table.name=t1 \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/storage/config.ini b/integration_tests/iceberg-sink2/docker/storage/config.ini new file mode 100644 index 0000000000000..1d77bc55e40ed --- /dev/null +++ b/integration_tests/iceberg-sink2/docker/storage/config.ini @@ -0,0 +1,22 @@ +[default] +aws_key=hummockadmin +aws_secret=hummockadmin + +[risingwave] +db=dev +user=root +host=127.0.0.1 +port=4566 + +[sink] +connector = iceberg +type=append-only +force_append_only = true +catalog.type = storage +warehouse.path = s3://icebergdata/demo +s3.endpoint=http://minio-0:9301 +s3.access.key = hummockadmin +s3.secret.key = hummockadmin +s3.region = ap-southeast-1 +database.name=s1 +table.name=t1 \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/storage/docker-compose.yml b/integration_tests/iceberg-sink2/docker/storage/docker-compose.yml new file mode 100644 index 0000000000000..7a29e5f83c145 --- /dev/null +++ b/integration_tests/iceberg-sink2/docker/storage/docker-compose.yml @@ -0,0 +1,75 @@ +version: '3.8' + +services: + spark: + depends_on: + - minio-0 + image: ghcr.io/icelake-io/icelake-spark:latest + environment: + - SPARK_HOME=/opt/spark + - PYSPARK_PYTHON=/usr/bin/python3.9 + - PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/spark/bin:/opt/spark/sbin + user: root + networks: + iceberg_net: + links: + - minio-0:icebergdata.minio-0 + expose: + - 15002 + healthcheck: + test: netstat -ltn | grep -c 15002 + interval: 1s + retries: 1200 + volumes: + - ./spark-script:/spark-script + entrypoint: ["/spark-script/spark-connect-server.sh"] + + risingwave-standalone: + extends: + file: ../../../../docker/docker-compose.yml + service: risingwave-standalone + healthcheck: + test: + - CMD-SHELL + - bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/4566; exit $$?;' + interval: 1s + timeout: 30s + networks: + iceberg_net: + + minio-0: + extends: + file: ../../../../docker/docker-compose.yml + service: minio-0 + entrypoint: " + + /bin/sh -c ' + + set -e + + mkdir -p \"/data/icebergdata/demo\" + mkdir -p \"/data/hummock001\" + + /usr/bin/docker-entrypoint.sh \"$$0\" \"$$@\" + + '" + networks: + iceberg_net: + + etcd-0: + extends: + file: ../../../../docker/docker-compose.yml + service: etcd-0 + networks: + iceberg_net: + +volumes: + risingwave-standalone: + external: false + etcd-0: + external: false + minio-0: + external: false + +networks: + iceberg_net: \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/docker/storage/spark-script/spark-connect-server.sh b/integration_tests/iceberg-sink2/docker/storage/spark-script/spark-connect-server.sh new file mode 100755 index 0000000000000..d37ed983fc236 --- /dev/null +++ b/integration_tests/iceberg-sink2/docker/storage/spark-script/spark-connect-server.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +set -ex + +JARS=$(find /opt/spark/deps -type f -name "*.jar" | tr '\n' ':') + +/opt/spark/sbin/start-connect-server.sh \ + --master local[3] \ + --driver-class-path $JARS \ + --conf spark.driver.bindAddress=0.0.0.0 \ + --conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions \ + --conf spark.sql.catalog.demo=org.apache.iceberg.spark.SparkCatalog \ + --conf spark.sql.catalog.demo.type=hadoop \ + --conf spark.sql.catalog.demo.warehouse=s3a://icebergdata/demo \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.endpoint=http://minio-0:9301 \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.path.style.access=true \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.access.key=hummockadmin \ + --conf spark.sql.catalog.demo.hadoop.fs.s3a.secret.key=hummockadmin \ + --conf spark.sql.defaultCatalog=demo + +tail -f /opt/spark/logs/spark*.out \ No newline at end of file diff --git a/integration_tests/iceberg-sink2/python/main.py b/integration_tests/iceberg-sink2/python/main.py index 9ad070b0c1b9b..9e1f8d3830283 100644 --- a/integration_tests/iceberg-sink2/python/main.py +++ b/integration_tests/iceberg-sink2/python/main.py @@ -122,7 +122,10 @@ def check_spark_table(case_name): if __name__ == "__main__": - case_name = "rest" +# case_name = "rest" +# case_name = "storage" +# case_name = "jdbc" + case_name = "hive" config = configparser.ConfigParser() config.read(f"{case_dir(case_name)}/config.ini") print({section: dict(config[section]) for section in config.sections()}) diff --git a/integration_tests/iceberg-sink2/python/poetry.lock b/integration_tests/iceberg-sink2/python/poetry.lock index cf29a6f294aad..72915c4014cc4 100644 --- a/integration_tests/iceberg-sink2/python/poetry.lock +++ b/integration_tests/iceberg-sink2/python/poetry.lock @@ -146,36 +146,40 @@ files = [ [[package]] name = "pandas" -version = "2.1.4" +version = "2.2.0" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bdec823dc6ec53f7a6339a0e34c68b144a7a1fd28d80c260534c39c62c5bf8c9"}, - {file = "pandas-2.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:294d96cfaf28d688f30c918a765ea2ae2e0e71d3536754f4b6de0ea4a496d034"}, - {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b728fb8deba8905b319f96447a27033969f3ea1fea09d07d296c9030ab2ed1d"}, - {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00028e6737c594feac3c2df15636d73ace46b8314d236100b57ed7e4b9ebe8d9"}, - {file = "pandas-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:426dc0f1b187523c4db06f96fb5c8d1a845e259c99bda74f7de97bd8a3bb3139"}, - {file = "pandas-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:f237e6ca6421265643608813ce9793610ad09b40154a3344a088159590469e46"}, - {file = "pandas-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b7d852d16c270e4331f6f59b3e9aa23f935f5c4b0ed2d0bc77637a8890a5d092"}, - {file = "pandas-2.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7d5f2f54f78164b3d7a40f33bf79a74cdee72c31affec86bfcabe7e0789821"}, - {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa6e92e639da0d6e2017d9ccff563222f4eb31e4b2c3cf32a2a392fc3103c0d"}, - {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d797591b6846b9db79e65dc2d0d48e61f7db8d10b2a9480b4e3faaddc421a171"}, - {file = "pandas-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2d3e7b00f703aea3945995ee63375c61b2e6aa5aa7871c5d622870e5e137623"}, - {file = "pandas-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:dc9bf7ade01143cddc0074aa6995edd05323974e6e40d9dbde081021ded8510e"}, - {file = "pandas-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:482d5076e1791777e1571f2e2d789e940dedd927325cc3cb6d0800c6304082f6"}, - {file = "pandas-2.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8a706cfe7955c4ca59af8c7a0517370eafbd98593155b48f10f9811da440248b"}, - {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0513a132a15977b4a5b89aabd304647919bc2169eac4c8536afb29c07c23540"}, - {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f17f2b6fc076b2a0078862547595d66244db0f41bf79fc5f64a5c4d635bead"}, - {file = "pandas-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:45d63d2a9b1b37fa6c84a68ba2422dc9ed018bdaa668c7f47566a01188ceeec1"}, - {file = "pandas-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:f69b0c9bb174a2342818d3e2778584e18c740d56857fc5cdb944ec8bbe4082cf"}, - {file = "pandas-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f06bda01a143020bad20f7a85dd5f4a1600112145f126bc9e3e42077c24ef34"}, - {file = "pandas-2.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab5796839eb1fd62a39eec2916d3e979ec3130509930fea17fe6f81e18108f6a"}, - {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbaf9e8d3a63a9276d707b4d25930a262341bca9874fcb22eff5e3da5394732"}, - {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ebfd771110b50055712b3b711b51bee5d50135429364d0498e1213a7adc2be8"}, - {file = "pandas-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ea107e0be2aba1da619cc6ba3f999b2bfc9669a83554b1904ce3dd9507f0860"}, - {file = "pandas-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:d65148b14788b3758daf57bf42725caa536575da2b64df9964c563b015230984"}, - {file = "pandas-2.1.4.tar.gz", hash = "sha256:fcb68203c833cc735321512e13861358079a96c174a61f5116a1de89c58c0ef7"}, + {file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"}, + {file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"}, + {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e0b4fc3ddceb56ec8a287313bc22abe17ab0eb184069f08fc6a9352a769b18"}, + {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20404d2adefe92aed3b38da41d0847a143a09be982a31b85bc7dd565bdba0f4e"}, + {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ea3ee3f125032bfcade3a4cf85131ed064b4f8dd23e5ce6fa16473e48ebcaf5"}, + {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9670b3ac00a387620489dfc1bca66db47a787f4e55911f1293063a78b108df1"}, + {file = "pandas-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a946f210383c7e6d16312d30b238fd508d80d927014f3b33fb5b15c2f895430"}, + {file = "pandas-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a1b438fa26b208005c997e78672f1aa8138f67002e833312e6230f3e57fa87d5"}, + {file = "pandas-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ce2fbc8d9bf303ce54a476116165220a1fedf15985b09656b4b4275300e920b"}, + {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2707514a7bec41a4ab81f2ccce8b382961a29fbe9492eab1305bb075b2b1ff4f"}, + {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85793cbdc2d5bc32620dc8ffa715423f0c680dacacf55056ba13454a5be5de88"}, + {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cfd6c2491dc821b10c716ad6776e7ab311f7df5d16038d0b7458bc0b67dc10f3"}, + {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a146b9dcacc3123aa2b399df1a284de5f46287a4ab4fbfc237eac98a92ebcb71"}, + {file = "pandas-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbc1b53c0e1fdf16388c33c3cca160f798d38aea2978004dd3f4d3dec56454c9"}, + {file = "pandas-2.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a41d06f308a024981dcaa6c41f2f2be46a6b186b902c94c2674e8cb5c42985bc"}, + {file = "pandas-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:159205c99d7a5ce89ecfc37cb08ed179de7783737cea403b295b5eda8e9c56d1"}, + {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1e1f3861ea9132b32f2133788f3b14911b68102d562715d71bd0013bc45440"}, + {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:761cb99b42a69005dec2b08854fb1d4888fdf7b05db23a8c5a099e4b886a2106"}, + {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a20628faaf444da122b2a64b1e5360cde100ee6283ae8effa0d8745153809a2e"}, + {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f5be5d03ea2073627e7111f61b9f1f0d9625dc3c4d8dda72cc827b0c58a1d042"}, + {file = "pandas-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:a626795722d893ed6aacb64d2401d017ddc8a2341b49e0384ab9bf7112bdec30"}, + {file = "pandas-2.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9f66419d4a41132eb7e9a73dcec9486cf5019f52d90dd35547af11bc58f8637d"}, + {file = "pandas-2.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57abcaeda83fb80d447f28ab0cc7b32b13978f6f733875ebd1ed14f8fbc0f4ab"}, + {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60f1f7dba3c2d5ca159e18c46a34e7ca7247a73b5dd1a22b6d59707ed6b899a"}, + {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb61dc8567b798b969bcc1fc964788f5a68214d333cade8319c7ab33e2b5d88a"}, + {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:52826b5f4ed658fa2b729264d63f6732b8b29949c7fd234510d57c61dbeadfcd"}, + {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bde2bc699dbd80d7bc7f9cab1e23a95c4375de615860ca089f34e7c64f4a8de7"}, + {file = "pandas-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:3de918a754bbf2da2381e8a3dcc45eede8cd7775b047b923f9006d5f876802ae"}, + {file = "pandas-2.2.0.tar.gz", hash = "sha256:30b83f7c3eb217fb4d1b494a57a2fda5444f17834f5df2de6b2ffff68dc3c8e2"}, ] [package.dependencies] @@ -185,31 +189,31 @@ numpy = [ ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" -tzdata = ">=2022.1" +tzdata = ">=2022.7" [package.extras] -all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"] -aws = ["s3fs (>=2022.05.0)"] -clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"] -compression = ["zstandard (>=0.17.0)"] -computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"] -feather = ["pyarrow (>=7.0.0)"] -fss = ["fsspec (>=2022.05.0)"] -gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"] -hdf5 = ["tables (>=3.7.0)"] -html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"] -mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"] -parquet = ["pyarrow (>=7.0.0)"] -performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"] -plot = ["matplotlib (>=3.6.1)"] -postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"] -spss = ["pyreadstat (>=1.1.5)"] -sql-other = ["SQLAlchemy (>=1.4.36)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.8.0)"] +xml = ["lxml (>=4.9.2)"] [[package]] name = "protobuf" @@ -325,51 +329,51 @@ files = [ [[package]] name = "pyarrow" -version = "14.0.2" +version = "15.0.0" description = "Python library for Apache Arrow" optional = false python-versions = ">=3.8" files = [ - {file = "pyarrow-14.0.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:ba9fe808596c5dbd08b3aeffe901e5f81095baaa28e7d5118e01354c64f22807"}, - {file = "pyarrow-14.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:22a768987a16bb46220cef490c56c671993fbee8fd0475febac0b3e16b00a10e"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dbba05e98f247f17e64303eb876f4a80fcd32f73c7e9ad975a83834d81f3fda"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a898d134d00b1eca04998e9d286e19653f9d0fcb99587310cd10270907452a6b"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:87e879323f256cb04267bb365add7208f302df942eb943c93a9dfeb8f44840b1"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:76fc257559404ea5f1306ea9a3ff0541bf996ff3f7b9209fc517b5e83811fa8e"}, - {file = "pyarrow-14.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0c4a18e00f3a32398a7f31da47fefcd7a927545b396e1f15d0c85c2f2c778cd"}, - {file = "pyarrow-14.0.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:87482af32e5a0c0cce2d12eb3c039dd1d853bd905b04f3f953f147c7a196915b"}, - {file = "pyarrow-14.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:059bd8f12a70519e46cd64e1ba40e97eae55e0cbe1695edd95384653d7626b23"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f16111f9ab27e60b391c5f6d197510e3ad6654e73857b4e394861fc79c37200"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06ff1264fe4448e8d02073f5ce45a9f934c0f3db0a04460d0b01ff28befc3696"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6dd4f4b472ccf4042f1eab77e6c8bce574543f54d2135c7e396f413046397d5a"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:32356bfb58b36059773f49e4e214996888eeea3a08893e7dbde44753799b2a02"}, - {file = "pyarrow-14.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:52809ee69d4dbf2241c0e4366d949ba035cbcf48409bf404f071f624ed313a2b"}, - {file = "pyarrow-14.0.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:c87824a5ac52be210d32906c715f4ed7053d0180c1060ae3ff9b7e560f53f944"}, - {file = "pyarrow-14.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a25eb2421a58e861f6ca91f43339d215476f4fe159eca603c55950c14f378cc5"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c1da70d668af5620b8ba0a23f229030a4cd6c5f24a616a146f30d2386fec422"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cc61593c8e66194c7cdfae594503e91b926a228fba40b5cf25cc593563bcd07"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:78ea56f62fb7c0ae8ecb9afdd7893e3a7dbeb0b04106f5c08dbb23f9c0157591"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:37c233ddbce0c67a76c0985612fef27c0c92aef9413cf5aa56952f359fcb7379"}, - {file = "pyarrow-14.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:e4b123ad0f6add92de898214d404e488167b87b5dd86e9a434126bc2b7a5578d"}, - {file = "pyarrow-14.0.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e354fba8490de258be7687f341bc04aba181fc8aa1f71e4584f9890d9cb2dec2"}, - {file = "pyarrow-14.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:20e003a23a13da963f43e2b432483fdd8c38dc8882cd145f09f21792e1cf22a1"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc0de7575e841f1595ac07e5bc631084fd06ca8b03c0f2ecece733d23cd5102a"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e986dc859712acb0bd45601229021f3ffcdfc49044b64c6d071aaf4fa49e98"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f7d029f20ef56673a9730766023459ece397a05001f4e4d13805111d7c2108c0"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:209bac546942b0d8edc8debda248364f7f668e4aad4741bae58e67d40e5fcf75"}, - {file = "pyarrow-14.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:1e6987c5274fb87d66bb36816afb6f65707546b3c45c44c28e3c4133c010a881"}, - {file = "pyarrow-14.0.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a01d0052d2a294a5f56cc1862933014e696aa08cc7b620e8c0cce5a5d362e976"}, - {file = "pyarrow-14.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a51fee3a7db4d37f8cda3ea96f32530620d43b0489d169b285d774da48ca9785"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64df2bf1ef2ef14cee531e2dfe03dd924017650ffaa6f9513d7a1bb291e59c15"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c0fa3bfdb0305ffe09810f9d3e2e50a2787e3a07063001dcd7adae0cee3601a"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c65bf4fd06584f058420238bc47a316e80dda01ec0dfb3044594128a6c2db794"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:63ac901baec9369d6aae1cbe6cca11178fb018a8d45068aaf5bb54f94804a866"}, - {file = "pyarrow-14.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:75ee0efe7a87a687ae303d63037d08a48ef9ea0127064df18267252cfe2e9541"}, - {file = "pyarrow-14.0.2.tar.gz", hash = "sha256:36cef6ba12b499d864d1def3e990f97949e0b79400d08b7cf74504ffbd3eb025"}, + {file = "pyarrow-15.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0a524532fd6dd482edaa563b686d754c70417c2f72742a8c990b322d4c03a15d"}, + {file = "pyarrow-15.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a6bdb314affa9c2e0d5dddf3d9cbb9ef4a8dddaa68669975287d47ece67642"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66958fd1771a4d4b754cd385835e66a3ef6b12611e001d4e5edfcef5f30391e2"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f500956a49aadd907eaa21d4fff75f73954605eaa41f61cb94fb008cf2e00c6"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6f87d9c4f09e049c2cade559643424da84c43a35068f2a1c4653dc5b1408a929"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85239b9f93278e130d86c0e6bb455dcb66fc3fd891398b9d45ace8799a871a1e"}, + {file = "pyarrow-15.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b8d43e31ca16aa6e12402fcb1e14352d0d809de70edd185c7650fe80e0769e3"}, + {file = "pyarrow-15.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:fa7cd198280dbd0c988df525e50e35b5d16873e2cdae2aaaa6363cdb64e3eec5"}, + {file = "pyarrow-15.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8780b1a29d3c8b21ba6b191305a2a607de2e30dab399776ff0aa09131e266340"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0ec198ccc680f6c92723fadcb97b74f07c45ff3fdec9dd765deb04955ccf19"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036a7209c235588c2f07477fe75c07e6caced9b7b61bb897c8d4e52c4b5f9555"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2bd8a0e5296797faf9a3294e9fa2dc67aa7f10ae2207920dbebb785c77e9dbe5"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e8ebed6053dbe76883a822d4e8da36860f479d55a762bd9e70d8494aed87113e"}, + {file = "pyarrow-15.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:17d53a9d1b2b5bd7d5e4cd84d018e2a45bc9baaa68f7e6e3ebed45649900ba99"}, + {file = "pyarrow-15.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9950a9c9df24090d3d558b43b97753b8f5867fb8e521f29876aa021c52fda351"}, + {file = "pyarrow-15.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:003d680b5e422d0204e7287bb3fa775b332b3fce2996aa69e9adea23f5c8f970"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f75fce89dad10c95f4bf590b765e3ae98bcc5ba9f6ce75adb828a334e26a3d40"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca9cb0039923bec49b4fe23803807e4ef39576a2bec59c32b11296464623dc2"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ed5a78ed29d171d0acc26a305a4b7f83c122d54ff5270810ac23c75813585e4"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6eda9e117f0402dfcd3cd6ec9bfee89ac5071c48fc83a84f3075b60efa96747f"}, + {file = "pyarrow-15.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a3a6180c0e8f2727e6f1b1c87c72d3254cac909e609f35f22532e4115461177"}, + {file = "pyarrow-15.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:19a8918045993349b207de72d4576af0191beef03ea655d8bdb13762f0cd6eac"}, + {file = "pyarrow-15.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0ec076b32bacb6666e8813a22e6e5a7ef1314c8069d4ff345efa6246bc38593"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5db1769e5d0a77eb92344c7382d6543bea1164cca3704f84aa44e26c67e320fb"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2617e3bf9df2a00020dd1c1c6dce5cc343d979efe10bc401c0632b0eef6ef5b"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:d31c1d45060180131caf10f0f698e3a782db333a422038bf7fe01dace18b3a31"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:c8c287d1d479de8269398b34282e206844abb3208224dbdd7166d580804674b7"}, + {file = "pyarrow-15.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:07eb7f07dc9ecbb8dace0f58f009d3a29ee58682fcdc91337dfeb51ea618a75b"}, + {file = "pyarrow-15.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:47af7036f64fce990bb8a5948c04722e4e3ea3e13b1007ef52dfe0aa8f23cf7f"}, + {file = "pyarrow-15.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93768ccfff85cf044c418bfeeafce9a8bb0cee091bd8fd19011aff91e58de540"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6ee87fd6892700960d90abb7b17a72a5abb3b64ee0fe8db6c782bcc2d0dc0b4"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:001fca027738c5f6be0b7a3159cc7ba16a5c52486db18160909a0831b063c4e4"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:d1c48648f64aec09accf44140dccb92f4f94394b8d79976c426a5b79b11d4fa7"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:972a0141be402bb18e3201448c8ae62958c9c7923dfaa3b3d4530c835ac81aed"}, + {file = "pyarrow-15.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:f01fc5cf49081426429127aa2d427d9d98e1cb94a32cb961d583a70b7c4504e6"}, + {file = "pyarrow-15.0.0.tar.gz", hash = "sha256:876858f549d540898f927eba4ef77cd549ad8d24baa3207cf1b72e5788b50e83"}, ] [package.dependencies] -numpy = ">=1.16.6" +numpy = ">=1.16.6,<2" [[package]] name = "pyspark" diff --git a/java/connector-node/assembly/assembly.xml b/java/connector-node/assembly/assembly.xml index 9cf457d8a0b6d..26df6e8a71af9 100644 --- a/java/connector-node/assembly/assembly.xml +++ b/java/connector-node/assembly/assembly.xml @@ -42,9 +42,13 @@ *:risingwave-sink-es-7 *:risingwave-sink-cassandra *:risingwave-sink-jdbc + *:risingwave-sink-iceberg *:risingwave-sink-mock-flink-http-sink + + org.apache.iceberg:iceberg-common + true true diff --git a/java/connector-node/risingwave-sink-iceberg/pom.xml b/java/connector-node/risingwave-sink-iceberg/pom.xml index 753691c05dd6d..9f733d830a475 100644 --- a/java/connector-node/risingwave-sink-iceberg/pom.xml +++ b/java/connector-node/risingwave-sink-iceberg/pom.xml @@ -65,11 +65,36 @@ iceberg-data ${iceberg.version} + + org.apache.iceberg + iceberg-hive-metastore + ${iceberg.version} + + + org.apache.hive + hive-metastore + org.apache.parquet parquet-avro 1.12.3 + + org.apache.hadoop + hadoop-common + + + org.apache.hadoop + hadoop-mapreduce-client-core + + + org.apache.hadoop + hadoop-mapreduce-client-common + + + org.apache.hadoop + hadoop-mapreduce-client-jobclient + org.apache.iceberg iceberg-aws @@ -78,13 +103,37 @@ software.amazon.awssdk s3 - 2.18.20 software.amazon.awssdk - url-connection-client - 2.18.20 + sts + + + org.postgresql + postgresql + + + mysql + mysql-connector-java + + + org.xerial + sqlite-jdbc + + + software.amazon.awssdk + apache-client + + + junit + junit + test + + + org.assertj + assertj-core + 3.24.2 + test - diff --git a/java/connector-node/risingwave-sink-iceberg/src/main/java/com/risingwave/connector/catalog/JniCatalogWrapper.java b/java/connector-node/risingwave-sink-iceberg/src/main/java/com/risingwave/connector/catalog/JniCatalogWrapper.java index 583747f3b2f3f..e8c900b37e88d 100644 --- a/java/connector-node/risingwave-sink-iceberg/src/main/java/com/risingwave/connector/catalog/JniCatalogWrapper.java +++ b/java/connector-node/risingwave-sink-iceberg/src/main/java/com/risingwave/connector/catalog/JniCatalogWrapper.java @@ -74,6 +74,15 @@ public static JniCatalogWrapper create(String name, String klassName, String[] p checkArgument( props.length % 2 == 0, "props should be key-value pairs, but length is: " + props.length); + + // Thread.currentThread().setContextClassLoader(ClassLoader.getSystemClassLoader()); + System.out.println("Current thread name is: " + Thread.currentThread().getName()); + + // try { + // Thread.currentThread().getContextClassLoader().loadClass(klassName); + // } catch (ClassNotFoundException e) { + // throw new RuntimeException(e); + // } try { HashMap config = new HashMap<>(props.length / 2); for (int i = 0; i < props.length; i += 2) { diff --git a/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynClasses.java b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynClasses.java new file mode 100644 index 0000000000000..65a75e774d2ff --- /dev/null +++ b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynClasses.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.common; + +import org.apache.iceberg.relocated.com.google.common.base.Joiner; +import org.apache.iceberg.relocated.com.google.common.collect.Sets; + +import java.util.Set; + +public class DynClasses { + + private DynClasses() {} + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + private Class foundClass = null; + private boolean nullOk = false; + private Set classNames = Sets.newLinkedHashSet(); + + private Builder() {} + + /** + * Set the {@link ClassLoader} used to lookup classes by name. + * + *

If not set, the current thread's ClassLoader is used. + * + * @param newLoader a ClassLoader + * @return this Builder for method chaining + */ + public Builder loader(ClassLoader newLoader) { + return this; + } + + /** + * Checks for an implementation of the class by name. + * + * @param className name of a class + * @return this Builder for method chaining + */ + public Builder impl(String className) { + classNames.add(className); + + if (foundClass != null) { + return this; + } + + try { + this.foundClass = Class.forName(className); + } catch (ClassNotFoundException e) { + // not the right implementation + } + + return this; + } + + /** + * Instructs this builder to return null if no class is found, rather than throwing an + * Exception. + * + * @return this Builder for method chaining + */ + public Builder orNull() { + this.nullOk = true; + return this; + } + + /** + * Returns the first implementation or throws ClassNotFoundException if one was not found. + * + * @param Java superclass + * @return a {@link Class} for the first implementation found + * @throws ClassNotFoundException if no implementation was found + */ + @SuppressWarnings("unchecked") + public Class buildChecked() throws ClassNotFoundException { + if (!nullOk && foundClass == null) { + throw new ClassNotFoundException( + "Cannot find class; alternatives: " + Joiner.on(", ").join(classNames)); + } + return (Class) foundClass; + } + + /** + * Returns the first implementation or throws RuntimeException if one was not found. + * + * @param Java superclass + * @return a {@link Class} for the first implementation found + * @throws RuntimeException if no implementation was found + */ + @SuppressWarnings("unchecked") + public Class build() { + if (!nullOk && foundClass == null) { + throw new RuntimeException( + "Cannot find class; alternatives: " + Joiner.on(", ").join(classNames)); + } + return (Class) foundClass; + } + } +} diff --git a/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynConstructors.java b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynConstructors.java new file mode 100644 index 0000000000000..61566f4e191ff --- /dev/null +++ b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynConstructors.java @@ -0,0 +1,298 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.common; + +import org.apache.iceberg.relocated.com.google.common.base.Preconditions; +import org.apache.iceberg.relocated.com.google.common.base.Throwables; +import org.apache.iceberg.relocated.com.google.common.collect.Maps; + +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Arrays; +import java.util.Map; + +/** Copied from parquet-common */ +public class DynConstructors { + + private DynConstructors() {} + + public static class Ctor extends DynMethods.UnboundMethod { + private final Constructor ctor; + private final Class constructed; + + private Ctor(Constructor constructor, Class constructed) { + super(null, "newInstance"); + this.ctor = constructor; + this.constructed = constructed; + } + + public Class getConstructedClass() { + return constructed; + } + + public C newInstanceChecked(Object... args) throws Exception { + try { + if (args.length > ctor.getParameterCount()) { + return ctor.newInstance(Arrays.copyOfRange(args, 0, ctor.getParameterCount())); + } else { + return ctor.newInstance(args); + } + } catch (InstantiationException | IllegalAccessException e) { + throw e; + } catch (InvocationTargetException e) { + Throwables.propagateIfInstanceOf(e.getCause(), Exception.class); + Throwables.propagateIfInstanceOf(e.getCause(), RuntimeException.class); + throw Throwables.propagate(e.getCause()); + } + } + + public C newInstance(Object... args) { + try { + return newInstanceChecked(args); + } catch (Exception e) { + Throwables.propagateIfInstanceOf(e, RuntimeException.class); + throw Throwables.propagate(e); + } + } + + @Override + @SuppressWarnings("unchecked") + public R invoke(Object target, Object... args) { + Preconditions.checkArgument( + target == null, "Invalid call to constructor: target must be null"); + return (R) newInstance(args); + } + + @Override + @SuppressWarnings("unchecked") + public R invokeChecked(Object target, Object... args) throws Exception { + Preconditions.checkArgument( + target == null, "Invalid call to constructor: target must be null"); + return (R) newInstanceChecked(args); + } + + @Override + public DynMethods.BoundMethod bind(Object receiver) { + throw new IllegalStateException("Cannot bind constructors"); + } + + @Override + public boolean isStatic() { + return true; + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(constructor=" + ctor + ", class=" + constructed + ")"; + } + } + + public static Builder builder() { + return new Builder(); + } + + public static Builder builder(Class baseClass) { + return new Builder(baseClass); + } + + public static class Builder { + private final Class baseClass; + private Ctor ctor = null; + private Map problems = Maps.newHashMap(); + + public Builder(Class baseClass) { + this.baseClass = baseClass; + } + + public Builder() { + this.baseClass = null; + } + + /** + * Set the {@link ClassLoader} used to lookup classes by name. + * + *

If not set, the current thread's ClassLoader is used. + * + * @param newLoader a ClassLoader + * @return this Builder for method chaining + */ + public Builder loader(ClassLoader newLoader) { + return this; + } + + public Builder impl(String className, Class... types) { + // don't do any work if an implementation has been found + if (ctor != null) { + return this; + } + + try { + Class targetClass = Class.forName(className); + impl(targetClass, types); + } catch (NoClassDefFoundError | ClassNotFoundException e) { + // cannot load this implementation + problems.put(className, e); + } + return this; + } + + public Builder impl(Class targetClass, Class... types) { + // don't do any work if an implementation has been found + if (ctor != null) { + return this; + } + + try { + ctor = new Ctor(targetClass.getConstructor(types), targetClass); + } catch (NoSuchMethodException e) { + // not the right implementation + problems.put(methodName(targetClass, types), e); + } + return this; + } + + public Builder hiddenImpl(Class... types) { + hiddenImpl(baseClass, types); + return this; + } + + @SuppressWarnings("unchecked") + public Builder hiddenImpl(String className, Class... types) { + // don't do any work if an implementation has been found + if (ctor != null) { + return this; + } + + try { + Class targetClass = Class.forName(className); + hiddenImpl(targetClass, types); + } catch (NoClassDefFoundError | ClassNotFoundException e) { + // cannot load this implementation + problems.put(className, e); + } + return this; + } + + public Builder hiddenImpl(Class targetClass, Class... types) { + // don't do any work if an implementation has been found + if (ctor != null) { + return this; + } + + try { + Constructor hidden = targetClass.getDeclaredConstructor(types); + AccessController.doPrivileged(new MakeAccessible(hidden)); + ctor = new Ctor(hidden, targetClass); + } catch (SecurityException e) { + // unusable + problems.put(methodName(targetClass, types), e); + } catch (NoSuchMethodException e) { + // not the right implementation + problems.put(methodName(targetClass, types), e); + } + return this; + } + + @SuppressWarnings("unchecked") + public Ctor buildChecked() throws NoSuchMethodException { + if (ctor != null) { + return ctor; + } + throw buildCheckedException(baseClass, problems); + } + + @SuppressWarnings("unchecked") + public Ctor build() { + if (ctor != null) { + return ctor; + } + throw buildRuntimeException(baseClass, problems); + } + } + + private static class MakeAccessible implements PrivilegedAction { + private Constructor hidden; + + MakeAccessible(Constructor hidden) { + this.hidden = hidden; + } + + @Override + public Void run() { + hidden.setAccessible(true); + return null; + } + } + + private static NoSuchMethodException buildCheckedException( + Class baseClass, Map problems) { + NoSuchMethodException exc = + new NoSuchMethodException( + "Cannot find constructor for " + baseClass + "\n" + formatProblems(problems)); + problems.values().forEach(exc::addSuppressed); + return exc; + } + + private static RuntimeException buildRuntimeException( + Class baseClass, Map problems) { + RuntimeException exc = + new RuntimeException( + "Cannot find constructor for " + baseClass + "\n" + formatProblems(problems)); + problems.values().forEach(exc::addSuppressed); + return exc; + } + + private static String formatProblems(Map problems) { + StringBuilder sb = new StringBuilder(); + boolean first = true; + for (Map.Entry problem : problems.entrySet()) { + if (first) { + first = false; + } else { + sb.append("\n"); + } + sb.append("\tMissing ") + .append(problem.getKey()) + .append(" [") + .append(problem.getValue().getClass().getName()) + .append(": ") + .append(problem.getValue().getMessage()) + .append("]"); + } + return sb.toString(); + } + + private static String methodName(Class targetClass, Class... types) { + StringBuilder sb = new StringBuilder(); + sb.append(targetClass.getName()).append("("); + boolean first = true; + for (Class type : types) { + if (first) { + first = false; + } else { + sb.append(","); + } + sb.append(type.getName()); + } + sb.append(")"); + return sb.toString(); + } +} diff --git a/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynFields.java b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynFields.java new file mode 100644 index 0000000000000..80b6af8cb7f93 --- /dev/null +++ b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynFields.java @@ -0,0 +1,428 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.common; + +import org.apache.iceberg.relocated.com.google.common.base.Joiner; +import org.apache.iceberg.relocated.com.google.common.base.MoreObjects; +import org.apache.iceberg.relocated.com.google.common.base.Preconditions; +import org.apache.iceberg.relocated.com.google.common.base.Throwables; +import org.apache.iceberg.relocated.com.google.common.collect.Sets; + +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Set; + +public class DynFields { + + private DynFields() {} + + /** + * Convenience wrapper class around {@link Field}. + * + *

Allows callers to invoke the wrapped method with all Exceptions wrapped by RuntimeException, + * or with a single Exception catch block. + */ + public static class UnboundField { + private final Field field; + private final String name; + + private UnboundField(Field field, String name) { + this.field = field; + this.name = name; + } + + @SuppressWarnings("unchecked") + public T get(Object target) { + try { + return (T) field.get(target); + } catch (IllegalAccessException e) { + throw Throwables.propagate(e); + } + } + + public void set(Object target, T value) { + try { + field.set(target, value); + } catch (IllegalAccessException e) { + throw Throwables.propagate(e); + } + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("class", field.getDeclaringClass().toString()) + .add("name", name) + .add("type", field.getType()) + .toString(); + } + + /** + * Returns this method as a BoundMethod for the given receiver. + * + * @param target an Object on which to get or set this field + * @return a {@link BoundField} for this field and the target + * @throws IllegalStateException if the method is static + * @throws IllegalArgumentException if the receiver's class is incompatible + */ + public BoundField bind(Object target) { + Preconditions.checkState( + !isStatic() || this == AlwaysNull.INSTANCE, "Cannot bind static field %s", name); + Preconditions.checkArgument( + field.getDeclaringClass().isAssignableFrom(target.getClass()), + "Cannot bind field %s to instance of %s", + name, + target.getClass()); + + return new BoundField<>(this, target); + } + + /** + * Returns this field as a StaticField. + * + * @return a {@link StaticField} for this field + * @throws IllegalStateException if the method is not static + */ + public StaticField asStatic() { + Preconditions.checkState(isStatic(), "Field %s is not static", name); + return new StaticField<>(this); + } + + /** Returns whether the field is a static field. */ + public boolean isStatic() { + return Modifier.isStatic(field.getModifiers()); + } + + /** Returns whether the field is always null. */ + public boolean isAlwaysNull() { + return this == AlwaysNull.INSTANCE; + } + } + + private static class AlwaysNull extends UnboundField { + private static final AlwaysNull INSTANCE = new AlwaysNull(); + + private AlwaysNull() { + super(null, "AlwaysNull"); + } + + @Override + public Void get(Object target) { + return null; + } + + @Override + public void set(Object target, Void value) {} + + @Override + public String toString() { + return "Field(AlwaysNull)"; + } + + @Override + public boolean isStatic() { + return true; + } + + @Override + public boolean isAlwaysNull() { + return true; + } + } + + public static class StaticField { + private final UnboundField field; + + private StaticField(UnboundField field) { + this.field = field; + } + + public T get() { + return field.get(null); + } + + public void set(T value) { + field.set(null, value); + } + } + + public static class BoundField { + private final UnboundField field; + private final Object target; + + private BoundField(UnboundField field, Object target) { + this.field = field; + this.target = target; + } + + public T get() { + return field.get(target); + } + + public void set(T value) { + field.set(target, value); + } + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + private UnboundField field = null; + private final Set candidates = Sets.newHashSet(); + private boolean defaultAlwaysNull = false; + + private Builder() {} + + /** + * Set the {@link ClassLoader} used to lookup classes by name. + * + *

If not set, the current thread's ClassLoader is used. + * + * @param newLoader a ClassLoader + * @return this Builder for method chaining + */ + public Builder loader(ClassLoader newLoader) { + return this; + } + + /** + * Instructs this builder to return AlwaysNull if no implementation is found. + * + * @return this Builder for method chaining + */ + public Builder defaultAlwaysNull() { + this.defaultAlwaysNull = true; + return this; + } + + /** + * Checks for an implementation, first finding the class by name. + * + * @param className name of a class + * @param fieldName name of the field + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getField(String) + */ + public Builder impl(String className, String fieldName) { + // don't do any work if an implementation has been found + if (field != null) { + return this; + } + + try { + Class targetClass = Class.forName(className); + impl(targetClass, fieldName); + } catch (ClassNotFoundException e) { + // not the right implementation + candidates.add(className + "." + fieldName); + } + return this; + } + + /** + * Checks for an implementation. + * + * @param targetClass a class instance + * @param fieldName name of a field (different from constructor) + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getField(String) + */ + public Builder impl(Class targetClass, String fieldName) { + // don't do any work if an implementation has been found + if (field != null || targetClass == null) { + return this; + } + + try { + this.field = new UnboundField<>(targetClass.getField(fieldName), fieldName); + } catch (NoSuchFieldException e) { + // not the right implementation + candidates.add(targetClass.getName() + "." + fieldName); + } + return this; + } + + /** + * Checks for a hidden implementation, first finding the class by name. + * + * @param className name of a class + * @param fieldName name of a field (different from constructor) + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getField(String) + */ + public Builder hiddenImpl(String className, String fieldName) { + // don't do any work if an implementation has been found + if (field != null) { + return this; + } + + try { + Class targetClass = Class.forName(className); + hiddenImpl(targetClass, fieldName); + } catch (ClassNotFoundException e) { + // not the right implementation + candidates.add(className + "." + fieldName); + } + return this; + } + + /** + * Checks for a hidden implementation. + * + * @param targetClass a class instance + * @param fieldName name of a field (different from constructor) + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getField(String) + */ + public Builder hiddenImpl(Class targetClass, String fieldName) { + // don't do any work if an implementation has been found + if (field != null || targetClass == null) { + return this; + } + + try { + Field hidden = targetClass.getDeclaredField(fieldName); + AccessController.doPrivileged(new MakeFieldAccessible(hidden)); + this.field = new UnboundField(hidden, fieldName); + } catch (SecurityException | NoSuchFieldException e) { + // unusable + candidates.add(targetClass.getName() + "." + fieldName); + } + return this; + } + + /** + * Returns the first valid implementation as a UnboundField or throws a NoSuchFieldException if + * there is none. + * + * @param Java class stored in the field + * @return a {@link UnboundField} with a valid implementation + * @throws NoSuchFieldException if no implementation was found + */ + @SuppressWarnings("unchecked") + public UnboundField buildChecked() throws NoSuchFieldException { + if (field != null) { + return (UnboundField) field; + } else if (defaultAlwaysNull) { + return (UnboundField) AlwaysNull.INSTANCE; + } else { + throw new NoSuchFieldException( + "Cannot find field from candidates: " + Joiner.on(", ").join(candidates)); + } + } + + /** + * Returns the first valid implementation as a BoundMethod or throws a NoSuchMethodException if + * there is none. + * + * @param target an Object on which to get and set the field + * @param Java class stored in the field + * @return a {@link BoundField} with a valid implementation and target + * @throws IllegalStateException if the method is static + * @throws IllegalArgumentException if the receiver's class is incompatible + * @throws NoSuchFieldException if no implementation was found + */ + public BoundField buildChecked(Object target) throws NoSuchFieldException { + return this.buildChecked().bind(target); + } + + /** + * Returns the first valid implementation as a UnboundField or throws a NoSuchFieldException if + * there is none. + * + * @param Java class stored in the field + * @return a {@link UnboundField} with a valid implementation + * @throws RuntimeException if no implementation was found + */ + @SuppressWarnings("unchecked") + public UnboundField build() { + if (field != null) { + return (UnboundField) field; + } else if (defaultAlwaysNull) { + return (UnboundField) AlwaysNull.INSTANCE; + } else { + throw new RuntimeException( + "Cannot find field from candidates: " + Joiner.on(", ").join(candidates)); + } + } + + /** + * Returns the first valid implementation as a BoundMethod or throws a RuntimeException if there + * is none. + * + * @param target an Object on which to get and set the field + * @param Java class stored in the field + * @return a {@link BoundField} with a valid implementation and target + * @throws IllegalStateException if the method is static + * @throws IllegalArgumentException if the receiver's class is incompatible + * @throws RuntimeException if no implementation was found + */ + public BoundField build(Object target) { + return this.build().bind(target); + } + + /** + * Returns the first valid implementation as a StaticField or throws a NoSuchFieldException if + * there is none. + * + * @param Java class stored in the field + * @return a {@link StaticField} with a valid implementation + * @throws IllegalStateException if the method is not static + * @throws NoSuchFieldException if no implementation was found + */ + public StaticField buildStaticChecked() throws NoSuchFieldException { + return this.buildChecked().asStatic(); + } + + /** + * Returns the first valid implementation as a StaticField or throws a RuntimeException if there + * is none. + * + * @param Java class stored in the field + * @return a {@link StaticField} with a valid implementation + * @throws IllegalStateException if the method is not static + * @throws RuntimeException if no implementation was found + */ + public StaticField buildStatic() { + return this.build().asStatic(); + } + } + + private static class MakeFieldAccessible implements PrivilegedAction { + private Field hidden; + + MakeFieldAccessible(Field hidden) { + this.hidden = hidden; + } + + @Override + public Void run() { + hidden.setAccessible(true); + return null; + } + } +} diff --git a/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynMethods.java b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynMethods.java new file mode 100644 index 0000000000000..281c3d34ed304 --- /dev/null +++ b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/DynMethods.java @@ -0,0 +1,522 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.common; + +import org.apache.iceberg.relocated.com.google.common.base.Preconditions; +import org.apache.iceberg.relocated.com.google.common.base.Throwables; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Arrays; + +/** Copied from parquet-common */ +public class DynMethods { + + private DynMethods() {} + + /** + * Convenience wrapper class around {@link Method}. + * + *

Allows callers to invoke the wrapped method with all Exceptions wrapped by RuntimeException, + * or with a single Exception catch block. + */ + public static class UnboundMethod { + + private final Method method; + private final String name; + private final int argLength; + + UnboundMethod(Method method, String name) { + this.method = method; + this.name = name; + this.argLength = + (method == null || method.isVarArgs()) ? -1 : method.getParameterTypes().length; + } + + @SuppressWarnings("unchecked") + public R invokeChecked(Object target, Object... args) throws Exception { + try { + if (argLength < 0) { + return (R) method.invoke(target, args); + } else { + return (R) method.invoke(target, Arrays.copyOfRange(args, 0, argLength)); + } + + } catch (InvocationTargetException e) { + Throwables.propagateIfInstanceOf(e.getCause(), Exception.class); + Throwables.propagateIfInstanceOf(e.getCause(), RuntimeException.class); + throw Throwables.propagate(e.getCause()); + } + } + + public R invoke(Object target, Object... args) { + try { + return this.invokeChecked(target, args); + } catch (Exception e) { + Throwables.propagateIfInstanceOf(e, RuntimeException.class); + throw Throwables.propagate(e); + } + } + + /** + * Returns this method as a BoundMethod for the given receiver. + * + * @param receiver an Object to receive the method invocation + * @return a {@link BoundMethod} for this method and the receiver + * @throws IllegalStateException if the method is static + * @throws IllegalArgumentException if the receiver's class is incompatible + */ + public BoundMethod bind(Object receiver) { + Preconditions.checkState( + !isStatic(), "Cannot bind static method %s", method.toGenericString()); + Preconditions.checkArgument( + method.getDeclaringClass().isAssignableFrom(receiver.getClass()), + "Cannot bind %s to instance of %s", + method.toGenericString(), + receiver.getClass()); + + return new BoundMethod(this, receiver); + } + + /** Returns whether the method is a static method. */ + public boolean isStatic() { + return Modifier.isStatic(method.getModifiers()); + } + + /** Returns whether the method is a noop. */ + public boolean isNoop() { + return this == NOOP; + } + + /** + * Returns this method as a StaticMethod. + * + * @return a {@link StaticMethod} for this method + * @throws IllegalStateException if the method is not static + */ + public StaticMethod asStatic() { + Preconditions.checkState(isStatic(), "Method is not static"); + return new StaticMethod(this); + } + + @Override + public String toString() { + return "DynMethods.UnboundMethod(name=" + name + " method=" + method.toGenericString() + ")"; + } + + /** Singleton {@link UnboundMethod}, performs no operation and returns null. */ + private static final UnboundMethod NOOP = + new UnboundMethod(null, "NOOP") { + @Override + public R invokeChecked(Object target, Object... args) throws Exception { + return null; + } + + @Override + public BoundMethod bind(Object receiver) { + return new BoundMethod(this, receiver); + } + + @Override + public StaticMethod asStatic() { + return new StaticMethod(this); + } + + @Override + public boolean isStatic() { + return true; + } + + @Override + public String toString() { + return "DynMethods.UnboundMethod(NOOP)"; + } + }; + } + + public static class BoundMethod { + private final UnboundMethod method; + private final Object receiver; + + private BoundMethod(UnboundMethod method, Object receiver) { + this.method = method; + this.receiver = receiver; + } + + public R invokeChecked(Object... args) throws Exception { + return method.invokeChecked(receiver, args); + } + + public R invoke(Object... args) { + return method.invoke(receiver, args); + } + } + + public static class StaticMethod { + private final UnboundMethod method; + + private StaticMethod(UnboundMethod method) { + this.method = method; + } + + public R invokeChecked(Object... args) throws Exception { + return method.invokeChecked(null, args); + } + + public R invoke(Object... args) { + return method.invoke(null, args); + } + } + + /** + * Constructs a new builder for calling methods dynamically. + * + * @param methodName name of the method the builder will locate + * @return a Builder for finding a method + */ + public static Builder builder(String methodName) { + return new Builder(methodName); + } + + public static class Builder { + private final String name; + private UnboundMethod method = null; + + public Builder(String methodName) { + this.name = methodName; + } + + /** + * Set the {@link ClassLoader} used to lookup classes by name. + * + *

If not set, the current thread's ClassLoader is used. + * + * @param newLoader a ClassLoader + * @return this Builder for method chaining + */ + public Builder loader(ClassLoader newLoader) { + return this; + } + + /** + * If no implementation has been found, adds a NOOP method. + * + *

Note: calls to impl will not match after this method is called! + * + * @return this Builder for method chaining + */ + public Builder orNoop() { + if (method == null) { + this.method = UnboundMethod.NOOP; + } + return this; + } + + /** + * Checks for an implementation, first finding the given class by name. + * + * @param className name of a class + * @param methodName name of a method (different from constructor) + * @param argClasses argument classes for the method + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getMethod(String, Class[]) + */ + public Builder impl(String className, String methodName, Class... argClasses) { + // don't do any work if an implementation has been found + if (method != null) { + return this; + } + + try { + Class targetClass = Class.forName(className); + impl(targetClass, methodName, argClasses); + } catch (ClassNotFoundException e) { + // not the right implementation + } + return this; + } + + /** + * Checks for an implementation, first finding the given class by name. + * + *

The name passed to the constructor is the method name used. + * + * @param className name of a class + * @param argClasses argument classes for the method + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getMethod(String, Class[]) + */ + public Builder impl(String className, Class... argClasses) { + impl(className, name, argClasses); + return this; + } + + /** + * Checks for a method implementation. + * + * @param targetClass a class instance + * @param methodName name of a method (different from constructor) + * @param argClasses argument classes for the method + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getMethod(String, Class[]) + */ + public Builder impl(Class targetClass, String methodName, Class... argClasses) { + // don't do any work if an implementation has been found + if (method != null) { + return this; + } + + try { + this.method = new UnboundMethod(targetClass.getMethod(methodName, argClasses), name); + } catch (NoSuchMethodException e) { + // not the right implementation + } + return this; + } + + /** + * Checks for a method implementation. + * + *

The name passed to the constructor is the method name used. + * + * @param targetClass a class instance + * @param argClasses argument classes for the method + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getMethod(String, Class[]) + */ + public Builder impl(Class targetClass, Class... argClasses) { + impl(targetClass, name, argClasses); + return this; + } + + public Builder ctorImpl(Class targetClass, Class... argClasses) { + // don't do any work if an implementation has been found + if (method != null) { + return this; + } + + try { + this.method = new DynConstructors.Builder().impl(targetClass, argClasses).buildChecked(); + } catch (NoSuchMethodException e) { + // not the right implementation + } + return this; + } + + public Builder ctorImpl(String className, Class... argClasses) { + // don't do any work if an implementation has been found + if (method != null) { + return this; + } + + try { + this.method = new DynConstructors.Builder().impl(className, argClasses).buildChecked(); + } catch (NoSuchMethodException e) { + // not the right implementation + } + return this; + } + + /** + * Checks for an implementation, first finding the given class by name. + * + * @param className name of a class + * @param methodName name of a method (different from constructor) + * @param argClasses argument classes for the method + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getMethod(String, Class[]) + */ + public Builder hiddenImpl(String className, String methodName, Class... argClasses) { + // don't do any work if an implementation has been found + if (method != null) { + return this; + } + + try { + Class targetClass = Class.forName(className ); + hiddenImpl(targetClass, methodName, argClasses); + } catch (ClassNotFoundException e) { + // not the right implementation + } + return this; + } + + /** + * Checks for an implementation, first finding the given class by name. + * + *

The name passed to the constructor is the method name used. + * + * @param className name of a class + * @param argClasses argument classes for the method + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getMethod(String, Class[]) + */ + public Builder hiddenImpl(String className, Class... argClasses) { + hiddenImpl(className, name, argClasses); + return this; + } + + /** + * Checks for a method implementation. + * + * @param targetClass a class instance + * @param methodName name of a method (different from constructor) + * @param argClasses argument classes for the method + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getMethod(String, Class[]) + */ + public Builder hiddenImpl(Class targetClass, String methodName, Class... argClasses) { + // don't do any work if an implementation has been found + if (method != null) { + return this; + } + + try { + Method hidden = targetClass.getDeclaredMethod(methodName, argClasses); + AccessController.doPrivileged(new MakeAccessible(hidden)); + this.method = new UnboundMethod(hidden, name); + } catch (SecurityException | NoSuchMethodException e) { + // unusable or not the right implementation + } + return this; + } + + /** + * Checks for a method implementation. + * + *

The name passed to the constructor is the method name used. + * + * @param targetClass a class instance + * @param argClasses argument classes for the method + * @return this Builder for method chaining + * @see Class#forName(String) + * @see Class#getMethod(String, Class[]) + */ + public Builder hiddenImpl(Class targetClass, Class... argClasses) { + hiddenImpl(targetClass, name, argClasses); + return this; + } + + /** + * Returns the first valid implementation as a UnboundMethod or throws a RuntimeError if there + * is none. + * + * @return a {@link UnboundMethod} with a valid implementation + * @throws RuntimeException if no implementation was found + */ + public UnboundMethod build() { + if (method != null) { + return method; + } else { + throw new RuntimeException("Cannot find method: " + name); + } + } + + /** + * Returns the first valid implementation as a BoundMethod or throws a RuntimeError if there is + * none. + * + * @param receiver an Object to receive the method invocation + * @return a {@link BoundMethod} with a valid implementation and receiver + * @throws IllegalStateException if the method is static + * @throws IllegalArgumentException if the receiver's class is incompatible + * @throws RuntimeException if no implementation was found + */ + public BoundMethod build(Object receiver) { + return build().bind(receiver); + } + + /** + * Returns the first valid implementation as a UnboundMethod or throws a NoSuchMethodException + * if there is none. + * + * @return a {@link UnboundMethod} with a valid implementation + * @throws NoSuchMethodException if no implementation was found + */ + public UnboundMethod buildChecked() throws NoSuchMethodException { + if (method != null) { + return method; + } else { + throw new NoSuchMethodException("Cannot find method: " + name); + } + } + + /** + * Returns the first valid implementation as a BoundMethod or throws a NoSuchMethodException if + * there is none. + * + * @param receiver an Object to receive the method invocation + * @return a {@link BoundMethod} with a valid implementation and receiver + * @throws IllegalStateException if the method is static + * @throws IllegalArgumentException if the receiver's class is incompatible + * @throws NoSuchMethodException if no implementation was found + */ + public BoundMethod buildChecked(Object receiver) throws NoSuchMethodException { + return buildChecked().bind(receiver); + } + + /** + * Returns the first valid implementation as a StaticMethod or throws a NoSuchMethodException if + * there is none. + * + * @return a {@link StaticMethod} with a valid implementation + * @throws IllegalStateException if the method is not static + * @throws NoSuchMethodException if no implementation was found + */ + public StaticMethod buildStaticChecked() throws NoSuchMethodException { + return buildChecked().asStatic(); + } + + /** + * Returns the first valid implementation as a StaticMethod or throws a RuntimeException if + * there is none. + * + * @return a {@link StaticMethod} with a valid implementation + * @throws IllegalStateException if the method is not static + * @throws RuntimeException if no implementation was found + */ + public StaticMethod buildStatic() { + return build().asStatic(); + } + } + + private static class MakeAccessible implements PrivilegedAction { + private Method hidden; + + MakeAccessible(Method hidden) { + this.hidden = hidden; + } + + @Override + public Void run() { + hidden.setAccessible(true); + return null; + } + } +} diff --git a/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/README.md b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/README.md new file mode 100644 index 0000000000000..e817f66e2d93e --- /dev/null +++ b/java/connector-node/risingwave-sink-iceberg/src/main/java/org/apache/iceberg/common/README.md @@ -0,0 +1,6 @@ +# Why we need this package? + +In this package we have override the `iceberg-common` package, since in original `iceberg-common` it uses `Thread.getContextClassLoader` to load classes dynamically. +While this works well in most cases, it will fail when invoked by jni, since by default jni threads was passed bootstrap class loader, and `Thread.getContextClassLoader` +will inherit parent thread's class loader. That's to say, all threads created by jni will use bootstrap class loader. While we can use `Thread.setContextClassLoader` to it system class loader +manually, but it's not possible in all cases since iceberg used thread pools internally, which can't be hooked by us. \ No newline at end of file diff --git a/java/connector-node/risingwave-sink-iceberg/src/test/java/com/risingwave/connector/catalog/JniCatalogWrapperTest.java b/java/connector-node/risingwave-sink-iceberg/src/test/java/com/risingwave/connector/catalog/JniCatalogWrapperTest.java new file mode 100644 index 0000000000000..08fa0fdb85e80 --- /dev/null +++ b/java/connector-node/risingwave-sink-iceberg/src/test/java/com/risingwave/connector/catalog/JniCatalogWrapperTest.java @@ -0,0 +1,28 @@ +package com.risingwave.connector.catalog; + +import org.junit.Test; + +public class JniCatalogWrapperTest { + @Test + public void testJdbc() throws Exception { + System.setProperty("aws.region", "us-east-1"); + JniCatalogWrapper catalog = + JniCatalogWrapper.create( + "demo", + "org.apache.iceberg.jdbc.JdbcCatalog", + new String[] { + "uri", "jdbc:postgresql://172.17.0.3:5432/iceberg", + "jdbc.user", "admin", + "jdbc.password", "123456", + "warehouse", "s3://icebergdata/demo", + "io-impl", "org.apache.iceberg.aws.s3.S3FileIO", + "s3.endpoint", "http://172.17.0.2:9301", + "s3.region", "us-east-1", + "s3.path-style-access", "true", + "s3.access-key-id", "hummockadmin", + "s3.secret-access-key", "hummockadmin", + }); + + System.out.println(catalog.loadTable("s1.t1")); + } +} diff --git a/java/pom.xml b/java/pom.xml index 5f168c48bd9ef..e11dd4d0f0f04 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -84,6 +84,9 @@ 4.15.0 1.18.0 1.17.6 + 3.45.0.0 + 2.21.42 + 3.1.3 @@ -324,6 +327,51 @@ simpleclient_httpserver 0.5.0 + + org.xerial + sqlite-jdbc + ${sqlite.version} + + + software.amazon.awssdk + s3 + ${aws.version} + + + software.amazon.awssdk + sts + ${aws.version} + + + software.amazon.awssdk + apache-client + ${aws.version} + + + org.apache.hadoop + hadoop-common + ${hadoop.version} + + + org.apache.hive + hive-metastore + ${hive.version} + + + org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop.version} + + + org.apache.hadoop + hadoop-mapreduce-client-common + ${hadoop.version} + + + org.apache.hadoop + hadoop-mapreduce-client-jobclient + ${hadoop.version} + org.apache.spark spark-sql_2.12 @@ -381,6 +429,7 @@ /tools/maven/checkstyle.xml true true + **/org/apache/iceberg/common/* @@ -396,6 +445,9 @@ + + **/org/apache/iceberg/common/** + diff --git a/src/connector/src/sink/iceberg/jni_catalog.rs b/src/connector/src/sink/iceberg/jni_catalog.rs index 11de50e69936a..2b5cd324d6d13 100644 --- a/src/connector/src/sink/iceberg/jni_catalog.rs +++ b/src/connector/src/sink/iceberg/jni_catalog.rs @@ -153,8 +153,8 @@ impl JniCatalog { JObject::null(), )?; for (i, (key, value)) in java_catalog_props.iter().enumerate() { - let key_j_str = env.new_string(key).unwrap(); - let value_j_str = env.new_string(value).unwrap(); + let key_j_str = env.new_string(key)?; + let value_j_str = env.new_string(value)?; env.set_object_array_element(&props, i as i32 * 2, key_j_str)?; env.set_object_array_element(&props, i as i32 * 2 + 1, value_j_str)?; } @@ -163,18 +163,16 @@ impl JniCatalog { .call_static_method( "com/risingwave/connector/catalog/JniCatalogWrapper", "create", - "(Ljava/lang/String;Ljava/lang/String;[Ljava/lang/String;)V", + "(Ljava/lang/String;Ljava/lang/String;[Ljava/lang/String;)Lcom/risingwave/connector/catalog/JniCatalogWrapper;", &[ (&env.new_string(name.to_string()).unwrap()).into(), (&env.new_string(catalog_impl.to_string()).unwrap()).into(), (&props).into(), ], - ) - .unwrap(); + )?; let jni_catalog = env - .new_global_ref(jni_catalog_wrapper.l().unwrap()) - .unwrap(); + .new_global_ref(jni_catalog_wrapper.l().unwrap())?; Ok(Arc::new(Self { java_catalog: jni_catalog, @@ -184,3 +182,43 @@ impl JniCatalog { }) } } + +#[cfg(test)] +mod tests { + use icelake::catalog::BaseCatalogConfig; + use icelake::TableIdentifier; + + use crate::sink::iceberg::jni_catalog::JniCatalog; + + #[tokio::test] + async fn test_create_jni_catalog_wrapper() { + let config = BaseCatalogConfig { + name: "demo".to_string(), + ..Default::default() + }; + + let props = [ + ("uri", "thrift://192.168.166.6:9083"), + ("warehouse", "s3://icebergdata/demo"), + ("io-impl", "org.apache.iceberg.aws.s3.S3FileIO"), + ("s3.endpoint", "http://192.168.166.3:9301"), + ("s3.path-style-access", "true"), + ("s3.access-key-id", "hummockadmin"), + ("s3.secret-access-key", "hummockadmin"), + ] + .into_iter() + .map(|(k, v)| (k.to_string(), v.to_string())) + .collect(); + + let catalog = + JniCatalog::build(config, "demo", "org.apache.iceberg.hive.HiveCatalog", props) + .unwrap(); + + let table = catalog + .load_table(&TableIdentifier::new(vec!["s1", "t1"]).unwrap()) + .await + .unwrap(); + + println!("{:?}", table.table_name()) + } +} diff --git a/src/connector/src/sink/iceberg/mod.rs b/src/connector/src/sink/iceberg/mod.rs index 68c5654533a64..a5fb6c6ac3cab 100644 --- a/src/connector/src/sink/iceberg/mod.rs +++ b/src/connector/src/sink/iceberg/mod.rs @@ -66,10 +66,7 @@ use crate::sink::{Result, SinkCommitCoordinator, SinkParam}; /// This iceberg sink is WIP. When it ready, we will change this name to "iceberg". pub const ICEBERG_SINK: &str = "iceberg"; -static RW_CATALOG_NAME: &str = "risingwave"; - -#[derive(Debug, Clone, Deserialize, WithOptions, Default)] -#[serde(deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq, Eq, Deserialize, WithOptions, Default)] pub struct IcebergConfig { pub connector: String, // Avoid deny unknown field. Must be "iceberg" @@ -82,7 +79,10 @@ pub struct IcebergConfig { pub table_name: String, // Full name of table, must include schema name #[serde(rename = "database.name")] - pub database_name: String, // Database name of table + pub database_name: Option, // Database name of table + + #[serde(rename = "catalog.name")] + pub catalog_name: String, // Catalog name. // Catalog type supported by iceberg, such as "storage", "rest". // If not set, we use "storage" as default. @@ -166,11 +166,14 @@ impl IcebergConfig { } } - // All configs starts with "catalog." will be treated as java configs. + // All configs start with "catalog." will be treated as java configs. config.java_catalog_props = values .iter() .filter(|(k, _v)| { - k.starts_with("catalog.") && k != &"catalog.uri" && k != &"catalog.type" + k.starts_with("catalog.") + && k != &"catalog.uri" + && k != &"catalog.type" + && k != &"catalog.name" }) .map(|(k, v)| (k[8..].to_string(), v.to_string())) .collect(); @@ -182,18 +185,28 @@ impl IcebergConfig { self.catalog_type.as_deref().unwrap_or("storage") } + fn full_table_name(&self) -> Result { + let ret = if let Some(database_name) = &self.database_name { + TableIdentifier::new(vec![database_name, &self.table_name]) + } else { + TableIdentifier::new(vec![&self.table_name]) + }; + + ret.map_err(|e| SinkError::Iceberg(anyhow!("Failed to create table identifier: {}", e))) + } + fn build_iceberg_configs(&self) -> Result> { let mut iceberg_configs = HashMap::new(); let catalog_type = self.catalog_type().to_string(); iceberg_configs.insert(CATALOG_TYPE.to_string(), catalog_type.clone()); - iceberg_configs.insert(CATALOG_NAME.to_string(), RW_CATALOG_NAME.to_string()); + iceberg_configs.insert(CATALOG_NAME.to_string(), self.catalog_name.clone()); match catalog_type.as_str() { "storage" => { iceberg_configs.insert( - format!("iceberg.catalog.{}.warehouse", RW_CATALOG_NAME), + format!("iceberg.catalog.{}.warehouse", self.catalog_name.clone()), self.path.clone(), ); } @@ -201,7 +214,10 @@ impl IcebergConfig { let uri = self.uri.clone().ok_or_else(|| { SinkError::Iceberg(anyhow!("`catalog.uri` must be set in rest catalog")) })?; - iceberg_configs.insert(format!("iceberg.catalog.{}.uri", RW_CATALOG_NAME), uri); + iceberg_configs.insert( + format!("iceberg.catalog.{}.uri", self.catalog_name.clone()), + uri, + ); } _ => { return Err(SinkError::Iceberg(anyhow!( @@ -265,7 +281,7 @@ impl IcebergConfig { let catalog_type = self.catalog_type().to_string(); iceberg_configs.insert(CATALOG_TYPE.to_string(), catalog_type.clone()); - iceberg_configs.insert(CATALOG_NAME.to_string(), "risingwave".to_string()); + iceberg_configs.insert(CATALOG_NAME.to_string(), self.catalog_name.clone()); if let Some(region) = &self.region { iceberg_configs.insert( @@ -359,18 +375,19 @@ impl IcebergConfig { .map_err(|e| anyhow!(e))?; Ok(catalog) } - catalog_type if catalog_type == "hive" || catalog_type == "sql" || catalog_type == "glue" || catalog_type == "dynamodb" => { + catalog_type if catalog_type == "hive" || catalog_type == "jdbc" || catalog_type == "glue" || catalog_type == "dynamodb" => { // Create java catalog let (base_catalog_config, java_catalog_props) = self.build_jni_catalog_configs()?; let catalog_impl = match catalog_type { "hive" => "org.apache.iceberg.hive.HiveCatalog", - "sql" => "org.apache.iceberg.jdbc.JdbcCatalog", + "jdbc" => "org.apache.iceberg.jdbc.JdbcCatalog", "glue" => "org.apache.iceberg.aws.glue.GlueCatalog", "dynamodb" => "org.apache.iceberg.aws.dynamodb.DynamoDbCatalog", _ => unreachable!(), }; - jni_catalog::JniCatalog::build(base_catalog_config, "risingwave", catalog_impl, java_catalog_props) + jni_catalog::JniCatalog::build(base_catalog_config, &self.config.catalog_name, catalog_impl, + java_catalog_props) } "mock" => Ok(Arc::new(MockCatalog{})), _ => { @@ -394,6 +411,7 @@ impl IcebergConfig { .chain(self.table_name.split('.')), ) .context("Unable to parse table name")?; + let table_id = self.config.full_table_name()?; catalog .load_table(&table_id) @@ -974,6 +992,7 @@ fn try_matches_arrow_schema(rw_schema: &Schema, arrow_schema: &ArrowSchema) -> R mod test { use risingwave_common::catalog::Field; + use crate::sink::iceberg::IcebergConfig; use crate::source::DataType; #[test] @@ -1008,4 +1027,58 @@ mod test { ]); try_matches_arrow_schema(&risingwave_schema, &arrow_schema).unwrap(); } + + #[test] + fn test_parse_iceberg_config() { + let values = [ + ("connector", "iceberg"), + ("type", "upsert"), + ("primary_key", "v1"), + ("warehouse.path", "s3://iceberg"), + ("s3.endpoint", "http://127.0.0.1:9301"), + ("s3.access.key", "hummockadmin"), + ("s3.secret.key", "hummockadmin"), + ("s3.region", "us-east-1"), + ("catalog.type", "jdbc"), + ("catalog.name", "demo"), + ("catalog.uri", "jdbc://postgresql://postgres:5432/iceberg"), + ("catalog.jdbc.user", "admin"), + ("catalog.jdbc.password", "123456"), + ("database.name", "demo_db"), + ("table.name", "demo_table"), + ] + .into_iter() + .map(|(k, v)| (k.to_string(), v.to_string())) + .collect(); + + let iceberg_config = IcebergConfig::from_hashmap(values).unwrap(); + + let expected_iceberg_config = IcebergConfig { + connector: "iceberg".to_string(), + r#type: "upsert".to_string(), + force_append_only: false, + table_name: "demo_table".to_string(), + database_name: Some("demo_db".to_string()), + catalog_name: "demo".to_string(), + catalog_type: Some("jdbc".to_string()), + path: "s3://iceberg".to_string(), + uri: Some("jdbc://postgresql://postgres:5432/iceberg".to_string()), + region: Some("us-east-1".to_string()), + endpoint: Some("http://127.0.0.1:9301".to_string()), + access_key: "hummockadmin".to_string(), + secret_key: "hummockadmin".to_string(), + primary_key: Some(vec!["v1".to_string()]), + java_catalog_props: [("jdbc.user", "admin"), ("jdbc.password", "123456")] + .into_iter() + .map(|(k, v)| (k.to_string(), v.to_string())) + .collect(), + }; + + assert_eq!(iceberg_config, expected_iceberg_config); + + assert_eq!( + &iceberg_config.full_table_name().unwrap().to_string(), + "demo_db.demo_table" + ); + } } diff --git a/src/jni_core/src/jvm_runtime.rs b/src/jni_core/src/jvm_runtime.rs index d0193a7717d2c..9e4280485563c 100644 --- a/src/jni_core/src/jvm_runtime.rs +++ b/src/jni_core/src/jvm_runtime.rs @@ -216,12 +216,14 @@ pub fn execute_with_jni_env( let ret = f(&mut env); match env.exception_check() { - Ok(true) => env - .exception_clear() - .inspect_err(|e| { + Ok(true) => { + env.exception_describe().inspect_err(|e| { + tracing::warn!("Failed to describe jvm exception: {:?}", e); + })?; + env.exception_clear().inspect_err(|e| { tracing::warn!(error = %e.as_report(), "Exception occurred but failed to clear"); - }) - .unwrap(), + })?; + } Ok(false) => { // No exception, do nothing }