Skip to content

Commit

Permalink
Bug fix
Browse files Browse the repository at this point in the history
  • Loading branch information
liurenjie1024 committed Feb 21, 2024
1 parent 9d3a3a5 commit 20f6958
Show file tree
Hide file tree
Showing 6 changed files with 16 additions and 11 deletions.
2 changes: 1 addition & 1 deletion docker/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
---
version: "3"
x-image: &image
image: ${RW_IMAGE:-ghcr.io/risingwavelabs/risingwave:git-71e013012065bbfa86e3839f5a2b23d06e0e71ef}
image: ${RW_IMAGE:-ghcr.io/risingwavelabs/risingwave:git-9d3a3a58a182ed60ed1ff4ecc02a6c6489ace0a3}
services:
risingwave-standalone:
<<: *image
Expand Down
2 changes: 0 additions & 2 deletions integration_tests/iceberg-sink2/docker/jdbc/config.ini
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@ port=4566
connector = iceberg
type=append-only
force_append_only = true
catalog.jdbc.user=admin
catalog.jdbc.password=123456
warehouse.path = s3://icebergdata/demo
s3.endpoint=http://minio-0:9301
s3.access.key = hummockadmin
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ services:
retries: 1200
volumes:
- ./spark-script:/spark-script
entrypoint: ["/spark-script/spark-connect-server.sh"]
entrypoint: [ "/spark-script/spark-connect-server.sh" ]

risingwave-standalone:
extends:
Expand All @@ -51,6 +51,10 @@ services:
- bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/4566; exit $$?;'
interval: 1s
timeout: 30s
environment:
- AWS_REGION=us-east-1
links:
- minio-0:icebergdata.minio-0
networks:
iceberg_net:

Expand Down Expand Up @@ -88,4 +92,5 @@ volumes:
external: false

networks:
iceberg_net:
iceberg_net:
name: iceberg
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ JARS=$(find /opt/spark/deps -type f -name "*.jar" | tr '\n' ':')
/opt/spark/sbin/start-connect-server.sh \
--master local[3] \
--driver-class-path $JARS \
--conf spark.driver.extraJavaOptions="-Djdbc.drivers=org.postgresql.Driver" \
--conf spark.driver.bindAddress=0.0.0.0 \
--conf spark.sql.catalog.demo=org.apache.iceberg.spark.SparkCatalog \
--conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions \
Expand Down
7 changes: 3 additions & 4 deletions integration_tests/iceberg-sink2/python/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,8 +112,7 @@ def check_spark_table(case_name):
spark = SparkSession.builder.remote(url).getOrCreate()

sqls = [
"USE s1",
"SELECT COUNT(*) FROM t1"
"SELECT COUNT(*) FROM s1.t1"
]

for sql in sqls:
Expand All @@ -123,10 +122,10 @@ def check_spark_table(case_name):


if __name__ == "__main__":
# case_name = "rest"
case_name = "rest"
# case_name = "storage"
# case_name = "jdbc"
case_name = "hive"
# case_name = "hive"
config = configparser.ConfigParser()
config.read(f"{case_dir(case_name)}/config.ini")
print({section: dict(config[section]) for section in config.sections()})
Expand Down
6 changes: 5 additions & 1 deletion src/connector/src/sink/iceberg/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,11 @@ impl IcebergConfig {
};

iceberg_configs.insert("iceberg.table.io.bucket".to_string(), bucket);
iceberg_configs.insert("iceberg.table.io.root".to_string(), root);

// Only storage catalog should set this.
if catalog_type == "storage" {
iceberg_configs.insert("iceberg.table.io.root".to_string(), root);
}
// #TODO
// Support load config file
iceberg_configs.insert(
Expand Down

0 comments on commit 20f6958

Please sign in to comment.