Skip to content

Commit

Permalink
Refactor ShardingSphereStatement.metaData (#31515)
Browse files Browse the repository at this point in the history
* Refactor ShardingSphereStatement.metaData

* Refactor ShardingSphereStatement.metaData
  • Loading branch information
terrymanu authored Jun 1, 2024
1 parent c27fa6c commit 51d173e
Show file tree
Hide file tree
Showing 5 changed files with 80 additions and 80 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@
import org.apache.shardingsphere.infra.executor.sql.execute.engine.driver.jdbc.JDBCExecutionUnit;
import org.apache.shardingsphere.infra.executor.sql.execute.engine.driver.jdbc.JDBCExecutor;
import org.apache.shardingsphere.infra.executor.sql.execute.engine.driver.jdbc.JDBCExecutorCallback;
import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData;
import org.apache.shardingsphere.infra.metadata.user.Grantee;
import org.apache.shardingsphere.infra.rule.attribute.datanode.DataNodeRuleAttribute;
import org.apache.shardingsphere.mode.metadata.MetaDataContexts;
import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement;

import java.sql.SQLException;
Expand All @@ -50,7 +50,7 @@
*/
public final class BatchPreparedStatementExecutor {

private final MetaDataContexts metaDataContexts;
private final ShardingSphereMetaData metaData;

private final JDBCExecutor jdbcExecutor;

Expand All @@ -63,9 +63,9 @@ public final class BatchPreparedStatementExecutor {

private final String databaseName;

public BatchPreparedStatementExecutor(final MetaDataContexts metaDataContexts, final JDBCExecutor jdbcExecutor, final String databaseName, final String processId) {
public BatchPreparedStatementExecutor(final ShardingSphereMetaData metaData, final JDBCExecutor jdbcExecutor, final String databaseName, final String processId) {
this.databaseName = databaseName;
this.metaDataContexts = metaDataContexts;
this.metaData = metaData;
this.jdbcExecutor = jdbcExecutor;
executionGroupContext = new ExecutionGroupContext<>(new LinkedList<>(), new ExecutionGroupReportContext(processId, databaseName, new Grantee("", "")));
batchExecutionUnits = new LinkedList<>();
Expand Down Expand Up @@ -135,8 +135,8 @@ private void handleNewBatchExecutionUnits(final Collection<BatchExecutionUnit> n
*/
public int[] executeBatch(final SQLStatementContext sqlStatementContext) throws SQLException {
boolean isExceptionThrown = SQLExecutorExceptionHandler.isExceptionThrown();
JDBCExecutorCallback<int[]> callback = new JDBCExecutorCallback<int[]>(metaDataContexts.getMetaData().getDatabase(databaseName).getProtocolType(),
metaDataContexts.getMetaData().getDatabase(databaseName).getResourceMetaData(), sqlStatementContext.getSqlStatement(), isExceptionThrown) {
JDBCExecutorCallback<int[]> callback = new JDBCExecutorCallback<int[]>(metaData.getDatabase(databaseName).getProtocolType(),
metaData.getDatabase(databaseName).getResourceMetaData(), sqlStatementContext.getSqlStatement(), isExceptionThrown) {

@Override
protected int[] executeSQL(final String sql, final Statement statement, final ConnectionMode connectionMode, final DatabaseType storageType) throws SQLException {
Expand All @@ -157,7 +157,7 @@ protected Optional<int[]> getSaneResult(final SQLStatement sqlStatement, final S
}

private boolean isNeedAccumulate(final SQLStatementContext sqlStatementContext) {
for (DataNodeRuleAttribute each : metaDataContexts.getMetaData().getDatabase(databaseName).getRuleMetaData().getAttributes(DataNodeRuleAttribute.class)) {
for (DataNodeRuleAttribute each : metaData.getDatabase(databaseName).getRuleMetaData().getAttributes(DataNodeRuleAttribute.class)) {
if (each.isNeedAccumulate(sqlStatementContext.getTablesContext().getTableNames())) {
return true;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import org.apache.shardingsphere.infra.database.core.metadata.database.DialectDatabaseMetaData;
import org.apache.shardingsphere.infra.database.core.type.DatabaseType;
import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry;
import org.apache.shardingsphere.mode.metadata.MetaDataContexts;
import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData;

import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
Expand All @@ -53,9 +53,9 @@ public abstract class AbstractStatementAdapter extends WrapperAdapter implements

private boolean closeOnCompletion;

protected final void handleExceptionInTransaction(final ShardingSphereConnection connection, final MetaDataContexts metaDataContexts) {
protected final void handleExceptionInTransaction(final ShardingSphereConnection connection, final ShardingSphereMetaData metaData) {
if (connection.getDatabaseConnectionManager().getConnectionTransaction().isInTransaction()) {
DatabaseType databaseType = metaDataContexts.getMetaData().getDatabase(connection.getDatabaseName()).getProtocolType();
DatabaseType databaseType = metaData.getDatabase(connection.getDatabaseName()).getProtocolType();
DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(databaseType).getDialectDatabaseMetaData();
if (dialectDatabaseMetaData.getDefaultSchema().isPresent()) {
connection.getDatabaseConnectionManager().getConnectionContext().getTransactionContext().setExceptionOccur(true);
Expand Down
Loading

0 comments on commit 51d173e

Please sign in to comment.