Skip to content

Commit

Permalink
Merge branch 'apache:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
sayyidhussain authored Oct 8, 2023
2 parents 9179162 + 38405b2 commit 9d8f6c2
Show file tree
Hide file tree
Showing 250 changed files with 1,872 additions and 1,799 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/e2e-operation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ jobs:

e2e-operation:
if: ${{ needs.global-environment.outputs.GLOBAL_JOB_ENABLED == 'true' }}
name: E2E - Operation
name: E2E - ${{ matrix.operation }} on ${{ matrix.image.version }}
needs: [ detect-changed-files, global-environment ]
runs-on: ubuntu-latest
timeout-minutes: 40
Expand Down
11 changes: 11 additions & 0 deletions .github/workflows/nightly-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -237,3 +237,14 @@ jobs:
remote_port: ${{ secrets.NIGHTLIES_RSYNC_PORT }}
remote_user: ${{ secrets.NIGHTLIES_RSYNC_USER }}
remote_key: ${{ secrets.NIGHTLIES_RSYNC_KEY }}

generate-contributors-map-image:
runs-on: ubuntu-latest
permissions: write-all

steps:
- name: Contributor Map
uses: tunaitis/contributor-map@v1
with:
repository: apache/shardingsphere
output: docs/contributor-map.svg
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@
|:---------------------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------:|
| [![Stargazers Over Time](https://starchart.cc/apache/shardingsphere.svg)](https://starchart.cc/apache/shardingsphere) | [![Contributor over time](https://contributor-graph-api.apiseven.com/contributors-svg?chart=contributorOverTime&repo=apache/shardingsphere)](https://www.apiseven.com/en/contributor-graph?chart=contributorOverTime&repo=apache/shardingsphere) |

![Contributors Map](./docs/contributor-map.svg)

### OVERVIEW

<hr>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,6 @@ public Optional<GaugeMetricFamilyMetricsCollector> export(final String pluginTyp
}

private int getStorageUnitCount(final MetaDataContexts metaDataContexts) {
return metaDataContexts.getMetaData().getDatabases().values().stream().map(each -> each.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().size()).reduce(0, Integer::sum);
return metaDataContexts.getMetaData().getDatabases().values().stream().map(each -> each.getResourceMetaData().getStorageUnits().size()).reduce(0, Integer::sum);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ void assertExportWithContextManager() {

private ContextManager mockContextManager() {
ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS);
when(database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.singletonMap("ds_0", mock(StorageUnit.class)));
when(database.getResourceMetaData().getStorageUnits()).thenReturn(Collections.singletonMap("ds_0", mock(StorageUnit.class)));
when(database.getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "FIXTURE"));
ShardingSphereMetaData metaData = mock(ShardingSphereMetaData.class);
when(metaData.getDatabases()).thenReturn(Collections.singletonMap("sharding_db", database));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,8 @@ public abstract class TracingJDBCExecutorCallbackAdvice<T> implements InstanceMe
public final void beforeMethod(final TargetAdviceObject target, final Method method, final Object[] args, final String pluginType) {
JDBCExecutionUnit executionUnit = (JDBCExecutionUnit) args[0];
ResourceMetaData resourceMetaData = AgentReflectionUtils.getFieldValue(target, "resourceMetaData");
ConnectionProperties connectionProps = resourceMetaData.getConnectionProperties(executionUnit.getExecutionUnit().getDataSourceName());
DatabaseType storageType = resourceMetaData.getStorageType(executionUnit.getExecutionUnit().getDataSourceName());
ConnectionProperties connectionProps = resourceMetaData.getStorageUnits().get(executionUnit.getExecutionUnit().getDataSourceName()).getConnectionProperties();
DatabaseType storageType = resourceMetaData.getStorageUnits().get(executionUnit.getExecutionUnit().getDataSourceName()).getStorageType();
recordExecuteInfo(RootSpanContext.get(), target, executionUnit, (boolean) args[1], connectionProps, storageType);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@

import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;

Expand Down Expand Up @@ -95,9 +96,9 @@ private void prepare() {
when(connection.getMetaData()).thenReturn(databaseMetaData);
when(statement.getConnection()).thenReturn(connection);
executionUnit = new JDBCExecutionUnit(new ExecutionUnit(DATA_SOURCE_NAME, new SQLUnit(SQL, Collections.emptyList())), null, statement);
ResourceMetaData resourceMetaData = mock(ResourceMetaData.class);
when(resourceMetaData.getStorageType(DATA_SOURCE_NAME)).thenReturn(TypedSPILoader.getService(DatabaseType.class, "MySQL"));
when(resourceMetaData.getConnectionProperties(DATA_SOURCE_NAME)).thenReturn(mock(ConnectionProperties.class));
ResourceMetaData resourceMetaData = mock(ResourceMetaData.class, RETURNS_DEEP_STUBS);
when(resourceMetaData.getStorageUnits().get(DATA_SOURCE_NAME).getStorageType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "MySQL"));
when(resourceMetaData.getStorageUnits().get(DATA_SOURCE_NAME).getConnectionProperties()).thenReturn(mock(ConnectionProperties.class));
JDBCExecutorCallback jdbcExecutorCallback = new JDBCExecutorCallbackFixture(TypedSPILoader.getService(DatabaseType.class, "MySQL"), resourceMetaData, new MySQLSelectStatement(), true);
Plugins.getMemberAccessor().set(JDBCExecutorCallback.class.getDeclaredField("resourceMetaData"), jdbcExecutorCallback, resourceMetaData);
targetObject = (TargetAdviceObject) jdbcExecutorCallback;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,8 @@ public short[] decodeInt2Array(final byte[] parameterBytes, final boolean isBina
Collection<String> parameterElements = decodeText(parameterValue);
short[] result = new short[parameterElements.size()];
int index = 0;
for (String element : parameterElements) {
result[index++] = Short.parseShort(element);
for (String each : parameterElements) {
result[index++] = Short.parseShort(each);
}
return result;
}
Expand All @@ -65,8 +65,8 @@ public int[] decodeInt4Array(final byte[] parameterBytes, final boolean isBinary
Collection<String> parameterElements = decodeText(parameterValue);
int[] result = new int[parameterElements.size()];
int index = 0;
for (String element : parameterElements) {
result[index++] = Integer.parseInt(element);
for (String each : parameterElements) {
result[index++] = Integer.parseInt(each);
}
return result;
}
Expand All @@ -84,8 +84,8 @@ public long[] decodeInt8Array(final byte[] parameterBytes, final boolean isBinar
Collection<String> parameterElements = decodeText(parameterValue);
long[] result = new long[parameterElements.size()];
int index = 0;
for (String element : parameterElements) {
result[index++] = Long.parseLong(element);
for (String each : parameterElements) {
result[index++] = Long.parseLong(each);
}
return result;
}
Expand All @@ -103,8 +103,8 @@ public float[] decodeFloat4Array(final byte[] parameterBytes, final boolean isBi
Collection<String> parameterElements = decodeText(parameterValue);
float[] result = new float[parameterElements.size()];
int index = 0;
for (String element : parameterElements) {
result[index++] = Float.parseFloat(element);
for (String each : parameterElements) {
result[index++] = Float.parseFloat(each);
}
return result;
}
Expand All @@ -122,8 +122,8 @@ public double[] decodeFloat8Array(final byte[] parameterBytes, final boolean isB
Collection<String> parameterElements = decodeText(parameterValue);
double[] result = new double[parameterElements.size()];
int index = 0;
for (String element : parameterElements) {
result[index++] = Double.parseDouble(element);
for (String each : parameterElements) {
result[index++] = Double.parseDouble(each);
}
return result;
}
Expand All @@ -141,8 +141,8 @@ public boolean[] decodeBoolArray(final byte[] parameterBytes, final boolean isBi
Collection<String> parameterElements = decodeText(parameterValue);
boolean[] result = new boolean[parameterElements.size()];
int index = 0;
for (String element : parameterElements) {
result[index++] = Boolean.parseBoolean(element);
for (String each : parameterElements) {
result[index++] = Boolean.parseBoolean(each);
}
return result;
}
Expand Down
18 changes: 15 additions & 3 deletions docs/document/content/test-manual/integration-test/_index.cn.md
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,8 @@ SQL 用例在 `resources/cases/${SQL-TYPE}/${SQL-TYPE}-integration-test-cases.xm
</dataset>
```

> e2e operation 为 E2E 测试,并不包含 <dataset> 这类断言
### 环境配置

`${SCENARIO-TYPE}` 表示场景名称,在测试引擎运行中用于标识唯一场景。
Expand Down Expand Up @@ -167,6 +169,16 @@ it.cluster.databases=H2,MySQL,Oracle,SQLServer,PostgreSQL
```

#### 远程 debug Docker 容器中的 Proxy 代码
首先修改要测试模块的配置文件 it-env.properties,将 function.it.env.type 设置为 `docker`;设置对应的数据库镜像版本,例如 `transaction.it.docker.mysql.version=mysql:5.7`
其次通过命令生成测试镜像,例如:

```bash
# for operation, replace ${operation} with transaction、pipeline or showprocesslist
./mvnw -B clean install -am -pl test/e2e/operation/${operation} -Pit.env.docker -DskipTests

# for e2e sql
./mvnw -B clean install -am -pl test/e2e/sql -Pit.env.docker -DskipTests -Dspotless.apply.skip=true
```

##### 远程调试通过镜像启动的 Proxy
E2E 测试的 Proxy 镜像默认开启了 3308 端口用于远程调试容器中的实例。
Expand All @@ -177,7 +189,7 @@ IDEA -> Run -> Edit Configurations -> Add New Configuration -> Remote JVM Debug
编辑对应的信息:
- Name:一个描述性的名字,例如 e2e-debug。
- Host:可以访问 docker 的 IP,例如 127.0.0.1。
- Port:调试端口 3308
- Port:调试端口(需要在下一步中设置)
- use module classpath:项目根目录 shardingsphere。

编辑好上面的信息后,在 IDEA 中 Run -> Run -> e2e-debug 即可启动 IDEA 的远程 debug。
Expand All @@ -187,9 +199,9 @@ IDEA -> Run -> Edit Configurations -> Add New Configuration -> Remote JVM Debug
可以通过如下方式 debug Testcontainer 启动的 Proxy 容器:
- 在 Testcontainer 的相关启动类后打一个断点,例如 sql 测试中 E2EIT#setUp() -> `containerComposer.start();` 后面的一行打断点,此时相关容器一定已经启动。
- 通过快捷键 Alt + F8,进入断点调试模式,查看 containerComposer 下的 Proxy 对象 3308 映射的端口(Testcontainer 对外映射端口是随机的)。例如本次通过该表达式:`((ShardingSphereProxyClusterContainer)((java.util.LinkedList)((ITContainers)((ClusterContainerComposer)containerComposer).containers).dockerContainers).getLast()).getMappedPort(3308)` 获取到映射的对外随机端口为 51837。(或者通过命令 `docker ps` 查看)
- 参考 `远程调试通过镜像启动的 Proxy` 中的方式,Port 设置为上一步中获取到的端口。
- 参考 `远程调试通过镜像启动的 Proxy` 中的方式,将 Remote JVM Debug 配置中的 Port 设置为上一步中获取到的端口,例如 51837

编辑好上面的信息后,在 IDEA 中 Run -> Run -> e2e-debug 即可启动 IDEA 的远程 debug。
编辑好上面的信息后,在 IDEA 中 Run -> Run -> e2e-debug -> debug 即可启动 IDEA 的远程 debug。


#### 注意事项
Expand Down
18 changes: 15 additions & 3 deletions docs/document/content/test-manual/integration-test/_index.en.md
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,8 @@ The assertion file format is as follows:
</dataset>
```

> e2e operation module is E2E test,does not contains the assertion for `</dataset>` tag
### Environment configuration

`${SCENARIO-TYPE}` Refers to the scenario name used to identify a unique scenario during the test engine run.
Expand Down Expand Up @@ -167,6 +169,16 @@ If you only modify the test code, you can reuse the existing test mirror without
```

#### Remote debug Proxy code in Docker container
First of all, you need to modify the configuration file it-env.properties, set function.it.env.type to `docker`, and then set the corresponding database image version like `transaction.it.docker.mysql.version=mysql:5.7`.
Then generate the test image through the command, for example:

```bash
# for operation, replace ${operation} with transaction、pipeline or showprocesslist
./mvnw -B clean install -am -pl test/e2e/operation/${operation} -Pit.env.docker -DskipTests

# for e2e sql
./mvnw -B clean install -am -pl test/e2e/sql -Pit.env.docker -DskipTests -Dspotless.apply.skip=true
```

##### Remote debug Proxy started by docker image
E2E Test Proxy image opens the 3308 port by default for remote debugging of the instance in the container.
Expand All @@ -177,7 +189,7 @@ IDEA -> Run -> Edit Configurations -> Add New Configuration -> Remote JVM Debug
Edit the corresponding information:
- Name: A descriptive name, such as e2e-debug.
- Host: A IP that can access docker, such as 127.0.0.1
- Port: debugging port 3308.
- Port: debugging port(will set in next step).
- use module classpath: The root directory of the project shardingsphere.

After editing the above information, run Run -> Run -> e2e-debug in IDEA to start the remote debug of IDEA.
Expand All @@ -187,9 +199,9 @@ After editing the above information, run Run -> Run -> e2e-debug in IDEA to star
Debug Testcontainer started Proxy container by the following method:
- Set a breakpoint in the relevant startup class of Testcontainer, for example, after the line `containerComposer.start();` in BaseE2EIT#setUp() in the suite test, at this time, the relevant containers must have been started.
- Access breakpoint debugging mode through shortcut key Alt + F8, and view the port mapped by the 3308 mapping of the Proxy object under the containerComposer (the external mapping port of Testcontainer is random). For example, the expression `((ShardingSphereProxyClusterContainer)((java.util.LinkedList)((ITContainers)((ClusterContainerComposer)containerComposer).containers).dockerContainers).getLast()).getMappedPort(3308)` get the mapped random port 51837.(or get mapped port by `docker ps`)
- See the `Remote debug Proxy started by docker image` method, set the Name, Host, Port, and use the port got in previous step.
- See the `Remote debug Proxy started by docker image` method, set the Name, Host, Port, and use the port got in previous step, e.g. 51837.

After editing the above information, run Run -> Run -> e2e-debug in IDEA to start the remote debug of IDEA.
After editing the above information, run Run -> Run -> e2e-debug -> debug in IDEA to start the remote debug of IDEA.

#### Notice

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.shardingsphere.encrypt.rule;

import lombok.Getter;
import org.apache.commons.collections4.map.CaseInsensitiveMap;
import org.apache.shardingsphere.encrypt.api.config.rule.EncryptColumnRuleConfiguration;
import org.apache.shardingsphere.encrypt.api.config.rule.EncryptTableRuleConfiguration;
import org.apache.shardingsphere.encrypt.api.encrypt.assisted.AssistedEncryptAlgorithm;
Expand All @@ -35,29 +36,26 @@
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.TreeMap;

/**
* Encrypt table.
*/
@Getter
public final class EncryptTable {

@Getter
private final String table;

private final Map<String, EncryptColumn> columns;

@SuppressWarnings("rawtypes")
public EncryptTable(final EncryptTableRuleConfiguration config, final Map<String, StandardEncryptAlgorithm> standardEncryptors,
final Map<String, AssistedEncryptAlgorithm> assistedEncryptors, final Map<String, LikeEncryptAlgorithm> likeEncryptors) {
table = config.getName();
columns = createEncryptColumns(config, standardEncryptors, assistedEncryptors, likeEncryptors);
}

@SuppressWarnings("rawtypes")
private Map<String, EncryptColumn> createEncryptColumns(final EncryptTableRuleConfiguration config, final Map<String, StandardEncryptAlgorithm> standardEncryptors,
final Map<String, AssistedEncryptAlgorithm> assistedEncryptors, final Map<String, LikeEncryptAlgorithm> likeEncryptors) {
Map<String, EncryptColumn> result = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
Map<String, EncryptColumn> result = new CaseInsensitiveMap<>();
for (EncryptColumnRuleConfiguration each : config.getColumns()) {
result.put(each.getName(), createEncryptColumn(each, standardEncryptors, assistedEncryptors, likeEncryptors));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ private static InsertStatement createInsertSelectStatement(final boolean contain
new IdentifierValue("status")));
projections.getProjections().add(new ColumnProjectionSegment(statusColumn));
selectStatement.setProjections(projections);
result.setInsertSelect(new SubquerySegment(0, 0, selectStatement));
result.setInsertSelect(new SubquerySegment(0, 0, selectStatement, ""));
return result;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ private void checkToBeCreatedEncryptors(final CreateEncryptRuleStatement sqlStat
}

private void checkDataSources(final ShardingSphereDatabase database) {
ShardingSpherePreconditions.checkState(!database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().isEmpty(), () -> new EmptyStorageUnitException(database.getName()));
ShardingSpherePreconditions.checkState(!database.getResourceMetaData().getStorageUnits().isEmpty(), () -> new EmptyStorageUnitException(database.getName()));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,12 @@

package org.apache.shardingsphere.mask.rule;

import org.apache.commons.collections4.map.CaseInsensitiveMap;
import org.apache.shardingsphere.mask.api.config.rule.MaskColumnRuleConfiguration;
import org.apache.shardingsphere.mask.api.config.rule.MaskTableRuleConfiguration;

import java.util.Map;
import java.util.Optional;
import java.util.TreeMap;

/**
* Mask table.
Expand All @@ -32,7 +32,7 @@ public final class MaskTable {
private final Map<String, MaskColumn> columns;

public MaskTable(final MaskTableRuleConfiguration config) {
columns = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
columns = new CaseInsensitiveMap<>();
for (MaskColumnRuleConfiguration each : config.getColumns()) {
columns.put(each.getLogicColumn(), new MaskColumn(each.getLogicColumn(), each.getMaskAlgorithm()));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.apache.shardingsphere.readwritesplitting.exception.checker.DuplicateDataSourceException;
import org.apache.shardingsphere.readwritesplitting.exception.checker.InvalidWeightLoadBalancerConfigurationException;
import org.apache.shardingsphere.readwritesplitting.exception.checker.MissingRequiredWriteDataSourceNameException;
import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource;
import org.apache.shardingsphere.test.util.PropertiesBuilder;
import org.apache.shardingsphere.test.util.PropertiesBuilder.Property;
import org.junit.jupiter.api.Test;
Expand Down Expand Up @@ -127,10 +128,10 @@ private ReadwriteSplittingDataSourceRuleConfiguration createDataSourceRuleConfig

private Map<String, DataSource> mockDataSources() {
Map<String, DataSource> result = new LinkedHashMap<>(2, 1F);
result.put("read_ds_0", mock(DataSource.class));
result.put("read_ds_1", mock(DataSource.class));
result.put("write_ds_0", mock(DataSource.class));
result.put("write_ds_1", mock(DataSource.class));
result.put("read_ds_0", new MockedDataSource());
result.put("read_ds_1", new MockedDataSource());
result.put("write_ds_0", new MockedDataSource());
result.put("write_ds_1", new MockedDataSource());
return result;
}
}
Loading

0 comments on commit 9d8f6c2

Please sign in to comment.