Skip to content

Commit

Permalink
Merge branch 'main' into role_parameters
Browse files Browse the repository at this point in the history
  • Loading branch information
slvrtrn authored Nov 5, 2024
2 parents 601be11 + 00af5c2 commit 350b47e
Show file tree
Hide file tree
Showing 43 changed files with 2,552 additions and 177 deletions.
19 changes: 19 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: 'github-actions'
directory: '.github/workflows'
schedule:
interval: 'weekly'
day: 'monday'
groups:
workflows:
dependency-type: 'development'
- package-ecosystem: 'npm'
directory: '/'
schedule:
interval: 'weekly'
day: 'monday'
groups:
dev-dependencies:
dependency-type: 'development'
29 changes: 0 additions & 29 deletions .github/workflows/release.yml

This file was deleted.

28 changes: 20 additions & 8 deletions .github/workflows/scorecard.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,19 @@ on:
schedule:
- cron: '43 12 * * 6'
push:
branches: [ "main" ]
branches:
- main
paths-ignore:
- '**/*.md'
- 'LICENSE'
- 'benchmarks/**'
- 'examples/**'
pull_request:
paths-ignore:
- '**/*.md'
- 'LICENSE'
- 'benchmarks/**'
- 'examples/**'
workflow_dispatch:

# Declare default permissions as read only.
Expand All @@ -32,13 +44,13 @@ jobs:
# actions: read

steps:
- name: "Checkout code"
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: 'Checkout code'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false

- name: "Run analysis"
uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1
- name: 'Run analysis'
uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0
with:
results_file: results.sarif
results_format: sarif
Expand All @@ -59,16 +71,16 @@ jobs:

# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab.
- name: "Upload artifact"
uses: actions/upload-artifact@97a0fba1372883ab732affbe8f94b823f91727db # v3.pre.node20
- name: 'Upload artifact'
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v3.pre.node20
with:
name: SARIF file
path: results.sarif
retention-days: 5

# Upload the results to GitHub's code scanning dashboard (optional).
# Commenting out will disable upload of results to your repo's Code Scanning dashboard
- name: "Upload to code-scanning"
- name: 'Upload to code-scanning'
uses: github/codeql-action/upload-sarif@v3
with:
sarif_file: results.sarif
12 changes: 6 additions & 6 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
name: 'tests'

permissions: {}
on:
workflow_dispatch:
push:
Expand Down Expand Up @@ -67,7 +68,7 @@ jobs:
- uses: actions/checkout@main

- name: Start ClickHouse (version - ${{ matrix.clickhouse }}) in Docker
uses: isbang/compose-action@v1.5.1
uses: isbang/compose-action@v2.0.2
env:
CLICKHOUSE_VERSION: ${{ matrix.clickhouse }}
with:
Expand Down Expand Up @@ -100,7 +101,7 @@ jobs:
- uses: actions/checkout@main

- name: Start ClickHouse (version - ${{ matrix.clickhouse }}) in Docker
uses: isbang/compose-action@v1.5.1
uses: isbang/compose-action@v2.0.2
env:
CLICKHOUSE_VERSION: ${{ matrix.clickhouse }}
with:
Expand Down Expand Up @@ -141,7 +142,7 @@ jobs:
- uses: actions/checkout@main

- name: Start ClickHouse cluster (version - ${{ matrix.clickhouse }}) in Docker
uses: isbang/compose-action@v1.5.1
uses: isbang/compose-action@v2.0.2
env:
CLICKHOUSE_VERSION: ${{ matrix.clickhouse }}
with:
Expand Down Expand Up @@ -172,7 +173,7 @@ jobs:
- uses: actions/checkout@main

- name: Start ClickHouse cluster (version - ${{ matrix.clickhouse }}) in Docker
uses: isbang/compose-action@v1.5.1
uses: isbang/compose-action@v2.0.2
env:
CLICKHOUSE_VERSION: ${{ matrix.clickhouse }}
with:
Expand Down Expand Up @@ -222,7 +223,6 @@ jobs:
web-integration-tests-cloud-smt:
needs: node-unit-tests
runs-on: ubuntu-latest
permissions: write-all
steps:
- uses: actions/checkout@main

Expand Down Expand Up @@ -263,7 +263,7 @@ jobs:
fetch-depth: 0

- name: Start ClickHouse (version - ${{ matrix.clickhouse }}) in Docker
uses: isbang/compose-action@v1.5.1
uses: isbang/compose-action@v2.0.2
with:
compose-file: 'docker-compose.yml'
down-flags: '--volumes'
Expand Down
63 changes: 63 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,66 @@
# 1.7.0 (Common, Node.js, Web)

## Bug fixes

- (Web only) Fixed an issue where streaming large datasets could provide corrupted results. See [#333](https://github.com/ClickHouse/clickhouse-js/pull/333) (PR) for more details.

## New features

- Added `JSONEachRowWithProgress` format support, `ProgressRow` interface, and `isProgressRow` type guard. See [this Node.js example](./examples/node/select_json_each_row_with_progress.ts) for more details. It should work similarly with the Web version.
- (Experimental) Exposed the `parseColumnType` function that takes a string representation of a ClickHouse type (e.g., `FixedString(16)`, `Nullable(Int32)`, etc.) and returns an AST-like object that represents the type. For example:

```ts
for (const type of [
'Int32',
'Array(Nullable(String))',
`Map(Int32, DateTime64(9, 'UTC'))`,
]) {
console.log(`##### Source ClickHouse type: ${type}`)
console.log(parseColumnType(type))
}
```

The above code will output:

```
##### Source ClickHouse type: Int32
{ type: 'Simple', columnType: 'Int32', sourceType: 'Int32' }
##### Source ClickHouse type: Array(Nullable(String))
{
type: 'Array',
value: {
type: 'Nullable',
sourceType: 'Nullable(String)',
value: { type: 'Simple', columnType: 'String', sourceType: 'String' }
},
dimensions: 1,
sourceType: 'Array(Nullable(String))'
}
##### Source ClickHouse type: Map(Int32, DateTime64(9, 'UTC'))
{
type: 'Map',
key: { type: 'Simple', columnType: 'Int32', sourceType: 'Int32' },
value: {
type: 'DateTime64',
timezone: 'UTC',
precision: 9,
sourceType: "DateTime64(9, 'UTC')"
},
sourceType: "Map(Int32, DateTime64(9, 'UTC'))"
}
```

While the original intention was to use this function internally for `Native`/`RowBinaryWithNamesAndTypes` data formats headers parsing, it can be useful for other purposes as well (e.g., interfaces generation, or custom JSON serializers).

NB: currently unsupported source types to parse:

- Geo
- (Simple)AggregateFunction
- Nested
- Old/new experimental JSON
- Dynamic
- Variant

# 1.6.0 (Common, Node.js, Web)

## New features
Expand Down
1 change: 1 addition & 0 deletions examples/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ If something is missing, or you found a mistake in one of these examples, please
- [select_streaming_json_each_row.ts](node/select_streaming_json_each_row.ts) - (Node.js only) streaming JSON\* formats from ClickHouse and processing it with `on('data')` event.
- [select_streaming_json_each_row_for_await.ts](node/select_streaming_json_each_row_for_await.ts) - (Node.js only) similar to [select_streaming_json_each_row.ts](node/select_streaming_json_each_row.ts), but using the `for await` loop syntax.
- [select_streaming_text_line_by_line.ts](node/select_streaming_text_line_by_line.ts) - (Node.js only) streaming text formats from ClickHouse and processing it line by line. In this example, CSV format is used.
- [select_json_each_row_with_progress.ts](node/select_json_each_row_with_progress.ts) - streaming using `JSONEachRowWithProgress` format, checking for the progress rows in the stream.

#### Data types

Expand Down
39 changes: 39 additions & 0 deletions examples/node/select_json_each_row_with_progress.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import { createClient } from '@clickhouse/client'
import { isProgressRow } from '@clickhouse/client-common'

/** See the format spec - https://clickhouse.com/docs/en/interfaces/formats#jsoneachrowwithprogress
* When JSONEachRowWithProgress format is used in TypeScript,
* the ResultSet should infer the final row type as `{ row: Data } | ProgressRow`. */
type Data = { number: string }

void (async () => {
const client = createClient()
const rs = await client.query({
query: 'SELECT number FROM system.numbers LIMIT 100',
format: 'JSONEachRowWithProgress',
})

let totalRows = 0
let totalProgressRows = 0

const stream = rs.stream<Data>()
for await (const rows of stream) {
for (const row of rows) {
const decodedRow = row.json()
if (isProgressRow(decodedRow)) {
console.log('Got a progress row:', decodedRow)
totalProgressRows++
} else {
totalRows++
if (totalRows % 100 === 0) {
console.log('Sample row:', decodedRow)
}
}
}
}

console.log('Total rows:', totalRows)
console.log('Total progress rows:', totalProgressRows)

await client.close()
})()
46 changes: 23 additions & 23 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -43,50 +43,50 @@
"prepare": "husky"
},
"devDependencies": {
"@faker-js/faker": "^8.4.1",
"@faker-js/faker": "^9.0.2",
"@istanbuljs/nyc-config-typescript": "^1.0.2",
"@types/jasmine": "^5.1.4",
"@types/node": "^20.11.30",
"@types/node": "^22.7.0",
"@types/sinon": "^17.0.3",
"@types/split2": "^4.2.3",
"@types/uuid": "^9.0.8",
"@typescript-eslint/eslint-plugin": "^7.3.1",
"@typescript-eslint/parser": "^7.3.1",
"apache-arrow": "^15.0.2",
"eslint": "^8.57.0",
"@types/uuid": "^10.0.0",
"@typescript-eslint/eslint-plugin": "^8.7.0",
"@typescript-eslint/parser": "^8.7.0",
"apache-arrow": "^18.0.0",
"eslint": "^8.57.1",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-expect-type": "^0.3.0",
"eslint-plugin-prettier": "^5.1.3",
"husky": "^9.0.11",
"jasmine": "^5.1.0",
"jasmine-core": "^5.1.2",
"eslint-plugin-expect-type": "^0.4.3",
"eslint-plugin-prettier": "^5.2.1",
"husky": "^9.1.6",
"jasmine": "^5.3.0",
"jasmine-core": "^5.3.0",
"jasmine-expect": "^5.0.0",
"karma": "^6.4.3",
"karma": "^6.4.4",
"karma-chrome-launcher": "^3.2.0",
"karma-firefox-launcher": "^2.1.3",
"karma-jasmine": "^5.1.0",
"karma-mocha-reporter": "^2.2.5",
"karma-sourcemap-loader": "^0.4.0",
"karma-typescript": "^5.5.4",
"karma-webpack": "^5.0.1",
"lint-staged": "^15.2.2",
"nyc": "^15.1.0",
"parquet-wasm": "0.6.0-beta.2",
"prettier": "3.2.5",
"sinon": "^17.0.1",
"lint-staged": "^15.2.10",
"nyc": "^17.1.0",
"parquet-wasm": "0.6.1",
"prettier": "3.3.3",
"sinon": "^19.0.2",
"source-map-support": "^0.5.21",
"split2": "^4.2.0",
"terser-webpack-plugin": "^5.3.10",
"ts-jest": "^29.1.2",
"ts-jest": "^29.2.5",
"ts-loader": "^9.5.1",
"ts-node": "^10.9.2",
"tsconfig-paths": "^4.2.0",
"tsconfig-paths-webpack-plugin": "^4.1.0",
"typescript": "^5.4.3",
"uuid": "^9.0.1",
"webpack": "^5.91.0",
"typescript": "^5.6.2",
"uuid": "^11.0.1",
"webpack": "^5.95.0",
"webpack-cli": "^5.1.4",
"webpack-merge": "^5.10.0"
"webpack-merge": "^6.0.1"
},
"workspaces": [
"./packages/*"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ describe('abort request', () => {
beforeEach(() => {
client = createTestClient()
})

afterEach(async () => {
await client.close()
})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,10 @@ describe('ClickHouse server errors parsing', () => {
// Possible error messages here:
// (since 24.3+, Cloud SMT): Unknown expression identifier 'number' in scope SELECT number AS FR
// (since 23.8+, Cloud RMT): Missing columns: 'number' while processing query: 'SELECT number AS FR', required columns: 'number'
// (since 24.9+): Unknown expression identifier `number` in scope SELECT number AS FR
const errorMessagePattern =
`((?:Missing columns: 'number' while processing query: 'SELECT number AS FR', required columns: 'number')|` +
`(?:Unknown expression identifier 'number' in scope SELECT number AS FR))`
`(?:Unknown expression identifier ('|\`)number('|\`) in scope SELECT number AS FR))`
await expectAsync(
client.query({
query: 'SELECT number FR',
Expand All @@ -37,7 +38,7 @@ describe('ClickHouse server errors parsing', () => {
const dbName = getTestDatabaseName()
const errorMessagePattern =
`((?:^Table ${dbName}.unknown_table does not exist.*)|` +
`(?:Unknown table expression identifier 'unknown_table' in scope))`
`(?:Unknown table expression identifier ('|\`)unknown_table('|\`) in scope))`
await expectAsync(
client.query({
query: 'SELECT * FROM unknown_table',
Expand Down
Loading

0 comments on commit 350b47e

Please sign in to comment.