Skip to content

Commit

Permalink
Update CI tools & fix typos (bitsandbytes-foundation#1386)
Browse files Browse the repository at this point in the history
* Update pre-commit tools

* Fix typos
  • Loading branch information
akx authored and matthewdouglas committed Oct 28, 2024
1 parent 6610c52 commit 59883ac
Show file tree
Hide file tree
Showing 5 changed files with 11 additions and 10 deletions.
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.2
rev: v0.6.9
hooks:
- id: ruff
args:
- --fix
- id: ruff-format
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
rev: v5.0.0
hooks:
- id: check-merge-conflict
- id: check-yaml
Expand All @@ -18,6 +18,6 @@ repos:
args:
- --fix=lf
- repo: https://github.com/crate-ci/typos
rev: v1.18.2
rev: v1.26.0
hooks:
- id: typos
6 changes: 4 additions & 2 deletions _typos.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,10 @@
extend-ignore-re = [
"@Ther-nul", # valid Github user
]

[default.extend-identifiers]
extend-ignore-identifiers-re = [
".*arange.*",
".*ARANGE.*",
]

[type.py.extend-words]
"BA" = "BA" # used as a commented-out variable in tests
Expand Down
2 changes: 1 addition & 1 deletion bitsandbytes/functional.py
Original file line number Diff line number Diff line change
Expand Up @@ -1875,7 +1875,7 @@ def percentile_clipping(grad: Tensor, gnorm_vec: Tensor, step: int, percentile:
gnorm_vec: torch.Tensor
Vector of gradient norms. 100 elements expected.
step: int
The current optimiation steps (number of past gradient norms).
The current optimization steps (number of past gradient norms).
"""
prev_device = pre_call(grad.device)
Expand Down
6 changes: 3 additions & 3 deletions csrc/kernels.cu
Original file line number Diff line number Diff line change
Expand Up @@ -2616,7 +2616,7 @@ template <int THREADS, int ITEMS_PER_THREAD, int TILE_ROWS, int TILE_COLS, int T
//const int global_col = base_row; // block offset for col
if((base_col + subrow_loop_row + jrow + warp_id < outRows) && (base_row+warp_lane < rows))
{
// each row hae 32 columns and is offset by 1 to prevent bank conflict during storage into smem
// each row has 32 columns and is offset by 1 to prevent bank conflict during storage into smem
char data = smem_data[(subrow_loop_row + jrow + warp_id)*33 + warp_lane];

// each 32 columns we have new tile
Expand Down Expand Up @@ -2655,7 +2655,7 @@ template <int THREADS, int ITEMS_PER_THREAD, int TILE_ROWS, int TILE_COLS, int T
//const int global_col = base_row; // block offset for col
if((base_col + subrow_loop_row + jrow + warp_id < outRows) && (base_row+warp_lane < rows))
{
// each row hae 32 columns and is offset by 1 to prevent bank conflict during storage into smem
// each row has 32 columns and is offset by 1 to prevent bank conflict during storage into smem
char data = smem_data[(subrow_loop_row + jrow + warp_id)*33 + warp_lane];

// each 32 columns we have new tile
Expand Down Expand Up @@ -2732,7 +2732,7 @@ template <int THREADS, int ITEMS_PER_THREAD, int TILE_ROWS, int TILE_COLS, int T
//const int global_col = base_row; // block offset for col
if((base_col + subrow_loop_row + jrow + warp_id < outRows) && (base_row+warp_lane < rows))
{
// each row hae 32 columns and is offset by 1 to prevent bank conflict during storage into smem
// each row has 32 columns and is offset by 1 to prevent bank conflict during storage into smem
char data = smem_data[(subrow_loop_row + jrow + warp_id)*33 + warp_lane];

// each 32 columns we have new tile
Expand Down
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ ignore = [
"F841", # Local assigned but not used (TODO: enable, these are likely bugs)
"RUF012", # Mutable class attribute annotations
]
ignore-init-module-imports = true # allow to expose in __init__.py via imports

[tool.ruff.lint.extend-per-file-ignores]
"**/__init__.py" = ["F401"] # allow unused imports in __init__.py
Expand Down

0 comments on commit 59883ac

Please sign in to comment.