Skip to content

Commit ca90076

Browse files
authored
Replace flake8/isort with ruff (#6441)
There's a few advantages of using ruff over these tools: * It's way faster * It's easier to configure * It includes support for a bunch of other linters (for example bugbear) right out of the box, which catches some things and makes our code more consistent. Ruff works great with black, which I'd recommend we continue using. Fixed a few minor issues that the new linter combo picked up.
1 parent c97afeb commit ca90076

15 files changed

+41
-55
lines changed

.github/workflows/ci.yml

+2-3
Original file line numberDiff line numberDiff line change
@@ -16,10 +16,9 @@ jobs:
1616
- uses: actions/setup-python@v4
1717
with:
1818
python-version: '3.8'
19-
- run: sudo pip install flake8==6.1.0 black==23.1.0 isort==5.12.0
20-
- run: flake8 .
19+
- run: sudo pip install black==23.1.0 ruff==0.0.287
20+
- run: ruff check .
2121
- run: black --check .
22-
- run: isort --check-only --diff .
2322

2423
backend-unit-tests:
2524
runs-on: ubuntu-22.04

.pre-commit-config.yaml

+3-8
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,10 @@
11
repos:
2-
- repo: https://github.com/PyCQA/isort
3-
rev: 5.12.0
4-
hooks:
5-
- id: isort
62
- repo: https://github.com/psf/black
73
rev: 23.1.0
84
hooks:
95
- id: black
106
language_version: python3
11-
- repo: https://github.com/pycqa/flake8
12-
rev: 6.1.0
7+
- repo: https://github.com/charliermarsh/ruff-pre-commit
8+
rev: "v0.0.287"
139
hooks:
14-
- id: flake8
15-
exclude: "migration/.*|.git|viz-lib|node_modules|migrations|bin/upgrade"
10+
- id: ruff

Makefile

+6-5
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,8 @@ tests:
3434
docker-compose run server tests
3535

3636
lint:
37-
./bin/flake8_tests.sh
37+
ruff check .
38+
black --check . --diff
3839

3940
backend-unit-tests: up test_db
4041
docker-compose run --rm --name tests server tests
@@ -43,15 +44,15 @@ frontend-unit-tests:
4344
CYPRESS_INSTALL_BINARY=0 PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 yarn --frozen-lockfile
4445
yarn test
4546

46-
test: lint backend-unit-tests frontend-unit-tests
47+
test: backend-unit-tests frontend-unit-tests lint
4748

48-
build:
49+
build:
4950
yarn build
5051

51-
watch:
52+
watch:
5253
yarn watch
5354

54-
start:
55+
start:
5556
yarn start
5657

5758
redis-cli:

bin/flake8_tests.sh

-9
This file was deleted.

bin/upgrade

+1-1
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ def restart_services():
117117
green("Restarting...")
118118
try:
119119
run("sudo /etc/init.d/redash_supervisord restart")
120-
except subprocess.CalledProcessError as e:
120+
except subprocess.CalledProcessError:
121121
run("sudo service supervisor restart")
122122

123123

pyproject.toml

+12-8
Original file line numberDiff line numberDiff line change
@@ -10,13 +10,6 @@ force-exclude = '''
1010
)/
1111
'''
1212

13-
[tool.isort]
14-
py_version = 38
15-
multi_line_output = 3
16-
include_trailing_comma = true
17-
use_parentheses = true
18-
skip = "migrations"
19-
2013
[tool.poetry]
2114
name = "redash"
2215
version = "23.09.0-dev"
@@ -123,7 +116,7 @@ pyexasol = "0.12.0"
123116
pygridgain = "1.4.0"
124117
pyhive = "0.6.1"
125118
pyignite = "0.6.1"
126-
pymongo = {version = "4.3.3", extras = ["srv", "tls"]}
119+
pymongo = { version = "4.3.3", extras = ["srv", "tls"] }
127120
pymssql = "2.2.8"
128121
pyodbc = "4.0.28"
129122
python-arango = "6.1.0"
@@ -164,3 +157,14 @@ watchdog = "3.0.0"
164157
[build-system]
165158
requires = ["poetry-core"]
166159
build-backend = "poetry.core.masonry.api"
160+
161+
[tool.ruff]
162+
exclude = [".git", "viz-lib", "node_modules", "migrations"]
163+
ignore = ["E501"]
164+
select = ["C9", "E", "F", "W", "I001"]
165+
166+
[tool.ruff.mccabe]
167+
max-complexity = 15
168+
169+
[tool.ruff.per-file-ignores]
170+
"__init__.py" = ["F401"]

redash/handlers/dashboards.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,9 @@
88
BaseResource,
99
filter_by_tags,
1010
get_object_or_404,
11+
paginate,
1112
)
1213
from redash.handlers.base import order_results as _order_results
13-
from redash.handlers.base import paginate
1414
from redash.permissions import (
1515
can_modify,
1616
require_admin_or_owner,

redash/handlers/queries.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,11 @@
1111
BaseResource,
1212
filter_by_tags,
1313
get_object_or_404,
14+
org_scoped_rule,
15+
paginate,
16+
routes,
1417
)
1518
from redash.handlers.base import order_results as _order_results
16-
from redash.handlers.base import org_scoped_rule, paginate, routes
1719
from redash.handlers.query_results import run_query
1820
from redash.models.parameterized_query import ParameterizedQuery
1921
from redash.permissions import (

redash/handlers/users.py

+6-2
Original file line numberDiff line numberDiff line change
@@ -13,9 +13,13 @@
1313
send_password_reset_email,
1414
send_verify_email,
1515
)
16-
from redash.handlers.base import BaseResource, get_object_or_404
16+
from redash.handlers.base import (
17+
BaseResource,
18+
get_object_or_404,
19+
paginate,
20+
require_fields,
21+
)
1722
from redash.handlers.base import order_results as _order_results
18-
from redash.handlers.base import paginate, require_fields
1923
from redash.permissions import (
2024
is_admin_or_owner,
2125
require_admin,

redash/query_runner/dgraph.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -14,12 +14,12 @@
1414
def reduce_item(reduced_item, key, value):
1515
"""From https://github.com/vinay20045/json-to-csv"""
1616
# Reduction Condition 1
17-
if type(value) is list:
17+
if isinstance(value, list):
1818
for i, sub_item in enumerate(value):
1919
reduce_item(reduced_item, "{}.{}".format(key, i), sub_item)
2020

2121
# Reduction Condition 2
22-
elif type(value) is dict:
22+
elif isinstance(value, dict):
2323
sub_keys = value.keys()
2424
for sub_key in sub_keys:
2525
reduce_item(reduced_item, "{}.{}".format(key, sub_key), value[sub_key])

redash/query_runner/google_spreadsheets.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -133,12 +133,12 @@ def parse_worksheet(worksheet):
133133

134134
def parse_spreadsheet(spreadsheet, worksheet_num_or_title):
135135
worksheet = None
136-
if type(worksheet_num_or_title) is int:
136+
if isinstance(worksheet_num_or_title, int):
137137
worksheet = spreadsheet.get_worksheet_by_index(worksheet_num_or_title)
138138
if worksheet is None:
139139
worksheet_count = len(spreadsheet.worksheets())
140140
raise WorksheetNotFoundError(worksheet_num_or_title, worksheet_count)
141-
elif type(worksheet_num_or_title) is str:
141+
elif isinstance(worksheet_num_or_title, str):
142142
worksheet = spreadsheet.get_worksheet_by_title(worksheet_num_or_title)
143143
if worksheet is None:
144144
raise WorksheetNotFoundByTitleError(worksheet_num_or_title)

redash/query_runner/ignite.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ def _get_tables(self, schema):
102102

103103
def normalise_column(self, col):
104104
# if it's a datetime, just return the milliseconds
105-
if type(col) is tuple and len(col) == 2 and type(col[0]) is datetime.datetime and type(col[1]) is int:
105+
if type(col) is tuple and len(col) == 2 and type(col[0]) is datetime.datetime and isinstance(col[1], int):
106106
return col[0]
107107
else:
108108
return col

redash/query_runner/prometheus.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ def convert_query_range(payload):
5555
continue
5656
value = payload[key][0]
5757

58-
if type(value) is str:
58+
if isinstance(value, str):
5959
# Don't convert timestamp string
6060
try:
6161
int(value)

redash/tasks/worker.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,8 @@
1010
from rq.utils import utcnow
1111
from rq.worker import (
1212
HerokuWorker, # HerokuWorker implements graceful shutdown on SIGTERM
13+
Worker,
1314
)
14-
from rq.worker import Worker
1515

1616
from redash import statsd_client
1717

setup.cfg

-10
This file was deleted.

0 commit comments

Comments
 (0)