Skip to content

Commit

Permalink
style(ruff): clear ruff check violations (#175)
Browse files Browse the repository at this point in the history
  • Loading branch information
IndexSeek authored Dec 1, 2024
1 parent c32d604 commit 7d2d021
Show file tree
Hide file tree
Showing 13 changed files with 2,114 additions and 2,121 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ repos:
hooks:
- id: prettier
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.2
rev: v0.8.0
hooks:
- id: ruff
args: [--fix]
Expand Down
6 changes: 3 additions & 3 deletions examples/Preprocess your data with recipes.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -490,7 +490,7 @@
},
{
"cell_type": "code",
"execution_count": 11,
"execution_count": null,
"id": "dc04f24e-c8cb-4580-b502-a9410c64a126",
"metadata": {},
"outputs": [],
Expand All @@ -510,7 +510,7 @@
" from skorch import NeuralNetClassifier\n",
"\n",
" class MyModule(nn.Module):\n",
" def __init__(self, num_units=10, nonlin=nn.ReLU()):\n",
" def __init__(self, num_units=10, nonlin=nn.ReLU()): # noqa: B008\n",
" super().__init__()\n",
"\n",
" self.dense0 = nn.Linear(10, num_units)\n",
Expand All @@ -525,7 +525,7 @@
" X = self.dropout(X)\n",
" X = self.nonlin(self.dense1(X))\n",
" X = self.softmax(self.output(X))\n",
" return X\n",
" return X # noqa: RET504\n",
"\n",
" mod = NeuralNetClassifier(\n",
" MyModule,\n",
Expand Down
4,195 changes: 2,094 additions & 2,101 deletions examples/Using IbisML and DuckDB for a Kaggle competition.ipynb

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion ibis_ml/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -478,7 +478,7 @@ def _name_estimators(estimators):
for i in reversed(range(len(estimators))):
name = names[i]
if name in namecount:
names[i] += "-%d" % namecount[name]
names[i] += f"-{namecount[name]}"
namecount[name] -= 1

return list(zip(names, estimators))
Expand Down
2 changes: 1 addition & 1 deletion ibis_ml/select.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def __repr__(self):
return f"{name}({args})"

def __eq__(self, other):
return type(self) == type(other) and all(
return isinstance(other, type(self)) and all(
getattr(self, name) == getattr(other, name) for name in self._fields
)

Expand Down
2 changes: 1 addition & 1 deletion ibis_ml/steps/_discretize.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class DiscretizeKBins(Step):
Raises
----------
------
ValueError
If `n_bins` is less than or equal to 1 or if an unsupported
`strategy` is provided.
Expand Down
2 changes: 1 addition & 1 deletion ibis_ml/utils/_pprint.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def _safe_repr(self, object, context, maxlevels, level):
if krecur or vrecur:
recursive = True
del context[objid]
return "{%s}" % ", ".join(components), readable, recursive
return f"{{{', '.join(components)}}}", readable, recursive

if (issubclass(typ, list) and r is list.__repr__) or (
issubclass(typ, tuple) and r is tuple.__repr__
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ ignore = [
[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["F401", "F403"]
"docs/**/*.py" = ["INP001"]
"examples/*.ipynb" = ["E501", "ERA001", "I001", "T201"]

[tool.ruff.lint.isort]
split-on-trailing-comma = false
Expand Down
11 changes: 5 additions & 6 deletions tests/test_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def transform_table(self, table):
return table.order_by(ibis.random())


@pytest.fixture()
@pytest.fixture
def df():
return pd.DataFrame(
{
Expand All @@ -34,7 +34,7 @@ def df():
)


@pytest.fixture()
@pytest.fixture
def table(df):
return ibis.memtable(df)

Expand Down Expand Up @@ -410,12 +410,11 @@ def test_set_params_passes_all_parameters():
def test_set_params_updates_valid_params():
# Check that set_params tries to set `replacement_mutateat.inputs`, not
# `original_mutateat.inputs`.
original_mutateat = ml.MutateAt("dep_time", ibis._.hour() * 60 + ibis._.minute()) # noqa: SLF001
original_mutateat = ml.MutateAt("dep_time", ibis._.hour() * 60 + ibis._.minute())
rec = ml.Recipe(
original_mutateat,
ml.MutateAt(ml.timestamp(), ibis._.epoch_seconds()), # noqa: SLF001
original_mutateat, ml.MutateAt(ml.timestamp(), ibis._.epoch_seconds())
)
replacement_mutateat = ml.MutateAt("arr_time", ibis._.hour() * 60 + ibis._.minute()) # noqa: SLF001
replacement_mutateat = ml.MutateAt("arr_time", ibis._.hour() * 60 + ibis._.minute())
rec.set_params(
**{"mutateat-1": replacement_mutateat, "mutateat-1__inputs": ml.cols("arrival")}
)
Expand Down
4 changes: 2 additions & 2 deletions tests/test_encode.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import ibis_ml as ml


@pytest.fixture()
@pytest.fixture
def t_train():
return ibis.memtable(
{
Expand All @@ -27,7 +27,7 @@ def t_train():
)


@pytest.fixture()
@pytest.fixture
def t_test():
return ibis.memtable(
{
Expand Down
2 changes: 1 addition & 1 deletion tests/test_generate_features.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import ibis_ml as ml


@pytest.fixture()
@pytest.fixture
def train_table():
N = 100
return ibis.memtable({"x": list(range(N)), "y": [10] * N, "z": ["s"] * N})
Expand Down
2 changes: 1 addition & 1 deletion tests/test_impute.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import ibis_ml as ml


@pytest.fixture()
@pytest.fixture
def train_table():
return ibis.memtable(
{
Expand Down
4 changes: 2 additions & 2 deletions tests/test_pprint.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,15 @@
import ibis_ml as ml


@pytest.fixture()
@pytest.fixture
def rec():
imputer = ml.ImputeMean(ml.numeric())
scaler = ml.ScaleStandard(ml.numeric())
encoder = ml.OneHotEncode(ml.string(), min_frequency=20, max_categories=10)
return ml.Recipe(imputer, scaler, encoder)


@pytest.fixture()
@pytest.fixture
def pipe(rec):
pytest.importorskip("sklearn")
from sklearn.pipeline import Pipeline
Expand Down

0 comments on commit 7d2d021

Please sign in to comment.