Skip to content

Commit

Permalink
MNT add isort to ruff's rules (#26649)
Browse files Browse the repository at this point in the history
  • Loading branch information
adrinjalali authored Jun 21, 2023
1 parent 4a8b4f9 commit 42173fd
Show file tree
Hide file tree
Showing 771 changed files with 5,515 additions and 5,563 deletions.
5 changes: 3 additions & 2 deletions .github/scripts/label_title_regex.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
"""Labels PRs based on title. Must be run in a github action with the
pull_request_target event."""
from github import Github
import os
import json
import os
import re

from github import Github

context_dict = json.loads(os.getenv("CONTEXT_GITHUB"))

repo = context_dict["repository"]
Expand Down
1 change: 1 addition & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ repos:
rev: v0.0.272
hooks:
- id: ruff
args: ["--fix", "--show-source"]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.3.0
hooks:
Expand Down
2 changes: 1 addition & 1 deletion asv_benchmarks/benchmarks/cluster.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from sklearn.cluster import KMeans, MiniBatchKMeans

from .common import Benchmark, Estimator, Predictor, Transformer
from .datasets import _blobs_dataset, _20newsgroups_highdim_dataset
from .datasets import _20newsgroups_highdim_dataset, _blobs_dataset
from .utils import neg_mean_inertia


Expand Down
8 changes: 4 additions & 4 deletions asv_benchmarks/benchmarks/common.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import os
import itertools
import json
import timeit
import os
import pickle
import itertools
import timeit
from abc import ABC, abstractmethod
from pathlib import Path
from multiprocessing import cpu_count
from pathlib import Path

import numpy as np

Expand Down
13 changes: 7 additions & 6 deletions asv_benchmarks/benchmarks/datasets.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,22 @@
from pathlib import Path

import numpy as np
import scipy.sparse as sp
from joblib import Memory
from pathlib import Path

from sklearn.decomposition import TruncatedSVD
from sklearn.datasets import (
make_blobs,
fetch_20newsgroups,
fetch_olivetti_faces,
fetch_openml,
load_digits,
make_regression,
make_blobs,
make_classification,
fetch_olivetti_faces,
make_regression,
)
from sklearn.preprocessing import MaxAbsScaler, StandardScaler
from sklearn.decomposition import TruncatedSVD
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MaxAbsScaler, StandardScaler

# memory location for caching datasets
M = Memory(location=str(Path(__file__).resolve().parent / "cache"))
Expand Down
4 changes: 2 additions & 2 deletions asv_benchmarks/benchmarks/decomposition.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from sklearn.decomposition import PCA, DictionaryLearning, MiniBatchDictionaryLearning

from .common import Benchmark, Estimator, Transformer
from .datasets import _olivetti_faces_dataset, _mnist_dataset
from .utils import make_pca_scorers, make_dict_learning_scorers
from .datasets import _mnist_dataset, _olivetti_faces_dataset
from .utils import make_dict_learning_scorers, make_pca_scorers


class PCABenchmark(Transformer, Estimator, Benchmark):
Expand Down
2 changes: 1 addition & 1 deletion asv_benchmarks/benchmarks/ensemble.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from sklearn.ensemble import (
RandomForestClassifier,
GradientBoostingClassifier,
HistGradientBoostingClassifier,
RandomForestClassifier,
)

from .common import Benchmark, Estimator, Predictor
Expand Down
4 changes: 2 additions & 2 deletions asv_benchmarks/benchmarks/linear_model.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from sklearn.linear_model import (
LogisticRegression,
Ridge,
ElasticNet,
Lasso,
LinearRegression,
LogisticRegression,
Ridge,
SGDRegressor,
)

Expand Down
19 changes: 10 additions & 9 deletions benchmarks/bench_20newsgroups.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,19 @@
from time import time
import argparse
import numpy as np
from time import time

from sklearn.dummy import DummyClassifier
import numpy as np

from sklearn.datasets import fetch_20newsgroups_vectorized
from sklearn.metrics import accuracy_score
from sklearn.utils.validation import check_array

from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.ensemble import AdaBoostClassifier
from sklearn.dummy import DummyClassifier
from sklearn.ensemble import (
AdaBoostClassifier,
ExtraTreesClassifier,
RandomForestClassifier,
)
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score
from sklearn.naive_bayes import MultinomialNB
from sklearn.utils.validation import check_array

ESTIMATORS = {
"dummy": DummyClassifier(),
Expand Down
16 changes: 10 additions & 6 deletions benchmarks/bench_covertype.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,20 +45,24 @@
# Arnaud Joly <[email protected]>
# License: BSD 3 clause

import argparse
import os
from time import time
import argparse

import numpy as np
from joblib import Memory

from sklearn.datasets import fetch_covtype, get_data_home
from sklearn.svm import LinearSVC
from sklearn.linear_model import SGDClassifier, LogisticRegression
from sklearn.ensemble import (
ExtraTreesClassifier,
GradientBoostingClassifier,
RandomForestClassifier,
)
from sklearn.linear_model import LogisticRegression, SGDClassifier
from sklearn.metrics import zero_one_loss
from sklearn.naive_bayes import GaussianNB
from sklearn.svm import LinearSVC
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier, ExtraTreesClassifier
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.metrics import zero_one_loss
from sklearn.utils import check_array

# Memoize the data extraction and memory map the resulting
Expand Down
4 changes: 3 additions & 1 deletion benchmarks/bench_feature_expansions.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
from time import time

import matplotlib.pyplot as plt
import numpy as np
import scipy.sparse as sparse

from sklearn.preprocessing import PolynomialFeatures
from time import time

degree = 2
trials = 3
Expand Down
3 changes: 2 additions & 1 deletion benchmarks/bench_glm.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,10 @@
"""
from datetime import datetime

import numpy as np
from sklearn import linear_model

from sklearn import linear_model

if __name__ == "__main__":
import matplotlib.pyplot as plt
Expand Down
10 changes: 6 additions & 4 deletions benchmarks/bench_glmnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,11 @@
In both cases, only 10% of the features are informative.
"""
import numpy as np
import gc
from time import time

import numpy as np

from sklearn.datasets import make_regression

alpha = 0.1
Expand All @@ -45,11 +47,11 @@ def bench(factory, X, Y, X_test, Y_test, ref_coef):


if __name__ == "__main__":
from glmnet.elastic_net import Lasso as GlmnetLasso
from sklearn.linear_model import Lasso as ScikitLasso

# Delayed import of matplotlib.pyplot
import matplotlib.pyplot as plt
from glmnet.elastic_net import Lasso as GlmnetLasso

from sklearn.linear_model import Lasso as ScikitLasso

scikit_results = []
glmnet_results = []
Expand Down
15 changes: 8 additions & 7 deletions benchmarks/bench_hist_gradient_boosting.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
from time import time
import argparse
from time import time

import matplotlib.pyplot as plt
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.ensemble import HistGradientBoostingRegressor
from sklearn.ensemble import HistGradientBoostingClassifier
from sklearn.datasets import make_classification
from sklearn.datasets import make_regression
from sklearn.ensemble._hist_gradient_boosting.utils import get_equivalent_estimator

from sklearn.datasets import make_classification, make_regression
from sklearn.ensemble import (
HistGradientBoostingClassifier,
HistGradientBoostingRegressor,
)
from sklearn.ensemble._hist_gradient_boosting.utils import get_equivalent_estimator
from sklearn.model_selection import train_test_split

parser = argparse.ArgumentParser()
parser.add_argument("--n-leaf-nodes", type=int, default=31)
Expand Down
7 changes: 3 additions & 4 deletions benchmarks/bench_hist_gradient_boosting_adult.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,14 @@
import numpy as np
import pandas as pd

from sklearn.model_selection import train_test_split
from sklearn.compose import make_column_transformer, make_column_selector
from sklearn.compose import make_column_selector, make_column_transformer
from sklearn.datasets import fetch_openml
from sklearn.metrics import accuracy_score, roc_auc_score
from sklearn.ensemble import HistGradientBoostingClassifier
from sklearn.ensemble._hist_gradient_boosting.utils import get_equivalent_estimator
from sklearn.metrics import accuracy_score, roc_auc_score
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import OrdinalEncoder


parser = argparse.ArgumentParser()
parser.add_argument("--n-leaf-nodes", type=int, default=31)
parser.add_argument("--n-trees", type=int, default=100)
Expand Down
3 changes: 1 addition & 2 deletions benchmarks/bench_hist_gradient_boosting_categorical_only.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
import argparse
from time import time

from sklearn.preprocessing import KBinsDiscretizer
from sklearn.datasets import make_classification
from sklearn.ensemble import HistGradientBoostingClassifier
from sklearn.ensemble._hist_gradient_boosting.utils import get_equivalent_estimator

from sklearn.preprocessing import KBinsDiscretizer

parser = argparse.ArgumentParser()
parser.add_argument("--n-leaf-nodes", type=int, default=31)
Expand Down
10 changes: 5 additions & 5 deletions benchmarks/bench_hist_gradient_boosting_higgsboson.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
from urllib.request import urlretrieve
import argparse
import os
from gzip import GzipFile
from time import time
import argparse
from urllib.request import urlretrieve

import numpy as np
import pandas as pd
from joblib import Memory
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score, roc_auc_score

from sklearn.ensemble import HistGradientBoostingClassifier
from sklearn.ensemble._hist_gradient_boosting.utils import get_equivalent_estimator

from sklearn.metrics import accuracy_score, roc_auc_score
from sklearn.model_selection import train_test_split

parser = argparse.ArgumentParser()
parser.add_argument("--n-leaf-nodes", type=int, default=31)
Expand Down
17 changes: 9 additions & 8 deletions benchmarks/bench_hist_gradient_boosting_threading.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,19 @@
from time import time
import argparse
import os
from pprint import pprint
from time import time

import numpy as np
from threadpoolctl import threadpool_limits

import sklearn
from sklearn.model_selection import train_test_split
from sklearn.ensemble import HistGradientBoostingRegressor
from sklearn.ensemble import HistGradientBoostingClassifier
from sklearn.datasets import make_classification
from sklearn.datasets import make_regression
from sklearn.datasets import make_classification, make_regression
from sklearn.ensemble import (
HistGradientBoostingClassifier,
HistGradientBoostingRegressor,
)
from sklearn.ensemble._hist_gradient_boosting.utils import get_equivalent_estimator

from sklearn.model_selection import train_test_split

parser = argparse.ArgumentParser()
parser.add_argument("--n-leaf-nodes", type=int, default=31)
Expand Down Expand Up @@ -290,8 +291,8 @@ def one_run(n_threads, n_samples):


if args.plot or args.plot_filename:
import matplotlib.pyplot as plt
import matplotlib
import matplotlib.pyplot as plt

fig, axs = plt.subplots(2, figsize=(12, 12))

Expand Down
7 changes: 4 additions & 3 deletions benchmarks/bench_isolation_forest.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,13 @@
"""

from time import time
import numpy as np

import matplotlib.pyplot as plt
import numpy as np

from sklearn.datasets import fetch_covtype, fetch_kddcup99, fetch_openml
from sklearn.ensemble import IsolationForest
from sklearn.metrics import roc_curve, auc
from sklearn.datasets import fetch_kddcup99, fetch_covtype, fetch_openml
from sklearn.metrics import auc, roc_curve
from sklearn.preprocessing import LabelBinarizer
from sklearn.utils import shuffle as sh

Expand Down
10 changes: 6 additions & 4 deletions benchmarks/bench_isotonic.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,15 @@
This allows the scaling of the algorithm with the problem size to be
visualized and understood.
"""
import numpy as np
import argparse
import gc
from datetime import datetime
from sklearn.isotonic import isotonic_regression
from scipy.special import expit

import matplotlib.pyplot as plt
import argparse
import numpy as np
from scipy.special import expit

from sklearn.isotonic import isotonic_regression


def generate_perturbed_logarithm_dataset(size):
Expand Down
7 changes: 3 additions & 4 deletions benchmarks/bench_kernel_pca_solvers_time_vs_n_components.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,13 +39,12 @@

import time

import numpy as np
import matplotlib.pyplot as plt

import numpy as np
from numpy.testing import assert_array_almost_equal
from sklearn.decomposition import KernelPCA
from sklearn.datasets import make_circles

from sklearn.datasets import make_circles
from sklearn.decomposition import KernelPCA

print(__doc__)

Expand Down
Loading

0 comments on commit 42173fd

Please sign in to comment.