Skip to content

Commit

Permalink
MNT upgrade black version (#28802)
Browse files Browse the repository at this point in the history
  • Loading branch information
adrinjalali authored Apr 10, 2024
1 parent d9e932e commit c4c5463
Show file tree
Hide file tree
Showing 156 changed files with 311 additions and 224 deletions.
1 change: 1 addition & 0 deletions .github/scripts/label_title_regex.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Labels PRs based on title. Must be run in a github action with the
pull_request_target event."""

import json
import os
import re
Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ repos:
- id: ruff
args: ["--fix", "--output-format=full"]
- repo: https://github.com/psf/black
rev: 23.3.0
rev: 24.3.0
hooks:
- id: black
- repo: https://github.com/pre-commit/mirrors-mypy
Expand Down
1 change: 1 addition & 0 deletions benchmarks/bench_glm.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
Data comes from a random square matrix.
"""

from datetime import datetime

import numpy as np
Expand Down
1 change: 1 addition & 0 deletions benchmarks/bench_glmnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
In both cases, only 10% of the features are informative.
"""

import gc
from time import time

Expand Down
1 change: 1 addition & 0 deletions benchmarks/bench_isotonic.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
This allows the scaling of the algorithm with the problem size to be
visualized and understood.
"""

import argparse
import gc
from datetime import datetime
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
You can also set `arpack_all=True` to activate arpack solver for large number
of components (this takes more time).
"""

# Authors: Sylvain MARIE, Schneider Electric

import time
Expand Down
1 change: 1 addition & 0 deletions benchmarks/bench_kernel_pca_solvers_time_vs_n_samples.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
Solvers comparison benchmark: time vs n_components", where this time the number
of examples is fixed, and the desired number of components varies.
"""

# Author: Sylvain MARIE, Schneider Electric

import time
Expand Down
1 change: 1 addition & 0 deletions benchmarks/bench_lasso.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
In both cases, only 10% of the features are informative.
"""

import gc
from time import time

Expand Down
1 change: 1 addition & 0 deletions benchmarks/bench_plot_lasso_path.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
The input data is mostly low rank but is a fat infinite tail.
"""

import gc
import sys
from collections import defaultdict
Expand Down
1 change: 1 addition & 0 deletions benchmarks/bench_plot_neighbors.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Plot the scaling of the nearest neighbors algorithms with k, D, and N
"""

from time import time

import matplotlib.pyplot as plt
Expand Down
7 changes: 3 additions & 4 deletions benchmarks/bench_plot_nmf.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Benchmarks of Non-Negative Matrix Factorization
"""

# Authors: Tom Dupre la Tour (benchmark)
# Chih-Jen Linn (original projected gradient NMF implementation)
# Anthony Di Franco (projected gradient, Python and NumPy port)
Expand Down Expand Up @@ -258,8 +259,7 @@ def _fit_transform(self, X, y=None, W=None, H=None, update_H=True):
if not isinstance(self.max_iter, numbers.Integral) or self.max_iter < 0:
raise ValueError(
"Maximum number of iterations must be a positive "
"integer; got (max_iter=%r)"
% self.max_iter
"integer; got (max_iter=%r)" % self.max_iter
)
if not isinstance(self.tol, numbers.Number) or self.tol < 0:
raise ValueError(
Expand Down Expand Up @@ -305,8 +305,7 @@ def _fit_transform(self, X, y=None, W=None, H=None, update_H=True):
if n_iter == self.max_iter and self.tol > 0:
warnings.warn(
"Maximum number of iteration %d reached. Increase it"
" to improve convergence."
% self.max_iter,
" to improve convergence." % self.max_iter,
ConvergenceWarning,
)

Expand Down
1 change: 1 addition & 0 deletions benchmarks/bench_plot_omp_lars.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
The input data is mostly low rank but is a fat infinite tail.
"""

import gc
import sys
from time import time
Expand Down
1 change: 1 addition & 0 deletions benchmarks/bench_plot_polynomial_kernel_approximation.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
(https://people.cs.rutgers.edu/~farach/pubs/FrequentStream.pdf)
"""

# Author: Daniel Lopez-Sanchez <[email protected]>
# License: BSD 3 clause

Expand Down
1 change: 1 addition & 0 deletions benchmarks/bench_plot_svd.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
The data is mostly low rank but is a fat infinite tail.
"""

import gc
from collections import defaultdict
from time import time
Expand Down
1 change: 1 addition & 0 deletions benchmarks/bench_random_projections.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
Benchmarks for random projections.
"""

import collections
import gc
import optparse
Expand Down
5 changes: 2 additions & 3 deletions benchmarks/bench_saga.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
Benchmarks of sklearn SAGA vs lightning SAGA vs Liblinear. Shows the gain
in using multinomial logistic regression in term of learning time.
"""

import json
import os
import time
Expand Down Expand Up @@ -118,9 +119,7 @@ def fit_single(
# Lightning predict_proba is not implemented for n_classes > 2
y_pred = _predict_proba(lr, X)
score = log_loss(y, y_pred, normalize=False) / n_samples
score += 0.5 * alpha * np.sum(lr.coef_**2) + beta * np.sum(
np.abs(lr.coef_)
)
score += 0.5 * alpha * np.sum(lr.coef_**2) + beta * np.sum(np.abs(lr.coef_))
scores.append(score)
train_score, test_score = tuple(scores)

Expand Down
1 change: 1 addition & 0 deletions benchmarks/bench_sample_without_replacement.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Benchmarks for sampling without replacement of integer.
"""

import gc
import operator
import optparse
Expand Down
1 change: 1 addition & 0 deletions benchmarks/bench_text_vectorizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
* psutil (optional, but recommended)
"""

import itertools
import timeit

Expand Down
1 change: 1 addition & 0 deletions benchmarks/bench_tree.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
training set, classify a sample and plot the time taken as a function
of the number of dimensions.
"""

import gc
from datetime import datetime

Expand Down
6 changes: 4 additions & 2 deletions benchmarks/bench_tsne_mnist.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,8 @@ def sanitize(filename):
try:
from bhtsne.bhtsne import run_bh_tsne
except ImportError as e:
raise ImportError("""\
raise ImportError(
"""\
If you want comparison with the reference implementation, build the
binary from source (https://github.com/lvdmaaten/bhtsne) in the folder
benchmarks/bhtsne and add an empty `__init__.py` file in the folder:
Expand All @@ -140,7 +141,8 @@ def sanitize(filename):
$ g++ sptree.cpp tsne.cpp tsne_main.cpp -o bh_tsne -O2
$ touch __init__.py
$ cd ..
""") from e
"""
) from e

def bhtsne(X):
"""Wrapper for the reference lvdmaaten/bhtsne implementation."""
Expand Down
1 change: 1 addition & 0 deletions build_tools/generate_authors_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
The table should be updated for each new inclusion in the teams.
Generating the table requires admin rights.
"""

import getpass
import sys
import time
Expand Down
3 changes: 1 addition & 2 deletions build_tools/get_comment.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,7 @@ def get_message(log_file, repo, pr_number, sha, run_id, details, versions):
"https://scikit-learn.org/dev/developers/contributing.html"
"#how-to-contribute)) and push the changes. If you already have done "
"that, please send an empty commit with `git commit --allow-empty` "
"and push the changes to trigger the CI.\n\n"
+ sub_text
"and push the changes to trigger the CI.\n\n" + sub_text
)

message = ""
Expand Down
1 change: 1 addition & 0 deletions build_tools/github/check_wheels.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Checks that dist/* contains the number of wheels built from the
.github/workflows/wheels.yml config."""

import sys
from pathlib import Path

Expand Down
1 change: 0 additions & 1 deletion build_tools/github/vendor.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
"""Embed vcomp140.dll and msvcp140.dll."""


import os
import os.path as op
import shutil
Expand Down
33 changes: 22 additions & 11 deletions build_tools/update_environments_and_lock_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,8 @@ def remove_from(alist, to_remove):
"folder": "build_tools/azure",
"platform": "linux-64",
"channel": "conda-forge",
"conda_dependencies": common_dependencies + [
"conda_dependencies": common_dependencies
+ [
"ccache",
"pytorch",
"pytorch-cpu",
Expand All @@ -123,7 +124,8 @@ def remove_from(alist, to_remove):
"folder": "build_tools/azure",
"platform": "osx-64",
"channel": "conda-forge",
"conda_dependencies": common_dependencies + [
"conda_dependencies": common_dependencies
+ [
"ccache",
"compilers",
"llvm-openmp",
Expand Down Expand Up @@ -160,7 +162,8 @@ def remove_from(alist, to_remove):
"channel": "defaults",
"conda_dependencies": remove_from(
common_dependencies, ["pandas", "cython", "pip", "ninja", "meson-python"]
) + ["ccache"],
)
+ ["ccache"],
"package_constraints": {
"python": "3.9",
"blas": "[build=openblas]",
Expand Down Expand Up @@ -268,7 +271,8 @@ def remove_from(alist, to_remove):
"folder": "build_tools/azure",
"platform": "win-64",
"channel": "conda-forge",
"conda_dependencies": remove_from(common_dependencies, ["pandas", "pyamg"]) + [
"conda_dependencies": remove_from(common_dependencies, ["pandas", "pyamg"])
+ [
"wheel",
"pip",
],
Expand All @@ -284,7 +288,8 @@ def remove_from(alist, to_remove):
"folder": "build_tools/circle",
"platform": "linux-64",
"channel": "conda-forge",
"conda_dependencies": common_dependencies_without_coverage + [
"conda_dependencies": common_dependencies_without_coverage
+ [
"scikit-image",
"seaborn",
"memory_profiler",
Expand Down Expand Up @@ -324,7 +329,8 @@ def remove_from(alist, to_remove):
"folder": "build_tools/circle",
"platform": "linux-64",
"channel": "conda-forge",
"conda_dependencies": common_dependencies_without_coverage + [
"conda_dependencies": common_dependencies_without_coverage
+ [
"scikit-image",
"seaborn",
"memory_profiler",
Expand Down Expand Up @@ -353,7 +359,8 @@ def remove_from(alist, to_remove):
"channel": "conda-forge",
"conda_dependencies": remove_from(
common_dependencies_without_coverage, ["pandas", "pyamg"]
) + ["pip", "ccache"],
)
+ ["pip", "ccache"],
"package_constraints": {
"python": "3.9",
},
Expand Down Expand Up @@ -460,7 +467,8 @@ def get_package_with_constraint(package_name, build_metadata, uses_pip=False):


def get_conda_environment_content(build_metadata):
template = environment.from_string("""
template = environment.from_string(
"""
# DO NOT EDIT: this file is generated from the specification found in the
# following script to centralize the configuration for CI builds:
# build_tools/update_environments_and_lock_files.py
Expand All @@ -476,7 +484,8 @@ def get_conda_environment_content(build_metadata):
{% for pip_dep in build_metadata.get('pip_dependencies', []) %}
- {{ pip_dep | get_package_with_constraint(build_metadata, uses_pip=True) }}
{% endfor %}
{% endif %}""".strip())
{% endif %}""".strip()
)
return template.render(build_metadata=build_metadata)


Expand Down Expand Up @@ -532,13 +541,15 @@ def write_all_conda_lock_files(build_metadata_list):


def get_pip_requirements_content(build_metadata):
template = environment.from_string("""
template = environment.from_string(
"""
# DO NOT EDIT: this file is generated from the specification found in the
# following script to centralize the configuration for CI builds:
# build_tools/update_environments_and_lock_files.py
{% for pip_dep in build_metadata['pip_dependencies'] %}
{{ pip_dep | get_package_with_constraint(build_metadata, uses_pip=True) }}
{% endfor %}""".strip())
{% endfor %}""".strip()
)
return template.render(build_metadata=build_metadata)


Expand Down
2 changes: 1 addition & 1 deletion doc/developers/contributing.rst
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ how to set up your git repository:

.. prompt:: bash $

pip install pytest pytest-cov ruff mypy numpydoc black==23.3.0
pip install pytest pytest-cov ruff mypy numpydoc black==24.3.0

.. _upstream:

Expand Down
26 changes: 13 additions & 13 deletions doc/sphinxext/doi_role.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
"""
doilinks
~~~~~~~~
Extension to add links to DOIs. With this extension you can use e.g.
:doi:`10.1016/S0022-2836(05)80360-2` in your documents. This will
create a link to a DOI resolver
(``https://doi.org/10.1016/S0022-2836(05)80360-2``).
The link caption will be the raw DOI.
You can also give an explicit caption, e.g.
:doi:`Basic local alignment search tool <10.1016/S0022-2836(05)80360-2>`.
:copyright: Copyright 2015 Jon Lund Steffensen. Based on extlinks by
the Sphinx team.
:license: BSD.
doilinks
~~~~~~~~
Extension to add links to DOIs. With this extension you can use e.g.
:doi:`10.1016/S0022-2836(05)80360-2` in your documents. This will
create a link to a DOI resolver
(``https://doi.org/10.1016/S0022-2836(05)80360-2``).
The link caption will be the raw DOI.
You can also give an explicit caption, e.g.
:doi:`Basic local alignment search tool <10.1016/S0022-2836(05)80360-2>`.
:copyright: Copyright 2015 Jon Lund Steffensen. Based on extlinks by
the Sphinx team.
:license: BSD.
"""

from docutils import nodes, utils
Expand Down
1 change: 1 addition & 0 deletions doc/sphinxext/sphinx_issues.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""

import re

from docutils import nodes, utils
Expand Down
1 change: 1 addition & 0 deletions examples/applications/plot_face_recognition.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
.. _LFW: http://vis-www.cs.umass.edu/lfw/
"""

# %%
from time import time

Expand Down
Loading

0 comments on commit c4c5463

Please sign in to comment.