Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ DATEANDTIME=$(shell date +%Y-%m-%dT%T%z)

# External commands
BLACK ?= black
ISORT ?= isort
CTAGS ?= ctags
FIND ?= find
PYTHON ?= python3
Expand Down Expand Up @@ -147,5 +148,6 @@ clean::
.PHONY: clean

format::
$(BLACK) --skip-string-normalization --target-version py310 $(PYTHON_DIRS)
$(BLACK) $(PYTHON_DIRS)
$(ISORT) --profile=black $(PYTHON_DIRS)
.PHONY: format
8 changes: 4 additions & 4 deletions conftest.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import sys
from pathlib import Path

collect_ignore = ['contrib', 'examples', 'build']
collect_ignore = ["contrib", "examples", "build"]


def pytest_ignore_collect(collection_path, config):
Expand All @@ -17,7 +17,7 @@ def pytest_ignore_collect(collection_path, config):

def pytest_addoption(parser):
parser.addoption(
'--repeat', action='store', help='Number of times to repeat each test'
"--repeat", action="store", help="Number of times to repeat each test"
)


Expand All @@ -28,9 +28,9 @@ def pytest_generate_tests(metafunc):
# We're going to duplicate these tests by parametrizing them,
# which requires that each test has a fixture to accept the parameter.
# We can add a new fixture like so:
metafunc.fixturenames.append('tmp_ct')
metafunc.fixturenames.append("tmp_ct")

# Now we parametrize. This is what happens when we do e.g.,
# @pytest.mark.parametrize('tmp_ct', range(count))
# def test_foo(): pass
metafunc.parametrize('tmp_ct', range(count))
metafunc.parametrize("tmp_ct", range(count))
6 changes: 3 additions & 3 deletions contrib/pyqt-reduce-handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import jsonpickle
from jsonpickle import handlers

text_type = eval('unicode') if str is bytes else str
text_type = eval("unicode") if str is bytes else str


class QReduceHandler(handlers.BaseHandler):
Expand All @@ -21,13 +21,13 @@ def flatten(self, obj, data):
if not pickler.unpicklable:
return text_type(obj)
flatten = pickler.flatten
data['__reduce__'] = [flatten(i, reset=False) for i in obj.__reduce__()[1]]
data["__reduce__"] = [flatten(i, reset=False) for i in obj.__reduce__()[1]]
return data

def restore(self, data):
unpickler = self.context
restore = unpickler.restore
reduced = [restore(i, reset=False) for i in data['__reduce__']]
reduced = [restore(i, reset=False) for i in data["__reduce__"]]
modulename = reduced[0]
classname = reduced[1]
args = reduced[2]
Expand Down
48 changes: 24 additions & 24 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,42 +14,42 @@
sys.path.insert(1, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))

extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.coverage',
"sphinx.ext.autodoc",
"sphinx.ext.doctest",
"sphinx.ext.todo",
"sphinx.ext.coverage",
]

project = 'jsonpickle'
master_doc = 'index'
project = "jsonpickle"
master_doc = "index"

if furo is not None:
html_theme = 'furo'
html_theme = "furo"

# Link dates and other references in the changelog
if rst_linker is not None:
extensions += ['rst.linker']
extensions += ["rst.linker"]

package_url = 'https://github.com/jsonpickle/jsonpickle'
package_url = "https://github.com/jsonpickle/jsonpickle"
link_files = {
'../CHANGES.rst': dict(
using=dict(GH='https://github.com'),
"../CHANGES.rst": dict(
using=dict(GH="https://github.com"),
replace=[
dict(
pattern=r'(Issue #|\B#)(?P<issue>\d+)',
url=package_url + '/issues/{issue}',
pattern=r"(Issue #|\B#)(?P<issue>\d+)",
url=package_url + "/issues/{issue}",
),
dict(
pattern=r'\B\+(?P<pull>\d+)',
url=package_url + '/pull/{pull}',
pattern=r"\B\+(?P<pull>\d+)",
url=package_url + "/pull/{pull}",
),
dict(
pattern=r'(?m:^((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n)',
with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n',
pattern=r"(?m:^((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n)",
with_scm="{text}\n{rev[timestamp]:%d %b %Y}\n",
),
dict(
pattern=r'PEP[- ](?P<pep_number>\d+)',
url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',
pattern=r"PEP[- ](?P<pep_number>\d+)",
url="https://www.python.org/dev/peps/pep-{pep_number:0>4}/",
),
],
)
Expand All @@ -58,12 +58,12 @@
# Be strict about any broken references
nitpicky = True

sphinx_disable = os.environ.get('JSONPICKLE_SPHINX_DISABLE', '')
if 'intersphinx' not in sphinx_disable:
extensions += ['sphinx.ext.intersphinx']
sphinx_disable = os.environ.get("JSONPICKLE_SPHINX_DISABLE", "")
if "intersphinx" not in sphinx_disable:
extensions += ["sphinx.ext.intersphinx"]
intersphinx_mapping = {
'python': ('https://docs.python.org/3', None),
'sphinx': ('https://www.sphinx-doc.org/en/stable/', None),
"python": ("https://docs.python.org/3", None),
"sphinx": ("https://www.sphinx-doc.org/en/stable/", None),
}

# Preserve authored syntax for defaults
Expand Down
2 changes: 1 addition & 1 deletion garden.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ trees:
${activate} python3 -m sphinx docs pages
fmt: |
${activate}
black --skip-string-normalization --target-version py39 "$@" jsonpickle tests fuzzing/fuzz-targets
black "$@" jsonpickle tests fuzzing/fuzz-targets
isort --profile=black "$@" jsonpickle tests fuzzing/fuzz-targets
setup: |
test -d env3 || python3 -m venv env3
Expand Down
4 changes: 2 additions & 2 deletions jsonpickle/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,10 +76,10 @@ def __init__(self, name):
from .unpickler import decode
from .version import __version__ # noqa: F401

__all__ = ('encode', 'decode')
__all__ = ("encode", "decode")

# register built-in handlers
__import__('jsonpickle.handlers', level=0)
__import__("jsonpickle.handlers", level=0)

# Export specific JSONPluginMgr methods into the jsonpickle namespace
set_preferred_backend = json.set_preferred_backend
Expand Down
28 changes: 14 additions & 14 deletions jsonpickle/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def _verify(self) -> None:
"""Ensures that we've loaded at least one JSON backend."""
if self._verified:
return
raise AssertionError('jsonpickle could not load any json modules')
raise AssertionError("jsonpickle could not load any json modules")

def encode(
self, obj: Any, indent: Optional[int] = None, separators: Optional[Any] = None
Expand Down Expand Up @@ -95,17 +95,17 @@ def __init__(self, fallthrough: bool = True) -> None:
# Whether we've loaded any backends successfully
self._verified = False

self.load_backend('simplejson')
self.load_backend('json')
self.load_backend('ujson')
self.load_backend("simplejson")
self.load_backend("json")
self.load_backend("ujson")

# Defaults for various encoders
json_opts = ((), {'sort_keys': False})
json_opts = ((), {"sort_keys": False})
self._encoder_options = {
'ujson': ((), {'sort_keys': False, 'escape_forward_slashes': False}),
'json': json_opts,
'simplejson': json_opts,
'django.util.simplejson': json_opts,
"ujson": ((), {"sort_keys": False, "escape_forward_slashes": False}),
"json": json_opts,
"simplejson": json_opts,
"django.util.simplejson": json_opts,
}

def enable_fallthrough(self, enable: bool) -> None:
Expand Down Expand Up @@ -138,8 +138,8 @@ def _store(
def load_backend(
self,
name: str,
dumps: str = 'dumps',
loads: str = 'loads',
dumps: str = "dumps",
loads: str = "loads",
loads_exc: Union[str, Type[Exception]] = ValueError,
) -> bool:
"""Load a JSON backend by name.
Expand Down Expand Up @@ -170,7 +170,7 @@ def load_backend(

# Handle submodules, e.g. django.utils.simplejson
try:
for attr in name.split('.')[1:]:
for attr in name.split(".")[1:]:
mod = getattr(mod, attr)
except AttributeError:
return False
Expand Down Expand Up @@ -220,9 +220,9 @@ def backend_encode(
optargs, optkwargs = self._encoder_options.get(name, ([], {}))
encoder_kwargs = optkwargs.copy()
if indent is not None:
encoder_kwargs['indent'] = indent # type: ignore[assignment]
encoder_kwargs["indent"] = indent # type: ignore[assignment]
if separators is not None:
encoder_kwargs['separators'] = separators # type: ignore[assignment]
encoder_kwargs["separators"] = separators # type: ignore[assignment]
encoder_args = (obj,) + tuple(optargs)
return self._encoders[name](*encoder_args, **encoder_kwargs) # type: ignore[no-any-return]

Expand Down
2 changes: 1 addition & 1 deletion jsonpickle/compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
PY_MAJOR = sys.version_info[0]

class_types = (type,)
iterator_types = (type(iter('')),)
iterator_types = (type(iter("")),)

string_types = (str,)
numeric_types = (int, float)
Expand Down
6 changes: 3 additions & 3 deletions jsonpickle/ext/gmpy.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,16 @@

from ..handlers import BaseHandler, HandlerReturn, register, unregister

__all__ = ['register_handlers', 'unregister_handlers']
__all__ = ["register_handlers", "unregister_handlers"]


class GmpyMPZHandler(BaseHandler):
def flatten(self, obj: gmpy.mpz, data: Dict[str, Any]) -> HandlerReturn:
data['int'] = int(obj)
data["int"] = int(obj)
return data

def restore(self, data: Dict[str, Any]) -> gmpy.mpz:
return gmpy.mpz(data['int'])
return gmpy.mpz(data["int"])


def register_handlers() -> None:
Expand Down
Loading