From 1092b884225d6100f423693b27c62ce655584bbe Mon Sep 17 00:00:00 2001 From: Nineteendo Date: Fri, 19 Jul 2024 20:10:18 +0200 Subject: [PATCH] Allow missing comma's --- src/jsonyx/__init__.py | 54 ++++++------ src/jsonyx/_accelerator.c | 126 ++++++++++++++++----------- src/jsonyx/_accelerator.pyi | 5 +- src/jsonyx/_decoder.py | 83 ++++++++++-------- src/jsonyx/_encoder.py | 4 +- src/jsonyx/test_jsonyx/test_loads.py | 17 ++-- 6 files changed, 164 insertions(+), 125 deletions(-) diff --git a/src/jsonyx/__init__.py b/src/jsonyx/__init__.py index 5e4d1c5..8f08204 100644 --- a/src/jsonyx/__init__.py +++ b/src/jsonyx/__init__.py @@ -6,7 +6,8 @@ "COMMENTS", "DUPLICATE_KEYS", "EVERYTHING", - "NAN", + "MISSING_COMMAS", + "NAN_AND_INFINITY", "NOTHING", "TRAILING_COMMA", "DuplicateKey", @@ -36,6 +37,11 @@ from _typeshed import SupportsRead, SupportsWrite + _AllowList = Container[Literal[ + "comments", "duplicate_keys", "missing_commas", "nan_and_infinity", + "trailing_comma", + ] | str] + try: # pylint: disable-next=C0412 from jsonyx._accelerator import make_encoder @@ -45,9 +51,13 @@ NOTHING: frozenset[str] = frozenset() COMMENTS: frozenset[str] = frozenset({"comments"}) DUPLICATE_KEYS: frozenset[str] = frozenset({"duplicate_keys"}) -NAN: frozenset[str] = frozenset({"nan"}) +MISSING_COMMAS: frozenset[str] = frozenset({"missing_commas"}) +NAN_AND_INFINITY: frozenset[str] = frozenset({"nan_and_infinity"}) TRAILING_COMMA: frozenset[str] = frozenset({"trailing_comma"}) -EVERYTHING: frozenset[str] = COMMENTS | DUPLICATE_KEYS | NAN | TRAILING_COMMA +EVERYTHING: frozenset[str] = ( + COMMENTS | DUPLICATE_KEYS | MISSING_COMMAS | NAN_AND_INFINITY + | TRAILING_COMMA +) def _decode_bytes(b: bytearray | bytes) -> str: @@ -83,24 +93,16 @@ def _decode_bytes(b: bytearray | bytes) -> str: class JSONDecoder: """JSON decoder.""" - def __init__( - self, - *, - allow: Container[Literal[ - "comments", "duplicate_keys", "nan", "trailing_comma", - ] | str] = NOTHING, - ) -> None: + def __init__(self, *, allow: _AllowList = NOTHING) -> None: """Create new JSON decoder.""" self._scanner: Callable[[str, str], tuple[Any]] = make_scanner( - "comments" in allow, "duplicate_keys" in allow, "nan" in allow, + "comments" in allow, "duplicate_keys" in allow, + "missing_commas" in allow, "nan_and_infinity" in allow, "trailing_comma" in allow, ) def load( - self, - fp: SupportsRead[bytearray | bytes | str], - *, - filename: str = "", + self, fp: SupportsRead[bytes | str], *, filename: str = "", ) -> Any: """Deserialize a JSON file to a Python object.""" return self.loads(fp.read(), filename=getattr(fp, "name", filename)) @@ -128,7 +130,7 @@ class JSONEncoder: def __init__( # noqa: PLR0913 self, *, - allow: Container[Literal["nan"] | str] = NOTHING, + allow: _AllowList = NOTHING, ensure_ascii: bool = False, indent: int | str | None = None, item_separator: str = ", ", @@ -146,12 +148,12 @@ def __init__( # noqa: PLR0913 else: self._encoder = make_encoder( indent, key_separator, item_separator, sort_keys, - "nan" in allow, ensure_ascii, + "nan_and_infinity" in allow, ensure_ascii, ) self._writer: Callable[[Any, SupportsWrite[str]], None] = make_writer( - indent, key_separator, item_separator, sort_keys, "nan" in allow, - ensure_ascii, + indent, key_separator, item_separator, sort_keys, + "nan_and_infinity" in allow, ensure_ascii, ) def dump(self, obj: Any, fp: SupportsWrite[str]) -> None: @@ -188,7 +190,7 @@ def dump( # noqa: PLR0913 obj: Any, fp: SupportsWrite[str], *, - allow: Container[Literal["nan"] | str] = NOTHING, + allow: _AllowList = NOTHING, ensure_ascii: bool = False, indent: int | str | None = None, item_separator: str = ", ", @@ -208,7 +210,7 @@ def dump( # noqa: PLR0913 def dumps( # noqa: PLR0913 obj: Any, *, - allow: Container[Literal["nan"] | str] = NOTHING, + allow: _AllowList = NOTHING, ensure_ascii: bool = False, indent: int | str | None = None, item_separator: str = ", ", @@ -227,11 +229,9 @@ def dumps( # noqa: PLR0913 def load( - fp: SupportsRead[bytearray | bytes | str], + fp: SupportsRead[bytes | str], *, - allow: Container[ - Literal["comments", "duplicate_keys", "nan", "trailing_comma"] | str - ] = NOTHING, + allow: _AllowList = NOTHING, filename: str = "", ) -> Any: """Deserialize a JSON file to a Python object.""" @@ -241,9 +241,7 @@ def load( def loads( s: bytearray | bytes | str, *, - allow: Container[ - Literal["comments", "duplicate_keys", "nan", "trailing_comma"] | str - ] = NOTHING, + allow: _AllowList = NOTHING, filename: str = "", ) -> Any: """Deserialize a JSON string to a Python object.""" diff --git a/src/jsonyx/_accelerator.c b/src/jsonyx/_accelerator.c index 3fd6560..89be3bb 100644 --- a/src/jsonyx/_accelerator.c +++ b/src/jsonyx/_accelerator.c @@ -22,7 +22,8 @@ typedef struct _PyScannerObject { PyObject_HEAD int allow_comments; int allow_duplicate_keys; - int allow_nan; + int allow_missing_commas; + int allow_nan_and_infinity; int allow_trailing_comma; } PyScannerObject; @@ -32,7 +33,7 @@ typedef struct _PyEncoderObject { PyObject *key_separator; PyObject *item_separator; int sort_keys; - int allow_nan; + int allow_nan_and_infinity; int ensure_ascii; } PyEncoderObject; @@ -118,7 +119,9 @@ _skip_comments(PyScannerObject *s, PyObject *pyfilename, PyObject *pystr, Py_ssi if (s->allow_comments) { raise_errmsg("Unterminated comment", pyfilename, pystr, comment_idx, idx); } - raise_errmsg("Comments are not allowed", pyfilename, pystr, comment_idx, idx); + else { + raise_errmsg("Comments are not allowed", pyfilename, pystr, comment_idx, idx); + } return -1; } if (PyUnicode_READ(kind,str, idx) == '*' && @@ -462,14 +465,14 @@ scanstring_unicode(PyObject *pyfilename, PyObject *pystr, Py_ssize_t end, Py_ssi case 'n': c = '\n'; break; case 'r': c = '\r'; break; case 't': c = '\t'; break; - case '\n': - raise_errmsg("Expecting escaped character", pyfilename, pystr, end - 1, -1); + default: + if (c == '\n') { + raise_errmsg("Expecting escaped character", pyfilename, pystr, end - 1, -1); + } + else { + raise_errmsg("Invalid backslash escape", pyfilename, pystr, end - 2, end); + } goto bail; - default: c = 0; - } - if (c == 0) { - raise_errmsg("Invalid backslash escape", pyfilename, pystr, end - 2, end); - goto bail; } } else { @@ -666,7 +669,7 @@ _parse_object_unicode(PyScannerObject *s, PyObject *memo, PyObject *pyfilename, if (!PyDict_Contains(rval, key)) { new_key = PyDict_SetDefault(memo, key, key); } - else if (!s->allow_duplicate_keys) { + else if (!s->allow_duplicate_keys) { raise_errmsg("Duplicate keys are not allowed", pyfilename, pystr, idx, next_idx); goto bail; } @@ -684,7 +687,7 @@ _parse_object_unicode(PyScannerObject *s, PyObject *memo, PyObject *pyfilename, goto bail; } if (idx > end_idx || PyUnicode_READ(kind, str, idx) != ':') { - raise_errmsg("Expecting ':' delimiter", pyfilename, pystr, colon_idx, -1); + raise_errmsg("Expecting colon", pyfilename, pystr, colon_idx, -1); goto bail; } idx++; @@ -709,26 +712,38 @@ _parse_object_unicode(PyScannerObject *s, PyObject *memo, PyObject *pyfilename, } /* bail if the object is closed or we didn't get the , delimiter */ - if (idx <= end_idx && PyUnicode_READ(kind, str, idx) == '}') + if (idx <= end_idx && PyUnicode_READ(kind, str, idx) == ',') { + comma_idx = idx; + idx++; + + /* skip comments after , delimiter */ + if (_skip_comments(s, pyfilename, pystr, &idx)) { + goto bail; + } + } + else if (idx <= end_idx && PyUnicode_READ(kind, str, idx) == '}') { break; - if (idx > end_idx || PyUnicode_READ(kind, str, idx) != ',') { - raise_errmsg("Expecting ',' delimiter", pyfilename, pystr, comma_idx, -1); + } + else if (idx == comma_idx) { + if (!s->allow_missing_commas){ + raise_errmsg("Expecting comma or whitespace", pyfilename, pystr, comma_idx, -1); + } + else { + raise_errmsg("Expecting comma", pyfilename, pystr, comma_idx, -1); + } goto bail; } - comma_idx = idx; - idx++; - - /* skip comments after , delimiter */ - if (_skip_comments(s, pyfilename, pystr, &idx)) { + else if (!s->allow_missing_commas) { + raise_errmsg("Missing comma's are not allowed", pyfilename, pystr, comma_idx, -1); goto bail; } if (idx <= end_idx && PyUnicode_READ(kind, str, idx) == '}') { - if (s->allow_trailing_comma) { - break; + if (!s->allow_trailing_comma) { + raise_errmsg("Trailing comma is not allowed", pyfilename, pystr, comma_idx, -1); + goto bail; } - raise_errmsg("Trailing comma is not allowed", pyfilename, pystr, comma_idx, -1); - goto bail; + break; } } } @@ -793,26 +808,38 @@ _parse_array_unicode(PyScannerObject *s, PyObject *memo, PyObject *pyfilename, P } /* bail if the array is closed or we didn't get the , delimiter */ - if (idx <= end_idx && PyUnicode_READ(kind, str, idx) == ']') + if (idx <= end_idx && PyUnicode_READ(kind, str, idx) == ',') { + comma_idx = idx; + idx++; + + /* skip comments after , */ + if (_skip_comments(s, pyfilename, pystr, &idx)) { + goto bail; + } + } + else if (idx <= end_idx && PyUnicode_READ(kind, str, idx) == ']') { break; - if (idx > end_idx || PyUnicode_READ(kind, str, idx) != ',') { - raise_errmsg("Expecting ',' delimiter", pyfilename, pystr, comma_idx, -1); + } + else if (idx == comma_idx) { + if (!s->allow_missing_commas){ + raise_errmsg("Expecting comma or whitespace", pyfilename, pystr, comma_idx, -1); + } + else { + raise_errmsg("Expecting comma", pyfilename, pystr, comma_idx, -1); + } goto bail; } - comma_idx = idx; - idx++; - - /* skip comments after , */ - if (_skip_comments(s, pyfilename, pystr, &idx)) { + else if (!s->allow_missing_commas) { + raise_errmsg("Missing comma's are not allowed", pyfilename, pystr, comma_idx, -1); goto bail; } if (idx <= end_idx && PyUnicode_READ(kind, str, idx) == ']') { - if (s->allow_trailing_comma) { - break; + if (!s->allow_trailing_comma) { + raise_errmsg("Trailing comma is not allowed", pyfilename, pystr, comma_idx, -1); + goto bail; } - raise_errmsg("Trailing comma is not allowed", pyfilename, pystr, comma_idx, -1); - goto bail; + break; } } } @@ -1006,7 +1033,7 @@ scan_once_unicode(PyScannerObject *s, PyObject *memo, PyObject *pyfilename, PyOb /* NaN */ if ((idx + 2 < length) && PyUnicode_READ(kind, str, idx + 1) == 'a' && PyUnicode_READ(kind, str, idx + 2) == 'N') { - if (!s->allow_nan) { + if (!s->allow_nan_and_infinity) { raise_errmsg("NaN is not allowed", pyfilename, pystr, idx, idx + 3); return NULL; } @@ -1023,7 +1050,7 @@ scan_once_unicode(PyScannerObject *s, PyObject *memo, PyObject *pyfilename, PyOb PyUnicode_READ(kind, str, idx + 5) == 'i' && PyUnicode_READ(kind, str, idx + 6) == 't' && PyUnicode_READ(kind, str, idx + 7) == 'y') { - if (!s->allow_nan) { + if (!s->allow_nan_and_infinity) { raise_errmsg("Infinity is not allowed", pyfilename, pystr, idx, idx + 8); return NULL; } @@ -1042,7 +1069,7 @@ scan_once_unicode(PyScannerObject *s, PyObject *memo, PyObject *pyfilename, PyOb PyUnicode_READ(kind, str, idx + 7) == 't' && PyUnicode_READ(kind, str, idx + 8) == 'y') { *next_idx_ptr = idx + 9; - if (!s->allow_nan) { + if (!s->allow_nan_and_infinity) { raise_errmsg("-Infinity is not allowed", pyfilename, pystr, idx, idx + 9); return NULL; } @@ -1092,13 +1119,13 @@ scanner_call(PyScannerObject *self, PyObject *args, PyObject *kwds) static PyObject * scanner_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { - static char *kwlist[] = {"allow_comments", "allow_duplicate_keys", "allow_nan", "allow_trailing_comma", NULL}; + static char *kwlist[] = {"allow_comments", "allow_duplicate_keys", "allow_missing_commas", "allow_nan_and_infinity", "allow_trailing_comma", NULL}; PyScannerObject *s; - int allow_comments, allow_duplicate_keys, allow_nan, allow_trailing_comma; + int allow_comments, allow_duplicate_keys, allow_missing_commas, allow_nan_and_infinity, allow_trailing_comma; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "pppp:make_scanner", kwlist, - &allow_comments, &allow_duplicate_keys, &allow_nan, &allow_trailing_comma)) + if (!PyArg_ParseTupleAndKeywords(args, kwds, "ppppp:make_scanner", kwlist, + &allow_comments, &allow_duplicate_keys, &allow_missing_commas, &allow_nan_and_infinity, &allow_trailing_comma)) return NULL; s = (PyScannerObject *)type->tp_alloc(type, 0); @@ -1108,7 +1135,8 @@ scanner_new(PyTypeObject *type, PyObject *args, PyObject *kwds) s->allow_comments = allow_comments; s->allow_duplicate_keys = allow_duplicate_keys; - s->allow_nan = allow_nan; + s->allow_missing_commas = allow_missing_commas; + s->allow_nan_and_infinity = allow_nan_and_infinity; s->allow_trailing_comma = allow_trailing_comma; return (PyObject *)s; } @@ -1136,17 +1164,17 @@ static PyType_Spec PyScannerType_spec = { static PyObject * encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { - static char *kwlist[] = {"indent", "key_separator", "item_separator", "sort_keys", "allow_nan", "ensure_ascii", NULL}; + static char *kwlist[] = {"indent", "key_separator", "item_separator", "sort_keys", "allow_nan_and_infinity", "ensure_ascii", NULL}; PyEncoderObject *s; PyObject *indent, *key_separator; PyObject *item_separator; - int sort_keys, allow_nan, ensure_ascii; + int sort_keys, allow_nan_and_infinity, ensure_ascii; if (!PyArg_ParseTupleAndKeywords(args, kwds, "OUUppp:make_encoder", kwlist, &indent, &key_separator, &item_separator, - &sort_keys, &allow_nan, &ensure_ascii)) + &sort_keys, &allow_nan_and_infinity, &ensure_ascii)) return NULL; s = (PyEncoderObject *)type->tp_alloc(type, 0); @@ -1157,7 +1185,7 @@ encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) s->key_separator = Py_NewRef(key_separator); s->item_separator = Py_NewRef(item_separator); s->sort_keys = sort_keys; - s->allow_nan = allow_nan; + s->allow_nan_and_infinity = allow_nan_and_infinity; s->ensure_ascii = ensure_ascii; return (PyObject *)s; } @@ -1209,7 +1237,7 @@ encoder_encode_float(PyEncoderObject *s, PyObject *obj) /* Return the JSON representation of a PyFloat. */ double i = PyFloat_AS_DOUBLE(obj); if (!isfinite(i)) { - if (!s->allow_nan) { + if (!s->allow_nan_and_infinity) { PyErr_Format( PyExc_ValueError, "%R is not allowed", diff --git a/src/jsonyx/_accelerator.pyi b/src/jsonyx/_accelerator.pyi index 97e9ede..aaabd6c 100644 --- a/src/jsonyx/_accelerator.pyi +++ b/src/jsonyx/_accelerator.pyi @@ -30,7 +30,7 @@ def make_encoder( # noqa: PLR0917, PLR0913 key_separator: str, item_separator: str, sort_keys: bool, # noqa: FBT001 - allow_nan: bool, # noqa: FBT001 + allow_nan_and_infinity: bool, # noqa: FBT001 ensure_ascii: bool, # noqa: FBT001 ) -> Callable[[Any], str]: """Make JSON encoder.""" @@ -39,7 +39,8 @@ def make_encoder( # noqa: PLR0917, PLR0913 def make_scanner( allow_comments: bool, # noqa: FBT001 allow_duplicate_keys: bool, # noqa: FBT001 - allow_nan: bool, # noqa: FBT001 + allow_missing_commas: bool, # noqa: FBT001 + allow_nan_and_infinity: bool, # noqa: FBT001 allow_trailing_comma: bool, # noqa: FBT001 ) -> Callable[[str, str], Any]: """Make JSON scanner.""" diff --git a/src/jsonyx/_decoder.py b/src/jsonyx/_decoder.py index bac5335..0636b7e 100644 --- a/src/jsonyx/_decoder.py +++ b/src/jsonyx/_decoder.py @@ -38,6 +38,7 @@ ).match +# TODO(Nice Zombies): refactor def _get_err_context(doc: str, start: int, end: int) -> ( tuple[int, str, int] ): @@ -194,7 +195,8 @@ def __str__(self) -> str: def make_scanner( # noqa: C901, PLR0915 allow_comments: bool, # noqa: FBT001 allow_duplicate_keys: bool, # noqa: FBT001 - allow_nan: bool, # noqa: FBT001 + allow_missing_commas: bool, # noqa: FBT001 + allow_nan_and_infinity: bool, # noqa: FBT001 allow_trailing_comma: bool, # noqa: FBT001 ) -> Callable[[str, str], Any]: """Make JSON scanner.""" @@ -257,27 +259,32 @@ def scan_object( # noqa: C901, PLR0912 else: key = DuplicateKey(key) + colon_idx: int = end + end = skip_comments(filename, s, end) if s[end:end + 1] != ":": - colon_idx: int = end - end = skip_comments(filename, s, end) - if s[end:end + 1] != ":": - msg = "Expecting ':' delimiter" - raise JSONSyntaxError(msg, filename, s, colon_idx) + msg = "Expecting colon" + raise JSONSyntaxError(msg, filename, s, colon_idx) end = skip_comments(filename, s, end + 1) result[key], end = scan_value(filename, s, end) - if s[end:end + 1] != ",": - comma_idx: int = end - end = skip_comments(filename, s, end) - if (nextchar := s[end:end + 1]) != ",": - if nextchar != "}": - msg = "Expecting ',' delimiter" - raise JSONSyntaxError(msg, filename, s, comma_idx) + comma_idx: int = end + end = skip_comments(filename, s, end) + if (nextchar := s[end:end + 1]) == ",": + comma_idx = end + end = skip_comments(filename, s, end + 1) + elif nextchar == "}": + return result, end + 1 + elif end == comma_idx: + if allow_missing_commas: + msg = "Expecting comma or whitespace" + else: + msg = "Expecting comma" - return result, end + 1 + raise JSONSyntaxError(msg, filename, s, comma_idx) + elif not allow_missing_commas: + msg = "Missing comma's are not allowed" + raise JSONSyntaxError(msg, filename, s, comma_idx) - comma_idx = end - end = skip_comments(filename, s, end + 1) if (nextchar := s[end:end + 1]) != '"': if nextchar != "}": msg = "Expecting string" @@ -301,24 +308,30 @@ def scan_array(filename: str, s: str, end: int) -> ( while True: value, end = scan_value(filename, s, end) append_value(value) - if s[end:end + 1] != ",": - comma_idx: int = end - end = skip_comments(filename, s, end) - if (nextchar := s[end:end + 1]) != ",": - if nextchar == "]": - return values, end + 1 - - msg: str = "Expecting ',' delimiter" - raise JSONSyntaxError(msg, filename, s, comma_idx) + comma_idx: int = end + end = skip_comments(filename, s, end) + if (nextchar := s[end:end + 1]) == ",": + comma_idx = end + end = skip_comments(filename, s, end + 1) + elif nextchar == "]": + return values, end + 1 + elif end == comma_idx: + if allow_missing_commas: + msg = "Expecting comma or whitespace" + else: + msg = "Expecting comma" + + raise JSONSyntaxError(msg, filename, s, comma_idx) + elif not allow_missing_commas: + msg = "Missing comma's are not allowed" + raise JSONSyntaxError(msg, filename, s, comma_idx) - comma_idx = end - end = skip_comments(filename, s, end + 1) if s[end:end + 1] == "]": - if allow_trailing_comma: - return values, end + 1 + if not allow_trailing_comma: + msg = "Trailing comma is not allowed" + raise JSONSyntaxError(msg, filename, s, comma_idx) - msg = "Trailing comma is not allowed" - raise JSONSyntaxError(msg, filename, s, comma_idx) + return values, end + 1 # pylint: disable-next=R0912 def scan_value( # noqa: C901, PLR0912 @@ -354,19 +367,19 @@ def scan_value( # noqa: C901, PLR0912 else: result = int(integer) elif nextchar == "N" and s[idx:idx + 3] == "NaN": - if not allow_nan: - msg: str = "NaN is not allowed" + if not allow_nan_and_infinity: + msg = "NaN is not allowed" raise JSONSyntaxError(msg, filename, s, idx, idx + 3) result, end = nan, idx + 3 elif nextchar == "I" and s[idx:idx + 8] == "Infinity": - if not allow_nan: + if not allow_nan_and_infinity: msg = "Infinity is not allowed" raise JSONSyntaxError(msg, filename, s, idx, idx + 8) result, end = inf, idx + 8 elif nextchar == "-" and s[idx:idx + 9] == "-Infinity": - if not allow_nan: + if not allow_nan_and_infinity: msg = "-Infinity is not allowed" raise JSONSyntaxError(msg, filename, s, idx, idx + 9) diff --git a/src/jsonyx/_encoder.py b/src/jsonyx/_encoder.py index d3537d4..227bf63 100644 --- a/src/jsonyx/_encoder.py +++ b/src/jsonyx/_encoder.py @@ -66,7 +66,7 @@ def make_writer( # noqa: C901, PLR0915, PLR0917, PLR0913 key_separator: str, item_separator: str, sort_keys: bool, # noqa: FBT001 - allow_nan: bool, # noqa: FBT001 + allow_nan_and_infinity: bool, # noqa: FBT001 ensure_ascii: bool, # noqa: FBT001 ) -> Callable[[Any, SupportsWrite[str]], None]: """Make JSON interencode.""" @@ -90,7 +90,7 @@ def floatstr(num: float) -> str: else: return float_repr(num) - if not allow_nan: + if not allow_nan_and_infinity: msg: str = f"{num!r} is not allowed" raise ValueError(msg) diff --git a/src/jsonyx/test_jsonyx/test_loads.py b/src/jsonyx/test_jsonyx/test_loads.py index 2fb242b..52b0485 100644 --- a/src/jsonyx/test_jsonyx/test_loads.py +++ b/src/jsonyx/test_jsonyx/test_loads.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import pytest -from jsonyx import NAN, TRAILING_COMMA +from jsonyx import NAN_AND_INFINITY, TRAILING_COMMA # pylint: disable-next=W0611 from jsonyx.test_jsonyx import get_json # type: ignore # noqa: F401 from typing_extensions import Any # type: ignore @@ -56,9 +56,11 @@ def test_keywords(json: ModuleType, string: str, expected: Any) -> None: ("Infinity", inf), ("-Infinity", -inf), ]) -def test_nan_allowed(json: ModuleType, string: str, expected: Any) -> None: - """Test NaN if allowed.""" - obj: Any = json.loads(string, allow=NAN) +def test_nan_and_infinity_allowed( + json: ModuleType, string: str, expected: Any, +) -> None: + """Test NaN and infinity if allowed.""" + obj: Any = json.loads(string, allow=NAN_AND_INFINITY) if isnan(expected): assert isnan(obj) else: @@ -66,8 +68,8 @@ def test_nan_allowed(json: ModuleType, string: str, expected: Any) -> None: @pytest.mark.parametrize("string", ["NaN", "Infinity", "-Infinity"]) -def test_nan_not_allowed(json: ModuleType, string: str) -> None: - """Test NaN if not allowed.""" +def test_nan_and_infinity_not_allowed(json: ModuleType, string: str) -> None: + """Test NaN and infinity if not allowed.""" with pytest.raises(json.JSONSyntaxError) as exc_info: json.loads(string) @@ -239,9 +241,6 @@ def test_invalid_string( # Multiple values ("[1, 2, 3]", [1, 2, 3]), - - # Space before delimiter - ("[1 ,2]", [1, 2]), ]) # type: ignore def test_array(json: ModuleType, string: str, expected: Any) -> None: """Test JSON array."""