From 8c173aca187598828a0fc9ebeb56f582ba306f06 Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Wed, 13 Aug 2025 23:42:07 +0900 Subject: [PATCH 01/27] change: add workarounds for ruff:COM812 --- pyproject.toml | 1 + src/anyconfig/__init__.py | 6 +-- src/anyconfig/api/__init__.py | 14 ++--- src/anyconfig/api/_dump.py | 4 +- src/anyconfig/api/_load.py | 54 ++++++++++--------- src/anyconfig/api/_open.py | 2 +- src/anyconfig/backend/__init__.py | 10 ++-- src/anyconfig/backend/base/__init__.py | 8 +-- src/anyconfig/backend/base/compat.py | 6 +-- src/anyconfig/backend/base/datatypes.py | 6 +-- src/anyconfig/backend/base/dumpers.py | 20 ++++---- src/anyconfig/backend/base/loaders.py | 22 ++++---- src/anyconfig/backend/base/parsers.py | 35 +++++++------ src/anyconfig/backend/base/utils.py | 8 ++- src/anyconfig/backend/ini/configparser.py | 16 +++--- src/anyconfig/backend/json/__init__.py | 4 +- src/anyconfig/backend/json/common.py | 6 +-- src/anyconfig/backend/json/simplejson.py | 8 ++- src/anyconfig/backend/json/stdlib.py | 4 +- src/anyconfig/backend/properties/builtin.py | 13 ++--- src/anyconfig/backend/python/builtin.py | 4 +- src/anyconfig/backend/python/dumper.py | 4 +- src/anyconfig/backend/python/loader.py | 14 +++-- src/anyconfig/backend/python/utils.py | 10 ++-- src/anyconfig/backend/sh/variables.py | 17 +++--- src/anyconfig/backend/xml/etree.py | 57 +++++++++++---------- src/anyconfig/backend/yaml/common.py | 4 +- src/anyconfig/backend/yaml/pyyaml.py | 32 ++++++------ src/anyconfig/backend/yaml/ruamel.py | 12 ++--- src/anyconfig/cli/actions.py | 4 +- src/anyconfig/cli/constants.py | 4 +- src/anyconfig/cli/detectors.py | 14 ++--- 32 files changed, 215 insertions(+), 208 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index ef4d4558..820bae35 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -149,6 +149,7 @@ select = [ "ARG", # flake8-unused-arguments "B", # flake8-bugbear "BLE", # flake8-blind-except +# "COM", # flake8-commas (It's not ready yet.) "C4", # flake8-comprehensions "C90", # mccabe "D", # pydocstyle diff --git a/src/anyconfig/__init__.py b/src/anyconfig/__init__.py index 84f1c887..d035ae25 100644 --- a/src/anyconfig/__init__.py +++ b/src/anyconfig/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2021 Satoru SATOH +# Copyright (C) 2012 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # # For 'anyconfig.open': @@ -31,7 +31,7 @@ load_plugins, list_types, list_by_cid, list_by_type, list_by_extension, findall, find, try_query, - validate, is_valid, gen_schema + validate, is_valid, gen_schema, ) @@ -67,7 +67,7 @@ "try_query", # anyconfig.validate - "validate", "is_valid", "gen_schema" # validation APIs. + "validate", "is_valid", "gen_schema", # validation APIs. ] # vim:sw=4:ts=4:et: diff --git a/src/anyconfig/api/__init__.py b/src/anyconfig/api/__init__.py index c2be7987..bba92884 100644 --- a/src/anyconfig/api/__init__.py +++ b/src/anyconfig/api/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=unused-import,import-error,invalid-name @@ -87,10 +87,10 @@ from .datatypes import MaybeDataT from ._dump import ( - dump, dumps + dump, dumps, ) from ._load import ( - single_load, multi_load, load, loads + single_load, multi_load, load, loads, ) from ._open import open # pylint: disable=redefined-builtin @@ -99,7 +99,7 @@ from ..common import ( InDataT, InDataExT, UnknownFileTypeError, UnknownParserTypeError, - UnknownProcessorTypeError, ValidationError + UnknownProcessorTypeError, ValidationError, ) from ..dicts import ( MS_REPLACE, MS_NO_REPLACE, MS_DICTS, MS_DICTS_AND_LISTS, MERGE_STRATEGIES, @@ -110,11 +110,11 @@ ) from ..parsers import ( load_plugins, list_types, list_by_cid, list_by_type, list_by_extension, - findall, find, MaybeParserT + findall, find, MaybeParserT, ) from ..query import try_query from ..schema import ( - validate, is_valid, gen_schema + validate, is_valid, gen_schema, ) @@ -160,5 +160,5 @@ def version() -> list[str]: "try_query", # anyconfig.validate - "validate", "is_valid", "gen_schema" + "validate", "is_valid", "gen_schema", ] diff --git a/src/anyconfig/api/_dump.py b/src/anyconfig/api/_dump.py index 2463d8d9..a5719cb6 100644 --- a/src/anyconfig/api/_dump.py +++ b/src/anyconfig/api/_dump.py @@ -14,7 +14,7 @@ def dump( data: common.InDataExT, out: ioinfo.PathOrIOInfoT, - ac_parser: parsers.MaybeParserT = None, **options + ac_parser: parsers.MaybeParserT = None, **options, ) -> None: """Save ``data`` to ``out`` in specified or detected format. @@ -37,7 +37,7 @@ def dump( def dumps( data: common.InDataExT, ac_parser: parsers.MaybeParserT = None, - **options + **options, ) -> str: """Return a str representation of ``data`` in specified format. diff --git a/src/anyconfig/api/_load.py b/src/anyconfig/api/_load.py index 04d5be70..2d260f8d 100644 --- a/src/anyconfig/api/_load.py +++ b/src/anyconfig/api/_load.py @@ -12,7 +12,7 @@ from .. import ioinfo from ..dicts import ( convert_to as dicts_convert_to, - merge as dicts_merge + merge as dicts_merge, ) from ..parsers import find as parsers_find from ..query import try_query @@ -20,7 +20,7 @@ from ..template import try_render from ..utils import is_dict_like from .datatypes import ( - ParserT + ParserT, ) from .utils import are_same_file_types @@ -28,7 +28,7 @@ import collections.abc from ..common import ( - InDataT, InDataExT + InDataT, InDataExT, ) @@ -67,7 +67,7 @@ def _single_load( ioi: ioinfo.IOInfo, *, ac_parser: MaybeParserOrIdOrTypeT = None, ac_template: bool = False, ac_context: typing.Optional[MappingT] = None, - **options + **options, ) -> InDataExT: """Load data from a given ``ioi``. @@ -102,7 +102,7 @@ def single_load( input_: ioinfo.PathOrIOInfoT, ac_parser: MaybeParserOrIdOrTypeT = None, *, ac_template: bool = False, ac_context: typing.Optional[MappingT] = None, - **options + **options, ) -> InDataExT: r"""Load from single input ``input\_``. @@ -161,7 +161,7 @@ def single_load( cnf = _single_load(ioi, ac_parser=ac_parser, ac_template=ac_template, ac_context=ac_context, **options) schema = try_to_load_schema( - ac_template=ac_template, ac_context=ac_context, **options + ac_template=ac_template, ac_context=ac_context, **options, ) if schema and not is_valid(cnf, schema, **options): return None @@ -171,11 +171,12 @@ def single_load( def multi_load( inputs: typing.Union[ - collections.abc.Iterable[ioinfo.PathOrIOInfoT], ioinfo.PathOrIOInfoT + collections.abc.Iterable[ioinfo.PathOrIOInfoT], + ioinfo.PathOrIOInfoT, ], ac_parser: MaybeParserOrIdOrTypeT = None, *, ac_template: bool = False, ac_context: typing.Optional[MappingT] = None, - **options + **options, ) -> InDataExT: r"""Load data from multiple inputs ``inputs``. @@ -230,7 +231,7 @@ def multi_load( :raises: ValueError, UnknownProcessorTypeError, UnknownFileTypeError """ schema = try_to_load_schema( - ac_template=ac_template, ac_context=ac_context, **options + ac_template=ac_template, ac_context=ac_context, **options, ) options["ac_schema"] = None # Avoid to load schema more than twice. @@ -246,7 +247,7 @@ def multi_load( for ioi in iois: cups = _single_load( ioi, ac_parser=ac_parser, ac_template=ac_template, - ac_context=ctx, **options + ac_context=ctx, **options, ) if cups: if cnf is None: @@ -256,7 +257,7 @@ def multi_load( dicts_merge( typing.cast("MappingT", cnf), typing.cast("MappingT", cups), - **options + **options, ) dicts_merge(ctx, typing.cast("MappingT", cups), **options) elif len(iois) > 1: @@ -278,12 +279,13 @@ def multi_load( def load( path_specs: typing.Union[ - collections.abc.Iterable[ioinfo.PathOrIOInfoT], ioinfo.PathOrIOInfoT + collections.abc.Iterable[ioinfo.PathOrIOInfoT], + ioinfo.PathOrIOInfoT, ], ac_parser: typing.Optional[str] = None, *, ac_dict: typing.Optional[collections.abc.Callable] = None, ac_template: bool = False, ac_context: typing.Optional[MappingT] = None, - **options + **options, ) -> InDataExT: r"""Load from a file or files specified as ``path_specs``. @@ -320,13 +322,17 @@ def load( raise ValueError(msg) if len(iois) == 1: - return single_load(iois[0], ac_parser=ac_parser, ac_dict=ac_dict, - ac_template=ac_template, ac_context=ac_context, - **options) + return single_load( + iois[0], ac_parser=ac_parser, ac_dict=ac_dict, + ac_template=ac_template, ac_context=ac_context, + **options, + ) - return multi_load(iois, ac_parser=ac_parser, ac_dict=ac_dict, - ac_template=ac_template, ac_context=ac_context, - **options) + return multi_load( + iois, ac_parser=ac_parser, ac_dict=ac_dict, + ac_template=ac_template, ac_context=ac_context, + **options, + ) def loads( @@ -334,7 +340,7 @@ def loads( ac_dict: typing.Optional[collections.abc.Callable] = None, ac_template: typing.Union[str, bool] = False, ac_context: typing.Optional[MappingT] = None, - **options + **options, ) -> InDataExT: """Load data from a str, ``content``. @@ -367,9 +373,11 @@ def loads( ac_schema = options.get("ac_schema") if ac_schema is not None: options["ac_schema"] = None - schema = loads(ac_schema, ac_parser=psr, ac_dict=ac_dict, - ac_template=ac_template, ac_context=ac_context, - **options) + schema = loads( + ac_schema, ac_parser=psr, ac_dict=ac_dict, + ac_template=ac_template, ac_context=ac_context, + **options, + ) if ac_template: compiled = try_render(content=content, ctx=ac_context, **options) diff --git a/src/anyconfig/api/_open.py b/src/anyconfig/api/_open.py index ea11a1fb..75e237bb 100644 --- a/src/anyconfig/api/_open.py +++ b/src/anyconfig/api/_open.py @@ -18,7 +18,7 @@ def open( path: ioinfo.PathOrIOInfoT, mode: typing.Optional[str] = None, ac_parser: parsers.MaybeParserT = None, - **options + **options, ) -> typing.IO: """Open given file ``path`` with appropriate open flag. diff --git a/src/anyconfig/backend/__init__.py b/src/anyconfig/backend/__init__.py index d7046d05..7dfb9965 100644 --- a/src/anyconfig/backend/__init__.py +++ b/src/anyconfig/backend/__init__.py @@ -16,10 +16,10 @@ sh, toml, yaml, - xml + xml, ) from .base import ( - ParserT, ParsersT, ParserClssT + ParserT, ParsersT, ParserClssT, ) @@ -33,7 +33,7 @@ def warn(name: str, feature: str) -> None: """Wraper for warnings.warn.""" warnings.warn( f"'{name}' module is not available. Disabled {feature} support.", - category=ImportWarning, stacklevel=2 + category=ImportWarning, stacklevel=2, ) @@ -49,6 +49,8 @@ def warn(name: str, feature: str) -> None: __all__ = [ - "ParserT", "ParsersT", "ParserClssT", "PARSERS", + "ParserClssT", + "ParserT", + "ParsersT", ] diff --git a/src/anyconfig/backend/base/__init__.py b/src/anyconfig/backend/base/__init__.py index 05359463..cd306d8d 100644 --- a/src/anyconfig/backend/base/__init__.py +++ b/src/anyconfig/backend/base/__init__.py @@ -10,17 +10,17 @@ from .compat import BinaryFilesMixin from .datatypes import ( GenContainerT, OptionsT, InDataT, InDataExT, OutDataExT, IoiT, - PathOrStrT + PathOrStrT, ) from .dumpers import ( - ToStringDumperMixin, ToStreamDumperMixin, BinaryDumperMixin + ToStringDumperMixin, ToStreamDumperMixin, BinaryDumperMixin, ) from .loaders import ( LoaderMixin, FromStringLoaderMixin, FromStreamLoaderMixin, - BinaryLoaderMixin + BinaryLoaderMixin, ) from .utils import ( - ensure_outdir_exists, to_method + ensure_outdir_exists, to_method, ) from .parsers import ( Parser, diff --git a/src/anyconfig/backend/base/compat.py b/src/anyconfig/backend/base/compat.py index bff0ec04..d3447564 100644 --- a/src/anyconfig/backend/base/compat.py +++ b/src/anyconfig/backend/base/compat.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=consider-using-with @@ -25,7 +25,7 @@ def ropen(cls, filepath: PathOrStrT, **options) -> typing.IO: :param filepath: Path to file to open to read data """ return pathlib.Path(filepath).open( - cls._open_flags[0], **options + cls._open_flags[0], **options, ) @classmethod @@ -35,5 +35,5 @@ def wopen(cls, filepath: PathOrStrT, **options) -> typing.IO: :param filepath: Path to file to open to write data to """ return pathlib.Path(filepath).open( - cls._open_flags[1], **options + cls._open_flags[1], **options, ) diff --git a/src/anyconfig/backend/base/datatypes.py b/src/anyconfig/backend/base/datatypes.py index 5e516386..84e8be6e 100644 --- a/src/anyconfig/backend/base/datatypes.py +++ b/src/anyconfig/backend/base/datatypes.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2021 - 2024 Satoru SATOH +# Copyright (C) 2021 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # """Utility functions in anyconfig.backend.base.""" @@ -10,10 +10,10 @@ import typing from ...common import ( - InDataT, InDataExT + InDataT, InDataExT, ) from ...ioinfo import ( - IOInfo, PathOrIOInfoT + IOInfo, PathOrIOInfoT, ) diff --git a/src/anyconfig/backend/base/dumpers.py b/src/anyconfig/backend/base/dumpers.py index 4963ae58..88aa3e05 100644 --- a/src/anyconfig/backend/base/dumpers.py +++ b/src/anyconfig/backend/base/dumpers.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=consider-using-with, unspecified-encoding @@ -12,12 +12,12 @@ from ... import ioinfo, utils from .utils import ( - ensure_outdir_exists, not_implemented + ensure_outdir_exists, not_implemented, ) if typing.TYPE_CHECKING: from .datatypes import ( - InDataExT, IoiT, PathOrStrT + InDataExT, IoiT, PathOrStrT, ) @@ -48,7 +48,7 @@ def wopen(self, filepath: PathOrStrT, **options) -> typing.IO: options["encoding"] = _ENCODING return pathlib.Path(filepath).open( - self._open_write_mode, **options + self._open_write_mode, **options, ) def dump_to_string(self, cnf: InDataExT, **options) -> str: @@ -63,7 +63,7 @@ def dump_to_string(self, cnf: InDataExT, **options) -> str: return "" def dump_to_path( - self, cnf: InDataExT, filepath: PathOrStrT, **options + self, cnf: InDataExT, filepath: PathOrStrT, **options, ) -> None: """Dump config 'cnf' to a file 'filepath'. @@ -74,7 +74,7 @@ def dump_to_path( not_implemented(self, cnf, filepath, **options) def dump_to_stream( - self, cnf: InDataExT, stream: typing.IO, **options + self, cnf: InDataExT, stream: typing.IO, **options, ) -> None: """Dump config 'cnf' to a file-like object 'stream'. @@ -112,7 +112,7 @@ def dump(self, cnf: InDataExT, ioi: IoiT, **options) -> None: if ioinfo.is_stream(ioi): self.dump_to_stream( - cnf, typing.cast("typing.IO", ioi.src), **options + cnf, typing.cast("typing.IO", ioi.src), **options, ) else: ensure_outdir_exists(ioi.path) @@ -137,7 +137,7 @@ class ToStringDumperMixin(DumperMixin): """ def dump_to_path( - self, cnf: InDataExT, filepath: PathOrStrT, **options + self, cnf: InDataExT, filepath: PathOrStrT, **options, ) -> None: """Dump config 'cnf' to a file 'filepath'. @@ -149,7 +149,7 @@ def dump_to_path( out.write(self.dump_to_string(cnf, **options)) def dump_to_stream( - self, cnf: InDataExT, stream: typing.IO, **options + self, cnf: InDataExT, stream: typing.IO, **options, ) -> None: """Dump config 'cnf' to a file-like object 'stream'. @@ -185,7 +185,7 @@ def dump_to_string(self, cnf: InDataExT, **options) -> str: return stream.getvalue() def dump_to_path( - self, cnf: InDataExT, filepath: PathOrStrT, **options + self, cnf: InDataExT, filepath: PathOrStrT, **options, ) -> None: """Dump config 'cnf' to a file 'filepath`. diff --git a/src/anyconfig/backend/base/loaders.py b/src/anyconfig/backend/base/loaders.py index 151bf21a..f47693c2 100644 --- a/src/anyconfig/backend/base/loaders.py +++ b/src/anyconfig/backend/base/loaders.py @@ -16,7 +16,7 @@ if typing.TYPE_CHECKING: from .datatypes import ( - InDataExT, IoiT, GenContainerT, OptionsT, PathOrStrT + InDataExT, IoiT, GenContainerT, OptionsT, PathOrStrT, ) @@ -76,7 +76,7 @@ def ropen(self, filepath: PathOrStrT, **options) -> typing.IO: options["encoding"] = _ENCODING return pathlib.Path(filepath).open( - self._open_read_mode, **options + self._open_read_mode, **options, ) def _container_factory(self, **options) -> GenContainerT: @@ -102,7 +102,7 @@ def _container_factory(self, **options) -> GenContainerT: return dict def _load_options( - self, container: GenContainerT, **options + self, container: GenContainerT, **options, ) -> OptionsT: """Select backend specific loading options.""" # Force set dict option if available in backend. For example, @@ -114,7 +114,7 @@ def _load_options( return utils.filter_options(self._load_opts, options) def load_from_string( - self, content: str, container: GenContainerT, **options + self, content: str, container: GenContainerT, **options, ) -> InDataExT: """Load config from given string 'content'. @@ -128,7 +128,7 @@ def load_from_string( return DATA_DEFAULT def load_from_path( - self, filepath: PathOrStrT, container: GenContainerT, **options + self, filepath: PathOrStrT, container: GenContainerT, **options, ) -> InDataExT: """Load config from given file path 'filepath`. @@ -142,7 +142,7 @@ def load_from_path( return DATA_DEFAULT def load_from_stream( - self, stream: typing.IO, container: GenContainerT, **options + self, stream: typing.IO, container: GenContainerT, **options, ) -> InDataExT: """Load config from given file like object 'stream`. @@ -175,7 +175,7 @@ def loads(self, content: str, **options) -> InDataExT: return self.load_from_string(content, container, **options) def load( - self, ioi: IoiT, *, ac_ignore_missing: bool = False, **options + self, ioi: IoiT, *, ac_ignore_missing: bool = False, **options, ) -> InDataExT: """Load config from ``ioi``. @@ -230,7 +230,7 @@ class FromStringLoaderMixin(LoaderMixin): """ def load_from_stream( - self, stream: typing.IO, container: GenContainerT, **options + self, stream: typing.IO, container: GenContainerT, **options, ) -> InDataExT: """Load config from given stream 'stream'. @@ -243,7 +243,7 @@ def load_from_stream( return self.load_from_string(stream.read(), container, **options) def load_from_path( - self, filepath: PathOrStrT, container: GenContainerT, **options + self, filepath: PathOrStrT, container: GenContainerT, **options, ) -> InDataExT: """Load config from given file path 'filepath'. @@ -268,7 +268,7 @@ class FromStreamLoaderMixin(LoaderMixin): """ def load_from_string( - self, content: str, container: GenContainerT, **options + self, content: str, container: GenContainerT, **options, ) -> InDataExT: """Load config from given string 'cnf_content'. @@ -282,7 +282,7 @@ def load_from_string( return self.load_from_stream(iof(content), container, **options) def load_from_path( - self, filepath: PathOrStrT, container: GenContainerT, **options + self, filepath: PathOrStrT, container: GenContainerT, **options, ) -> InDataExT: """Load config from given file path 'filepath'. diff --git a/src/anyconfig/backend/base/parsers.py b/src/anyconfig/backend/base/parsers.py index 5e29fc83..71c34183 100644 --- a/src/anyconfig/backend/base/parsers.py +++ b/src/anyconfig/backend/base/parsers.py @@ -23,13 +23,13 @@ from ...models import processor from ...utils import is_dict_like from .datatypes import ( - InDataExT, GenContainerT + InDataExT, GenContainerT, ) from .dumpers import ( - DumperMixin, ToStringDumperMixin, ToStreamDumperMixin + DumperMixin, ToStringDumperMixin, ToStreamDumperMixin, ) from .loaders import ( - LoaderMixin, FromStringLoaderMixin, FromStreamLoaderMixin + LoaderMixin, FromStringLoaderMixin, FromStreamLoaderMixin, ) @@ -78,7 +78,7 @@ def load_with_fn( content_or_strm: typing.Union[typing.AnyStr, typing.IO], container: GenContainerT, *, allow_primitives: bool = False, - **options + **options, ) -> InDataExT: """Load data from given string or stream 'content_or_strm'. @@ -106,7 +106,7 @@ def load_with_fn( def dump_with_fn( dump_fn: typing.Optional[DumpFnT], data: InDataExT, stream: typing.Optional[typing.IO], - **options + **options, ) -> str: """Dump 'data' to a string. @@ -155,7 +155,7 @@ class StringStreamFnParser(Parser, FromStreamLoaderMixin, ToStreamDumperMixin): _dump_to_stream_fn: typing.Optional[DumpFnT] = None def load_from_string( - self, content: typing.AnyStr, container: GenContainerT, **options + self, content: typing.AnyStr, container: GenContainerT, **options, ) -> InDataExT: """Load configuration data from given string 'content'. @@ -165,12 +165,14 @@ def load_from_string( :return: container object holding the configuration data """ - return load_with_fn(self._load_from_string_fn, content, container, - allow_primitives=self.allow_primitives(), - **options) + return load_with_fn( + self._load_from_string_fn, content, container, + allow_primitives=self.allow_primitives(), + **options, + ) def load_from_stream( - self, stream: typing.IO, container: GenContainerT, **options + self, stream: typing.IO, container: GenContainerT, **options, ) -> InDataExT: """Load data from given stream 'stream'. @@ -180,9 +182,11 @@ def load_from_stream( :return: container object holding the configuration data """ - return load_with_fn(self._load_from_stream_fn, stream, container, - allow_primitives=self.allow_primitives(), - **options) + return load_with_fn( + self._load_from_stream_fn, stream, container, + allow_primitives=self.allow_primitives(), + **options, + ) def dump_to_string(self, cnf: InDataExT, **options) -> str: """Dump config 'cnf' to a string. @@ -196,7 +200,7 @@ def dump_to_string(self, cnf: InDataExT, **options) -> str: **options) def dump_to_stream( - self, cnf: InDataExT, stream: typing.IO, **options + self, cnf: InDataExT, stream: typing.IO, **options, ) -> None: """Dump config 'cnf' to a file-like object 'stream'. @@ -206,5 +210,4 @@ def dump_to_stream( :param stream: Config file or file like object :param options: optional keyword parameters to be sanitized :: dict """ - dump_with_fn(self._dump_to_stream_fn, cnf, stream, - **options) + dump_with_fn(self._dump_to_stream_fn, cnf, stream, **options) diff --git a/src/anyconfig/backend/base/utils.py b/src/anyconfig/backend/base/utils.py index 96a358b1..33c3b8d6 100644 --- a/src/anyconfig/backend/base/utils.py +++ b/src/anyconfig/backend/base/utils.py @@ -13,9 +13,7 @@ import collections.abc -def not_implemented( - *_args, **_options -) -> None: +def not_implemented(*_args, **_options) -> None: """Raise NotImplementedError.""" raise NotImplementedError @@ -29,7 +27,7 @@ def ensure_outdir_exists(filepath: typing.Union[str, pathlib.Path]) -> None: def to_method( - func: collections.abc.Callable[..., typing.Any] + func: collections.abc.Callable[..., typing.Any], ) -> collections.abc.Callable[..., typing.Any]: """Lift :func:`func` to a method. @@ -39,7 +37,7 @@ def to_method( """ @functools.wraps(func) def wrapper( - *args, **kwargs + *args, **kwargs, ) -> collections.abc.Callable[..., typing.Any]: """Original function decorated.""" return func(*args[1:], **kwargs) diff --git a/src/anyconfig/backend/ini/configparser.py b/src/anyconfig/backend/ini/configparser.py index 7122fd8a..ffa5af3b 100644 --- a/src/anyconfig/backend/ini/configparser.py +++ b/src/anyconfig/backend/ini/configparser.py @@ -57,12 +57,12 @@ r'".*"' r"|" r"'.*'" - r")$" + r")$", ) def parse( - val_s: str, sep: str = _SEP, quoted_re: re.Pattern = _QUOTED_RE + val_s: str, sep: str = _SEP, quoted_re: re.Pattern = _QUOTED_RE, ) -> typing.Any: """Parse expression. @@ -97,7 +97,7 @@ def _to_s(val: typing.Any, sep: str = ", ") -> str: def parsed_items( items: collections.abc.Iterable[tuple[str, typing.Any]], - sep: str = _SEP, **options + sep: str = _SEP, **options, ) -> collections.abc.Iterator[tuple[str, typing.Any]]: """Parse an iterable of items. @@ -111,12 +111,12 @@ def parsed_items( def _make_parser( - **kwargs + **kwargs, ) -> tuple[dict[str, typing.Any], configparser.ConfigParser]: """Make an instance of configparser.ConfigParser.""" # Optional arguments for configparser.ConfigParser{,readfp} kwargs_0 = utils.filter_options( - ("defaults", "dict_type", "allow_no_value", "strict"), kwargs + ("defaults", "dict_type", "allow_no_value", "strict"), kwargs, ) kwargs_1 = utils.filter_options(("filename", ), kwargs) @@ -134,7 +134,7 @@ def _make_parser( def _load( stream: typing.IO, container: base.GenContainerT, - sep: str = _SEP, dkey: str = DEFAULTSECT, **kwargs + sep: str = _SEP, dkey: str = DEFAULTSECT, **kwargs, ) -> base.InDataT: """Load data from ``stream`` of which file should be in INI format. @@ -162,7 +162,7 @@ def _load( def _dumps_itr( - cnf: dict[str, typing.Any], dkey: str = DEFAULTSECT + cnf: dict[str, typing.Any], dkey: str = DEFAULTSECT, ) -> collections.abc.Iterator[str]: """Dump data iterably. @@ -201,7 +201,7 @@ class Parser(base.Parser, base.FromStreamLoaderMixin, _extensions: tuple[str, ...] = ("ini", ) _load_opts: tuple[str, ...] = ( "defaults", "dict_type", "allow_no_value", "filename", - "ac_parse_value", "strict" + "ac_parse_value", "strict", ) _dict_opts: tuple[str, ...] = ("dict_type", ) diff --git a/src/anyconfig/backend/json/__init__.py b/src/anyconfig/backend/json/__init__.py index 14a75b5c..4b3e1b9e 100644 --- a/src/anyconfig/backend/json/__init__.py +++ b/src/anyconfig/backend/json/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # """Backend modules to load and dump JSON data. @@ -25,5 +25,3 @@ PARSERS.append(SimpleJsonParser) except ImportError: pass - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/backend/json/common.py b/src/anyconfig/backend/json/common.py index 89d98073..36f3af6d 100644 --- a/src/anyconfig/backend/json/common.py +++ b/src/anyconfig/backend/json/common.py @@ -17,14 +17,14 @@ JSON_LOAD_OPTS: tuple[str, ...] = ( "cls", "object_hook", "parse_float", "parse_int", - "parse_constant", "object_pairs_hook" + "parse_constant", "object_pairs_hook", ) JSON_DUMP_OPTS: tuple[str, ...] = ( "skipkeys", "ensure_ascii", "check_circular", "allow_nan", - "cls", "indent", "separators", "default", "sort_keys" + "cls", "indent", "separators", "default", "sort_keys", ) JSON_DICT_OPTS: tuple[str, ...] = ( - "object_pairs_hook", "object_hook" + "object_pairs_hook", "object_hook", ) diff --git a/src/anyconfig/backend/json/simplejson.py b/src/anyconfig/backend/json/simplejson.py index c9449913..28b99fa2 100644 --- a/src/anyconfig/backend/json/simplejson.py +++ b/src/anyconfig/backend/json/simplejson.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=import-error @@ -30,7 +30,7 @@ from .. import base from .common import ( - JSON_LOAD_OPTS, JSON_DUMP_OPTS, Parser as BaseParser + JSON_LOAD_OPTS, JSON_DUMP_OPTS, Parser as BaseParser, ) @@ -40,7 +40,7 @@ "use_decimal", "namedtuple_as_object", "tuple_as_array", "bigint_as_string", "item_sort_key", "for_json", "ignore_nan", "int_as_string_bitcount", - "iterable_as_array" + "iterable_as_array", ) @@ -55,5 +55,3 @@ class Parser(BaseParser): _load_from_stream_fn = base.to_method(json.load) _dump_to_string_fn = base.to_method(json.dumps) _dump_to_stream_fn = base.to_method(json.dump) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/backend/json/stdlib.py b/src/anyconfig/backend/json/stdlib.py index 17f042f3..2d642431 100644 --- a/src/anyconfig/backend/json/stdlib.py +++ b/src/anyconfig/backend/json/stdlib.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # # Ref. python -c "import json; help(json)" @@ -51,5 +51,3 @@ class Parser(BaseParser): _load_from_stream_fn = base.to_method(json.load) _dump_to_string_fn = base.to_method(json.dumps) _dump_to_stream_fn = base.to_method(json.dump) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/backend/properties/builtin.py b/src/anyconfig/backend/properties/builtin.py index bf98411c..0ff7a8db 100644 --- a/src/anyconfig/backend/properties/builtin.py +++ b/src/anyconfig/backend/properties/builtin.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # r"""A backend module to load and dump (Java) properties files. @@ -54,7 +54,8 @@ def parseline(line: str) -> tuple[typing.Optional[str], str]: if len(pair) < _MIN_LEN_PAIR: warnings.warn( - f"Invalid line found: {line}", category=SyntaxWarning, stacklevel=2 + f"Invalid line found: {line}", category=SyntaxWarning, + stacklevel=2, ) return (key or None, "") @@ -63,7 +64,7 @@ def parseline(line: str) -> tuple[typing.Optional[str], str]: def _pre_process_line( line: str, - cmarkers: tuple[str, ...] = _COMMENT_MARKERS + cmarkers: tuple[str, ...] = _COMMENT_MARKERS, ) -> typing.Optional[str]: """Preprocess a line in properties; strip comments, etc. @@ -97,7 +98,7 @@ def escape(in_s: str) -> str: def load( - stream: typing.IO, container: base.GenContainerT = dict, **kwargs + stream: typing.IO, container: base.GenContainerT = dict, **kwargs, ) -> base.InDataT: """Load data from a java properties files given as ``stream``. @@ -148,7 +149,7 @@ class Parser(base.StreamParser): _dict_opts: tuple[str, ...] = ("ac_dict", ) def load_from_stream( - self, stream: typing.IO, container: base.GenContainerT, **kwargs + self, stream: typing.IO, container: base.GenContainerT, **kwargs, ) -> base.InDataT: """Load config from given file like object 'stream'. @@ -161,7 +162,7 @@ def load_from_stream( return load(stream, container=container, **kwargs) def dump_to_stream( - self, cnf: base.InDataExT, stream: typing.IO, **_kwargs + self, cnf: base.InDataExT, stream: typing.IO, **_kwargs, ) -> None: """Dump config 'cnf' to a file or file-like object 'stream'. diff --git a/src/anyconfig/backend/python/builtin.py b/src/anyconfig/backend/python/builtin.py index 0ceefc2f..38f7d93d 100644 --- a/src/anyconfig/backend/python/builtin.py +++ b/src/anyconfig/backend/python/builtin.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2023, 2024 Satoru SATOH +# Copyright (C) 2023 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # r"""A backend module to load and dump python code conntains data. @@ -35,7 +35,7 @@ from .. import base from . import ( - loader, dumper + loader, dumper, ) diff --git a/src/anyconfig/backend/python/dumper.py b/src/anyconfig/backend/python/dumper.py index 5b00c718..8a504cb4 100644 --- a/src/anyconfig/backend/python/dumper.py +++ b/src/anyconfig/backend/python/dumper.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2024 Satoru SATOH +# Copyright (C) 2024, 2025 Satoru SATOH # SPDX-License-Identifier: MIT # r"""A backend module to dump python code conntains data. @@ -23,7 +23,7 @@ from __future__ import annotations from ..base import ( - InDataExT, ToStringDumperMixin + InDataExT, ToStringDumperMixin, ) diff --git a/src/anyconfig/backend/python/loader.py b/src/anyconfig/backend/python/loader.py index 03689c9a..fcfbf1d6 100644 --- a/src/anyconfig/backend/python/loader.py +++ b/src/anyconfig/backend/python/loader.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2023, 2024 Satoru SATOH +# Copyright (C) 2023 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # r"""A backend module to load python code conntains data. @@ -36,15 +36,13 @@ from ... import ioinfo from ..base import ( - IoiT, InDataExT, LoaderMixin + IoiT, InDataExT, LoaderMixin, ) from . import utils -def load_from_temp_file( - content: str, **opts -) -> InDataExT: +def load_from_temp_file(content: str, **opts) -> InDataExT: """Dump `content` to tempoary file and load from it. :param content: A str to load data from @@ -54,7 +52,7 @@ def load_from_temp_file( path.write_text(content, encoding="utf-8") return utils.load_from_path( - path, allow_exec=opts.get("allow_exec", False) + path, allow_exec=opts.get("allow_exec", False), ) @@ -105,9 +103,9 @@ def load(self, ioi: IoiT, **options) -> InDataExT: if ioinfo.is_stream(ioi): return load_from_temp_file( typing.cast("typing.IO", ioi.src).read(), - allow_exec=allow_exec + allow_exec=allow_exec, ) return utils.load_from_path( - pathlib.Path(ioi.path), allow_exec=allow_exec + pathlib.Path(ioi.path), allow_exec=allow_exec, ) diff --git a/src/anyconfig/backend/python/utils.py b/src/anyconfig/backend/python/utils.py index 9441597b..a8f807c7 100644 --- a/src/anyconfig/backend/python/utils.py +++ b/src/anyconfig/backend/python/utils.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2023, 2024 Satoru SATOH +# Copyright (C) 2023 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring @@ -47,7 +47,7 @@ def load_literal_data_from_path(path: pathlib.Path) -> typing.Any: def load_data_from_py( path: pathlib.Path, *, data_name: typing.Optional[str] = None, - fallback: bool = False + fallback: bool = False, ) -> typing.Any: """Load test data from .py files by evaluating it. @@ -65,7 +65,7 @@ def load_data_from_py( except (TypeError, ValueError, AttributeError): warnings.warn( f"No valid data '{data_name}' was found in {mod!r}.", - stacklevel=2 + stacklevel=2, ) if fallback: @@ -79,7 +79,7 @@ def load_from_path( path: pathlib.Path, *, allow_exec: bool = False, data_name: typing.Optional[str] = None, - fallback: bool = False + fallback: bool = False, ) -> typing.Any: """Load data from given path `path`. @@ -90,7 +90,7 @@ def load_from_path( """ if allow_exec and (data_name or DATA_VAR_NAME) in path.read_text(): return load_data_from_py( - path, data_name=data_name, fallback=fallback + path, data_name=data_name, fallback=fallback, ) return load_literal_data_from_path(path) diff --git a/src/anyconfig/backend/sh/variables.py b/src/anyconfig/backend/sh/variables.py index 152e064d..c7fd4493 100644 --- a/src/anyconfig/backend/sh/variables.py +++ b/src/anyconfig/backend/sh/variables.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2016 - 2024 Satoru SATOH +# Copyright (C) 2016 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # """A simple backend module to load and dump files contain shell variables. @@ -30,7 +30,7 @@ def _parseline( - line: str + line: str, ) -> tuple[typing.Optional[str], typing.Optional[str]]: """Parse a line contains shell variable definition. @@ -41,11 +41,12 @@ def _parseline( r"^\s*(export)?\s*(\S+)=(?:(?:" r"(?:\"(.*[^\\])\")|(?:'(.*[^\\])')|" r"(?:([^\"'#\s]+)))?)\s*#*", - line + line, ) if not match: warnings.warn( - f"Invalid line found: {line}", category=SyntaxWarning, stacklevel=2 + f"Invalid line found: {line}", category=SyntaxWarning, + stacklevel=2, ) return (None, None) @@ -55,7 +56,7 @@ def _parseline( def load( - stream: typing.IO, container: base.GenContainerT = dict, **_kwargs + stream: typing.IO, container: base.GenContainerT = dict, **_kwargs, ) -> base.InDataT: """Load shell variable definitions data from ``stream``. @@ -75,7 +76,7 @@ def load( if key is None: warnings.warn( f"Empty val in the line: {line}", - category=SyntaxWarning, stacklevel=2 + category=SyntaxWarning, stacklevel=2, ) continue @@ -94,7 +95,7 @@ class Parser(base.StreamParser): _dict_opts: tuple[str, ...] = ("ac_dict", ) def load_from_stream( - self, stream: typing.IO, container: base.GenContainerT, **kwargs + self, stream: typing.IO, container: base.GenContainerT, **kwargs, ) -> base.InDataT: """Load config from given file like object ``stream``. @@ -108,7 +109,7 @@ def load_from_stream( return load(stream, container=container, **kwargs) def dump_to_stream( - self, cnf: base.InDataExT, stream: typing.IO, **_kwargs + self, cnf: base.InDataExT, stream: typing.IO, **_kwargs, ) -> None: """Dump config dat ``cnf`` to a file or file-like object ``stream``. diff --git a/src/anyconfig/backend/xml/etree.py b/src/anyconfig/backend/xml/etree.py index c01b59bb..7e40be99 100644 --- a/src/anyconfig/backend/xml/etree.py +++ b/src/anyconfig/backend/xml/etree.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # # Some XML modules may be missing and Base.{load,dumps}_impl are not @@ -72,7 +72,7 @@ from ...parser import parse_single from ...utils import ( filter_options, get_path_from_stream, - is_dict_like, is_iterable, noop + is_dict_like, is_iterable, noop, ) if typing.TYPE_CHECKING: @@ -92,7 +92,7 @@ def _namespaces_from_file( - xmlfile: typing.Union[base.PathOrStrT, typing.IO] + xmlfile: typing.Union[base.PathOrStrT, typing.IO], ) -> dict[str, tuple[str, str]]: """Get the namespace str from file. @@ -171,7 +171,7 @@ def _parse_text(val: str, **options) -> typing.Any: def _process_elem_text( elem: ElementTree.Element, dic: DicType, subdic: DicType, - text: str = "@text", **options + text: str = "@text", **options, ) -> None: """Process the text in the element ``elem``. @@ -197,7 +197,7 @@ def _process_elem_text( def _parse_attrs( elem: ElementTree.Element, container: GenDicType = dict, - **options + **options, ) -> DicType: """Parse the attributes of the element ``elem``. @@ -215,7 +215,7 @@ def _parse_attrs( def _process_elem_attrs( elem: ElementTree.Element, dic: DicType, subdic: DicType, container: GenDicType = dict, attrs: str = "@attrs", - **options + **options, ) -> None: """Process attributes in the element ``elem``. @@ -238,7 +238,7 @@ def _process_elem_attrs( def _process_children_elems( elem: ElementTree.Element, dic: DicType, subdic: DicType, container: GenDicType = dict, children: str = "@children", - **options + **options, ) -> None: """Process children of the element ``elem``. @@ -276,7 +276,7 @@ def _process_children_elems( def elem_to_container( elem: typing.Optional[ElementTree.Element], container: GenDicType = dict, - **options + **options, ) -> DicType: """Convert XML ElementTree Element to a collection of container objects. @@ -342,7 +342,7 @@ def _complement_tag_options(options: DicType) -> DicType: def root_to_container( root: ElementTree.Element, container: GenDicType = dict, nspaces: typing.Optional[DicType] = None, - **options + **options, ) -> DicType: """Convert XML ElementTree Root Element to container objects. @@ -380,7 +380,7 @@ def _to_str_fn(**options: DicType) -> collections.abc.Callable[..., str]: def _elem_set_attrs( obj: DicType, parent: ElementTree.Element, - to_str: collections.abc.Callable[..., str] + to_str: collections.abc.Callable[..., str], ) -> None: """Set attributes of the element ``parent``. @@ -396,7 +396,7 @@ def _elem_set_attrs( def _elem_from_descendants( - children_nodes: collections.abc.Iterable[DicType], **options + children_nodes: collections.abc.Iterable[DicType], **options, ) -> collections.abc.Iterator[ElementTree.Element]: """Get the elements from the descendants ``children_nodes``. @@ -413,7 +413,7 @@ def _elem_from_descendants( def _get_or_update_parent( key: str, val: typing.Any, to_str: collections.abc.Callable[..., str], parent: typing.Optional[ElementTree.Element] = None, - **options + **options, ) -> ElementTree.Element: """Get or update the parent element ``parent``. @@ -454,7 +454,7 @@ def _assert_if_invalid_node( def container_to_elem( obj: typing.Any, parent: typing.Optional[ElementTree.Element] = None, to_str: typing.Optional[collections.abc.Callable[..., str]] = None, - **options + **options, ) -> ElementTree.Element: """Convert a dict-like object to XML ElementTree. @@ -483,7 +483,7 @@ def container_to_elem( for key, val in obj.items(): if parent is None: parent = _get_or_update_parent( - key, val, to_str, parent=parent, **options + key, val, to_str, parent=parent, **options, ) continue @@ -496,14 +496,14 @@ def container_to_elem( parent.append(celem) else: parent = _get_or_update_parent( - key, val, to_str, parent=parent, **options + key, val, to_str, parent=parent, **options, ) return parent # type: ignore[return-value] def etree_write( - elem: ElementTree.Element, stream: typing.IO, **options + elem: ElementTree.Element, stream: typing.IO, **options, ) -> None: """Write XML ElementTree 'root' content into 'stream'. @@ -515,11 +515,11 @@ def etree_write( **filter_options( ("method", "xml_declaration", "default_namespace", "short_empty_elements"), - options + options, ) ) content: bytes = ElementTree.tostring( # type: ignore[call-overload] - elem, **opts + elem, **opts, ).encode("utf-8") stream.write(content) @@ -531,13 +531,13 @@ class Parser(base.Parser, base.ToStreamDumperMixin): _type: typing.ClassVar[str] = "xml" _extensions: tuple[str, ...] = ("xml", ) _load_opts: tuple[str, ...] = ( - "tags", "merge_attrs", "ac_parse_value" + "tags", "merge_attrs", "ac_parse_value", ) # .. seealso:: xml.etree.ElementTree.tostring _dump_opts = ( *_load_opts, "encoding", "method", "xml_declaration", "default_namespace", - "short_empty_elements" + "short_empty_elements", ) _ordered: typing.ClassVar[bool] = True @@ -546,7 +546,7 @@ class Parser(base.Parser, base.ToStreamDumperMixin): _open_write_mode: typing.ClassVar[str] = "wb" def load_from_string( - self, content: typing.AnyStr, container: GenDicType, **opts + self, content: typing.AnyStr, container: GenDicType, **opts, ) -> DicType: """Load config from XML snippet (a string 'content'). @@ -571,7 +571,7 @@ def load_from_string( def load_from_path( self, filepath: base.PathOrStrT, container: GenDicType, - **opts + **opts, ) -> DicType: """Load data from path ``filepath``. @@ -584,11 +584,11 @@ def load_from_path( elem = ElementTree.parse(filepath).getroot() nspaces = _namespaces_from_file(filepath) return root_to_container( - elem, container=container, nspaces=nspaces, **opts + elem, container=container, nspaces=nspaces, **opts, ) def load_from_stream( - self, stream: typing.IO, container: GenDicType, **opts + self, stream: typing.IO, container: GenDicType, **opts, ) -> DicType: """Load data from IO stream ``stream``. @@ -601,11 +601,12 @@ def load_from_stream( elem = ElementTree.parse(stream).getroot() path = get_path_from_stream(stream) nspaces = _namespaces_from_file(path) - return root_to_container(elem, container=container, - nspaces=nspaces, **opts) + return root_to_container( + elem, container=container, nspaces=nspaces, **opts, + ) def dump_to_string( # type: ignore[override] - self, cnf: base.InDataExT, **opts + self, cnf: base.InDataExT, **opts, ) -> bytes: """Dump data ``cnf`` as a str. @@ -623,7 +624,7 @@ def dump_to_string( # type: ignore[override] return bio.getvalue() def dump_to_stream( - self, cnf: base.InDataExT, stream: typing.IO, **opts + self, cnf: base.InDataExT, stream: typing.IO, **opts, ) -> None: """Dump data ``cnf`` to the IO stream ``stream``. diff --git a/src/anyconfig/backend/yaml/common.py b/src/anyconfig/backend/yaml/common.py index b024f107..043470cd 100644 --- a/src/anyconfig/backend/yaml/common.py +++ b/src/anyconfig/backend/yaml/common.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # """Common library for YAML backend modules.""" @@ -12,7 +12,7 @@ def filter_from_options( - key: str, options: dict[str, typing.Any] + key: str, options: dict[str, typing.Any], ) -> dict[str, typing.Any]: """Filter a key ``key`` in ``options. diff --git a/src/anyconfig/backend/yaml/pyyaml.py b/src/anyconfig/backend/yaml/pyyaml.py index 6c27b137..5e539792 100644 --- a/src/anyconfig/backend/yaml/pyyaml.py +++ b/src/anyconfig/backend/yaml/pyyaml.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # # type() is used to exactly match check instead of isinstance here. @@ -65,7 +65,7 @@ def _customized_loader( container: collections.abc.Callable[..., dict[str, typing.Any]], - loader: type[Loader] = Loader, mapping_tag: str = _MAPPING_TAG + loader: type[Loader] = Loader, mapping_tag: str = _MAPPING_TAG, ) -> type[Loader]: """Get the customized loader. @@ -76,7 +76,7 @@ def _customized_loader( :param container: Set container used internally """ def construct_mapping( - loader: Loader, node: typing.Any, *, deep: bool = False + loader: Loader, node: typing.Any, *, deep: bool = False, ) -> dict[str, typing.Any]: """Construct python object from yaml mapping node. @@ -87,7 +87,7 @@ def construct_mapping( if not isinstance(node, yaml.MappingNode): raise yaml.constructor.ConstructorError( None, None, f"expected a mapping node, but found {node.id}", - node.start_mark + node.start_mark, ) mapping = container() for key_node, value_node in node.value: @@ -95,10 +95,12 @@ def construct_mapping( try: hash(key) except TypeError as exc: - eargs = ("while constructing a mapping", - node.start_mark, - f"found unacceptable key ({exc!s})", - key_node.start_mark) + eargs = ( + "while constructing a mapping", + node.start_mark, + f"found unacceptable key ({exc!s})", + key_node.start_mark, + ) raise yaml.constructor.ConstructorError(*eargs) from exc value = loader.construct_object(value_node, deep=deep) mapping[key] = value @@ -108,7 +110,7 @@ def construct_mapping( tag = "tag:yaml.org,2002:python/unicode" def construct_ustr( - loader: Loader, node: typing.Any + loader: Loader, node: typing.Any, ) -> typing.Union[str, int, float, None]: """Unicode string constructor.""" return loader.construct_scalar(node) @@ -122,11 +124,11 @@ def construct_ustr( def _customized_dumper( - container: typing.Any, dumper: type[Dumper] = Dumper + container: typing.Any, dumper: type[Dumper] = Dumper, ) -> type[Dumper]: """Counterpart of :func:`_customized_loader` for dumpers.""" def container_representer( - dumper: Dumper, data: typing.Any, mapping_tag: str = _MAPPING_TAG + dumper: Dumper, data: typing.Any, mapping_tag: str = _MAPPING_TAG, ) -> typing.Any: """Container representer.""" return dumper.represent_mapping(mapping_tag, data.items()) @@ -137,7 +139,7 @@ def container_representer( def yml_fnc_by_name( - fname: str, **options + fname: str, **options, ) -> collections.abc.Callable[..., typing.Any]: """Get yaml loading/dumping function by name. @@ -165,7 +167,7 @@ def yml_fnc_(fname: str, *args, **options) -> typing.Any: def yml_load( stream: typing.IO, container: base.GenContainerT, yml_fnc: collections.abc.Callable[..., typing.Any] = yml_fnc_, - **options + **options, ) -> dict[str, typing.Any]: """Call yaml.safe_load and yaml.load. @@ -196,7 +198,7 @@ def yml_load( def yml_dump( data: typing.Any, stream: typing.IO, yml_fnc: collections.abc.Callable[..., typing.Any] = yml_fnc_, - **options + **options, ) -> None: """Call yaml.safe_dump and yaml.dump. @@ -231,7 +233,7 @@ class Parser(common.Parser): "stream", "ac_safe", "Dumper", "default_style", "default_flow_style", "canonical", "indent", "width", "allow_unicode", "line_break", "encoding", "explicit_start", - "explicit_end", "version", "tags" + "explicit_end", "version", "tags", ) load_from_stream = base.to_method(yml_load) diff --git a/src/anyconfig/backend/yaml/ruamel.py b/src/anyconfig/backend/yaml/ruamel.py index 6cd9610a..4304ddac 100644 --- a/src/anyconfig/backend/yaml/ruamel.py +++ b/src/anyconfig/backend/yaml/ruamel.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # """A backend module to load and dump YAML data files using rumael.yaml. @@ -53,7 +53,7 @@ raise ImportError(msg) from exc _YAML_INIT_KWARGS: tuple[str, ...] = ( # kwargs for ruamel.yaml.YAML - "typ", "pure", "plug_ins" + "typ", "pure", "plug_ins", ) _YAML_INSTANCE_MEMBERS: tuple[str, ...] = ( "allow_duplicate_keys", "allow_unicode", @@ -70,13 +70,13 @@ "resolver", "scanner", "seq", "sequence_dash_offset", "sequence_indent", "serializer", "stream", "tags", "top_level_block_style_scalar_no_indent_error_1_1", - "top_level_colon_align", "version", "width" + "top_level_colon_align", "version", "width", ) _YAML_OPTS = (*_YAML_INIT_KWARGS, *_YAML_INSTANCE_MEMBERS) def yml_fnc( - fname: str, *args, **options + fname: str, *args, **options, ) -> typing.Optional[base.InDataExT]: """Call loading functions for yaml data. @@ -102,7 +102,7 @@ def yml_fnc( def yml_load( - stream: typing.IO, container: base.GenContainerT, **options + stream: typing.IO, container: base.GenContainerT, **options, ) -> base.InDataExT: """See :func:`anyconfig.backend.yaml.pyyaml.yml_load`.""" ret = yml_fnc("load", stream, **options) @@ -113,7 +113,7 @@ def yml_load( def yml_dump( - data: base.InDataExT, stream: typing.IO, **options + data: base.InDataExT, stream: typing.IO, **options, ) -> None: """See :func:`anyconfig.backend.yaml.pyyaml.yml_dump`.""" # .. todo:: diff --git a/src/anyconfig/cli/actions.py b/src/anyconfig/cli/actions.py index 1a5c7867..47f738ad 100644 --- a/src/anyconfig/cli/actions.py +++ b/src/anyconfig/cli/actions.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # """Actions for anyconfig.cli.*.""" @@ -20,5 +20,5 @@ def try_output_result( """Try to output result.""" api.dump( cnf, args.output, args.otype, - **(args.extra_opts if args.extra_opts else {}) + **(args.extra_opts if args.extra_opts else {}), ) diff --git a/src/anyconfig/cli/constants.py b/src/anyconfig/cli/constants.py index f435dd35..e3bd088f 100644 --- a/src/anyconfig/cli/constants.py +++ b/src/anyconfig/cli/constants.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2021 Satoru SATOH +# Copyright (C) 2011 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # """Provides constants for anyconfig.cli.""" @@ -51,5 +51,3 @@ SET_HELP = ("Specify key path to set (update) part of config, for " "example, '--set a.b.c=1' to a config {'a': {'b': {'c': 0, " "'d': 1}}} gives {'a': {'b': {'c': 1, 'd': 1}}}.") - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/cli/detectors.py b/src/anyconfig/cli/detectors.py index 699cf798..755b95b9 100644 --- a/src/anyconfig/cli/detectors.py +++ b/src/anyconfig/cli/detectors.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2025 Satoru SATOH # SPDX-License-Identifier: MIT # """Detect file type and parser from inputs and/or output.""" @@ -57,7 +57,7 @@ def find_by_the_type(io_type: str) -> typing.Optional[str]: def find_by_the_paths( - paths: list[str], *, ignore_errors: bool = True + paths: list[str], *, ignore_errors: bool = True, ) -> typing.Optional[str]: """Try to detect file (parser) type from given file paths ``paths``.""" default = None @@ -75,7 +75,7 @@ def find_by_the_paths( "Failed to detect a file type because given file paths " "may contain files with multiple types: " f"{paths_s}{os.linesep}{msg}", - 1 + 1, ) if constants.STD_IN_OR_OUT not in paths: @@ -87,14 +87,14 @@ def find_by_the_paths( utils.exit_with_output( "Failed to detect the file type because it is/those are " f"unknown file type[s]: {paths_s}{os.linesep}{msg}", - 1 + 1, ) return default def try_detecting_input_type( - args: argparse.Namespace, *, ignore_errors: bool = True + args: argparse.Namespace, *, ignore_errors: bool = True, ) -> typing.Optional[str]: """Try to resolve a file type and parser of inputs.""" # First, try the type given by users. @@ -111,7 +111,7 @@ def try_detecting_input_type( def try_detecting_output_type( - args: argparse.Namespace + args: argparse.Namespace, ) -> typing.Optional[str]: """Try to resolve a file type and parser of outputs (``args.output``).""" # First, try the type given by users. @@ -133,7 +133,7 @@ def try_detecting_output_type( "Failed to find or detect the file type: " f"itype={args.itype}, otype={args.otype}, " f"output={args.output}, inputs={', '.join(args.inputs)}", - 1 + 1, ) return itype From ce05f2f3d07c73cca29e08e23567e71b9f9bf3da Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Tue, 3 Feb 2026 01:08:37 +0900 Subject: [PATCH 02/27] fix: enable all rules by ruff linter by default --- pyproject.toml | 85 +++++++++++--------------------------------------- 1 file changed, 19 insertions(+), 66 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 820bae35..ad4f1a35 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -118,7 +118,11 @@ disable = [ # .. seealso:: https://docs.astral.sh/ruff/settings/#top-level [tool.ruff] exclude = [ - "tests/res/**/*.py", + "tests/res/*/*/*/*.py", + "tests/res/*/*/*/*/*.py", + "tests/res/*/*/*/*/*/*.py", + "tests/res/*/*/*/*/*/*/*.py", + "tests/res/*/*/*/*/*/*/*/*.py", ] src = [ "src", @@ -126,72 +130,8 @@ src = [ ] [tool.ruff.lint] -ignore = [ - "ANN002", # https://docs.astral.sh/ruff/rules/missing-type-args/ - "ANN003", # https://docs.astral.sh/ruff/rules/missing-type-kwargs/ - "ANN401", # https://docs.astral.sh/ruff/rules/any-type/ - "D203", # https://docs.astral.sh/ruff/rules/incorrect-blank-line-before-class/ - "I001", # https://docs.astral.sh/ruff/rules/unsorted-imports/ - # https://github.com/PyCQA/isort/issues/2146 - - "ICN001", # https://docs.astral.sh/ruff/rules/unconventional-import-alias/ - "TID252", # https://docs.astral.sh/ruff/rules/relative-imports/ - - "RUF022", # https://docs.astral.sh/ruff/rules/unsorted-dunder-all/ - - # .. note:: These are disabled until py39 support is dropped. - "UP007", # https://docs.astral.sh/ruff/rules/non-pep604-annotation-union/ - "UP045", # https://docs.astral.sh/ruff/rules/non-pep604-annotation-optional/ -] select = [ - "A", # flake8-builtins - "ANN", # flake8-annotations - "ARG", # flake8-unused-arguments - "B", # flake8-bugbear - "BLE", # flake8-blind-except -# "COM", # flake8-commas (It's not ready yet.) - "C4", # flake8-comprehensions - "C90", # mccabe - "D", # pydocstyle - "DTZ", # flake8-datetimez - "E", # pycodestyle - "EM", # flake8-errmsg - "ERA", # flake8-eradicate - "EXE", # flake8-executable - "F", # pyflakes - "FA", # flake8-future-annotations - "FBT", # flake8-boolean-trap - "G", # flake8-logging-format - "I", # isort - "ICN", # flake8-import-conventions - "INP", # flake8-no-pep420 - "ISC", # flake8-implicit-str-concat - "N", # pep8-naming - "PD", # flake8-vet - "PERF", # Perflint - "PGH", # pygrep-hooks - "PIE", # flake8-pie - "PLC", # Pylint - Convention - "PLE", # Pylint - Error - "PLR", # Pylint - Refactor - "PLW", # Pylint - Warning - "PTH", # flake8-use-pathlib - "PYI", # flake8-pyi - "Q", # flake8-quotes - "RET", # flake8-return - "RUF", # Ruff-specific rules - "RSE", # flake8-raise - "S", # flake8-bandit - "SLF", # flake8-self - "SLOT", # flake8-slots - "SIM", # flake8-simplify - "T20", # flake8-print - "TCH", # flake8-type-checking - "TID", # flake8-tidy-imports - "TRY", # tryceratops - "UP", # pyupgrade - "W", # pycodestyle - "YTT", # flake8-2020 + "ALL", ] [tool.ruff.lint.extend-per-file-ignores] @@ -204,6 +144,19 @@ select = [ # https://docs.astral.sh/ruff/rules/suspicious-xml-element-tree-usage/ "src/anyconfig/backend/xml/etree.py" = ["S314"] +"tests/*.py" = [ + "S101", # https://docs.astral.sh/ruff/rules/assert/ +] +"tests/*/*.py" = [ + "S101", +] +"tests/*/*/*.py" = [ + "S101", +] +"tests/*/*/*/*.py" = [ + "S101", +] + #[tool.ruff.lint.flake8-annotations] #[tool.ruff.lint.flake8-bandit] #[tool.ruff.lint.flake8-bugbear] From 0fce3d5f734158c4ecc80ee33a5159db213c0cfc Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Tue, 3 Feb 2026 01:18:42 +0900 Subject: [PATCH 03/27] fix: rename tests.api.dump.test_basics.test_data to make its purpose clearer --- tests/api/dump/test_basics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/api/dump/test_basics.py b/tests/api/dump/test_basics.py index 91655af6..ac181b84 100644 --- a/tests/api/dump/test_basics.py +++ b/tests/api/dump/test_basics.py @@ -32,7 +32,7 @@ ] -def test_data() -> None: +def test_data_is_defined_and_not_empty() -> None: assert DATA From e018e66f1be05e9b9ae1fbb936bb48b48c27fa4c Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Tue, 3 Feb 2026 02:43:52 +0900 Subject: [PATCH 04/27] fix: add configurations as workarounds to avoid some errors by ruff in tests/**/*.py --- pyproject.toml | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index ad4f1a35..038a1052 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -118,6 +118,9 @@ disable = [ # .. seealso:: https://docs.astral.sh/ruff/settings/#top-level [tool.ruff] exclude = [ + "tests/__init__.py", + "tests/*/__init__.py", + "tests/*/*/__init__.py", "tests/res/*/*/*/*.py", "tests/res/*/*/*/*/*.py", "tests/res/*/*/*/*/*/*.py", @@ -145,16 +148,38 @@ select = [ "src/anyconfig/backend/xml/etree.py" = ["S314"] "tests/*.py" = [ + # https://docs.astral.sh/ruff/rules/missing-type-function-argument/ + "ANN001", + "D103", # https://docs.astral.sh/ruff/rules/undocumented-public-function/ + "I001", # https://docs.astral.sh/ruff/rules/unsorted-imports/ + # https://docs.astral.sh/ruff/rules/lowercase-imported-as-non-lowercase/ + "N812", "S101", # https://docs.astral.sh/ruff/rules/assert/ + "TID252", # https://docs.astral.sh/ruff/rules/relative-imports/ ] "tests/*/*.py" = [ + "ANN001", + "D103", + "I001", + "N812", "S101", + "TID252", ] "tests/*/*/*.py" = [ + "ANN001", + "D103", + "I001", + "N812", "S101", + "TID252", ] "tests/*/*/*/*.py" = [ + "ANN001", + "D103", + "I001", + "N812", "S101", + "TID252", ] #[tool.ruff.lint.flake8-annotations] From 6bb131580fd6800a5fc99057d4b349235356085d Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Tue, 3 Feb 2026 02:45:14 +0900 Subject: [PATCH 05/27] fix: corrections and workarounds for some errors by ruff linter Corrections and workarounds for following errors in some test codes, tests.api.{dumps.*,load.{common,multi_load.*}} and tests.parsers.* by ruff linter. - COM812: https://docs.astral.sh/ruff/rules/missing-trailing-comma/ - D101: https://docs.astral.sh/ruff/rules/undocumented-public-class/ - PT011: https://docs.astral.sh/ruff/rules/pytest-raises-too-broad/ - RUF015: https://docs.astral.sh/ruff/rules/unnecessary-iterable-allocation-for-first-element/ --- tests/api/dump/test_basics.py | 12 ++++++--- tests/api/dumps/test_basics.py | 12 ++++++--- tests/api/load/common.py | 2 +- tests/api/load/multi_load/common.py | 2 +- tests/api/load/multi_load/test_basics.py | 26 +++++++++---------- tests/api/load/multi_load/test_multi_types.py | 4 +-- tests/api/load/multi_load/test_query.py | 10 +++---- tests/api/load/multi_load/test_schema.py | 10 +++---- tests/api/load/multi_load/test_template.py | 6 ++--- tests/parsers/test_parsers.py | 3 ++- tests/parsers/test_utils.py | 3 ++- 11 files changed, 50 insertions(+), 40 deletions(-) diff --git a/tests/api/dump/test_basics.py b/tests/api/dump/test_basics.py index ac181b84..d6c374fb 100644 --- a/tests/api/dump/test_basics.py +++ b/tests/api/dump/test_basics.py @@ -13,7 +13,7 @@ import anyconfig.api._dump as TT from anyconfig.api import ( - UnknownFileTypeError, UnknownProcessorTypeError + UnknownFileTypeError, UnknownProcessorTypeError, ) from ... import common @@ -37,14 +37,18 @@ def test_data_is_defined_and_not_empty() -> None: @pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) -def test_dump(obj, opts: dict, exp, tmp_path: pathlib.Path) -> None: +def test_dump( + obj, opts: dict, exp, tmp_path: pathlib.Path, +) -> None: out = tmp_path / "out.json" TT.dump(obj, out, **opts) assert out.read_text() == exp @pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) -def test_dump_without_ac_parser_option(obj, opts: dict, exp) -> None: +def test_dump_without_ac_parser_option( + obj, opts: dict, exp, +) -> None: assert opts or exp with pytest.raises(UnknownFileTypeError): TT.dump(obj, "out.txt") @@ -52,7 +56,7 @@ def test_dump_without_ac_parser_option(obj, opts: dict, exp) -> None: @pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) def test_dump_with_invalid_ac_parser_option( - obj, opts: dict, exp + obj, opts: dict, exp, ) -> None: assert opts or exp with pytest.raises(UnknownProcessorTypeError): diff --git a/tests/api/dumps/test_basics.py b/tests/api/dumps/test_basics.py index e4b44f35..6274594a 100644 --- a/tests/api/dumps/test_basics.py +++ b/tests/api/dumps/test_basics.py @@ -32,20 +32,24 @@ def test_data() -> None: @pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) -def test_dumps(obj, opts: dict, exp) -> None: +def test_dumps( + obj, opts: dict, exp, +) -> None: assert TT.dumps(obj, **opts) == exp @pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) -def test_dumps_without_ac_parser_option(obj, opts: dict, exp) -> None: +def test_dumps_without_ac_parser_option( + obj, opts: dict, exp, +) -> None: assert opts or exp - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="The first argument"): TT.dumps(obj) @pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) def test_dumps_with_invalid_ac_parser_option( - obj, opts: dict, exp + obj, opts: dict, exp, ) -> None: assert opts or exp with pytest.raises(UnknownProcessorTypeError): diff --git a/tests/api/load/common.py b/tests/api/load/common.py index 3be48618..940f1181 100644 --- a/tests/api/load/common.py +++ b/tests/api/load/common.py @@ -8,6 +8,6 @@ from __future__ import annotations from ...common import ( # noqa: F401 - get_test_ids, load_data_for_testfile + get_test_ids, load_data_for_testfile, ) from ..single_load.constants import LOADER_TYPES # noqa: F401 diff --git a/tests/api/load/multi_load/common.py b/tests/api/load/multi_load/common.py index ca1bde07..5d6ed664 100644 --- a/tests/api/load/multi_load/common.py +++ b/tests/api/load/multi_load/common.py @@ -9,5 +9,5 @@ from ...multi_load.common import ( # noqa: F401 NAMES, GLOB_PATTERN, - load_data_for_testfile, get_test_ids + load_data_for_testfile, get_test_ids, ) diff --git a/tests/api/load/multi_load/test_basics.py b/tests/api/load/multi_load/test_basics.py index 33996a1f..71aabf7e 100644 --- a/tests/api/load/multi_load/test_basics.py +++ b/tests/api/load/multi_load/test_basics.py @@ -14,7 +14,7 @@ import anyconfig.api._load as TT from .common import ( - NAMES, GLOB_PATTERN, load_data_for_testfile, get_test_ids + NAMES, GLOB_PATTERN, load_data_for_testfile, get_test_ids, ) if typing.TYPE_CHECKING: @@ -35,13 +35,13 @@ def test_data() -> None: def test_load_with_empty_list() -> None: - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Maybe invalid input"): TT.load([]) @pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) def test_load_for_a_list_of_path_objects( - inputs: list[pathlib.Path], opts: dict, exp + inputs: list[pathlib.Path], opts: dict, exp, ) -> None: assert TT.load(inputs, **opts) == exp assert TT.load((i for i in inputs), **opts) == exp @@ -49,35 +49,35 @@ def test_load_for_a_list_of_path_objects( @pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) def test_load_for_a_list_of_path_strings( - inputs: list[pathlib.Path], opts: dict, exp + inputs: list[pathlib.Path], opts: dict, exp, ) -> None: assert TT.load([str(i) for i in inputs], **opts) == exp assert TT.load((str(i) for i in inputs), **opts) == exp @pytest.mark.parametrize( - NAMES, DATA_W_GLOB, ids=get_test_ids(DATA_W_GLOB) + NAMES, DATA_W_GLOB, ids=get_test_ids(DATA_W_GLOB), ) def test_load_for_glob_patterns( - inputs: list[pathlib.Path], opts: dict, exp + inputs: list[pathlib.Path], opts: dict, exp, ) -> None: assert TT.load(inputs, **opts) == exp @pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) def test_load_for_a_list_of_streams( - inputs: list[pathlib.Path], opts: dict, exp + inputs: list[pathlib.Path], opts: dict, exp, ) -> None: assert TT.load([i.open() for i in inputs], **opts) == exp class MyDict(collections.OrderedDict): - pass + """Custom dict-like object.""" @pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) def test_load_with_ac_dict_option( - inputs: list[pathlib.Path], opts: dict, exp + inputs: list[pathlib.Path], opts: dict, exp, ) -> None: res = TT.load(inputs, ac_dict=MyDict, **opts) assert res == exp @@ -86,20 +86,20 @@ def test_load_with_ac_dict_option( @pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) def test_load_with_wrong_merge_strategy( - inputs: list[pathlib.Path], opts: dict, exp + inputs: list[pathlib.Path], opts: dict, exp, ) -> None: assert exp # dummy to avoid an error of unused argument. - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Wrong merge strategy"): TT.load(inputs, ac_merge="wrong_merge_strategy", **opts) -def test_load_with_ignore_missing_option(): +def test_load_with_ignore_missing_option() -> None: paths = [ "/path/to/file_not_exist_0.json", "/path/to/file_not_exist_1.json", "/path/to/file_not_exist_2.json", ] - with pytest.raises(FileNotFoundError): + with pytest.raises(FileNotFoundError, match="No such file or directory"): TT.load(paths) assert TT.load(paths, ac_ignore_missing=True) == {} diff --git a/tests/api/load/multi_load/test_multi_types.py b/tests/api/load/multi_load/test_multi_types.py index b2851759..453e3342 100644 --- a/tests/api/load/multi_load/test_multi_types.py +++ b/tests/api/load/multi_load/test_multi_types.py @@ -13,7 +13,7 @@ import anyconfig.api._load as TT from .common import ( - NAMES, load_data_for_testfile, get_test_ids + NAMES, load_data_for_testfile, get_test_ids, ) if typing.TYPE_CHECKING: @@ -30,6 +30,6 @@ def test_data() -> None: @pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) def test_load( - inputs: list[pathlib.Path], opts: dict, exp + inputs: list[pathlib.Path], opts: dict, exp, ) -> None: assert TT.load(inputs, **opts) == exp diff --git a/tests/api/load/multi_load/test_query.py b/tests/api/load/multi_load/test_query.py index 8d7ffc3e..fb3cb98d 100644 --- a/tests/api/load/multi_load/test_query.py +++ b/tests/api/load/multi_load/test_query.py @@ -14,7 +14,7 @@ import anyconfig.query from .common import ( - load_data_for_testfile, get_test_ids + load_data_for_testfile, get_test_ids, ) if typing.TYPE_CHECKING: @@ -24,7 +24,7 @@ if not anyconfig.query.SUPPORTED: pytest.skip( "jmespath lib to neede for query is not available.", - allow_module_level=True + allow_module_level=True, ) NAMES: tuple[str, ...] = ("inputs", "query", "exp") @@ -38,16 +38,16 @@ def test_data() -> None: @pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) def test_load( - inputs: list[pathlib.Path], query: str, exp + inputs: list[pathlib.Path], query: str, exp, ) -> None: assert TT.load(inputs, ac_query=query) == exp @pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) def test_load_with_invalid_query( - inputs: list[pathlib.Path], query: str, exp + inputs: list[pathlib.Path], query: str, exp, ) -> None: assert query or exp # To avoid an error not using them. assert TT.load( - inputs, ac_query="" + inputs, ac_query="", ) == TT.load(inputs) diff --git a/tests/api/load/multi_load/test_schema.py b/tests/api/load/multi_load/test_schema.py index 8b951a84..bcdec417 100644 --- a/tests/api/load/multi_load/test_schema.py +++ b/tests/api/load/multi_load/test_schema.py @@ -24,14 +24,14 @@ if "jsonschema" not in anyconfig.schema.VALIDATORS: pytest.skip( "jsonschema lib is not available.", - allow_module_level=True + allow_module_level=True, ) def scm_path_from_inputs(inputs: list[pathlib.Path]) -> pathlib.Path: path = inputs[0] name = path.name[:-len(path.suffix)] - return list((path.parent / "s").glob(f"{name}.*"))[0] + return next((path.parent / "s").glob(f"{name}.*")) NAMES: tuple[str, ...] = (*common.NAMES, "scm") @@ -48,7 +48,7 @@ def test_data() -> None: @pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) def test_load( - inputs: list[pathlib.Path], opts: dict, exp, scm: pathlib.Path + inputs: list[pathlib.Path], opts: dict, exp, scm: pathlib.Path, ) -> None: assert TT.load(inputs, ac_schema=scm, **opts) == exp @@ -59,7 +59,7 @@ def test_load( @pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) def test_load_with_validation_failure( inputs: list[pathlib.Path], opts: dict, exp, scm: pathlib.Path, - tmp_path: pathlib.Path + tmp_path: pathlib.Path, ) -> None: assert exp or scm # dummy @@ -68,5 +68,5 @@ def test_load_with_validation_failure( with pytest.raises(ValidationError): TT.load( - inputs, ac_schema=scm, ac_schema_safe=False, **opts + inputs, ac_schema=scm, ac_schema_safe=False, **opts, ) diff --git a/tests/api/load/multi_load/test_template.py b/tests/api/load/multi_load/test_template.py index 9aa566ff..2a24aa05 100644 --- a/tests/api/load/multi_load/test_template.py +++ b/tests/api/load/multi_load/test_template.py @@ -21,13 +21,13 @@ if not anyconfig.template.SUPPORTED: pytest.skip( "jinja2 lib neede for template option is not available", - allow_module_level=True + allow_module_level=True, ) NAMES: tuple[str, ...] = (*common.NAMES, "ctx") DATA: list = common.load_data_for_testfile( - __file__, values=(("o", {}), ("e", None), ("c", {})) + __file__, values=(("o", {}), ("e", None), ("c", {})), ) DATA_IDS: list[str] = common.get_test_ids(DATA) @@ -38,6 +38,6 @@ def test_data() -> None: @pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) def test_load( - inputs: list[pathlib.Path], opts: dict, exp, ctx: dict + inputs: list[pathlib.Path], opts: dict, exp, ctx: dict, ) -> None: assert TT.load(inputs, ac_context=ctx, **opts) == exp diff --git a/tests/parsers/test_parsers.py b/tests/parsers/test_parsers.py index 6d32a30c..f0a7b799 100644 --- a/tests/parsers/test_parsers.py +++ b/tests/parsers/test_parsers.py @@ -3,7 +3,8 @@ # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -"Test cases for anyconfig.parsers.""" +"""Test cases for anyconfig.parsers. +""" from __future__ import annotations import pathlib diff --git a/tests/parsers/test_utils.py b/tests/parsers/test_utils.py index e80c8dee..398e1431 100644 --- a/tests/parsers/test_utils.py +++ b/tests/parsers/test_utils.py @@ -3,7 +3,8 @@ # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring -"Test cases for anyconfig.parsers.utils.""" +"""Test cases for anyconfig.parsers.utils. +""" from __future__ import annotations import operator From f482a1de430c5a3b1fdae1bd5ceb59dc49cd795f Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Tue, 3 Feb 2026 03:04:33 +0900 Subject: [PATCH 06/27] fix: disable specific ruff rules to avoid conflicts --- pyproject.toml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 038a1052..a1f8af13 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -134,7 +134,13 @@ src = [ [tool.ruff.lint] select = [ - "ALL", + "ALL", +] +ignore = [ + "D203", # Because it looks conflict with D211. + # https://docs.astral.sh/ruff/rules/incorrect-blank-line-before-class/ + "D213", # Because it looks conflict with D212. + # https://docs.astral.sh/ruff/rules/multi-line-summary-second-line/ ] [tool.ruff.lint.extend-per-file-ignores] From cf0540e7688ff9f90b978b49c6709c11ac27d3cd Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Tue, 3 Feb 2026 03:05:02 +0900 Subject: [PATCH 07/27] fix: cleanups and workarunds for ruff --- tox.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index af607bf1..28d31ac6 100644 --- a/tox.ini +++ b/tox.ini @@ -30,7 +30,6 @@ deps = -r{toxinidir}/requirements.txt -r{toxinidir}/tests/requirements.txt -r {toxinidir}/tests/requirements.d/base.txt - lint: ruff commands = pytest setenv = @@ -42,7 +41,8 @@ deps = commands = flake8 --doctests src tests - pylint --init-hook 'import os,sys; sys.path.insert(0, os.curdir)' src - ruff check src + - ruff check src + - ruff check src --statistics [testenv:type-check] deps = From a2a2a8fa690872ebc9a676060b665aef62eb2da9 Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Tue, 3 Feb 2026 03:59:22 +0900 Subject: [PATCH 08/27] fix: corrections for ruff errors in tests.api.load.single_load.* Corrections for following errors in some test codes, tests.api.load.single_load.* pointed by ruff linter. - ANN201: https://docs.astral.sh/ruff/rules/missing-return-type-undocumented-public-function/ - COM812: https://docs.astral.sh/ruff/rules/missing-trailing-comma/ - PT006: https://docs.astral.sh/ruff/rules/pytest-parametrize-names-wrong-type/ - PT011: https://docs.astral.sh/ruff/rules/pytest-raises-too-broad/ - Q001: https://docs.astral.sh/ruff/rules/bad-quotes-multiline-string/ --- tests/api/load/single_load/test_ac_parser.py | 2 +- tests/api/load/single_load/test_basics.py | 44 ++++++++++--------- .../api/load/single_load/test_multi_types.py | 6 +-- tests/api/load/single_load/test_primitives.py | 2 +- tests/api/load/single_load/test_query.py | 14 +++--- tests/api/load/single_load/test_schema.py | 28 ++++++------ tests/api/load/single_load/test_template.py | 22 ++++++---- tests/api/load/single_load/test_toml.py | 6 +-- tests/api/load/single_load/test_yaml.py | 6 +-- 9 files changed, 70 insertions(+), 60 deletions(-) diff --git a/tests/api/load/single_load/test_ac_parser.py b/tests/api/load/single_load/test_ac_parser.py index d88b63f1..a4c20bf2 100644 --- a/tests/api/load/single_load/test_ac_parser.py +++ b/tests/api/load/single_load/test_ac_parser.py @@ -23,7 +23,7 @@ DATA_IDS: list[str] = common.get_test_ids(DATA) -def test_data() -> None: +def test_data_is_non_empty() -> None: assert DATA diff --git a/tests/api/load/single_load/test_basics.py b/tests/api/load/single_load/test_basics.py index cfd4eba0..f98370ca 100644 --- a/tests/api/load/single_load/test_basics.py +++ b/tests/api/load/single_load/test_basics.py @@ -15,12 +15,12 @@ import anyconfig.parsers from anyconfig.api import ( - UnknownFileTypeError, UnknownProcessorTypeError + UnknownFileTypeError, UnknownProcessorTypeError, ) from .. import common -JSON_PARSER = anyconfig.parsers.find(None, 'json') +JSON_PARSER = anyconfig.parsers.find(None, "json") NAMES: tuple[str, ...] = ("ipath", "opts", "exp") DATA: list = common.load_data_for_testfile(__file__) @@ -30,7 +30,7 @@ DATA_2: list = [(ipath, exp) for ipath, _, exp in DATA] -def test_data() -> None: +def test_data_is_not_empty() -> None: assert DATA @@ -39,66 +39,70 @@ class MyDict(collections.OrderedDict): @pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) -def test_load_from_stream(ipath, opts, exp): +def test_load_from_stream(ipath, opts: dict, exp) -> None: assert TT.load(ipath.open(), **opts) == exp @pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) -def test_load_from_path_str(ipath, opts, exp): +def test_load_from_path_str(ipath, opts: dict, exp) -> None: assert TT.load(str(ipath), **opts) == exp @pytest.mark.parametrize(NAMES_2, DATA_2, ids=DATA_IDS) -def test_load_with_ac_parser_by_instance(ipath, exp): +def test_load_with_ac_parser_by_instance(ipath, exp) -> None: assert TT.load(ipath, ac_parser=JSON_PARSER) == exp @pytest.mark.parametrize(NAMES_2, DATA_2, ids=DATA_IDS) -def test_load_with_ac_parser_by_id(ipath, exp): - assert TT.load(ipath, ac_parser=JSON_PARSER.cid()) == exp +def test_load_with_ac_parser_by_id(ipath, exp) -> None: + assert TT.load( + ipath, ac_parser=JSON_PARSER.cid(), + ) == exp @pytest.mark.parametrize(NAMES_2, DATA_2, ids=DATA_IDS) -def test_load_with_ac_ordered(ipath, exp): +def test_load_with_ac_ordered(ipath, exp) -> None: assert TT.load( - ipath, ac_ordered=True + ipath, ac_ordered=True, ) == collections.OrderedDict(exp) @pytest.mark.parametrize(NAMES_2, DATA_2, ids=DATA_IDS) -def test_load_with_ac_dict(ipath, exp): +def test_load_with_ac_dict(ipath, exp) -> None: res = TT.load(ipath, ac_dict=MyDict) assert isinstance(res, MyDict) assert res == MyDict(**exp) -def test_load_missing_file_failures(): +def test_load_missing_file_failures() -> None: with pytest.raises(FileNotFoundError): TT.load("not_exist.json") -def test_load_unknown_file_type_failures(): +def test_load_unknown_file_type_failures() -> None: with pytest.raises(UnknownFileTypeError): TT.load("dummy.txt") -def test_load_invalid_parser_object_failures(): - with pytest.raises(ValueError): +def test_load_invalid_parser_object_failures() -> None: + with pytest.raises(ValueError, match="Wrong processor class"): TT.load("dummy.txt", ac_parser=object()) @pytest.mark.parametrize( - ("ipath", ), [(ipath, ) for ipath, _, _ in DATA], ids=DATA_IDS + "ipath", + [(ipath, ) for ipath, _, _ in DATA], + ids=DATA_IDS, ) -def test_load_unknown_processor_type_failures(ipath): +def test_load_unknown_processor_type_failures(ipath) -> None: with pytest.raises(UnknownProcessorTypeError): TT.load(ipath, ac_parser="proc_does_not_exist") -def test_load_ignore_missing(): - ipath = pathlib.Path() / 'conf_file_not_exist.json' +def test_load_ignore_missing() -> None: + ipath = pathlib.Path() / "conf_file_not_exist.json" assert not ipath.exists() assert TT.load( - ipath, ac_parser='json', ac_ignore_missing=True + ipath, ac_parser="json", ac_ignore_missing=True, ) == {} diff --git a/tests/api/load/single_load/test_multi_types.py b/tests/api/load/single_load/test_multi_types.py index ed20b9e1..8f8869e3 100644 --- a/tests/api/load/single_load/test_multi_types.py +++ b/tests/api/load/single_load/test_multi_types.py @@ -23,12 +23,12 @@ DATA_IDS: list[str] = common.get_test_ids(DATA) -def test_data() -> None: +def test_data_is_non_empty() -> None: assert DATA @pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) def test_load( - ipath: pathlib.Path, opts: dict, exp -): + ipath: pathlib.Path, opts: dict, exp, +) -> None: assert TT.load(ipath, **opts) == exp diff --git a/tests/api/load/single_load/test_primitives.py b/tests/api/load/single_load/test_primitives.py index 739315d6..dc33dcaa 100644 --- a/tests/api/load/single_load/test_primitives.py +++ b/tests/api/load/single_load/test_primitives.py @@ -23,7 +23,7 @@ DATA_IDS: list[str] = common.get_test_ids(DATA) -def test_data() -> None: +def test_data_is_non_empty() -> None: assert DATA diff --git a/tests/api/load/single_load/test_query.py b/tests/api/load/single_load/test_query.py index 9813a41b..2a622675 100644 --- a/tests/api/load/single_load/test_query.py +++ b/tests/api/load/single_load/test_query.py @@ -17,7 +17,7 @@ except ImportError: pytest.skip( "Required query module is not available", - allow_module_level=True + allow_module_level=True, ) from .. import common @@ -28,26 +28,26 @@ NAMES: tuple[str, ...] = ("ipath", "exp", "query", "opts") DATA: list = common.load_data_for_testfile( - __file__, (("e", None), ("q", ""), ("o", {})) + __file__, (("e", None), ("q", ""), ("o", {})), ) DATA_IDS: list[str] = common.get_test_ids(DATA) DATA_2 = [(i, o) for i, _, _, o in DATA] -def test_data() -> None: +def test_data_is_non_empty() -> None: assert DATA @pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) -def test_load(ipath: pathlib.Path, exp, query, opts): +def test_load(ipath: pathlib.Path, exp, query, opts) -> None: assert TT.load(ipath, ac_query=query.strip(), **opts) == exp @pytest.mark.parametrize(("ipath", "opts"), DATA_2, ids=DATA_IDS) def test_load_with_invalid_query_string( - ipath: pathlib.Path, opts -): + ipath: pathlib.Path, opts, +) -> None: assert TT.load( - ipath, ac_query=None, **opts + ipath, ac_query=None, **opts, ) == TT.load(ipath, **opts) diff --git a/tests/api/load/single_load/test_schema.py b/tests/api/load/single_load/test_schema.py index 57db43ba..fcee40a2 100644 --- a/tests/api/load/single_load/test_schema.py +++ b/tests/api/load/single_load/test_schema.py @@ -6,7 +6,6 @@ """Test cases for anyconfig.api.load with schema options.""" from __future__ import annotations -import pathlib import typing import warnings @@ -23,17 +22,20 @@ except ImportError: pytest.skip( "Required jsonschema lib is not available.", - allow_module_level=True + allow_module_level=True, ) +if typing.TYPE_CHECKING: + import pathlib -SCM_NG_0 = '''{ + +SCM_NG_0 = """{ "type": "object", "properties": {"key_never_exist": {"type": "string", "required": true}} -}''' +}""" -def ipath_to_scm_path(ipath: pathlib.Path) -> typing.Optional[pathlib.Path]: +def ipath_to_scm_path(ipath: pathlib.Path) -> pathlib.Path | None: basename: str = ipath.name.replace(ipath.suffix, "") candidates = list((ipath.parent / "s").glob(f"{basename}.*")) if candidates: @@ -50,12 +52,12 @@ def ipath_to_scm_path(ipath: pathlib.Path) -> typing.Optional[pathlib.Path]: DATA_IDS: list[str] = common.get_test_ids(DATA) -def test_data() -> None: +def test_data_is_non_empty() -> None: assert DATA @pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) -def test_load(ipath, exp, opts, scm): +def test_load(ipath, exp, opts, scm) -> None: assert scm, f"Not found: {scm!s} [{ipath!s}" assert TT.load(ipath, ac_schema=scm, **opts) == exp @@ -63,23 +65,23 @@ def test_load(ipath, exp, opts, scm): @pytest.mark.parametrize( ("ipath", "opts"), [(ipath, opts) for ipath, _, opts, _ in DATA[:1]], - ids=DATA_IDS[:1] + ids=DATA_IDS[:1], ) def test_load_failures( - ipath, opts, tmp_path: pathlib.Path + ipath, opts, tmp_path: pathlib.Path, ) -> None: - scm = tmp_path / 'scm.json' + scm = tmp_path / "scm.json" scm.write_text(SCM_NG_0) with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter('always') + warnings.simplefilter("always") res = TT.load( - ipath, ac_schema=scm, ac_schema_safe=True, **opts + ipath, ac_schema=scm, ac_schema_safe=True, **opts, ) assert res is None assert len(warns) > 0 assert issubclass(warns[-1].category, UserWarning) - assert 'scm=' in str(warns[-1].message) + assert "scm=" in str(warns[-1].message) with pytest.raises(ValidationError): TT.load(ipath, ac_schema=scm, ac_schema_safe=False) diff --git a/tests/api/load/single_load/test_template.py b/tests/api/load/single_load/test_template.py index 1f8265c5..09e009ec 100644 --- a/tests/api/load/single_load/test_template.py +++ b/tests/api/load/single_load/test_template.py @@ -6,6 +6,7 @@ """Test cases for anyconfig.api.load with template args.""" from __future__ import annotations +import typing import warnings import pytest @@ -16,37 +17,40 @@ except ImportError: pytest.skip( "Requried jinja2 lib is not available.", - allow_module_level=True + allow_module_level=True, ) from .. import common +if typing.TYPE_CHECKING: + import pathlib + NAMES: tuple[str, ...] = ("ipath", "ctx", "exp", "opts") DATA: list = common.load_data_for_testfile( - __file__, (("c", {}), ("e", None), ("o", {})) + __file__, (("c", {}), ("e", None), ("o", {})), ) DATA_IDS: list[str] = common.get_test_ids(DATA) -def test_data() -> None: +def test_data_is_non_empty() -> None: assert DATA @pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) -def test_load(ipath, ctx, exp, opts): +def test_load(ipath, ctx, exp, opts) -> None: assert TT.load(ipath, ac_context=ctx, **opts) == exp -def test_load_from_invalid_template(tmp_path): +def test_load_from_invalid_template(tmp_path: pathlib.Path) -> None: ipath = tmp_path / "test.json" - ipath.write_text('{"a": "{{ a"}') # broken template string. + ipath.write_text("""{"a": "{{ a"}""") # broken template string. with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter('always') + warnings.simplefilter("always") res = TT.load(ipath, ac_template=True, ac_context={"a": 1}) - assert res == {"a": '{{ a'} + assert res == {"a": "{{ a"} assert len(warns) > 0 assert issubclass(warns[-1].category, UserWarning) - assert 'ailed to compile ' in str(warns[-1].message) + assert "ailed to compile " in str(warns[-1].message) diff --git a/tests/api/load/single_load/test_toml.py b/tests/api/load/single_load/test_toml.py index e8339e22..ca960a42 100644 --- a/tests/api/load/single_load/test_toml.py +++ b/tests/api/load/single_load/test_toml.py @@ -18,14 +18,14 @@ DATA_IDS: list[str] = common.get_test_ids(DATA) -def test_data() -> None: +def test_data_is_non_empty() -> None: assert DATA @pytest.mark.skipif( "toml" not in common.LOADER_TYPES, - reason="toml lib is not availabla." + reason="toml lib is not availabla.", ) @pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) -def test_load_for_toml_files(ipath, exp): +def test_load_for_toml_files(ipath, exp) -> None: assert TT.load(ipath) == exp diff --git a/tests/api/load/single_load/test_yaml.py b/tests/api/load/single_load/test_yaml.py index 973fcd2b..1ff1f664 100644 --- a/tests/api/load/single_load/test_yaml.py +++ b/tests/api/load/single_load/test_yaml.py @@ -18,14 +18,14 @@ DATA_IDS: list[str] = common.get_test_ids(DATA) -def test_data() -> None: +def test_data_is_non_empty() -> None: assert DATA @pytest.mark.skipif( "yaml" not in common.LOADER_TYPES, - reason="yaml loader is not availabla." + reason="yaml loader is not availabla.", ) @pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) -def test_load_for_yaml_files(ipath, exp): +def test_load_for_yaml_files(ipath, exp) -> None: assert TT.load(ipath) == exp From b020278d0ec2877a9bf5216c8f3212f72f85d57e Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Wed, 11 Feb 2026 23:38:14 +0900 Subject: [PATCH 09/27] change: enable all ruff rules with minimal exceptions --- pyproject.toml | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index a1f8af13..6d4f7439 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -136,11 +136,18 @@ src = [ select = [ "ALL", ] +# This setting is intentional, although it breaks the rules. ignore = [ + "ANN401", # https://docs.astral.sh/ruff/rules/any-type/ "D203", # Because it looks conflict with D211. # https://docs.astral.sh/ruff/rules/incorrect-blank-line-before-class/ "D213", # Because it looks conflict with D212. # https://docs.astral.sh/ruff/rules/multi-line-summary-second-line/ + "FIX002", # https://docs.astral.sh/ruff/rules/line-contains-todo/ + "I001", # https://docs.astral.sh/ruff/rules/unsorted-imports/ + "ICN001", # https://docs.astral.sh/ruff/rules/unconventional-import-alias/ + "RUF022", # https://docs.astral.sh/ruff/rules/unsorted-dunder-all/ + "TID252", # https://docs.astral.sh/ruff/rules/relative-imports/ ] [tool.ruff.lint.extend-per-file-ignores] @@ -153,6 +160,26 @@ ignore = [ # https://docs.astral.sh/ruff/rules/suspicious-xml-element-tree-usage/ "src/anyconfig/backend/xml/etree.py" = ["S314"] +"docs/*.py" = [ + "INP001", +] + +# TODO: +"src/anyconfig/backend/yaml/pyyaml.py" = [ + "TD003", # https://docs.astral.sh/ruff/rules/missing-todo-link/ + "FIX002", # https://docs.astral.sh/ruff/rules/line-contains-todo/ +] + +"src/anyconfig/parser.py" = [ + # https://docs.astral.sh/ruff/rules/non-pep604-annotation-optional/ + "UP045", +] + +"src/anyconfig/schema/datatypes.py" = [ + "UP045", +] + +# Ignored: "tests/*.py" = [ # https://docs.astral.sh/ruff/rules/missing-type-function-argument/ "ANN001", From 96822bf93f527e0948220460b1060a3c5c11929a Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Wed, 11 Feb 2026 23:41:38 +0900 Subject: [PATCH 10/27] fix: fix several ruff errors for setup.py fix or add workarouds for several ruff errors like the followings in setup.py. - ANN001: missing-type-function-argument - ANN202: missing-type-argsmissing-type-args - C408: unnecessary-collection-call - DTZ005: call-datetime-now-without-tzinfo - I001: unsorted-imports - N801: invalid-class-name - Q000: bad-quotes-inline-string --- setup.py | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/setup.py b/setup.py index dee047f1..054194fe 100644 --- a/setup.py +++ b/setup.py @@ -7,10 +7,10 @@ # It might throw IndexError and so on. -VERSION = '0.1.0' +VERSION = "0.1.0" VER_REG = re.compile(r"^__version__ = '([^']+)'") -for fpath in pathlib.Path('src').glob('**/__init__.py'): +for fpath in pathlib.Path("src").glob("**/__init__.py"): for line in fpath.open(): match = VER_REG.match(line) if match: @@ -21,11 +21,13 @@ RELEASE = "1%{?dist}" if os.environ.get("_SNAPSHOT_BUILD", None) is not None: import datetime - RELEASE = RELEASE.replace('1', - datetime.datetime.now().strftime("%Y%m%d")) + RELEASE = RELEASE.replace( + "1", + datetime.datetime.now(tz=datetime.timezone.utc).strftime("%Y%m%d"), + ) -def _replace(line): +def _replace(line: str) -> str: """Replace some strings in the RPM SPEC template.""" if "@VERSION@" in line: return line.replace("@VERSION@", VERSION) @@ -39,17 +41,18 @@ def _replace(line): return line -class bdist_rpm(setuptools.command.bdist_rpm.bdist_rpm): +class bdist_rpm(setuptools.command.bdist_rpm.bdist_rpm): # noqa: N801 """Override the default content of the RPM SPEC.""" - spec_tmpl = pathlib.Path('pkg/package.spec.in').resolve() + spec_tmpl = pathlib.Path("pkg/package.spec.in").resolve() - def _make_spec_file(self): + def _make_spec_file(self) -> list[str]: """Generate the RPM SPEC file.""" return [_replace(line.rstrip()) for line in self.spec_tmpl.open()] -setuptools.setup(version=VERSION, cmdclass=dict(bdist_rpm=bdist_rpm), - data_files=[("share/man/man1", ["docs/anyconfig_cli.1"])]) - -# vim:sw=4:ts=4:et: +setuptools.setup( + version=VERSION, + cmdclass={"bdist_rpm": bdist_rpm}, + data_files=[("share/man/man1", ["docs/anyconfig_cli.1"])], +) From 7829fc1512acc23bbcdde7abbda46f3e930f4112 Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Wed, 11 Feb 2026 23:53:12 +0900 Subject: [PATCH 11/27] fix: fix several ruff errors in docs/conf.py fix or add workarouds for several ruff errors like the followings in docs/conf.py. - A001: builtin-variable-shadowing - I001: unsorted-imports - Q000: bad-quotes-inline-string - UP025: unicode-kind-prefix --- docs/conf.py | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index f5c10c85..a026cbaf 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,26 +1,24 @@ -# -*- coding: utf-8 -*- # # pylint:disable=invalid-name """conf.py for sphinx.""" -import sys import pathlib +import sys -sys.path.insert(0, str(pathlib.Path(__file__).parent.resolve() / 'src')) +sys.path.insert(0, str(pathlib.Path(__file__).parent.resolve() / "src")) extensions = [ - 'sphinx.ext.autodoc', - 'sphinx_autodoc_typehints' + "sphinx.ext.autodoc", + "sphinx_autodoc_typehints", ] -source_suffix = '.rst' -master_doc = 'index' +source_suffix = ".rst" +master_doc = "index" -project = u'python-anyconfig' -copyright = u'2021, Satoru SATOH ' -version = '0.10.0' +project = "python-anyconfig" +version = "3.13.11" release = version exclude_patterns = [] -html_theme = 'default' +html_theme = "default" -autodoc_member_order = 'bysource' +autodoc_member_order = "bysource" From 4a09cf80a675ee2972b76d770a9a55efe1c0f08a Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Thu, 12 Feb 2026 00:05:22 +0900 Subject: [PATCH 12/27] change: update the copyright header --- src/anyconfig/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/anyconfig/__init__.py b/src/anyconfig/__init__.py index d035ae25..0a69aa79 100644 --- a/src/anyconfig/__init__.py +++ b/src/anyconfig/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2025 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # For 'anyconfig.open': From 387bdc482fad7be4e5d37f2dc6de971b30fea914 Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Thu, 12 Feb 2026 00:12:02 +0900 Subject: [PATCH 13/27] fix: fix several ruff errors in anyconfig.api.* fix or add workarouds for several ruff errors like the followings in anyconfig.api.*. ANN003: missing-type-kwargs I001: unsorted-imports RUF022: unsorted-dunder-all TID252: relative-imports UP007: non-pep604-annotation-union UP045: non-pep604-annotation-optional --- src/anyconfig/api/__init__.py | 2 +- src/anyconfig/api/_dump.py | 14 +++++---- src/anyconfig/api/_load.py | 53 ++++++++++++++++++++-------------- src/anyconfig/api/_open.py | 7 +++-- src/anyconfig/api/datatypes.py | 1 - 5 files changed, 44 insertions(+), 33 deletions(-) diff --git a/src/anyconfig/api/__init__.py b/src/anyconfig/api/__init__.py index bba92884..e0e2d8c0 100644 --- a/src/anyconfig/api/__init__.py +++ b/src/anyconfig/api/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2025 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=unused-import,import-error,invalid-name diff --git a/src/anyconfig/api/_dump.py b/src/anyconfig/api/_dump.py index a5719cb6..76753fd8 100644 --- a/src/anyconfig/api/_dump.py +++ b/src/anyconfig/api/_dump.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Provides the API to dump (serialize) objects.""" @@ -8,13 +8,15 @@ import typing from .. import common, ioinfo, parsers + if typing.TYPE_CHECKING: - from . import datatypes + from .datatypes import ParserT def dump( data: common.InDataExT, out: ioinfo.PathOrIOInfoT, - ac_parser: parsers.MaybeParserT = None, **options, + ac_parser: parsers.MaybeParserT = None, + **options: typing.Any, ) -> None: """Save ``data`` to ``out`` in specified or detected format. @@ -31,13 +33,13 @@ def dump( :raises: ValueError, UnknownProcessorTypeError, UnknownFileTypeError """ ioi = ioinfo.make(out) - psr: datatypes.ParserT = parsers.find(ioi, forced_type=ac_parser) + psr: ParserT = parsers.find(ioi, forced_type=ac_parser) psr.dump(data, ioi, **options) def dumps( data: common.InDataExT, ac_parser: parsers.MaybeParserT = None, - **options, + **options: typing.Any, ) -> str: """Return a str representation of ``data`` in specified format. @@ -48,5 +50,5 @@ def dumps( :return: Backend-specific string representation for the given data :raises: ValueError, UnknownProcessorTypeError """ - psr: datatypes.ParserT = parsers.find(None, forced_type=ac_parser) + psr: ParserT = parsers.find(None, forced_type=ac_parser) return psr.dumps(data, **options) diff --git a/src/anyconfig/api/_load.py b/src/anyconfig/api/_load.py index 2d260f8d..d05046ba 100644 --- a/src/anyconfig/api/_load.py +++ b/src/anyconfig/api/_load.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=unused-import,import-error,invalid-name @@ -36,7 +36,7 @@ MaybeParserOrIdOrTypeT = typing.Optional[typing.Union[str, ParserT]] -def try_to_load_schema(**options) -> typing.Optional[InDataT]: +def try_to_load_schema(**options: typing.Any) -> InDataT | None: """Try to load a schema object for validation. :param options: Optional keyword arguments such as @@ -65,9 +65,10 @@ def try_to_load_schema(**options) -> typing.Optional[InDataT]: def _single_load( ioi: ioinfo.IOInfo, *, - ac_parser: MaybeParserOrIdOrTypeT = None, ac_template: bool = False, - ac_context: typing.Optional[MappingT] = None, - **options, + ac_parser: MaybeParserOrIdOrTypeT = None, + ac_template: bool = False, + ac_context: MappingT | None = None, + **options: typing.Any, ) -> InDataExT: """Load data from a given ``ioi``. @@ -99,10 +100,12 @@ def _single_load( def single_load( - input_: ioinfo.PathOrIOInfoT, ac_parser: MaybeParserOrIdOrTypeT = None, + input_: ioinfo.PathOrIOInfoT, + ac_parser: MaybeParserOrIdOrTypeT = None, *, - ac_template: bool = False, ac_context: typing.Optional[MappingT] = None, - **options, + ac_template: bool = False, + ac_context: MappingT | None = None, + **options: typing.Any, ) -> InDataExT: r"""Load from single input ``input\_``. @@ -170,13 +173,15 @@ def single_load( def multi_load( - inputs: typing.Union[ + inputs: typing.Union[ # noqa: UP007 collections.abc.Iterable[ioinfo.PathOrIOInfoT], ioinfo.PathOrIOInfoT, - ], ac_parser: MaybeParserOrIdOrTypeT = None, + ], + ac_parser: MaybeParserOrIdOrTypeT = None, *, - ac_template: bool = False, ac_context: typing.Optional[MappingT] = None, - **options, + ac_template: bool = False, + ac_context: MappingT | None = None, + **options: typing.Any, ) -> InDataExT: r"""Load data from multiple inputs ``inputs``. @@ -278,14 +283,16 @@ def multi_load( def load( - path_specs: typing.Union[ + path_specs: typing.Union[ # noqa: UP007 collections.abc.Iterable[ioinfo.PathOrIOInfoT], ioinfo.PathOrIOInfoT, ], - ac_parser: typing.Optional[str] = None, *, - ac_dict: typing.Optional[collections.abc.Callable] = None, - ac_template: bool = False, ac_context: typing.Optional[MappingT] = None, - **options, + ac_parser: str | None = None, + *, + ac_dict: collections.abc.Callable | None = None, + ac_template: bool = False, + ac_context: MappingT | None = None, + **options: typing.Any, ) -> InDataExT: r"""Load from a file or files specified as ``path_specs``. @@ -336,11 +343,13 @@ def load( def loads( - content: str, ac_parser: MaybeParserOrIdOrTypeT = None, *, - ac_dict: typing.Optional[collections.abc.Callable] = None, - ac_template: typing.Union[str, bool] = False, - ac_context: typing.Optional[MappingT] = None, - **options, + content: str, + ac_parser: MaybeParserOrIdOrTypeT = None, + *, + ac_dict: collections.abc.Callable | None = None, + ac_template: str | bool = False, + ac_context: MappingT | None = None, + **options: typing.Any, ) -> InDataExT: """Load data from a str, ``content``. diff --git a/src/anyconfig/api/_open.py b/src/anyconfig/api/_open.py index 75e237bb..d439df21 100644 --- a/src/anyconfig/api/_open.py +++ b/src/anyconfig/api/_open.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """A API to open files by detecting those type automatically.""" @@ -9,6 +9,7 @@ import warnings from .. import ioinfo, parsers + if typing.TYPE_CHECKING: from .datatypes import ParserT @@ -16,9 +17,9 @@ # pylint: disable=redefined-builtin def open( path: ioinfo.PathOrIOInfoT, - mode: typing.Optional[str] = None, + mode: str | None = None, ac_parser: parsers.MaybeParserT = None, - **options, + **options: dict[str, typing.Any], ) -> typing.IO: """Open given file ``path`` with appropriate open flag. diff --git a/src/anyconfig/api/datatypes.py b/src/anyconfig/api/datatypes.py index 8f9469f9..f82fe643 100644 --- a/src/anyconfig/api/datatypes.py +++ b/src/anyconfig/api/datatypes.py @@ -9,6 +9,5 @@ from ..backend import base from ..common import InDataT - MaybeDataT = typing.Optional[InDataT] ParserT = base.Parser From 5c7b54d629275899009958adaff89c540bb4f8b9 Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Thu, 12 Feb 2026 00:36:08 +0900 Subject: [PATCH 14/27] fix: fix several ruff errors in anyconfig.backend.* fix or add workarouds for several ruff errors like the followings in anyconfig.backend.*. ANN002: missing-type-args ANN003: missing-type-kwargs ANN401: any-type COM812: missing-trailing-comma FIX002: line-contains-todo FURB122: for-loop-writes I001: unsorted-imports RUF022: unsorted-dunder-all TID252: relative-imports UP007: non-pep604-annotation-union UP045: non-pep604-annotation-optional --- src/anyconfig/backend/base/__init__.py | 6 +- src/anyconfig/backend/base/compat.py | 10 ++- src/anyconfig/backend/base/dumpers.py | 37 +++++++---- src/anyconfig/backend/base/loaders.py | 42 ++++++++----- src/anyconfig/backend/base/parsers.py | 35 ++++++----- src/anyconfig/backend/base/utils.py | 12 ++-- src/anyconfig/backend/ini/configparser.py | 13 ++-- src/anyconfig/backend/properties/builtin.py | 26 +++++--- src/anyconfig/backend/python/dumper.py | 8 ++- src/anyconfig/backend/python/loader.py | 14 +++-- src/anyconfig/backend/python/utils.py | 6 +- src/anyconfig/backend/sh/variables.py | 20 +++--- src/anyconfig/backend/xml/etree.py | 69 ++++++++++++--------- src/anyconfig/backend/yaml/pyyaml.py | 19 +++--- src/anyconfig/backend/yaml/ruamel.py | 12 ++-- 15 files changed, 207 insertions(+), 122 deletions(-) diff --git a/src/anyconfig/backend/base/__init__.py b/src/anyconfig/backend/base/__init__.py index cd306d8d..d973729c 100644 --- a/src/anyconfig/backend/base/__init__.py +++ b/src/anyconfig/backend/base/__init__.py @@ -9,7 +9,8 @@ from .compat import BinaryFilesMixin from .datatypes import ( - GenContainerT, OptionsT, InDataT, InDataExT, OutDataExT, IoiT, + GenContainerT, OptionsT, + InDataT, InDataExT, OutDataExT, IoiT, PathOrStrT, ) from .dumpers import ( @@ -35,7 +36,8 @@ __all__ = [ "BinaryFilesMixin", - "GenContainerT", "OptionsT", "InDataT", "InDataExT", "OutDataExT", "IoiT", + "GenContainerT", "OptionsT", + "InDataT", "InDataExT", "OutDataExT", "IoiT", "PathOrStrT", "ToStringDumperMixin", "ToStreamDumperMixin", "BinaryDumperMixin", "LoaderMixin", diff --git a/src/anyconfig/backend/base/compat.py b/src/anyconfig/backend/base/compat.py index d3447564..a460a6fa 100644 --- a/src/anyconfig/backend/base/compat.py +++ b/src/anyconfig/backend/base/compat.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2025 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=consider-using-with @@ -19,7 +19,9 @@ class BinaryFilesMixin: _open_flags: tuple[str, str] = ("rb", "wb") @classmethod - def ropen(cls, filepath: PathOrStrT, **options) -> typing.IO: + def ropen( + cls, filepath: PathOrStrT, **options: typing.Any, + ) -> typing.IO: """Open ``filepath`` with read only mode. :param filepath: Path to file to open to read data @@ -29,7 +31,9 @@ def ropen(cls, filepath: PathOrStrT, **options) -> typing.IO: ) @classmethod - def wopen(cls, filepath: PathOrStrT, **options) -> typing.IO: + def wopen( + cls, filepath: PathOrStrT, **options: typing.Any, + ) -> typing.IO: """Open ``filepath`` with write mode. :param filepath: Path to file to open to write data to diff --git a/src/anyconfig/backend/base/dumpers.py b/src/anyconfig/backend/base/dumpers.py index 88aa3e05..afdc5136 100644 --- a/src/anyconfig/backend/base/dumpers.py +++ b/src/anyconfig/backend/base/dumpers.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2025 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=consider-using-with, unspecified-encoding @@ -42,7 +42,9 @@ class DumperMixin: _dump_opts: tuple[str, ...] = () _open_write_mode: typing.ClassVar[str] = "w" - def wopen(self, filepath: PathOrStrT, **options) -> typing.IO: + def wopen( + self, filepath: PathOrStrT, **options: typing.Any, + ) -> typing.IO: """Open file ``filepath`` with the write mode ``_open_write_mode``.""" if "encoding" not in options and self._open_write_mode == "w": options["encoding"] = _ENCODING @@ -51,7 +53,9 @@ def wopen(self, filepath: PathOrStrT, **options) -> typing.IO: self._open_write_mode, **options, ) - def dump_to_string(self, cnf: InDataExT, **options) -> str: + def dump_to_string( + self, cnf: InDataExT, **options: typing.Any, + ) -> str: """Dump config 'cnf' to a string. :param cnf: Configuration data to dump @@ -63,7 +67,8 @@ def dump_to_string(self, cnf: InDataExT, **options) -> str: return "" def dump_to_path( - self, cnf: InDataExT, filepath: PathOrStrT, **options, + self, cnf: InDataExT, filepath: PathOrStrT, + **options: typing.Any, ) -> None: """Dump config 'cnf' to a file 'filepath'. @@ -74,7 +79,8 @@ def dump_to_path( not_implemented(self, cnf, filepath, **options) def dump_to_stream( - self, cnf: InDataExT, stream: typing.IO, **options, + self, cnf: InDataExT, stream: typing.IO, + **options: typing.Any, ) -> None: """Dump config 'cnf' to a file-like object 'stream'. @@ -86,7 +92,9 @@ def dump_to_stream( """ not_implemented(self, cnf, stream, **options) - def dumps(self, cnf: InDataExT, **options) -> str: + def dumps( + self, cnf: InDataExT, **options: typing.Any, + ) -> str: """Dump config 'cnf' to a string. :param cnf: Configuration data to dump @@ -97,7 +105,9 @@ def dumps(self, cnf: InDataExT, **options) -> str: options = utils.filter_options(self._dump_opts, options) return self.dump_to_string(cnf, **options) - def dump(self, cnf: InDataExT, ioi: IoiT, **options) -> None: + def dump( + self, cnf: InDataExT, ioi: IoiT, **options: typing.Any, + ) -> None: """Dump config 'cnf' to output object of which 'ioi' referring. :param cnf: Configuration data to dump @@ -137,7 +147,8 @@ class ToStringDumperMixin(DumperMixin): """ def dump_to_path( - self, cnf: InDataExT, filepath: PathOrStrT, **options, + self, cnf: InDataExT, filepath: PathOrStrT, + **options: typing.Any, ) -> None: """Dump config 'cnf' to a file 'filepath'. @@ -149,7 +160,8 @@ def dump_to_path( out.write(self.dump_to_string(cnf, **options)) def dump_to_stream( - self, cnf: InDataExT, stream: typing.IO, **options, + self, cnf: InDataExT, stream: typing.IO, + **options: typing.Any, ) -> None: """Dump config 'cnf' to a file-like object 'stream'. @@ -172,7 +184,9 @@ class ToStreamDumperMixin(DumperMixin): :meth:`dump_to_stream` at least. """ - def dump_to_string(self, cnf: InDataExT, **options) -> str: + def dump_to_string( + self, cnf: InDataExT, **options: typing.Any, + ) -> str: """Dump config 'cnf' to a string. :param cnf: Configuration data to dump @@ -185,7 +199,8 @@ def dump_to_string(self, cnf: InDataExT, **options) -> str: return stream.getvalue() def dump_to_path( - self, cnf: InDataExT, filepath: PathOrStrT, **options, + self, cnf: InDataExT, filepath: PathOrStrT, + **options: typing.Any, ) -> None: """Dump config 'cnf' to a file 'filepath`. diff --git a/src/anyconfig/backend/base/loaders.py b/src/anyconfig/backend/base/loaders.py index f47693c2..a23faafe 100644 --- a/src/anyconfig/backend/base/loaders.py +++ b/src/anyconfig/backend/base/loaders.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=consider-using-with, unspecified-encoding @@ -70,7 +70,9 @@ def dict_options(cls) -> tuple[str, ...]: """Get the list of dict factory options.""" return cls._dict_opts - def ropen(self, filepath: PathOrStrT, **options) -> typing.IO: + def ropen( + self, filepath: PathOrStrT, **options: typing.Any, + ) -> typing.IO: """Open files with read only mode.""" if "encoding" not in options and self._open_read_mode == "r": options["encoding"] = _ENCODING @@ -79,7 +81,9 @@ def ropen(self, filepath: PathOrStrT, **options) -> typing.IO: self._open_read_mode, **options, ) - def _container_factory(self, **options) -> GenContainerT: + def _container_factory( + self, **options: typing.Any, + ) -> GenContainerT: """Get the factory to make container objects. The order of prirorities are ac_dict, backend specific dict class @@ -102,7 +106,7 @@ def _container_factory(self, **options) -> GenContainerT: return dict def _load_options( - self, container: GenContainerT, **options, + self, container: GenContainerT, **options: typing.Any, ) -> OptionsT: """Select backend specific loading options.""" # Force set dict option if available in backend. For example, @@ -114,7 +118,8 @@ def _load_options( return utils.filter_options(self._load_opts, options) def load_from_string( - self, content: str, container: GenContainerT, **options, + self, content: str, container: GenContainerT, + **options: typing.Any, ) -> InDataExT: """Load config from given string 'content'. @@ -128,7 +133,8 @@ def load_from_string( return DATA_DEFAULT def load_from_path( - self, filepath: PathOrStrT, container: GenContainerT, **options, + self, filepath: PathOrStrT, container: GenContainerT, + **options: typing.Any, ) -> InDataExT: """Load config from given file path 'filepath`. @@ -142,7 +148,8 @@ def load_from_path( return DATA_DEFAULT def load_from_stream( - self, stream: typing.IO, container: GenContainerT, **options, + self, stream: typing.IO, container: GenContainerT, + **options: typing.Any, ) -> InDataExT: """Load config from given file like object 'stream`. @@ -155,7 +162,9 @@ def load_from_stream( not_implemented(self, stream, container, **options) return DATA_DEFAULT - def loads(self, content: str, **options) -> InDataExT: + def loads( + self, content: str, **options: typing.Any, + ) -> InDataExT: """Load config from given string 'content' after some checks. :param content: Config file content @@ -175,7 +184,8 @@ def loads(self, content: str, **options) -> InDataExT: return self.load_from_string(content, container, **options) def load( - self, ioi: IoiT, *, ac_ignore_missing: bool = False, **options, + self, ioi: IoiT, *, ac_ignore_missing: bool = False, + **options: typing.Any, ) -> InDataExT: """Load config from ``ioi``. @@ -202,7 +212,7 @@ def load( if ioinfo.is_stream(ioi): cnf = self.load_from_stream( - typing.cast("typing.IO", ioi.src), container, **options + typing.cast("typing.IO", ioi.src), container, **options, ) else: if ac_ignore_missing and not pathlib.Path(ioi.path).exists(): @@ -230,7 +240,8 @@ class FromStringLoaderMixin(LoaderMixin): """ def load_from_stream( - self, stream: typing.IO, container: GenContainerT, **options, + self, stream: typing.IO, container: GenContainerT, + **options: typing.Any, ) -> InDataExT: """Load config from given stream 'stream'. @@ -243,7 +254,8 @@ def load_from_stream( return self.load_from_string(stream.read(), container, **options) def load_from_path( - self, filepath: PathOrStrT, container: GenContainerT, **options, + self, filepath: PathOrStrT, container: GenContainerT, + **options: typing.Any, ) -> InDataExT: """Load config from given file path 'filepath'. @@ -268,7 +280,8 @@ class FromStreamLoaderMixin(LoaderMixin): """ def load_from_string( - self, content: str, container: GenContainerT, **options, + self, content: str, container: GenContainerT, + **options: typing.Any, ) -> InDataExT: """Load config from given string 'cnf_content'. @@ -282,7 +295,8 @@ def load_from_string( return self.load_from_stream(iof(content), container, **options) def load_from_path( - self, filepath: PathOrStrT, container: GenContainerT, **options, + self, filepath: PathOrStrT, container: GenContainerT, + **options: typing.Any, ) -> InDataExT: """Load config from given file path 'filepath'. diff --git a/src/anyconfig/backend/base/parsers.py b/src/anyconfig/backend/base/parsers.py index 71c34183..9e5aa918 100644 --- a/src/anyconfig/backend/base/parsers.py +++ b/src/anyconfig/backend/base/parsers.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # r"""Abstract implementation of backend modules. @@ -74,11 +74,11 @@ class StreamParser(Parser, FromStreamLoaderMixin, ToStreamDumperMixin): def load_with_fn( - load_fn: typing.Optional[LoadFnT], - content_or_strm: typing.Union[typing.AnyStr, typing.IO], + load_fn: LoadFnT | None, + content_or_strm: str | bytes | typing.IO, container: GenContainerT, *, allow_primitives: bool = False, - **options, + **options: dict[str, typing.Any], ) -> InDataExT: """Load data from given string or stream 'content_or_strm'. @@ -104,9 +104,9 @@ def load_with_fn( def dump_with_fn( - dump_fn: typing.Optional[DumpFnT], - data: InDataExT, stream: typing.Optional[typing.IO], - **options, + dump_fn: DumpFnT | None, + data: InDataExT, stream: typing.IO | None, + **options: dict[str, typing.Any], ) -> str: """Dump 'data' to a string. @@ -149,13 +149,14 @@ class StringStreamFnParser(Parser, FromStreamLoaderMixin, ToStreamDumperMixin): :seealso: :class:`anyconfig.backend.json.Parser` """ - _load_from_string_fn: typing.Optional[LoadFnT] = None - _load_from_stream_fn: typing.Optional[LoadFnT] = None - _dump_to_string_fn: typing.Optional[DumpFnT] = None - _dump_to_stream_fn: typing.Optional[DumpFnT] = None + _load_from_string_fn: LoadFnT | None = None + _load_from_stream_fn: LoadFnT | None = None + _dump_to_string_fn: DumpFnT | None = None + _dump_to_stream_fn: DumpFnT | None = None def load_from_string( - self, content: typing.AnyStr, container: GenContainerT, **options, + self, content: str | bytes, container: GenContainerT, + **options: dict[str, typing.Any], ) -> InDataExT: """Load configuration data from given string 'content'. @@ -172,7 +173,8 @@ def load_from_string( ) def load_from_stream( - self, stream: typing.IO, container: GenContainerT, **options, + self, stream: typing.IO, container: GenContainerT, + **options: dict[str, typing.Any], ) -> InDataExT: """Load data from given stream 'stream'. @@ -188,7 +190,9 @@ def load_from_stream( **options, ) - def dump_to_string(self, cnf: InDataExT, **options) -> str: + def dump_to_string( + self, cnf: InDataExT, **options: dict[str, typing.Any], + ) -> str: """Dump config 'cnf' to a string. :param cnf: Configuration data to dump @@ -200,7 +204,8 @@ def dump_to_string(self, cnf: InDataExT, **options) -> str: **options) def dump_to_stream( - self, cnf: InDataExT, stream: typing.IO, **options, + self, cnf: InDataExT, stream: typing.IO, + **options: dict[str, typing.Any], ) -> None: """Dump config 'cnf' to a file-like object 'stream'. diff --git a/src/anyconfig/backend/base/utils.py b/src/anyconfig/backend/base/utils.py index 33c3b8d6..1fb6e1a5 100644 --- a/src/anyconfig/backend/base/utils.py +++ b/src/anyconfig/backend/base/utils.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Provides utility functions in anyconfig.backend.base.""" @@ -13,12 +13,16 @@ import collections.abc -def not_implemented(*_args, **_options) -> None: +def not_implemented( + *_args: typing.Any, **_options: typing.Any, +) -> None: """Raise NotImplementedError.""" raise NotImplementedError -def ensure_outdir_exists(filepath: typing.Union[str, pathlib.Path]) -> None: +def ensure_outdir_exists( + filepath: str | pathlib.Path, +) -> None: """Make dir to dump 'filepath' if that dir does not exist. :param filepath: path of file to dump @@ -37,7 +41,7 @@ def to_method( """ @functools.wraps(func) def wrapper( - *args, **kwargs, + *args: typing.Any, **kwargs: typing.Any, ) -> collections.abc.Callable[..., typing.Any]: """Original function decorated.""" return func(*args[1:], **kwargs) diff --git a/src/anyconfig/backend/ini/configparser.py b/src/anyconfig/backend/ini/configparser.py index ffa5af3b..c9224a70 100644 --- a/src/anyconfig/backend/ini/configparser.py +++ b/src/anyconfig/backend/ini/configparser.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=deprecated-method @@ -97,7 +97,7 @@ def _to_s(val: typing.Any, sep: str = ", ") -> str: def parsed_items( items: collections.abc.Iterable[tuple[str, typing.Any]], - sep: str = _SEP, **options, + sep: str = _SEP, **options: typing.Any, ) -> collections.abc.Iterator[tuple[str, typing.Any]]: """Parse an iterable of items. @@ -111,7 +111,7 @@ def parsed_items( def _make_parser( - **kwargs, + **kwargs: typing.Any, ) -> tuple[dict[str, typing.Any], configparser.ConfigParser]: """Make an instance of configparser.ConfigParser.""" # Optional arguments for configparser.ConfigParser{,readfp} @@ -134,7 +134,8 @@ def _make_parser( def _load( stream: typing.IO, container: base.GenContainerT, - sep: str = _SEP, dkey: str = DEFAULTSECT, **kwargs, + sep: str = _SEP, dkey: str = DEFAULTSECT, + **kwargs: typing.Any, ) -> base.InDataT: """Load data from ``stream`` of which file should be in INI format. @@ -180,7 +181,9 @@ def _dumps_itr( yield "" # it will be a separator between each sections. -def _dumps(cnf: dict[str, typing.Any], **_kwargs) -> str: +def _dumps( + cnf: dict[str, typing.Any], **_kwargs: typing.Any, +) -> str: """Dump data as a str. :param cnf: Configuration data to dump diff --git a/src/anyconfig/backend/properties/builtin.py b/src/anyconfig/backend/properties/builtin.py index 0ff7a8db..0a718065 100644 --- a/src/anyconfig/backend/properties/builtin.py +++ b/src/anyconfig/backend/properties/builtin.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2025 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # r"""A backend module to load and dump (Java) properties files. @@ -30,6 +30,7 @@ """ from __future__ import annotations +import os import re import typing import warnings @@ -42,7 +43,7 @@ _MIN_LEN_PAIR: int = 2 -def parseline(line: str) -> tuple[typing.Optional[str], str]: +def parseline(line: str) -> tuple[str | None, str]: """Parse a line of Java properties file. :param line: @@ -65,7 +66,7 @@ def parseline(line: str) -> tuple[typing.Optional[str], str]: def _pre_process_line( line: str, cmarkers: tuple[str, ...] = _COMMENT_MARKERS, -) -> typing.Optional[str]: +) -> str | None: """Preprocess a line in properties; strip comments, etc. :param line: @@ -98,7 +99,8 @@ def escape(in_s: str) -> str: def load( - stream: typing.IO, container: base.GenContainerT = dict, **kwargs, + stream: typing.IO, container: base.GenContainerT = dict, + **kwargs: typing.Any, ) -> base.InDataT: """Load data from a java properties files given as ``stream``. @@ -114,7 +116,7 @@ def load( for line_ in stream: line = _pre_process_line( - prev + line_.strip().rstrip(), comment_markers + prev + line_.strip().rstrip(), comment_markers, ) # I don't think later case may happen but just in case. if line is None or not line: @@ -130,7 +132,7 @@ def load( if key is None: warnings.warn( f"Failed to parse the line: {line}", - category=SyntaxWarning, stacklevel=2 + category=SyntaxWarning, stacklevel=2, ) continue @@ -149,7 +151,8 @@ class Parser(base.StreamParser): _dict_opts: tuple[str, ...] = ("ac_dict", ) def load_from_stream( - self, stream: typing.IO, container: base.GenContainerT, **kwargs, + self, stream: typing.IO, container: base.GenContainerT, + **kwargs: typing.Any, ) -> base.InDataT: """Load config from given file like object 'stream'. @@ -162,7 +165,8 @@ def load_from_stream( return load(stream, container=container, **kwargs) def dump_to_stream( - self, cnf: base.InDataExT, stream: typing.IO, **_kwargs, + self, cnf: base.InDataExT, stream: typing.IO, + **_kwargs: typing.Any, ) -> None: """Dump config 'cnf' to a file or file-like object 'stream'. @@ -171,5 +175,7 @@ def dump_to_stream( :param kwargs: backend-specific optional keyword parameters :: dict """ if utils.is_dict_like(cnf): - for key, val in cnf.items(): - stream.write(f"{key} = {escape(val)}\n") + stream.writelines( + f"{key} = {escape(val)}{os.linesep}" + for key, val in cnf.items() + ) diff --git a/src/anyconfig/backend/python/dumper.py b/src/anyconfig/backend/python/dumper.py index 8a504cb4..1372c072 100644 --- a/src/anyconfig/backend/python/dumper.py +++ b/src/anyconfig/backend/python/dumper.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2024, 2025 Satoru SATOH +# Copyright (C) 2024 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # r"""A backend module to dump python code conntains data. @@ -22,6 +22,8 @@ """ from __future__ import annotations +import typing + from ..base import ( InDataExT, ToStringDumperMixin, ) @@ -30,7 +32,9 @@ class Dumper(ToStringDumperMixin): """Dumper for objects as python code.""" - def dump_to_string(self, cnf: InDataExT, **_kwargs) -> str: + def dump_to_string( + self, cnf: InDataExT, **_kwargs: typing.Any, + ) -> str: """Dump config 'cnf' to a string. :param cnf: Configuration data to dump diff --git a/src/anyconfig/backend/python/loader.py b/src/anyconfig/backend/python/loader.py index fcfbf1d6..2c757e2b 100644 --- a/src/anyconfig/backend/python/loader.py +++ b/src/anyconfig/backend/python/loader.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2023 - 2025 Satoru SATOH +# Copyright (C) 2023 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # r"""A backend module to load python code conntains data. @@ -42,7 +42,9 @@ from . import utils -def load_from_temp_file(content: str, **opts) -> InDataExT: +def load_from_temp_file( + content: str, **opts: typing.Any, +) -> InDataExT: """Dump `content` to tempoary file and load from it. :param content: A str to load data from @@ -62,7 +64,9 @@ class Loader(LoaderMixin): _allow_primitives: typing.ClassVar[bool] = True _load_opts: tuple[str, ...] = ("allow_exec", ) - def loads(self, content: str, **options) -> InDataExT: + def loads( + self, content: str, **options: typing.Any, + ) -> InDataExT: """Load config from given string 'content' after some checks. :param content: Config file content @@ -80,7 +84,9 @@ def loads(self, content: str, **options) -> InDataExT: return utils.load_literal_data_from_string(content) - def load(self, ioi: IoiT, **options) -> InDataExT: + def load( + self, ioi: IoiT, **options: typing.Any, + ) -> InDataExT: """Load config from ``ioi``. :param ioi: diff --git a/src/anyconfig/backend/python/utils.py b/src/anyconfig/backend/python/utils.py index a8f807c7..e1a6669a 100644 --- a/src/anyconfig/backend/python/utils.py +++ b/src/anyconfig/backend/python/utils.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2023 - 2025 Satoru SATOH +# Copyright (C) 2023 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=missing-docstring @@ -46,7 +46,7 @@ def load_literal_data_from_path(path: pathlib.Path) -> typing.Any: def load_data_from_py( path: pathlib.Path, *, - data_name: typing.Optional[str] = None, + data_name: str | None = None, fallback: bool = False, ) -> typing.Any: """Load test data from .py files by evaluating it. @@ -78,7 +78,7 @@ def load_data_from_py( def load_from_path( path: pathlib.Path, *, allow_exec: bool = False, - data_name: typing.Optional[str] = None, + data_name: str | None = None, fallback: bool = False, ) -> typing.Any: """Load data from given path `path`. diff --git a/src/anyconfig/backend/sh/variables.py b/src/anyconfig/backend/sh/variables.py index c7fd4493..7bb98ff0 100644 --- a/src/anyconfig/backend/sh/variables.py +++ b/src/anyconfig/backend/sh/variables.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2016 - 2025 Satoru SATOH +# Copyright (C) 2016 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """A simple backend module to load and dump files contain shell variables. @@ -21,6 +21,7 @@ from __future__ import annotations import itertools +import os import re import typing import warnings @@ -31,7 +32,7 @@ def _parseline( line: str, -) -> tuple[typing.Optional[str], typing.Optional[str]]: +) -> tuple[str | None, str | None]: """Parse a line contains shell variable definition. :param line: A string to parse, must not start with '#' (comment) @@ -56,7 +57,8 @@ def _parseline( def load( - stream: typing.IO, container: base.GenContainerT = dict, **_kwargs, + stream: typing.IO, container: base.GenContainerT = dict, + **_kwargs: typing.Any, ) -> base.InDataT: """Load shell variable definitions data from ``stream``. @@ -95,7 +97,8 @@ class Parser(base.StreamParser): _dict_opts: tuple[str, ...] = ("ac_dict", ) def load_from_stream( - self, stream: typing.IO, container: base.GenContainerT, **kwargs, + self, stream: typing.IO, container: base.GenContainerT, + **kwargs: typing.Any, ) -> base.InDataT: """Load config from given file like object ``stream``. @@ -109,7 +112,8 @@ def load_from_stream( return load(stream, container=container, **kwargs) def dump_to_stream( - self, cnf: base.InDataExT, stream: typing.IO, **_kwargs, + self, cnf: base.InDataExT, stream: typing.IO, + **_kwargs: typing.Any, ) -> None: """Dump config dat ``cnf`` to a file or file-like object ``stream``. @@ -118,5 +122,7 @@ def dump_to_stream( :param kwargs: backend-specific optional keyword parameters :: dict """ if utils.is_dict_like(cnf): - for key, val in cnf.items(): - stream.write(f"{key}='{val}'\n") + stream.writelines( + f"{key}='{val}'{os.linesep}" + for key, val in cnf.items() + ) diff --git a/src/anyconfig/backend/xml/etree.py b/src/anyconfig/backend/xml/etree.py index 7e40be99..e2565a01 100644 --- a/src/anyconfig/backend/xml/etree.py +++ b/src/anyconfig/backend/xml/etree.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2025 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # Some XML modules may be missing and Base.{load,dumps}_impl are not @@ -92,7 +92,7 @@ def _namespaces_from_file( - xmlfile: typing.Union[base.PathOrStrT, typing.IO], + xmlfile: base.PathOrStrT | typing.IO, ) -> dict[str, tuple[str, str]]: """Get the namespace str from file. @@ -158,7 +158,7 @@ def _dicts_have_unique_keys(dics: DicsType) -> bool: return len(set(key_itr)) == sum(len(d) for d in dics) -def _parse_text(val: str, **options) -> typing.Any: +def _parse_text(val: str, **options: typing.Any) -> typing.Any: """Parse ``val`` and interpret its data to some value. :return: Parsed value or value itself depends on 'ac_parse_value' @@ -171,7 +171,7 @@ def _parse_text(val: str, **options) -> typing.Any: def _process_elem_text( elem: ElementTree.Element, dic: DicType, subdic: DicType, - text: str = "@text", **options, + text: str = "@text", **options: typing.Any, ) -> None: """Process the text in the element ``elem``. @@ -197,7 +197,7 @@ def _process_elem_text( def _parse_attrs( elem: ElementTree.Element, container: GenDicType = dict, - **options, + **options: typing.Any, ) -> DicType: """Parse the attributes of the element ``elem``. @@ -215,7 +215,7 @@ def _parse_attrs( def _process_elem_attrs( elem: ElementTree.Element, dic: DicType, subdic: DicType, container: GenDicType = dict, attrs: str = "@attrs", - **options, + **options: typing.Any, ) -> None: """Process attributes in the element ``elem``. @@ -238,7 +238,7 @@ def _process_elem_attrs( def _process_children_elems( elem: ElementTree.Element, dic: DicType, subdic: DicType, container: GenDicType = dict, children: str = "@children", - **options, + **options: typing.Any, ) -> None: """Process children of the element ``elem``. @@ -275,8 +275,9 @@ def _process_children_elems( def elem_to_container( - elem: typing.Optional[ElementTree.Element], container: GenDicType = dict, - **options, + elem: ElementTree.Element | None, + container: GenDicType = dict, + **options: typing.Any, ) -> DicType: """Convert XML ElementTree Element to a collection of container objects. @@ -340,9 +341,10 @@ def _complement_tag_options(options: DicType) -> DicType: def root_to_container( - root: ElementTree.Element, container: GenDicType = dict, - nspaces: typing.Optional[DicType] = None, - **options, + root: ElementTree.Element, + container: GenDicType = dict, + nspaces: DicType | None = None, + **options: typing.Any, ) -> DicType: """Convert XML ElementTree Root Element to container objects. @@ -366,7 +368,9 @@ def root_to_container( **_complement_tag_options(options)) -def _to_str_fn(**options: DicType) -> collections.abc.Callable[..., str]: +def _to_str_fn( + **options: typing.Any, +) -> collections.abc.Callable[..., str]: """Convert any objects to a str. :param options: Keyword options might have 'ac_parse_value' key @@ -396,7 +400,8 @@ def _elem_set_attrs( def _elem_from_descendants( - children_nodes: collections.abc.Iterable[DicType], **options, + children_nodes: collections.abc.Iterable[DicType], + **options: typing.Any, ) -> collections.abc.Iterator[ElementTree.Element]: """Get the elements from the descendants ``children_nodes``. @@ -411,9 +416,10 @@ def _elem_from_descendants( def _get_or_update_parent( - key: str, val: typing.Any, to_str: collections.abc.Callable[..., str], - parent: typing.Optional[ElementTree.Element] = None, - **options, + key: str, val: typing.Any, + to_str: collections.abc.Callable[..., str], + parent: ElementTree.Element | None = None, + **options: typing.Any, ) -> ElementTree.Element: """Get or update the parent element ``parent``. @@ -441,7 +447,7 @@ def _get_or_update_parent( def _assert_if_invalid_node( obj: typing.Any, - parent: typing.Optional[ElementTree.Element] = None, + parent: ElementTree.Element | None = None, ) -> None: """Make sure the ``obj`` or ``parent`` is not invalid.""" if parent is None and (obj is None or not obj): @@ -452,9 +458,10 @@ def _assert_if_invalid_node( def container_to_elem( - obj: typing.Any, parent: typing.Optional[ElementTree.Element] = None, - to_str: typing.Optional[collections.abc.Callable[..., str]] = None, - **options, + obj: typing.Any, + parent: ElementTree.Element | None = None, + to_str: collections.abc.Callable[..., str] | None = None, + **options: typing.Any, ) -> ElementTree.Element: """Convert a dict-like object to XML ElementTree. @@ -503,7 +510,8 @@ def container_to_elem( def etree_write( - elem: ElementTree.Element, stream: typing.IO, **options, + elem: ElementTree.Element, stream: typing.IO, + **options: typing.Any, ) -> None: """Write XML ElementTree 'root' content into 'stream'. @@ -516,7 +524,7 @@ def etree_write( ("method", "xml_declaration", "default_namespace", "short_empty_elements"), options, - ) + ), ) content: bytes = ElementTree.tostring( # type: ignore[call-overload] elem, **opts, @@ -546,7 +554,8 @@ class Parser(base.Parser, base.ToStreamDumperMixin): _open_write_mode: typing.ClassVar[str] = "wb" def load_from_string( - self, content: typing.AnyStr, container: GenDicType, **opts, + self, content: typing.AnyStr, container: GenDicType, + **opts: typing.Any, ) -> DicType: """Load config from XML snippet (a string 'content'). @@ -566,12 +575,12 @@ def load_from_string( nspaces = _namespaces_from_file(stream) return root_to_container( - elem, container=container, nspaces=nspaces, **opts + elem, container=container, nspaces=nspaces, **opts, ) def load_from_path( self, filepath: base.PathOrStrT, container: GenDicType, - **opts, + **opts: typing.Any, ) -> DicType: """Load data from path ``filepath``. @@ -588,7 +597,8 @@ def load_from_path( ) def load_from_stream( - self, stream: typing.IO, container: GenDicType, **opts, + self, stream: typing.IO, container: GenDicType, + **opts: typing.Any, ) -> DicType: """Load data from IO stream ``stream``. @@ -606,7 +616,7 @@ def load_from_stream( ) def dump_to_string( # type: ignore[override] - self, cnf: base.InDataExT, **opts, + self, cnf: base.InDataExT, **opts: typing.Any, ) -> bytes: """Dump data ``cnf`` as a str. @@ -624,7 +634,8 @@ def dump_to_string( # type: ignore[override] return bio.getvalue() def dump_to_stream( - self, cnf: base.InDataExT, stream: typing.IO, **opts, + self, cnf: base.InDataExT, stream: typing.IO, + **opts: typing.Any, ) -> None: """Dump data ``cnf`` to the IO stream ``stream``. diff --git a/src/anyconfig/backend/yaml/pyyaml.py b/src/anyconfig/backend/yaml/pyyaml.py index 5e539792..cd990a3c 100644 --- a/src/anyconfig/backend/yaml/pyyaml.py +++ b/src/anyconfig/backend/yaml/pyyaml.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2025 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # type() is used to exactly match check instead of isinstance here. @@ -65,7 +65,8 @@ def _customized_loader( container: collections.abc.Callable[..., dict[str, typing.Any]], - loader: type[Loader] = Loader, mapping_tag: str = _MAPPING_TAG, + loader: type[Loader] = Loader, + mapping_tag: str = _MAPPING_TAG, ) -> type[Loader]: """Get the customized loader. @@ -111,7 +112,7 @@ def construct_mapping( def construct_ustr( loader: Loader, node: typing.Any, - ) -> typing.Union[str, int, float, None]: + ) -> str | int | float | None: """Unicode string constructor.""" return loader.construct_scalar(node) @@ -139,7 +140,7 @@ def container_representer( def yml_fnc_by_name( - fname: str, **options, + fname: str, **options: typing.Any, ) -> collections.abc.Callable[..., typing.Any]: """Get yaml loading/dumping function by name. @@ -151,7 +152,9 @@ def yml_fnc_by_name( return getattr(yaml, f"safe_{fname}" if options.get("ac_safe") else fname) -def yml_fnc_(fname: str, *args, **options) -> typing.Any: +def yml_fnc_( + fname: str, *args: typing.Any, **options: typing.Any, +) -> typing.Any: """Call yaml.safe_load, yaml.load, yaml.safe_dump and yaml.dump. :param fname: @@ -167,7 +170,7 @@ def yml_fnc_(fname: str, *args, **options) -> typing.Any: def yml_load( stream: typing.IO, container: base.GenContainerT, yml_fnc: collections.abc.Callable[..., typing.Any] = yml_fnc_, - **options, + **options: typing.Any, ) -> dict[str, typing.Any]: """Call yaml.safe_load and yaml.load. @@ -198,7 +201,7 @@ def yml_load( def yml_dump( data: typing.Any, stream: typing.IO, yml_fnc: collections.abc.Callable[..., typing.Any] = yml_fnc_, - **options, + **options: typing.Any, ) -> None: """Call yaml.safe_dump and yaml.dump. @@ -211,7 +214,7 @@ def yml_dump( options = {"ac_safe": True} # Same as yml_load. elif not options.get("Dumper", False) and _is_dict: - # TODO: Any other way to get its constructor? + # TODO(ssato): Any other way to get its constructor? maybe_container = options.get("ac_dict", type(data)) options["Dumper"] = _customized_dumper(maybe_container) diff --git a/src/anyconfig/backend/yaml/ruamel.py b/src/anyconfig/backend/yaml/ruamel.py index 4304ddac..44dbd1cb 100644 --- a/src/anyconfig/backend/yaml/ruamel.py +++ b/src/anyconfig/backend/yaml/ruamel.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2025 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """A backend module to load and dump YAML data files using rumael.yaml. @@ -76,8 +76,8 @@ def yml_fnc( - fname: str, *args, **options, -) -> typing.Optional[base.InDataExT]: + fname: str, *args: typing.Any, **options: typing.Any, +) -> base.InDataExT | None: """Call loading functions for yaml data. :param fname: @@ -102,7 +102,8 @@ def yml_fnc( def yml_load( - stream: typing.IO, container: base.GenContainerT, **options, + stream: typing.IO, container: base.GenContainerT, + **options: typing.Any, ) -> base.InDataExT: """See :func:`anyconfig.backend.yaml.pyyaml.yml_load`.""" ret = yml_fnc("load", stream, **options) @@ -113,7 +114,8 @@ def yml_load( def yml_dump( - data: base.InDataExT, stream: typing.IO, **options, + data: base.InDataExT, stream: typing.IO, + **options: typing.Any, ) -> None: """See :func:`anyconfig.backend.yaml.pyyaml.yml_dump`.""" # .. todo:: From 8f1df2e34cee98e46ff8f71a819c476a5baec651 Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Thu, 12 Feb 2026 00:39:56 +0900 Subject: [PATCH 15/27] fix: fix several ruff errors in anyconfig.cli.* fix or add workarouds for several ruff errors like the followings in anyconfig.cli.*. ANN401: any-type COM812: missing-trailing-comma FIX001: line-contains-fixme I001: unsorted-imports TD001: invalid-todo-tag TD002: missing-todo-author TD003: missing-todo-link TD004: missing-todo-colon TID252: relative-imports UP045: non-pep604-annotation-optional --- src/anyconfig/cli/_main.py | 20 ++++++++++---------- src/anyconfig/cli/actions.py | 7 +++---- src/anyconfig/cli/detectors.py | 12 ++++++------ src/anyconfig/cli/filters.py | 4 ++-- src/anyconfig/cli/io.py | 10 +++++----- src/anyconfig/cli/parse_args.py | 13 ++++++------- src/anyconfig/cli/utils.py | 10 +++++----- 7 files changed, 37 insertions(+), 39 deletions(-) diff --git a/src/anyconfig/cli/_main.py b/src/anyconfig/cli/_main.py index 73c26de0..d716b4ac 100644 --- a/src/anyconfig/cli/_main.py +++ b/src/anyconfig/cli/_main.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """CLI frontend module for anyconfig.""" @@ -12,7 +12,7 @@ from .. import api, parser from . import ( - actions, constants, detectors, filters, parse_args, utils + actions, constants, detectors, filters, parse_args, utils, ) if typing.TYPE_CHECKING: @@ -36,8 +36,9 @@ def try_special_command_if_no_inputs(args: argparse.Namespace) -> None: sys.exit(0) -def process_args_or_run_command(args: argparse.Namespace - ) -> argparse.Namespace: +def process_args_or_run_command( + args: argparse.Namespace, +) -> argparse.Namespace: """Process ``args`` and/or run commands. Process ``args``, that is, validate and update it, and raise SystemExit if @@ -49,7 +50,7 @@ def process_args_or_run_command(args: argparse.Namespace and args.inputs[0] == constants.STD_IN_OR_OUT): utils.exit_with_output( "No input type was given but required for the input '-'", - 1 + 1, ) else: try_special_command_if_no_inputs(args) @@ -57,7 +58,7 @@ def process_args_or_run_command(args: argparse.Namespace if args.validate and not args.schema: utils.exit_with_output( "--validate and --schema options must be used together", - 1 + 1, ) # Update args: @@ -96,18 +97,18 @@ def try_validate(cnf: api.InDataExT, args: argparse.Namespace) -> None: msg_code = ( "Validation failed:" f"{(os.linesep + ' ').join(errors)}", - 1 + 1, ) utils.exit_with_output(*msg_code) -def main(argv: typing.Optional[list[str]] = None) -> None: +def main(argv: list[str] | None = None) -> None: """Provide the entrypoint to run the CLI. :param argv: Argument list to parse or None (sys.argv will be set). """ - (_psr, args) = parse_args.parse((argv if argv else sys.argv)[1:]) + (_psr, args) = parse_args.parse((argv or sys.argv)[1:]) args = process_args_or_run_command(args) cnf = os.environ.copy() if args.env else {} @@ -129,7 +130,6 @@ def main(argv: typing.Optional[list[str]] = None) -> None: api.merge(cnf, diff) # type: ignore[arg-type] cnf = ( - # fixme. api.gen_schema(cnf) if args.gen_schema # type: ignore[assignment] else filters.do_filter(cnf, args) ) diff --git a/src/anyconfig/cli/actions.py b/src/anyconfig/cli/actions.py index 47f738ad..d46d0fd6 100644 --- a/src/anyconfig/cli/actions.py +++ b/src/anyconfig/cli/actions.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2025 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Actions for anyconfig.cli.*.""" @@ -15,10 +15,9 @@ def show_parsers_and_exit() -> None: def try_output_result( - cnf: api.InDataExT, args: argparse.Namespace + cnf: api.InDataExT, args: argparse.Namespace, ) -> None: """Try to output result.""" api.dump( - cnf, args.output, args.otype, - **(args.extra_opts if args.extra_opts else {}), + cnf, args.output, args.otype, **(args.extra_opts or {}), ) diff --git a/src/anyconfig/cli/detectors.py b/src/anyconfig/cli/detectors.py index 755b95b9..61f3bba5 100644 --- a/src/anyconfig/cli/detectors.py +++ b/src/anyconfig/cli/detectors.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2025 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Detect file type and parser from inputs and/or output.""" @@ -35,7 +35,7 @@ def are_same_file_types(paths: list[str]) -> bool: return all(x and exts[0] == x for x in exts[1:]) -def find_by_the_type(io_type: str) -> typing.Optional[str]: +def find_by_the_type(io_type: str) -> str | None: """Check the type given by users.""" default = None @@ -50,7 +50,7 @@ def find_by_the_type(io_type: str) -> typing.Optional[str]: warnings.warn( "Ignored the given type because it looks wrong or " "is not supported by installed parser backends: " - f"{io_type}", stacklevel=2 + f"{io_type}", stacklevel=2, ) return default @@ -58,7 +58,7 @@ def find_by_the_type(io_type: str) -> typing.Optional[str]: def find_by_the_paths( paths: list[str], *, ignore_errors: bool = True, -) -> typing.Optional[str]: +) -> str | None: """Try to detect file (parser) type from given file paths ``paths``.""" default = None msg = ( @@ -95,7 +95,7 @@ def find_by_the_paths( def try_detecting_input_type( args: argparse.Namespace, *, ignore_errors: bool = True, -) -> typing.Optional[str]: +) -> str | None: """Try to resolve a file type and parser of inputs.""" # First, try the type given by users. if args.itype: @@ -112,7 +112,7 @@ def try_detecting_input_type( def try_detecting_output_type( args: argparse.Namespace, -) -> typing.Optional[str]: +) -> str | None: """Try to resolve a file type and parser of outputs (``args.output``).""" # First, try the type given by users. if args.otype: diff --git a/src/anyconfig/cli/filters.py b/src/anyconfig/cli/filters.py index 25d19657..48b8c4b5 100644 --- a/src/anyconfig/cli/filters.py +++ b/src/anyconfig/cli/filters.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=broad-except @@ -17,7 +17,7 @@ def do_filter( - cnf: dict[str, typing.Any], args: argparse.Namespace + cnf: dict[str, typing.Any], args: argparse.Namespace, ) -> InDataExT: """Filter ``cnf`` by query/get/set and return filtered result.""" if args.query: diff --git a/src/anyconfig/cli/io.py b/src/anyconfig/cli/io.py index 24a7aa26..9345b8ef 100644 --- a/src/anyconfig/cli/io.py +++ b/src/anyconfig/cli/io.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Initialize sys.std{out,err}.""" @@ -12,16 +12,16 @@ from .. import ioinfo -def make() -> typing.Optional[tuple[typing.IO, typing.IO]]: +def make() -> tuple[typing.IO, typing.IO] | None: """Initialize sys.std{out,err} and returns them.""" encoding = ioinfo.get_encoding() - # TODO: What should be done for an error, "AttributeError: '_io.StringIO' - # object has no attribute 'buffer'"? + # TODO(ssato): #188 What should be done for an error, "AttributeError: + # '_io.StringIO' object has no attribute 'buffer'"? try: return ( io.TextIOWrapper(sys.stdout.buffer, encoding=encoding), - io.TextIOWrapper(sys.stderr.buffer, encoding=encoding) + io.TextIOWrapper(sys.stderr.buffer, encoding=encoding), ) except AttributeError: pass diff --git a/src/anyconfig/cli/parse_args.py b/src/anyconfig/cli/parse_args.py index aa4f1f91..369b6504 100644 --- a/src/anyconfig/cli/parse_args.py +++ b/src/anyconfig/cli/parse_args.py @@ -1,12 +1,11 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Argument parser.""" from __future__ import annotations import argparse -import typing from .. import api from . import constants, utils @@ -16,7 +15,7 @@ "loglevel": 0, "list": False, "output": None, "itype": None, "otype": None, "atype": None, "merge": api.MS_DICTS, "ignore_missing": False, "template": False, "env": False, "schema": None, "validate": False, - "gen_schema": False, "extra_opts": None + "gen_schema": False, "extra_opts": None, } @@ -29,8 +28,8 @@ def gen_type_help_txt(types: str, target: str = "Input") -> str: def make_parser( - defaults: typing.Optional[dict] = None, - prog: typing.Optional[str] = None + defaults: dict | None = None, + prog: str | None = None, ) -> argparse.ArgumentParser: """Make an instance of argparse.ArgumentParser to parse arguments.""" if defaults is None: @@ -45,7 +44,7 @@ def make_parser( apsr.add_argument("inputs", type=str, nargs="*", help="Input files") apsr.add_argument( "--version", action="version", - version=f"%%(prog)s {'.'.join(api.version())}" + version=f"%%(prog)s {'.'.join(api.version())}", ) apsr.add_argument("-o", "--output", help="Output file path") @@ -106,7 +105,7 @@ def make_parser( def parse( argv: list[str], - prog: typing.Optional[str] = None + prog: str | None = None, ) -> tuple[argparse.ArgumentParser, argparse.Namespace]: """Parse given arguments ``argv`` and return it with the parser.""" psr = make_parser(prog=prog) diff --git a/src/anyconfig/cli/utils.py b/src/anyconfig/cli/utils.py index f4400fe1..c2dd9a9a 100644 --- a/src/anyconfig/cli/utils.py +++ b/src/anyconfig/cli/utils.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Utilities for anyconfig.cli.*.""" @@ -39,7 +39,7 @@ def make_parsers_txt() -> str: f"{indent}{parser_types}", "Supported file extensions [extension: parsers]:", f"{file_ext_vs_parsers}", - ] + ], ) @@ -64,7 +64,7 @@ def exit_if_load_failure(cnf: api.InDataExT, msg: str) -> None: def load_diff( - args: argparse.Namespace, extra_opts: dict[str, typing.Any] + args: argparse.Namespace, extra_opts: dict[str, typing.Any], ) -> api.InDataExT: """Load update data. @@ -84,12 +84,12 @@ def load_diff( exit_with_output( "No appropriate backend was found for given file " f"type=n{args.itype}', inputs={', '.join(args.inputs)}", - 1 + 1, ) if diff is None: exit_with_output( - f"Failed to load: args={', '.join(args.inputs)}", 1 + f"Failed to load: args={', '.join(args.inputs)}", 1, ) return diff From 347cd8f6fee5dc4ac2976413441914ec1a0d2190 Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Thu, 12 Feb 2026 01:05:40 +0900 Subject: [PATCH 16/27] fix: fix several ruff errors in anyconfig.common.* fix or add workarouds for several ruff errors like the followings in anyconfig.common.*. ANN401: any-type COM812: missing-trailing-comma I001: unsorted-imports RUF022: unsorted-dunder-all UP045: non-pep604-annotation-optional --- src/anyconfig/common/__init__.py | 8 +++----- src/anyconfig/common/errors.py | 4 ++-- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/src/anyconfig/common/__init__.py b/src/anyconfig/common/__init__.py index d191e28e..e48189cd 100644 --- a/src/anyconfig/common/__init__.py +++ b/src/anyconfig/common/__init__.py @@ -1,14 +1,14 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Misc global constants, variables, classes and so on.""" from .datatypes import ( - InDataT, InDataExT, PrimitiveT + InDataT, InDataExT, PrimitiveT, ) from .errors import ( UnknownParserTypeError, UnknownProcessorTypeError, UnknownFileTypeError, - ValidationError + ValidationError, ) @@ -17,5 +17,3 @@ "UnknownParserTypeError", "UnknownProcessorTypeError", "UnknownFileTypeError", "ValidationError", ] - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/common/errors.py b/src/anyconfig/common/errors.py index e420eceb..7acda633 100644 --- a/src/anyconfig/common/errors.py +++ b/src/anyconfig/common/errors.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2021 - 2024 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=too-few-public-methods @@ -14,7 +14,7 @@ class BaseError(RuntimeError): _msg_fmt: str = "forced_type: {!s}" - def __init__(self, arg: typing.Optional[typing.Any] = None) -> None: + def __init__(self, arg: typing.Any | None = None) -> None: """Initialize the format.""" super().__init__(self._msg_fmt.format(str(arg))) From 942694789d1da520d4b889e59372e0702646e6f6 Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Thu, 12 Feb 2026 01:08:29 +0900 Subject: [PATCH 17/27] fix: fix several ruff errors in anyconfig.pa*.* fix or add workarouds for several ruff errors like the followings in anyconfig.pa*.*. COM812: missing-trailing-comma FIX001: line-contains-fixme FURB167: regex-flag-alias I001: unsorted-imports RUF022: unsorted-dunder-all TD001: invalid-todo-tag TD002: missing-todo-author TD003: missing-todo-link TD005: missing-todo-description TID252: relative-imports UP045: non-pep604-annotation-optional --- src/anyconfig/parser.py | 25 +++++++++---------------- src/anyconfig/parsers/__init__.py | 8 +++----- src/anyconfig/parsers/parsers.py | 9 ++------- src/anyconfig/parsers/utils.py | 12 +++++------- 4 files changed, 19 insertions(+), 35 deletions(-) diff --git a/src/anyconfig/parser.py b/src/anyconfig/parser.py index 1b36ccfe..c073d7bf 100644 --- a/src/anyconfig/parser.py +++ b/src/anyconfig/parser.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2024 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Misc simple parsers.""" @@ -15,8 +15,8 @@ INT_PATTERN: re.Pattern = re.compile(r"^(\d|([1-9]\d+))$") FLOAT_PATTERN: re.Pattern = re.compile(r"^\d+[\.]\d+$") -BOOL_TRUE_PATTERN: re.Pattern = re.compile(r"^true$", re.I) -BOOL_FALSE_PATTERN: re.Pattern = re.compile(r"^false$", re.I) +BOOL_TRUE_PATTERN: re.Pattern = re.compile(r"^true$", re.IGNORECASE) +BOOL_FALSE_PATTERN: re.Pattern = re.compile(r"^false$", re.IGNORECASE) STR_PATTERN: re.Pattern = re.compile(r"^['\"](.*)['\"]$") PrimitiveT = typing.Union[str, int, float, bool] @@ -24,7 +24,7 @@ def parse_single( # noqa: PLR0911 - str_: typing.Optional[str] + str_: str | None, ) -> PrimitiveT: """Parse an expression gives a primitive value.""" if str_ is None: @@ -66,7 +66,7 @@ def parse_list(str_: str, sep: str = ",") -> PrimitivesT: def attr_val_itr( - str_: str, avs_sep: str = ":", vs_sep: str = ",", as_sep: str = ";" + str_: str, avs_sep: str = ":", vs_sep: str = ",", as_sep: str = ";", ) -> collections.abc.Iterator[AttrValsT]: """Parse a list of atrribute and value pairs. @@ -88,7 +88,7 @@ def attr_val_itr( warnings.warn( f"Extra strings {_rest!s} in {rel!s}" f"It should be in the form of attr{avs_sep}value.", - stacklevel=2 + stacklevel=2, ) _attr = typing.cast("str", _attr) @@ -100,7 +100,7 @@ def attr_val_itr( def parse_attrlist_0( - str_: str, avs_sep: str = ":", vs_sep: str = ",", as_sep: str = ";" + str_: str, avs_sep: str = ":", vs_sep: str = ",", as_sep: str = ";", ) -> list[AttrValsT]: """Parse a list of atrribute and value pairs. @@ -140,17 +140,10 @@ def parse_attrlist(str_: str, avs_sep: str = ":", vs_sep: str = ",", return dict(parse_attrlist_0(str_, avs_sep, vs_sep, as_sep)) -ResultsT = typing.Union[ - PrimitiveT, - PrimitivesT, - AttrValsDictT -] - - def parse( str_: typing.Optional[str], lsep: str = ",", avsep: str = ":", - vssep: str = ",", avssep: str = ";" -) -> ResultsT: + vssep: str = ",", avssep: str = ";", +) -> PrimitiveT | PrimitivesT | AttrValsDictT: """Very simple generic parser.""" if str_ is None or not str_: return parse_single(str_) diff --git a/src/anyconfig/parsers/__init__.py b/src/anyconfig/parsers/__init__.py index a0513226..70353e6c 100644 --- a/src/anyconfig/parsers/__init__.py +++ b/src/anyconfig/parsers/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Utilities to list and find appropriate parser class objects and instances. @@ -14,12 +14,10 @@ """ from .utils import ( load_plugins, list_types, list_by_cid, list_by_type, list_by_extension, - findall, find, MaybeParserT + findall, find, MaybeParserT, ) __all__ = [ "load_plugins", "list_types", "list_by_cid", "list_by_type", - "list_by_extension", "findall", "find", "MaybeParserT" + "list_by_extension", "findall", "find", "MaybeParserT", ] - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/parsers/parsers.py b/src/anyconfig/parsers/parsers.py index 12c75136..7f3f88fc 100644 --- a/src/anyconfig/parsers/parsers.py +++ b/src/anyconfig/parsers/parsers.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2011 - 2021 Satoru SATOH +# Copyright (C) 2011 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # Suppress import positions after some global variables are defined @@ -7,8 +7,6 @@ """Provide config parser objects aggregated.""" from __future__ import annotations -import typing - from ..backend import ParserClssT, PARSERS from ..processors import Processors from ..singleton import Singleton @@ -19,12 +17,9 @@ class Parsers(Processors, Singleton): _pgroup: str = "anyconfig_backends" - def __init__(self, prcs: typing.Optional[ParserClssT] = None - ) -> None: + def __init__(self, prcs: ParserClssT | None = None) -> None: """Initialize with PARSERS.""" if prcs is None: prcs = PARSERS super().__init__(prcs) - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/parsers/utils.py b/src/anyconfig/parsers/utils.py index 0def3037..7eef19ed 100644 --- a/src/anyconfig/parsers/utils.py +++ b/src/anyconfig/parsers/utils.py @@ -1,9 +1,7 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # -# FIXME: -# mypy: disable-error-code=type-var """Internal APIs to load, list and find parser class objects.""" from __future__ import annotations @@ -47,8 +45,8 @@ def list_by_extension() -> list[tuple[str, ParsersT]]: def findall( - obj: typing.Optional[ioinfo.PathOrIOInfoT] = None, - forced_type: typing.Optional[str] = None + obj: ioinfo.PathOrIOInfoT | None = None, + forced_type: str | None = None, ) -> list[ParserT]: """Find out processor objects can process data from given ``obj``. @@ -67,8 +65,8 @@ def findall( def find( - obj: typing.Optional[ioinfo.PathOrIOInfoT] = None, - forced_type: MaybeParserT = None + obj: ioinfo.PathOrIOInfoT | None = None, + forced_type: str | ParserT | type[ParserT] | None = None, ) -> ParserT: """Very similar to the above :func:`findall`. From c26c219c7bd6f5a352377b1135cdde73090cdd44 Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Thu, 12 Feb 2026 01:15:59 +0900 Subject: [PATCH 18/27] fix: fix several ruff errors in anyconfig.dicts fix or add workarouds for several ruff errors like the followings in anyconfig.dicts. ANN003: missing-type-kwargs ANN401: any-type COM812: missing-trailing-comma I001: unsorted-imports UP045: non-pep604-annotation-optional --- src/anyconfig/dicts.py | 75 +++++++++++++++++++++++------------------- 1 file changed, 41 insertions(+), 34 deletions(-) diff --git a/src/anyconfig/dicts.py b/src/anyconfig/dicts.py index 79d0135f..d8ccab3b 100644 --- a/src/anyconfig/dicts.py +++ b/src/anyconfig/dicts.py @@ -2,7 +2,7 @@ # Forked from m9dicts.{api,dicts}. # # Copyright (C) 2011 - 2021 Red Hat, Inc. -# Copyright (C) 2018 - 2024 Satoru SATOH +# Copyright (C) 2018 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # r"""Utility functions to operate on mapping objects such as get, set and merge. @@ -29,7 +29,7 @@ MS_DICTS: str = "merge_dicts" MS_DICTS_AND_LISTS: str = "merge_dicts_and_lists" MERGE_STRATEGIES: tuple[str, ...] = ( - MS_REPLACE, MS_NO_REPLACE, MS_DICTS, MS_DICTS_AND_LISTS + MS_REPLACE, MS_NO_REPLACE, MS_DICTS, MS_DICTS_AND_LISTS, ) PATH_SEPS: tuple[str, ...] = ("/", ".") @@ -51,7 +51,7 @@ def _jsnp_unescape(jsn_s: str) -> str: def _split_path( - path: str, seps: tuple[str, ...] = PATH_SEPS + path: str, seps: tuple[str, ...] = PATH_SEPS, ) -> list[str]: """Parse a path expression and return a list of path items. @@ -72,7 +72,7 @@ def _split_path( def mk_nested_dic( - path: str, val: typing.Any, seps: tuple[str, ...] = PATH_SEPS + path: str, val: typing.Any, seps: tuple[str, ...] = PATH_SEPS, ) -> DictT: """Make a nested dict iteratively. @@ -89,7 +89,7 @@ def mk_nested_dic( def get( dic: DictT, path: str, seps: tuple[str, ...] = PATH_SEPS, - idx_reg: re.Pattern = _JSNP_GET_ARRAY_IDX_REG + idx_reg: re.Pattern = _JSNP_GET_ARRAY_IDX_REG, ) -> tuple[typing.Any, str]: """Getter for nested dicts. @@ -117,7 +117,7 @@ def get( def set_( dic: DictT, path: str, val: typing.Any, - seps: tuple[str, ...] = PATH_SEPS + seps: tuple[str, ...] = PATH_SEPS, ) -> None: """Setter for nested dicts. @@ -133,8 +133,10 @@ def _are_list_like(*objs: typing.Any) -> bool: return all(utils.is_list_like(obj) for obj in objs) -def _update_with_replace(self: DictT, other: DictT, key: str, - default: typing.Any = None, **_options) -> None: +def _update_with_replace( + self: DictT, other: DictT, key: str, + default: typing.Any = None, **_options: typing.Any, +) -> None: """Update ``self`` by replacements using ``other``. Replace value of a mapping object 'self' with 'other' has if both have same @@ -154,8 +156,10 @@ def _update_with_replace(self: DictT, other: DictT, key: str, self[key] = default -def _update_wo_replace(self: DictT, other: DictT, key: str, - val: typing.Any = None, **_options) -> None: +def _update_wo_replace( + self: DictT, other: DictT, key: str, + val: typing.Any = None, **_options: typing.Any, +) -> None: """Update ``self`` without any replacements using ``other``. Never update (replace) the value of 'self' with 'other''s, that is, only @@ -173,7 +177,7 @@ def _update_wo_replace(self: DictT, other: DictT, key: str, def _merge_list( - self: DictT, key: str, lst: collections.abc.Iterable[typing.Any] + self: DictT, key: str, lst: collections.abc.Iterable[typing.Any], ) -> None: """Update a dict ``self`` using an iterable ``lst``. @@ -192,9 +196,11 @@ def _merge_other(self: DictT, key: str, val: typing.Any) -> None: self[key] = val # Just overwrite it by default implementation. -def _update_with_merge(self: DictT, other: DictT, key: str, *, - val: typing.Any = None, - merge_lists: bool = False, **options) -> None: +def _update_with_merge( + self: DictT, other: DictT, key: str, *, + val: typing.Any = None, + merge_lists: bool = False, **options: typing.Any, +) -> None: """Update a dict ``self`` using ``other`` and optional arguments. Merge the value of self with other's recursively. Behavior of merge will be @@ -229,8 +235,10 @@ def _update_with_merge(self: DictT, other: DictT, key: str, *, self[key] = val -def _update_with_merge_lists(self: DictT, other: DictT, key: str, - val: typing.Any = None, **options) -> None: +def _update_with_merge_lists( + self: DictT, other: DictT, key: str, + val: typing.Any = None, **options: typing.Any, +) -> None: """Similar to _update_with_merge but merge lists always. :param self: mapping object to update with 'other' @@ -259,7 +267,8 @@ def _get_update_fn(strategy: str) -> collections.abc.Callable[..., None]: strategy = MS_DICTS try: return typing.cast( - "collections.abc.Callable[..., None]", _MERGE_FNS[strategy] + "collections.abc.Callable[..., None]", + _MERGE_FNS[strategy], ) except KeyError as exc: if callable(strategy): @@ -269,14 +278,12 @@ def _get_update_fn(strategy: str) -> collections.abc.Callable[..., None]: raise ValueError(msg) from exc -UpdatesT = typing.Union[ - collections.abc.Iterable[tuple[str, typing.Any]], - DictT -] - - -def merge(self: DictT, other: UpdatesT, ac_merge: str = MS_DICTS, - **options) -> None: +def merge( + self: DictT, + other: collections.abc.Iterable[tuple[str, typing.Any]] | DictT, + ac_merge: str = MS_DICTS, + **options: typing.Any, +) -> None: """Update (merge) a mapping object ``self`` with ``other``. ``other`` may be a mapping object or an iterable yields (key, value) tuples @@ -295,7 +302,7 @@ def merge(self: DictT, other: UpdatesT, ac_merge: str = MS_DICTS, try: iother = typing.cast( "collections.abc.Iterable[tuple[str, typing.Any]]", - other + other, ) for key, val in iother: _update_fn(self, dict(other), key, val=val, **options) @@ -307,8 +314,8 @@ def merge(self: DictT, other: UpdatesT, ac_merge: str = MS_DICTS, def _make_recur( obj: typing.Any, make_fn: collections.abc.Callable, *, ac_ordered: bool = False, - ac_dict: typing.Optional[collections.abc.Callable] = None, - **options + ac_dict: collections.abc.Callable | None = None, + **options: typing.Any, ) -> DictT: """Apply ``make_fn`` to ``obj`` recursively. @@ -327,8 +334,10 @@ def _make_recur( for k, v in obj.items()) -def _make_iter(obj: typing.Any, make_fn: collections.abc.Callable, **options - ) -> DictT: +def _make_iter( + obj: typing.Any, make_fn: collections.abc.Callable, + **options: typing.Any, +) -> DictT: """Apply ``make_fn`` to ``obj`` iteratively. :param obj: A mapping objects or other primitive object @@ -342,8 +351,8 @@ def _make_iter(obj: typing.Any, make_fn: collections.abc.Callable, **options def convert_to( obj: typing.Any, *, ac_ordered: bool = False, - ac_dict: typing.Optional[collections.abc.Callable] = None, - **options + ac_dict: collections.abc.Callable | None = None, + **options: typing.Any, ) -> DictT: """Convert a mapping objects to a dict or object of 'to_type' recursively. @@ -364,5 +373,3 @@ def convert_to( return _make_iter(obj, convert_to, **options) return obj - -# vim:sw=4:ts=4:et: From bd3d7fd7920080371734e5e6778a59cb10158296 Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Thu, 12 Feb 2026 01:19:59 +0900 Subject: [PATCH 19/27] fix: fix several ruff errors in anyconfig.ioinfo.utils fix or add workarouds for several ruff errors like the followings in anyconfig.ioinfo.utils. ANN401: any-type COM812: missing-trailing-comma I001: unsorted-imports RUF022: unsorted-dunder-all UP007: non-pep604-annotation-union --- src/anyconfig/ioinfo/utils.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/anyconfig/ioinfo/utils.py b/src/anyconfig/ioinfo/utils.py index a9e2bad0..b1ae8929 100644 --- a/src/anyconfig/ioinfo/utils.py +++ b/src/anyconfig/ioinfo/utils.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Utility funtions for anyconfig.ionfo.""" @@ -34,19 +34,21 @@ def get_path_and_ext(path: pathlib.Path) -> tuple[pathlib.Path, str]: return ( abs_path, - file_ext[1:] if file_ext.startswith(".") else "" + file_ext[1:] if file_ext.startswith(".") else "", ) def expand_from_path( - path: pathlib.Path, marker: str = GLOB_MARKER + path: pathlib.Path, marker: str = GLOB_MARKER, ) -> collections.abc.Iterator[pathlib.Path]: """Expand ``path`` contains '*' in its path str.""" if not path.is_absolute(): path = path.resolve() idx_part = list( - enumerate(itertools.takewhile(lambda p: marker not in p, path.parts)) + enumerate( + itertools.takewhile(lambda p: marker not in p, path.parts), + ), )[-1] if not idx_part: From 7b3193757463f8d0142a4f9d37d35a4f699debec Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Thu, 12 Feb 2026 01:27:15 +0900 Subject: [PATCH 20/27] fix: fix several ruff errors in anyconfig.ioinfo.* fix or add workarouds for several ruff errors like the followings in anyconfig.ioinfo.*. ANN401: any-type COM812: missing-trailing-comma I001: unsorted-imports RUF022: unsorted-dunder-all UP007: non-pep604-annotation-union --- src/anyconfig/ioinfo/datatypes.py | 4 ++-- src/anyconfig/ioinfo/factory.py | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/anyconfig/ioinfo/datatypes.py b/src/anyconfig/ioinfo/datatypes.py index 7e8210ba..66fa9d3e 100644 --- a/src/anyconfig/ioinfo/datatypes.py +++ b/src/anyconfig/ioinfo/datatypes.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2021 - 2024 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=inherit-non-class,too-few-public-methods @@ -17,7 +17,7 @@ class IOInfo(typing.NamedTuple): """Equivalent to collections.namedtuple.""" - src: typing.Union[pathlib.Path, typing.IO] + src: pathlib.Path | typing.IO type: str path: str extension: str diff --git a/src/anyconfig/ioinfo/factory.py b/src/anyconfig/ioinfo/factory.py index fd19f208..40669ea5 100644 --- a/src/anyconfig/ioinfo/factory.py +++ b/src/anyconfig/ioinfo/factory.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2018 - 2024 Satoru SATOH +# Copyright (C) 2018 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=invalid-name @@ -20,7 +20,7 @@ def from_path_object(path: pathlib.Path) -> datatypes.IOInfo: (abs_path, file_ext) = utils.get_path_and_ext(path) return datatypes.IOInfo( - abs_path, datatypes.IOI_PATH_OBJ, str(abs_path), file_ext + abs_path, datatypes.IOI_PATH_OBJ, str(abs_path), file_ext, ) @@ -39,7 +39,7 @@ def from_io_stream(strm: typing.IO) -> datatypes.IOInfo: (abs_path, file_ext) = (path, "") return datatypes.IOInfo( - strm, datatypes.IOI_STREAM, abs_path, file_ext + strm, datatypes.IOI_STREAM, abs_path, file_ext, ) @@ -62,7 +62,7 @@ def make(obj: typing.Any) -> datatypes.IOInfo: def make_itr( - obj: typing.Any, marker: str = constants.GLOB_MARKER + obj: typing.Any, marker: str = constants.GLOB_MARKER, ) -> collections.abc.Iterator[datatypes.IOInfo]: """Make and yield a series of :class:`datatypes.IOInfo` objects.""" if isinstance(obj, datatypes.IOInfo): @@ -85,7 +85,7 @@ def make_itr( def makes( - obj: typing.Any, marker: str = constants.GLOB_MARKER + obj: typing.Any, marker: str = constants.GLOB_MARKER, ) -> list[datatypes.IOInfo]: """Make and return a list of :class:`datatypes.IOInfo` objects.""" return list(make_itr(obj, marker=marker)) From 2d170db10d406f762b5e58cb341df0f66bc77a4e Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Thu, 12 Feb 2026 01:29:59 +0900 Subject: [PATCH 21/27] fix: fix several ruff errors in anyconfig.processors.* fix or add workarouds for several ruff errors like the followings in anyconfig.processors.*. COM812: missing-trailing-comma FIX001: line-contains-fixme I001: unsorted-imports RUF022: unsorted-dunder-all TD001: invalid-todo-tag TD002: missing-todo-author TD003: missing-todo-link TD005: missing-todo-description TID252: relative-imports UP007: non-pep604-annotation-union UP045: non-pep604-annotation-optional --- src/anyconfig/processors/__init__.py | 8 ++---- src/anyconfig/processors/processors.py | 26 +++++++++-------- src/anyconfig/processors/utils.py | 39 ++++++++++++++------------ 3 files changed, 38 insertions(+), 35 deletions(-) diff --git a/src/anyconfig/processors/__init__.py b/src/anyconfig/processors/__init__.py index 2e46d989..1c59ec5d 100644 --- a/src/anyconfig/processors/__init__.py +++ b/src/anyconfig/processors/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2021 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Provide a list of a :class:`anyconfig.models.processor` and so on. @@ -13,11 +13,11 @@ - Add to abstract processors such like Parsers (loaders and dumpers). """ from .datatypes import ( - ProcT, ProcClsT, ProcClssT, MaybeProcT + ProcT, ProcClsT, ProcClssT, MaybeProcT, ) from .processors import Processors from .utils import ( - list_by_x, load_plugins + list_by_x, load_plugins, ) __all__ = [ @@ -25,5 +25,3 @@ "Processors", "list_by_x", "load_plugins", ] - -# vim:sw=4:ts=4:et: diff --git a/src/anyconfig/processors/processors.py b/src/anyconfig/processors/processors.py index 730cb4bc..62b23f0e 100644 --- a/src/anyconfig/processors/processors.py +++ b/src/anyconfig/processors/processors.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2018 - 2024 Satoru SATOH +# Copyright (C) 2018 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # mypy: disable-error-code=type-var @@ -14,7 +14,7 @@ if typing.TYPE_CHECKING: import builtins from .datatypes import ( - ProcT, ProcsT, ProcClsT, ProcClssT, MaybeProcT + ProcT, ProcsT, ProcClsT, ProcClssT, MaybeProcT, ) from .. import ioinfo @@ -24,7 +24,7 @@ class Processors: _pgroup: str = "" # processor group name to load plugins - def __init__(self, processors: typing.Optional[ProcClssT] = None) -> None: + def __init__(self, processors: ProcClssT | None = None) -> None: """Initialize with ``processors``. :param processors: @@ -33,7 +33,7 @@ def __init__(self, processors: typing.Optional[ProcClssT] = None) -> None: """ # {: } self._processors: dict[ # type: ignore[valid-type] - str, ProcT + str, ProcT, ] = {} # type: ignore[valid-type] if processors is not None: for pcls in processors: @@ -74,7 +74,7 @@ def list_by_cid(self) -> builtins.list[tuple[str, ProcsT]]: prs = self._processors return sorted( ((cid, [prs[cid]]) for cid in sorted(prs.keys())), - key=operator.itemgetter(0) + key=operator.itemgetter(0), ) def list_by_type(self) -> builtins.list[tuple[str, ProcsT]]: @@ -87,7 +87,7 @@ def list_by_type(self) -> builtins.list[tuple[str, ProcsT]]: return utils.list_by_x(self.list(), "type") def list_by_x( - self, item: typing.Optional[str] = None + self, item: str | None = None, ) -> builtins.list[tuple[str, ProcsT]]: """List processors by those factor 'x'. @@ -113,7 +113,7 @@ def list_by_x( return res - def list_x(self, key: typing.Optional[str] = None) -> builtins.list[str]: + def list_x(self, key: str | None = None) -> builtins.list[str]: """List the factor 'x' of processors. :param key: Which of key to return from 'cid', 'type', and 'extention' @@ -122,7 +122,7 @@ def list_x(self, key: typing.Optional[str] = None) -> builtins.list[str]: if key in ("cid", "type"): return sorted( {operator.methodcaller(key)(p) - for p in self._processors.values()} + for p in self._processors.values()}, ) if key == "extension": return sorted(k for k, _v in self.list_by_x("extensions")) @@ -135,8 +135,8 @@ def list_x(self, key: typing.Optional[str] = None) -> builtins.list[str]: raise ValueError(msg) def findall( - self, obj: typing.Optional[ioinfo.PathOrIOInfoT], - forced_type: typing.Optional[str] = None + self, obj: ioinfo.PathOrIOInfoT | None, + forced_type: str | None = None, ) -> builtins.list[ProcT]: """Find all of the processors match with tthe given conditions. @@ -150,8 +150,10 @@ def findall( """ return utils.findall(obj, self.list(), forced_type=forced_type) - def find(self, obj: typing.Optional[ioinfo.PathOrIOInfoT], - forced_type: MaybeProcT = None) -> ProcT: + def find( + self, obj: ioinfo.PathOrIOInfoT | None, + forced_type: MaybeProcT = None, + ) -> ProcT: """Find the processor best match with tthe given conditions. :param obj: diff --git a/src/anyconfig/processors/utils.py b/src/anyconfig/processors/utils.py index ce5fdfba..0f5cad46 100644 --- a/src/anyconfig/processors/utils.py +++ b/src/anyconfig/processors/utils.py @@ -1,10 +1,10 @@ # -# Copyright (C) 2018 - 2024 Satoru SATOH +# Copyright (C) 2018 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=unidiomatic-typecheck # -# FIXME: +# todo(ssato): #189 fix the mypy error, type-var. # mypy: disable-error-code=type-var """Utility functions for anyconfig.processors.""" from __future__ import annotations @@ -16,13 +16,15 @@ import importlib.metadata -from .. import common, ioinfo, models, utils +from .. import ( + common, ioinfo, models, utils, +) if typing.TYPE_CHECKING: import collections.abc from .datatypes import ( - ProcT, ProcsT, ProcClsT, MaybeProcT + ProcT, ProcsT, ProcClsT, MaybeProcT, ) @@ -39,7 +41,7 @@ def select_by_key( items: collections.abc.Iterable[ tuple[tuple[str, ...], typing.Any] ], - sort_fn: collections.abc.Callable[..., typing.Any] = sorted + sort_fn: collections.abc.Callable[..., typing.Any] = sorted, ) -> list[tuple[str, list[typing.Any]]]: """Select items from ``items`` by key. @@ -57,7 +59,7 @@ def select_by_key( def list_by_x( - prs: collections.abc.Iterable[ProcT], key: str + prs: collections.abc.Iterable[ProcT], key: str, ) -> list[tuple[str, ProcsT]]: """List items by the factor 'x'. @@ -74,7 +76,7 @@ def list_by_x( elif key == "extensions": res = select_by_key( - ((p.extensions(), p) for p in prs), sort_fn=sort_by_prio + ((p.extensions(), p) for p in prs), sort_fn=sort_by_prio, ) else: msg = f"Argument 'key' must be 'type' or 'extensions' [{key}]" @@ -84,7 +86,7 @@ def list_by_x( def findall_with_pred( - predicate: collections.abc.Callable[..., bool], prs: ProcsT + predicate: collections.abc.Callable[..., bool], prs: ProcsT, ) -> ProcsT: """Find all of the items match with given predicates. @@ -97,9 +99,9 @@ def findall_with_pred( def maybe_processor( - type_or_id: typing.Union[ProcT, ProcClsT], - cls: ProcClsT = models.processor.Processor -) -> typing.Optional[ProcT]: + type_or_id: ProcT | ProcClsT, + cls: ProcClsT = models.processor.Processor, +) -> ProcT | None: """Try to get the processor. :param type_or_id: @@ -176,8 +178,8 @@ def find_by_maybe_file(obj: ioinfo.PathOrIOInfoT, prs: ProcsT) -> ProcsT: def findall( - obj: typing.Optional[ioinfo.PathOrIOInfoT], prs: ProcsT, - forced_type: typing.Optional[str] = None, + obj: ioinfo.PathOrIOInfoT | None, prs: ProcsT, + forced_type: str | None = None, ) -> ProcsT: """Find all of the processors match with the conditions. @@ -203,7 +205,7 @@ class or None if forced_type is None: pclss = find_by_maybe_file( - typing.cast("ioinfo.PathOrIOInfoT", obj), prs + typing.cast("ioinfo.PathOrIOInfoT", obj), prs, ) # :: [Processor], never [] else: pclss = find_by_type_or_id(forced_type, prs) # Do. @@ -211,9 +213,10 @@ class or None return pclss -def find(obj: typing.Optional[ioinfo.PathOrIOInfoT], prs: ProcsT, - forced_type: MaybeProcT = None, - ) -> ProcT: +def find( + obj: ioinfo.PathOrIOInfoT | None, prs: ProcsT, + forced_type: MaybeProcT = None, +) -> ProcT: """Find the processors best match with the conditions. :param obj: @@ -233,7 +236,7 @@ class or :class:`anyconfig.models.processor.Processor` class object or """ if forced_type is not None and not isinstance(forced_type, str): proc = maybe_processor( - typing.cast("typing.Union[ProcT, ProcClsT]", forced_type) + typing.cast("ProcT | ProcClsT", forced_type), ) if proc is None: msg = ( From 2e233ce5306cf6e03bafae23b064cccb9ccc39a6 Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Thu, 12 Feb 2026 01:36:04 +0900 Subject: [PATCH 22/27] fix: fix several ruff errors in tests.* fix or add workarouds for several ruff errors like the followings in tests.*. - ANN003, ANN201, ANN202, ANN401: rules derived from flake8-anntations - ARG001: rules derived from flake8-unused-arguments - COM812, COM819: rules derived from - D100, D101, D200, D204, D205, D400, D415: rules derived from pydocstyle - EM101, EM102: rules derived from flake8-errmsg - ERA001: rules derived from eradicate - FA100: rules derived from flake8-future-annotations - FBT001, FBT002: rules derived from flake8-boolean-trap - FIX001, FIX002: rules derived from flake8-fixme - I001: unsorted-imports derived from isort - PGH004: blanket-noqa derived from pygrep-hooks - PLR0913, PLW2901: rules derived from Pylint - PT006, PT007, PT011, PT012, PT014, PT030: rules derived from flake8-pytest-style - PTH100, PTH118, PTH120, PTH123: rules derived from flake8-use-pathlib - PYI024: collections-named-tuple derived from flake8-pyi - Q000: dbad-quotes-inline-string from flake8-quotes - RUF012, RUF015, RUF022: bultin rules - S603: subprocess-without-shell-equals-true derived from flake8-bandit - SIM115, SIM117: rules derived from flake8-simplify - SLF001: private-member-access derived from flake8-self - TD001, TD002, TD003, TD004, TD005: rules derived from flake8-todos - TID252: relative-importsrules derived from flake8-tidy-imports - TRY003: raise-vanilla-args derived from tryceratops - UP045: non-pep604-annotation-optional derived from pyupgrade --- tests/requirements.d/type-check.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/requirements.d/type-check.txt b/tests/requirements.d/type-check.txt index 92a0c08b..acd70680 100644 --- a/tests/requirements.d/type-check.txt +++ b/tests/requirements.d/type-check.txt @@ -7,4 +7,4 @@ types-jmespath types-jsonschema types-simplejson types-toml -typing-extensions; python_version < "3.10" +typing-extensions; python_version < "3.12" From 53a090e5a9da8c6a7a31b92d149b4ef8c3b1ca02 Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Thu, 12 Feb 2026 16:49:42 +0900 Subject: [PATCH 23/27] fix: fix several ruff errors in anyconfig.schema.* fix or add workarouds for several ruff errors like the followings in anyconfig.schema.*. - ANN003: missing-type-kwargs - ANN401: any-type - COM812: missing-trailing-comma - I001: unsorted-imports - TID252: relative-importsrules - UP045: non-pep604-annotation-optional --- src/anyconfig/schema/__init__.py | 8 +++---- src/anyconfig/schema/datatypes.py | 4 ++-- src/anyconfig/schema/default.py | 10 ++++---- src/anyconfig/schema/jsonschema/generator.py | 24 ++++++++++---------- src/anyconfig/schema/jsonschema/validator.py | 16 +++++++------ 5 files changed, 32 insertions(+), 30 deletions(-) diff --git a/src/anyconfig/schema/__init__.py b/src/anyconfig/schema/__init__.py index 840f7910..dca6434b 100644 --- a/src/anyconfig/schema/__init__.py +++ b/src/anyconfig/schema/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2021 - 2024 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Schema generation and validation.""" @@ -11,7 +11,7 @@ from .jsonschema.validator import validate, is_valid VALIDATORS = { - "jsonschema": validate + "jsonschema": validate, } SUPPORTED: bool = True except ImportError: @@ -21,10 +21,10 @@ GENERATORS = { - "jsonschema": gen_schema + "jsonschema": gen_schema, } _all__ = [ "validate", "is_valid", "gen_schema", - "VALIDATORS", "GENERATORS", "SUPPORTED" + "VALIDATORS", "GENERATORS", "SUPPORTED", ] diff --git a/src/anyconfig/schema/datatypes.py b/src/anyconfig/schema/datatypes.py index 392ae1c4..c51ab65c 100644 --- a/src/anyconfig/schema/datatypes.py +++ b/src/anyconfig/schema/datatypes.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2021 - 2024 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=unused-import @@ -9,7 +9,7 @@ import typing from ..common import ( # noqa: F401 - ValidationError, InDataT, InDataExT + ValidationError, InDataT, InDataExT, ) diff --git a/src/anyconfig/schema/default.py b/src/anyconfig/schema/default.py index 8bd476c0..10bfffe9 100644 --- a/src/anyconfig/schema/default.py +++ b/src/anyconfig/schema/default.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2015 - 2024 Satoru SATOH +# Copyright (C) 2015 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=unused-argument @@ -12,14 +12,14 @@ if typing.TYPE_CHECKING: from .datatypes import ( - InDataT, InDataExT, ResultT + InDataT, InDataExT, ResultT, ) def validate( data: InDataExT, schema: InDataExT, *, ac_schema_safe: bool = True, ac_schema_errors: bool = False, - **options: typing.Any + **options: typing.Any, ) -> ResultT: """Provide a dummy function does not validate at all in actual.""" return (True, "Validation module (jsonschema) is not available") @@ -28,12 +28,12 @@ def validate( def is_valid( data: InDataExT, schema: InDataExT, *, ac_schema_safe: bool = True, ac_schema_errors: bool = False, - **options + **options: typing.Any, ) -> bool: """Provide a dummy function never raise exceptions.""" return True -def gen_schema(data: InDataExT, **options) -> InDataT: +def gen_schema(data: InDataExT, **options: typing.Any) -> InDataT: """Provide a dummy function generates an empty dict in actual.""" return generator.gen_schema(data, **options) diff --git a/src/anyconfig/schema/jsonschema/generator.py b/src/anyconfig/schema/jsonschema/generator.py index 990662e5..cafd348d 100644 --- a/src/anyconfig/schema/jsonschema/generator.py +++ b/src/anyconfig/schema/jsonschema/generator.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2015 - 2024 Satoru SATOH +# Copyright (C) 2015 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """JSON schema generator.""" @@ -12,7 +12,7 @@ if typing.TYPE_CHECKING: import collections.abc from ..datatypes import ( - InDataExT, InDataT + InDataExT, InDataT, ) @@ -28,7 +28,7 @@ def _process_options( - **options + **options: typing.Any, ) -> tuple[dict[typing.Any, typing.Any], bool]: """Help to process keyword arguments passed to gen_schema. @@ -36,15 +36,15 @@ def _process_options( """ return ( options.get("ac_schema_typemap", _TYPE_MAP), - bool(options.get("ac_schema_strict", False)) + bool(options.get("ac_schema_strict", False)), ) def array_to_schema( iarr: collections.abc.Iterable[InDataExT], *, - ac_schema_typemap: typing.Optional[dict[type, str]] = None, + ac_schema_typemap: dict[type, str] | None = None, ac_schema_strict: bool = False, - **options + **options: typing.Any, ) -> InDataT: """Generate a JSON schema object with type annotation added for ``iaa```. @@ -63,8 +63,8 @@ def array_to_schema( "items": gen_schema( arr[0] if arr else "str", ac_schema_strict=ac_schema_strict, - **options - ) + **options, + ), } if ac_schema_strict: nitems = len(arr) @@ -76,9 +76,9 @@ def array_to_schema( def object_to_schema( obj: InDataT, *, - ac_schema_typemap: typing.Optional[dict[type, str]] = None, + ac_schema_typemap: dict[type, str] | None = None, ac_schema_strict: bool = False, - **options + **options: typing.Any, ) -> InDataT: """Generate a node represents JSON schema object for ``obj``. @@ -99,7 +99,7 @@ def object_to_schema( v, ac_schema_typemap=ac_schema_typemap, ac_schema_strict=ac_schema_strict, - **options + **options, ) for k, v in obj.items() } @@ -111,7 +111,7 @@ def object_to_schema( def gen_schema( - data: InDataExT, **options + data: InDataExT, **options: typing.Any, ) -> InDataT: """Generate a JSON schema object validates ``data``. diff --git a/src/anyconfig/schema/jsonschema/validator.py b/src/anyconfig/schema/jsonschema/validator.py index c251ce81..1032b283 100644 --- a/src/anyconfig/schema/jsonschema/validator.py +++ b/src/anyconfig/schema/jsonschema/validator.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2015 - 2024 Satoru SATOH +# Copyright (C) 2015 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """JSON schema validator.""" @@ -20,7 +20,7 @@ from typing_extensions import TypeGuard from .datatypes import ( - InDataExT, ResultT + InDataExT, ResultT, ) @@ -29,7 +29,9 @@ def is_valid_schema_object(maybe_scm: InDataExT) -> TypeGuard[InDataT]: return maybe_scm and utils.is_dict_like(maybe_scm) -def _validate_all(data: InDataExT, schema: InDataT, **_options) -> ResultT: +def _validate_all( + data: InDataExT, schema: InDataT, **_options: typing.Any, +) -> ResultT: """Do all of the validation checks. See the description of :func:`validate` for more details of parameters and @@ -47,7 +49,7 @@ def _validate_all(data: InDataExT, schema: InDataT, **_options) -> ResultT: def _validate( data: InDataExT, schema: InDataT, *, ac_schema_safe: bool = True, - **options: typing.Any + **options: typing.Any, ) -> ResultT: """Validate ``data`` with ``schema``. @@ -71,7 +73,7 @@ def validate( data: InDataExT, schema: InDataExT, *, ac_schema_safe: bool = True, ac_schema_errors: bool = False, - **options + **options: typing.Any, ) -> ResultT: """Validate target object with given schema object. @@ -106,7 +108,7 @@ def is_valid( data: InDataExT, schema: InDataExT, *, ac_schema_safe: bool = True, ac_schema_errors: bool = False, - **options + **options: typing.Any, ) -> bool: """Raise ValidationError if ``data`` was invalidated by schema `schema`.""" if not is_valid_schema_object(schema): @@ -114,7 +116,7 @@ def is_valid( (_success, error_or_errors) = validate( data, schema, ac_schema_safe=True, - ac_schema_errors=ac_schema_errors, **options + ac_schema_errors=ac_schema_errors, **options, ) if error_or_errors: msg = f"scm={schema!s}, err={error_or_errors!s}" From 1a8a83250b8043a2cb8ade3cc06b260c16231352 Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Thu, 12 Feb 2026 16:55:59 +0900 Subject: [PATCH 24/27] fix: fix several ruff errors in anyconfig.*, all the rest fix or add workarouds for several ruff errors like the followings in anyconfig.* (all the rest). - ANN002: missing-type-args - ANN003: missing-type-kwargs - ANN401: any-type - COM812: missing-trailing-comma - I001: unsorted-imports - RUF022: unsorted-dunder-all - TID252: relative-importsrules - UP045: non-pep604-annotation-optional --- src/anyconfig/models/processor.py | 4 +-- src/anyconfig/query/default.py | 4 +-- src/anyconfig/query/query.py | 19 ++++++++------ src/anyconfig/template/__init__.py | 10 +++---- src/anyconfig/template/jinja2.py | 42 ++++++++++++++++-------------- src/anyconfig/utils/__init__.py | 8 +++--- src/anyconfig/utils/lists.py | 6 ++--- src/anyconfig/utils/utils.py | 8 +++--- 8 files changed, 54 insertions(+), 47 deletions(-) diff --git a/src/anyconfig/models/processor.py b/src/anyconfig/models/processor.py index 36025b2e..f6970483 100644 --- a/src/anyconfig/models/processor.py +++ b/src/anyconfig/models/processor.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2018 - 2024 Satoru SATOH +# Copyright (C) 2018 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Abstract processor module. @@ -57,7 +57,7 @@ def extensions(cls) -> tuple[str, ...]: @classmethod def __eq__( - cls, other: builtins.type[Processor] # type: ignore[override] + cls, other: builtins.type[Processor], # type: ignore[override] ) -> bool: """Test equality.""" return cls.cid() == other.cid() diff --git a/src/anyconfig/query/default.py b/src/anyconfig/query/default.py index 3b2f41ee..1f9593dd 100644 --- a/src/anyconfig/query/default.py +++ b/src/anyconfig/query/default.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2021 - 2024 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=unused-argument @@ -14,7 +14,7 @@ def try_query( - data: InDataExT, jexp: MaybeJexp = None, **options + data: InDataExT, jexp: MaybeJexp = None, **options: typing.Any, ) -> InDataExT: """Provide a dummy implementation of :func:`anyconfig.query.try_query`.""" return data diff --git a/src/anyconfig/query/query.py b/src/anyconfig/query/query.py index 8407576a..db694ffe 100644 --- a/src/anyconfig/query/query.py +++ b/src/anyconfig/query/query.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2017 - 2024 Satoru SATOH +# Copyright (C) 2017 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=bare-except @@ -23,11 +23,14 @@ from .datatypes import MaybeJexp from ..common import ( - InDataExT, InDataT + InDataExT, InDataT, ) -def try_query(data: InDataExT, jexp: MaybeJexp = None, **options) -> InDataExT: +def try_query( + data: InDataExT, jexp: MaybeJexp = None, + **options: typing.Any, +) -> InDataExT: """Try to query data with JMESPath expression `jexp`.""" if jexp is None or not jexp: return data @@ -36,12 +39,12 @@ def try_query(data: InDataExT, jexp: MaybeJexp = None, **options) -> InDataExT: warnings.warn( "Could not query because given data is not " f"a mapping object (type? {type(data)}", - stacklevel=2 + stacklevel=2, ) return data (odata, exc) = query( - typing.cast("InDataT", data), typing.cast("str", jexp), **options + typing.cast("InDataT", data), typing.cast("str", jexp), **options, ) if exc: raise exc @@ -50,8 +53,8 @@ def try_query(data: InDataExT, jexp: MaybeJexp = None, **options) -> InDataExT: def query( - data: InDataT, jexp: str, **_options -) -> tuple[typing.Optional[InDataT], typing.Optional[Exception]]: + data: InDataT, jexp: str, **_options: typing.Any, +) -> tuple[InDataT | None, Exception | None]: """Filter data with given JMESPath expression. See also: https://github.com/jmespath/jmespath.py and http://jmespath.org. @@ -62,7 +65,7 @@ def query( :return: A tuple of query result and maybe exception if failed """ - exc: typing.Optional[Exception] = None + exc: Exception | None = None try: pexp = jmespath.compile(jexp) return (pexp.search(data), exc) diff --git a/src/anyconfig/template/__init__.py b/src/anyconfig/template/__init__.py index 0b0a73cb..cace10f9 100644 --- a/src/anyconfig/template/__init__.py +++ b/src/anyconfig/template/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2021 - 2024 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Misc global constants, variables, classes and so on.""" @@ -14,10 +14,10 @@ SUPPORTED = False def try_render( - filepath: typing.Optional[str] = None, - content: typing.Optional[str] = None, - **_options - ) -> typing.Optional[str]: + filepath: str | None = None, + content: str | None = None, + **_options: typing.Any, + ) -> str | None: """Provide a dummy function does nothing but returns None.""" if filepath and content: return None diff --git a/src/anyconfig/template/jinja2.py b/src/anyconfig/template/jinja2.py index 8438026c..79a7b01b 100644 --- a/src/anyconfig/template/jinja2.py +++ b/src/anyconfig/template/jinja2.py @@ -1,7 +1,7 @@ # # Jinja2 (http://jinja.pocoo.org) based template renderer. # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # # pylint: disable=wrong-import-position,wrong-import-order @@ -34,13 +34,13 @@ RENDER_S_OPTS: tuple[str, ...] = ( "ctx", "paths", "filters", - "autoescape" + "autoescape", ) RENDER_OPTS = (*RENDER_S_OPTS, "ask") def tmpl_env( - paths: MaybePathsT = None, *, autoescape: bool = True + paths: MaybePathsT = None, *, autoescape: bool = True, ) -> jinja2.Environment: """Get the template environment object from given ``paths``. @@ -51,12 +51,12 @@ def tmpl_env( return jinja2.Environment( loader=jinja2.FileSystemLoader([str(p) for p in paths]), - autoescape=autoescape # noqa: S701 + autoescape=autoescape, # noqa: S701 ) def make_template_paths( - template_file: pathlib.Path, paths: MaybePathsT = None + template_file: pathlib.Path, paths: MaybePathsT = None, ) -> list[pathlib.Path]: """Make a template paths. @@ -77,7 +77,7 @@ def make_template_paths( def render_s( tmpl_s: str, ctx: MaybeContextT = None, paths: MaybePathsT = None, - filters: MaybeFiltersT = None, *, autoescape: bool = True + filters: MaybeFiltersT = None, *, autoescape: bool = True, ) -> str: """Render a template as a str. @@ -103,7 +103,7 @@ def render_s( except AssertionError as exc: warnings.warn( f"Something went wrong with: paths={paths!r}, exc={exc!s}", - stacklevel=2 + stacklevel=2, ) return tmpl_s @@ -115,7 +115,7 @@ def render_s( return typing.cast( "jinja2.Environment", - tmpl_env(paths, autoescape=autoescape) + tmpl_env(paths, autoescape=autoescape), ).from_string(tmpl_s).render(**ctx) @@ -126,7 +126,7 @@ def render_impl( template_file: pathlib.Path, ctx: MaybeContextT = None, paths: MaybePathsT = None, filters: MaybeFiltersT = None, *, - autoescape: bool = True + autoescape: bool = True, ) -> str: """Render implementation. @@ -137,7 +137,7 @@ def render_impl( """ env = tmpl_env( make_template_paths(template_file, paths), - autoescape=autoescape + autoescape=autoescape, ) if env is None: @@ -153,10 +153,12 @@ def render_impl( return env.get_template(pathlib.Path(template_file).name).render(**ctx) -def render(filepath: str, ctx: MaybeContextT = None, - paths: MaybePathsT = None, *, - ask: bool = False, - filters: MaybeFiltersT = None) -> str: +def render( + filepath: str, ctx: MaybeContextT = None, + paths: MaybePathsT = None, *, + ask: bool = False, + filters: MaybeFiltersT = None, +) -> str: """Compile and render template and return the result as a string. :param template_file: Absolute or relative path to the template file @@ -175,7 +177,7 @@ def render(filepath: str, ctx: MaybeContextT = None, usr_tmpl = input( f"{os.linesep}*** Missing template '{mtmpl}'. Please enter " - "absolute or relative path starts from '.' to the template file: " + "absolute or relative path starts from '.' to the template file: ", ) usr_tmpl_2 = pathlib.Path(usr_tmpl.strip()).resolve() paths_2 = make_template_paths(usr_tmpl_2, paths) @@ -184,10 +186,10 @@ def render(filepath: str, ctx: MaybeContextT = None, def try_render( - filepath: typing.Optional[str] = None, - content: typing.Optional[str] = None, - **options -) -> typing.Optional[str]: + filepath: str | None = None, + content: str | None = None, + **options: typing.Any, +) -> str | None: """Compile and render template and return the result as a string. :param filepath: Absolute or relative path to the template file @@ -213,6 +215,6 @@ def try_render( f"Failed to compile '{tmpl_s!r}'. It may not be " f"a template.{os.linesep}, exc={exc!s}, " f"filepath={filepath}, options={options!r}", - stacklevel=2 + stacklevel=2, ) return None diff --git a/src/anyconfig/utils/__init__.py b/src/anyconfig/utils/__init__.py index 5c7d57c5..e1f4caff 100644 --- a/src/anyconfig/utils/__init__.py +++ b/src/anyconfig/utils/__init__.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2021 - 2024 Satoru SATOH +# Copyright (C) 2021 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Internal utility functions for anyconfig modules. @@ -17,14 +17,14 @@ - Add to abstract processors such like Parsers (loaders and dumpers). """ from .detectors import ( - is_primitive_type, is_iterable, is_dict_like, is_list_like + is_primitive_type, is_iterable, is_dict_like, is_list_like, ) from .files import get_path_from_stream from .lists import ( - groupby, concat + groupby, concat, ) from .utils import ( - filter_options, noop + filter_options, noop, ) diff --git a/src/anyconfig/utils/lists.py b/src/anyconfig/utils/lists.py index 3444c7c0..1185ce93 100644 --- a/src/anyconfig/utils/lists.py +++ b/src/anyconfig/utils/lists.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Misc utility routines for anyconfig module.""" @@ -14,7 +14,7 @@ def groupby( itr: collections.abc.Iterable[typing.Any], - key_fn: typing.Optional[collections.abc.Callable[..., typing.Any]] = None + key_fn: collections.abc.Callable[..., typing.Any] | None = None, ) -> collections.abc.Iterable[ tuple[typing.Any, collections.abc.Iterable[typing.Any]] ]: @@ -27,7 +27,7 @@ def groupby( def concat( - xss: collections.abc.Iterable[collections.abc.Iterable[typing.Any]] + xss: collections.abc.Iterable[collections.abc.Iterable[typing.Any]], ) -> list[typing.Any]: """Concatenates a list of lists.""" return list(itertools.chain.from_iterable(xs for xs in xss)) diff --git a/src/anyconfig/utils/utils.py b/src/anyconfig/utils/utils.py index bea016b3..852c1445 100644 --- a/src/anyconfig/utils/utils.py +++ b/src/anyconfig/utils/utils.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012 - 2024 Satoru SATOH +# Copyright (C) 2012 - 2026 Satoru SATOH # SPDX-License-Identifier: MIT # """Misc utility functions.""" @@ -11,7 +11,9 @@ import collections.abc -def noop(val: typing.Any, *_args, **_kwargs) -> typing.Any: +def noop( + val: typing.Any, *_args: typing.Any, **_kwargs: typing.Any, +) -> typing.Any: """Do nothing. >>> noop(1) @@ -22,7 +24,7 @@ def noop(val: typing.Any, *_args, **_kwargs) -> typing.Any: def filter_options( keys: collections.abc.Iterable[str], - options: collections.abc.Mapping[str, typing.Any] + options: collections.abc.Mapping[str, typing.Any], ) -> dict[str, typing.Any]: """Filter 'options' with given 'keys'. From bbddcc77fa491cc51f84b35bdf67c8b48c36a44b Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Thu, 12 Feb 2026 17:01:43 +0900 Subject: [PATCH 25/27] enhancement: add lint-next target to prepare for ruff-by-default configuration --- tox.ini | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 28d31ac6..d7909651 100644 --- a/tox.ini +++ b/tox.ini @@ -20,7 +20,7 @@ python = 3.10: py310 3.11: py311 3.12: py312 - 3.13: py313, type-check, lint, plugins, min + 3.13: py313, type-check, lint, lint-next, plugins, min [flake8] exclude = .git,.tox,dist,*egg,setup.py @@ -44,6 +44,13 @@ commands = - ruff check src - ruff check src --statistics +[testenv:lint-next] +deps = + -r {toxinidir}/tests/requirements.d/lint.txt +commands = + ruff check src --output-format pylint + ruff check src --statistics + [testenv:type-check] deps = -r {toxinidir}/tests/requirements.d/type-check.txt From a8b87677232137ddf0d20cdf21201ccf00b01093 Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Thu, 12 Feb 2026 17:03:10 +0900 Subject: [PATCH 26/27] fix: ruff error, TD006 --- src/anyconfig/processors/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/anyconfig/processors/utils.py b/src/anyconfig/processors/utils.py index 0f5cad46..8764e344 100644 --- a/src/anyconfig/processors/utils.py +++ b/src/anyconfig/processors/utils.py @@ -4,7 +4,7 @@ # # pylint: disable=unidiomatic-typecheck # -# todo(ssato): #189 fix the mypy error, type-var. +# TODO(ssato): #189 fix the mypy error, type-var. # mypy: disable-error-code=type-var """Utility functions for anyconfig.processors.""" from __future__ import annotations From 1ec8adcacc5ece4d4a384ac5af59255a2146cf6b Mon Sep 17 00:00:00 2001 From: Satoru Sato Date: Thu, 12 Feb 2026 17:12:13 +0900 Subject: [PATCH 27/27] fix: standardize the delimiter to '\n' across all platforms Standardize the delimiter to '\n' across all platforms including windows in properties and shellvars plugin. --- src/anyconfig/backend/properties/builtin.py | 3 +-- src/anyconfig/backend/sh/variables.py | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/anyconfig/backend/properties/builtin.py b/src/anyconfig/backend/properties/builtin.py index 0a718065..fa93038b 100644 --- a/src/anyconfig/backend/properties/builtin.py +++ b/src/anyconfig/backend/properties/builtin.py @@ -30,7 +30,6 @@ """ from __future__ import annotations -import os import re import typing import warnings @@ -176,6 +175,6 @@ def dump_to_stream( """ if utils.is_dict_like(cnf): stream.writelines( - f"{key} = {escape(val)}{os.linesep}" + f"{key} = {escape(val)}\n" for key, val in cnf.items() ) diff --git a/src/anyconfig/backend/sh/variables.py b/src/anyconfig/backend/sh/variables.py index 7bb98ff0..bff38558 100644 --- a/src/anyconfig/backend/sh/variables.py +++ b/src/anyconfig/backend/sh/variables.py @@ -21,7 +21,6 @@ from __future__ import annotations import itertools -import os import re import typing import warnings @@ -123,6 +122,6 @@ def dump_to_stream( """ if utils.is_dict_like(cnf): stream.writelines( - f"{key}='{val}'{os.linesep}" + f"{key}='{val}'\n" for key, val in cnf.items() )