From e97fd76ff303975ed3491e05f9600cffa00f6e2d Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 1 Dec 2021 20:07:30 +0000 Subject: [PATCH 01/55] Rename `config` to `config.setupcfg` This will facilitate the implementation of other configuration formats (such as pyproject.toml as initially defined by PEP 621) --- setuptools/config/__init__.py | 13 +++++++++++++ setuptools/{config.py => config/setupcfg.py} | 0 2 files changed, 13 insertions(+) create mode 100644 setuptools/config/__init__.py rename setuptools/{config.py => config/setupcfg.py} (100%) diff --git a/setuptools/config/__init__.py b/setuptools/config/__init__.py new file mode 100644 index 0000000000..7a4febf9c2 --- /dev/null +++ b/setuptools/config/__init__.py @@ -0,0 +1,13 @@ +# For backward compatibility, the following classes/functions are exposed +# from `config.setupcfg` +from setuptools.config.setupcfg import ( + ConfigHandler, + parse_configuration, + read_configuration, +) + +__all__ = [ + 'ConfigHandler', + 'parse_configuration', + 'read_configuration' +] diff --git a/setuptools/config.py b/setuptools/config/setupcfg.py similarity index 100% rename from setuptools/config.py rename to setuptools/config/setupcfg.py From e4b1f1fb74e3db278dbaa3602f093e569afb1bd0 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 2 Dec 2021 13:44:45 +0000 Subject: [PATCH 02/55] Extract post-processing functions from config We can split the process of interpreting configuration files into 2 steps: 1. The parsing the file contents from strings to value objects that can be understand by Python (for example a string with a comma separated list of keywords into an actual Python list of strings). 2. The expansion (or post-processing) of these values according to the semantics ``setuptools`` assign to them (for example a configuration field with the ``file:`` directive should be expanded from a list of file paths to a single string with the contents of those files concatenated) The idea of this change is to extract the functions responsible for (2.) into a new module, so they can be reused between different config file formats. --- setuptools/config/expand.py | 205 +++++++++++++++++++++++++ setuptools/config/setupcfg.py | 131 ++-------------- setuptools/tests/config/test_expand.py | 83 ++++++++++ 3 files changed, 297 insertions(+), 122 deletions(-) create mode 100644 setuptools/config/expand.py create mode 100644 setuptools/tests/config/test_expand.py diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py new file mode 100644 index 0000000000..04e94a3ca0 --- /dev/null +++ b/setuptools/config/expand.py @@ -0,0 +1,205 @@ +"""Utility functions to expand configuration directives or special values +(such glob patterns). + +We can split the process of interpreting configuration files into 2 steps: + +1. The parsing the file contents from strings to value objects + that can be understand by Python (for example a string with a comma + separated list of keywords into an actual Python list of strings). + +2. The expansion (or post-processing) of these values according to the + semantics ``setuptools`` assign to them (for example a configuration field + with the ``file:`` directive should be expanded from a list of file paths to + a single string with the contents of those files concatenated) + +This module focus on the second step, and therefore allow sharing the expansion +functions among several configuration file formats. +""" +import ast +import contextlib +import importlib +import io +import os +import sys +from glob import iglob +from itertools import chain + +from distutils.errors import DistutilsOptionError + +chain_iter = chain.from_iterable + + +class StaticModule: + """ + Attempt to load the module by the name + """ + + def __init__(self, name): + spec = importlib.util.find_spec(name) + if spec is None: + raise ModuleNotFoundError(name) + with open(spec.origin) as strm: + src = strm.read() + module = ast.parse(src) + vars(self).update(locals()) + del self.self + + def __getattr__(self, attr): + try: + return next( + ast.literal_eval(statement.value) + for statement in self.module.body + if isinstance(statement, ast.Assign) + for target in statement.targets + if isinstance(target, ast.Name) and target.id == attr + ) + except Exception as e: + raise AttributeError( + "{self.name} has no attribute {attr}".format(**locals()) + ) from e + + +@contextlib.contextmanager +def patch_path(path): + """ + Add path to front of sys.path for the duration of the context. + """ + try: + sys.path.insert(0, path) + yield + finally: + sys.path.remove(path) + + +def glob_relative(patterns): + """Expand the list of glob patterns, but preserving relative paths. + + :param list[str] patterns: List of glob patterns + :rtype: list + """ + glob_characters = ('*', '?', '[', ']', '{', '}') + expanded_values = [] + root_dir = os.getcwd() + for value in patterns: + + # Has globby characters? + if any(char in value for char in glob_characters): + # then expand the glob pattern while keeping paths *relative*: + expanded_values.extend(sorted( + os.path.relpath(path, root_dir) + for path in iglob(os.path.abspath(value), recursive=True))) + + else: + # take the value as-is: + expanded_values.append(value) + + return expanded_values + + +def read_files(filepaths): + """Return the content of the files concatenated using ``\n`` as str + + This function is sandboxed and won't reach anything outside the directory + with ``setup.py``. + """ + root_dir = os.getcwd() + return '\n'.join( + _read_file(path) + for path in filepaths + if _assert_local(path, root_dir) and os.path.isfile(path) + ) + + +def _read_file(filepath): + with io.open(filepath, encoding='utf-8') as f: + return f.read() + + +def _assert_local(filepath, root_dir): + if not os.path.abspath(filepath).startswith(os.path.abspath(root_dir)): + raise DistutilsOptionError(f'Cannot access {filepath!r}') + + return True + + +def read_attr(attr_desc, package_dir=None): + """Reads the value of an attribute from a module. + + This function will try to read the attributed statically first + (via :func:`ast.literal_eval`), and only evaluate the module if it fails. + + Examples: + read_attr("package.attr") + read_attr("package.module.attr") + + :param str attr_desc: Dot-separated string describing how to reach the + attribute (see examples above) + :param dict[str, str] package_dir: Mapping of package names to their + location in disk. + :rtype: str + """ + root_dir = os.getcwd() + attrs_path = attr_desc.strip().split('.') + attr_name = attrs_path.pop() + + module_name = '.'.join(attrs_path) + module_name = module_name or '__init__' + + parent_path = root_dir + if package_dir: + if attrs_path[0] in package_dir: + # A custom path was specified for the module we want to import + custom_path = package_dir[attrs_path[0]] + parts = custom_path.rsplit('/', 1) + if len(parts) > 1: + parent_path = os.path.join(root_dir, parts[0]) + parent_module = parts[1] + else: + parent_module = custom_path + module_name = ".".join([parent_module, *attrs_path[1:]]) + elif '' in package_dir: + # A custom parent directory was specified for all root modules + parent_path = os.path.join(root_dir, package_dir['']) + + with patch_path(parent_path): + try: + # attempt to load value statically + return getattr(StaticModule(module_name), attr_name) + except Exception: + # fallback to simple import + module = importlib.import_module(module_name) + + return getattr(module, attr_name) + + +def resolve_class(qualified_class_name): + """Given a qualified class name, return the associated class object""" + idx = qualified_class_name.rfind('.') + class_name = qualified_class_name[idx + 1 :] + pkg_name = qualified_class_name[:idx] + module = importlib.import_module(pkg_name) + return getattr(module, class_name) + + +def find_packages(namespaces=False, **kwargs): + """Works similarly to :func:`setuptools.find_packages`, but with all + arguments given as keyword arguments. Moreover, ``where`` can be given + as a list (the results will be simply concatenated). + + When the additional keyword argument ``namespaces`` is ``True``, it will + behave like :func:`setuptools.find_namespace_packages`` (i.e. include + implicit namespaces as per :pep:`420`). + + :rtype: list + """ + + if namespaces: + from setuptools import PEP420PackageFinder as PackageFinder + else: + from setuptools import PackageFinder + + where = kwargs.pop('where', ['.']) + if isinstance(where, str): + where = [where] + + return list(chain_iter(PackageFinder.find(x, **kwargs) for x in where)) diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py index b4e968e5ca..648e68c241 100644 --- a/setuptools/config/setupcfg.py +++ b/setuptools/config/setupcfg.py @@ -1,60 +1,16 @@ -import ast -import io +"""Load setuptools configuration from ``setup.cfg`` files""" import os -import sys import warnings import functools -import importlib from collections import defaultdict from functools import partial from functools import wraps -from glob import iglob -import contextlib from distutils.errors import DistutilsOptionError, DistutilsFileError from setuptools.extern.packaging.version import Version, InvalidVersion from setuptools.extern.packaging.specifiers import SpecifierSet - - -class StaticModule: - """ - Attempt to load the module by the name - """ - - def __init__(self, name): - spec = importlib.util.find_spec(name) - with open(spec.origin) as strm: - src = strm.read() - module = ast.parse(src) - vars(self).update(locals()) - del self.self - - def __getattr__(self, attr): - try: - return next( - ast.literal_eval(statement.value) - for statement in self.module.body - if isinstance(statement, ast.Assign) - for target in statement.targets - if isinstance(target, ast.Name) and target.id == attr - ) - except Exception as e: - raise AttributeError( - "{self.name} has no attribute {attr}".format(**locals()) - ) from e - - -@contextlib.contextmanager -def patch_path(path): - """ - Add path to front of sys.path for the duration of the context. - """ - try: - sys.path.insert(0, path) - yield - finally: - sys.path.remove(path) +from setuptools.config import expand def read_configuration(filepath, find_others=False, ignore_option_errors=False): @@ -267,23 +223,7 @@ def _parse_list_glob(cls, value, separator=','): :param separator: List items separator character. :rtype: list """ - glob_characters = ('*', '?', '[', ']', '{', '}') - values = cls._parse_list(value, separator=separator) - expanded_values = [] - for value in values: - - # Has globby characters? - if any(char in value for char in glob_characters): - # then expand the glob pattern while keeping paths *relative*: - expanded_values.extend(sorted( - os.path.relpath(path, os.getcwd()) - for path in iglob(os.path.abspath(value)))) - - else: - # take the value as-is: - expanded_values.append(value) - - return expanded_values + return expand.glob_relative(cls._parse_list(value, separator=separator)) @classmethod def _parse_dict(cls, value): @@ -361,21 +301,7 @@ def _parse_file(cls, value): spec = value[len(include_directive) :] filepaths = (os.path.abspath(path.strip()) for path in spec.split(',')) - return '\n'.join( - cls._read_file(path) - for path in filepaths - if (cls._assert_local(path) or True) and os.path.isfile(path) - ) - - @staticmethod - def _assert_local(filepath): - if not filepath.startswith(os.getcwd()): - raise DistutilsOptionError('`file:` directive can not access %s' % filepath) - - @staticmethod - def _read_file(filepath): - with io.open(filepath, encoding='utf-8') as f: - return f.read() + return expand.read_files(filepaths) @classmethod def _parse_attr(cls, value, package_dir=None): @@ -392,36 +318,8 @@ def _parse_attr(cls, value, package_dir=None): if not value.startswith(attr_directive): return value - attrs_path = value.replace(attr_directive, '').strip().split('.') - attr_name = attrs_path.pop() - - module_name = '.'.join(attrs_path) - module_name = module_name or '__init__' - - parent_path = os.getcwd() - if package_dir: - if attrs_path[0] in package_dir: - # A custom path was specified for the module we want to import - custom_path = package_dir[attrs_path[0]] - parts = custom_path.rsplit('/', 1) - if len(parts) > 1: - parent_path = os.path.join(os.getcwd(), parts[0]) - module_name = parts[1] - else: - module_name = custom_path - elif '' in package_dir: - # A custom parent directory was specified for all root modules - parent_path = os.path.join(os.getcwd(), package_dir['']) - - with patch_path(parent_path): - try: - # attempt to load value statically - return getattr(StaticModule(module_name), attr_name) - except Exception: - # fallback to simple import - module = importlib.import_module(module_name) - - return getattr(module, attr_name) + attr_desc = value.replace(attr_directive, '') + return expand.read_attr(attr_desc, package_dir) @classmethod def _get_parser_compound(cls, *parse_methods): @@ -642,16 +540,7 @@ def parsers(self): } def _parse_cmdclass(self, value): - def resolve_class(qualified_class_name): - idx = qualified_class_name.rfind('.') - class_name = qualified_class_name[idx + 1 :] - pkg_name = qualified_class_name[:idx] - - module = __import__(pkg_name) - - return getattr(module, class_name) - - return {k: resolve_class(v) for k, v in self._parse_dict(value).items()} + return {k: expand.resolve_class(v) for k, v in self._parse_dict(value).items()} def _parse_packages(self, value): """Parses `packages` option value. @@ -673,11 +562,9 @@ def _parse_packages(self, value): ) if findns: - from setuptools import find_namespace_packages as find_packages - else: - from setuptools import find_packages + find_kwargs["namespaces"] = True - return find_packages(**find_kwargs) + return expand.find_packages(**find_kwargs) def parse_section_packages__find(self, section_options): """Parses `packages.find` configuration file section. diff --git a/setuptools/tests/config/test_expand.py b/setuptools/tests/config/test_expand.py new file mode 100644 index 0000000000..84ab175387 --- /dev/null +++ b/setuptools/tests/config/test_expand.py @@ -0,0 +1,83 @@ +import pytest + +from distutils.errors import DistutilsOptionError +from setuptools.config import expand +from setuptools.sandbox import pushd + + +def write_files(files, root_dir): + for file, content in files.items(): + path = root_dir / file + path.parent.mkdir(exist_ok=True, parents=True) + path.write_text(content) + + +def test_glob_relative(tmp_path): + files = { + "dir1/dir2/dir3/file1.txt", + "dir1/dir2/file2.txt", + "dir1/file3.txt", + "a.ini", + "b.ini", + "dir1/c.ini", + "dir1/dir2/a.ini", + } + + write_files({k: "" for k in files}, tmp_path) + patterns = ["**/*.txt", "[ab].*", "**/[ac].ini"] + with pushd(tmp_path): + assert set(expand.glob_relative(patterns)) == files + + +def test_read_files(tmp_path): + files = { + "a.txt": "a", + "dir1/b.txt": "b", + "dir1/dir2/c.txt": "c" + } + write_files(files, tmp_path) + with pushd(tmp_path): + assert expand.read_files(list(files)) == "a\nb\nc" + + with pushd(tmp_path / "dir1"), pytest.raises(DistutilsOptionError): + expand.read_files(["../a.txt"]) + + +def test_read_attr(tmp_path): + files = { + "pkg/__init__.py": "", + "pkg/sub/__init__.py": "VERSION = '0.1.1'", + "pkg/sub/mod.py": ( + "VALUES = {'a': 0, 'b': {42}, 'c': (0, 1, 1)}\n" + "raise SystemExit(1)" + ), + } + write_files(files, tmp_path) + # Make sure it can read the attr statically without evaluating the module + with pushd(tmp_path): + assert expand.read_attr('pkg.sub.VERSION') == '0.1.1' + values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'}) + assert values['a'] == 0 + assert values['b'] == {42} + assert values['c'] == (0, 1, 1) + + +def test_resolve_class(): + from distutils.command import sdist + assert expand.resolve_class('distutils.command.sdist') == sdist + + +def test_find_packages(tmp_path): + files = { + "pkg/__init__.py", + "other/__init__.py", + "dir1/dir2/__init__.py", + } + + write_files({k: "" for k in files}, tmp_path) + with pushd(tmp_path): + assert set(expand.find_packages(where=['.'])) == {"pkg", "other"} + expected = {"pkg", "other", "dir2"} + assert set(expand.find_packages(where=['.', "dir1"])) == expected + expected = {"pkg", "other", "dir1", "dir1.dir2"} + assert set(expand.find_packages(namespaces="True")) == expected From 55c0459af9afbc622395593b36a25d04bb363eae Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 2 Dec 2021 14:11:53 +0000 Subject: [PATCH 03/55] Allow root_dir to be explicit in config.expand functions --- setuptools/config/expand.py | 38 ++++++++++++++++---------- setuptools/config/setupcfg.py | 2 +- setuptools/tests/config/test_expand.py | 23 ++++++++++++++++ 3 files changed, 47 insertions(+), 16 deletions(-) diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py index 04e94a3ca0..7b4c077537 100644 --- a/setuptools/config/expand.py +++ b/setuptools/config/expand.py @@ -71,41 +71,46 @@ def patch_path(path): sys.path.remove(path) -def glob_relative(patterns): +def glob_relative(patterns, root_dir=None): """Expand the list of glob patterns, but preserving relative paths. :param list[str] patterns: List of glob patterns + :param str root_dir: Path to which globs should be relative + (current directory by default) :rtype: list """ glob_characters = ('*', '?', '[', ']', '{', '}') expanded_values = [] - root_dir = os.getcwd() + root_dir = root_dir or os.getcwd() for value in patterns: # Has globby characters? if any(char in value for char in glob_characters): # then expand the glob pattern while keeping paths *relative*: + glob_path = os.path.abspath(os.path.join(root_dir, value)) expanded_values.extend(sorted( os.path.relpath(path, root_dir) - for path in iglob(os.path.abspath(value), recursive=True))) + for path in iglob(glob_path, recursive=True))) else: - # take the value as-is: - expanded_values.append(value) + # take the value as-is + expanded_values.append(os.path.relpath(value, root_dir)) return expanded_values -def read_files(filepaths): +def read_files(filepaths, root_dir=None): """Return the content of the files concatenated using ``\n`` as str - This function is sandboxed and won't reach anything outside the directory - with ``setup.py``. + This function is sandboxed and won't reach anything outside ``root_dir`` + + (By default ``root_dir`` is the current directory). """ - root_dir = os.getcwd() + root_dir = os.path.abspath(root_dir or os.getcwd()) + _filepaths = (os.path.join(root_dir, path) for path in filepaths) return '\n'.join( _read_file(path) - for path in filepaths + for path in _filepaths if _assert_local(path, root_dir) and os.path.isfile(path) ) @@ -116,13 +121,14 @@ def _read_file(filepath): def _assert_local(filepath, root_dir): - if not os.path.abspath(filepath).startswith(os.path.abspath(root_dir)): - raise DistutilsOptionError(f'Cannot access {filepath!r}') + if not os.path.abspath(filepath).startswith(root_dir): + msg = f"Cannot access {filepath!r} (or anything outside {root_dir!r})" + raise DistutilsOptionError(msg) return True -def read_attr(attr_desc, package_dir=None): +def read_attr(attr_desc, package_dir=None, root_dir=None): """Reads the value of an attribute from a module. This function will try to read the attributed statically first @@ -135,10 +141,12 @@ def read_attr(attr_desc, package_dir=None): :param str attr_desc: Dot-separated string describing how to reach the attribute (see examples above) :param dict[str, str] package_dir: Mapping of package names to their - location in disk. + location in disk (represented by paths relative to ``root_dir``). + :param str root_dir: Path to directory containing all the packages in + ``package_dir`` (current directory by default). :rtype: str """ - root_dir = os.getcwd() + root_dir = root_dir or os.getcwd() attrs_path = attr_desc.strip().split('.') attr_name = attrs_path.pop() diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py index 648e68c241..f0dcb91bb1 100644 --- a/setuptools/config/setupcfg.py +++ b/setuptools/config/setupcfg.py @@ -300,7 +300,7 @@ def _parse_file(cls, value): return value spec = value[len(include_directive) :] - filepaths = (os.path.abspath(path.strip()) for path in spec.split(',')) + filepaths = (path.strip() for path in spec.split(',')) return expand.read_files(filepaths) @classmethod diff --git a/setuptools/tests/config/test_expand.py b/setuptools/tests/config/test_expand.py index 84ab175387..72fb22b2f9 100644 --- a/setuptools/tests/config/test_expand.py +++ b/setuptools/tests/config/test_expand.py @@ -1,3 +1,5 @@ +import os + import pytest from distutils.errors import DistutilsOptionError @@ -27,6 +29,8 @@ def test_glob_relative(tmp_path): patterns = ["**/*.txt", "[ab].*", "**/[ac].ini"] with pushd(tmp_path): assert set(expand.glob_relative(patterns)) == files + # Make sure the same APIs work outside cwd + assert set(expand.glob_relative(patterns, tmp_path)) == files def test_read_files(tmp_path): @@ -42,6 +46,11 @@ def test_read_files(tmp_path): with pushd(tmp_path / "dir1"), pytest.raises(DistutilsOptionError): expand.read_files(["../a.txt"]) + # Make sure the same APIs work outside cwd + assert expand.read_files(list(files), tmp_path) == "a\nb\nc" + with pytest.raises(DistutilsOptionError): + expand.read_files(["../a.txt"], tmp_path) + def test_read_attr(tmp_path): files = { @@ -59,6 +68,10 @@ def test_read_attr(tmp_path): values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'}) assert values['a'] == 0 assert values['b'] == {42} + + # Make sure the same APIs work outside cwd + assert expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path) == '0.1.1' + values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'}, tmp_path) assert values['c'] == (0, 1, 1) @@ -81,3 +94,13 @@ def test_find_packages(tmp_path): assert set(expand.find_packages(where=['.', "dir1"])) == expected expected = {"pkg", "other", "dir1", "dir1.dir2"} assert set(expand.find_packages(namespaces="True")) == expected + + # Make sure the same APIs work outside cwd + path = str(tmp_path).replace(os.sep, '/') # ensure posix-style paths + dir1_path = str(tmp_path / "dir1").replace(os.sep, '/') + + assert set(expand.find_packages(where=[path])) == {"pkg", "other"} + expected = {"pkg", "other", "dir2"} + assert set(expand.find_packages(where=[path, dir1_path])) == expected + expected = {"pkg", "other", "dir1", "dir1.dir2"} + assert set(expand.find_packages(where=[path], namespaces="True")) == expected From 5fc5170e456cd231a6ab6b7b31474f8fb2cab626 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 3 Dec 2021 11:22:18 +0000 Subject: [PATCH 04/55] Allow single strings in config.expand.read_files --- setuptools/config/expand.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py index 7b4c077537..21d34d3a7b 100644 --- a/setuptools/config/expand.py +++ b/setuptools/config/expand.py @@ -106,6 +106,9 @@ def read_files(filepaths, root_dir=None): (By default ``root_dir`` is the current directory). """ + if isinstance(filepaths, (str, bytes)): + filepaths = [filepaths] + root_dir = os.path.abspath(root_dir or os.getcwd()) _filepaths = (os.path.join(root_dir, path) for path in filepaths) return '\n'.join( From a6a2dea1d79c22681dce56e40cdcfd6464182ce4 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 3 Dec 2021 11:22:48 +0000 Subject: [PATCH 05/55] Extract version normalisation from config to expand --- setuptools/config/expand.py | 16 ++++++++++++++++ setuptools/config/setupcfg.py | 13 +------------ 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py index 21d34d3a7b..6572c2b827 100644 --- a/setuptools/config/expand.py +++ b/setuptools/config/expand.py @@ -214,3 +214,19 @@ def find_packages(namespaces=False, **kwargs): where = [where] return list(chain_iter(PackageFinder.find(x, **kwargs) for x in where)) + + +def version(value): + """When getting the version directly from an attribute, + it should be normalised to string. + """ + if callable(value): + value = value() + + if not isinstance(value, str): + if hasattr(value, '__iter__'): + value = '.'.join(map(str, value)) + else: + value = '%s' % value + + return value diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py index f0dcb91bb1..13585b33be 100644 --- a/setuptools/config/setupcfg.py +++ b/setuptools/config/setupcfg.py @@ -494,18 +494,7 @@ def _parse_version(self, value): return version - version = self._parse_attr(value, self.package_dir) - - if callable(version): - version = version() - - if not isinstance(version, str): - if hasattr(version, '__iter__'): - version = '.'.join(map(str, version)) - else: - version = '%s' % version - - return version + return expand.version(self._parse_attr(value, self.package_dir)) class ConfigOptionsHandler(ConfigHandler): From 46644420fdc6cdcd0a466cd3e851e44568ece437 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 6 Dec 2021 23:10:10 +0000 Subject: [PATCH 06/55] Extract function to normalise package_data MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit … from config to expand --- setuptools/config/expand.py | 6 ++++++ setuptools/config/setupcfg.py | 10 ++-------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py index 6572c2b827..6de88def15 100644 --- a/setuptools/config/expand.py +++ b/setuptools/config/expand.py @@ -230,3 +230,9 @@ def version(value): value = '%s' % value return value + + +def canonic_package_data(package_data): + if "*" in package_data: + package_data[""] = package_data.pop("*") + return package_data diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py index 13585b33be..6e9117e64d 100644 --- a/setuptools/config/setupcfg.py +++ b/setuptools/config/setupcfg.py @@ -585,14 +585,8 @@ def parse_section_entry_points(self, section_options): self['entry_points'] = parsed def _parse_package_data(self, section_options): - parsed = self._parse_section_to_dict(section_options, self._parse_list) - - root = parsed.get('*') - if root: - parsed[''] = root - del parsed['*'] - - return parsed + package_data = self._parse_section_to_dict(section_options, self._parse_list) + return expand.canonic_package_data(package_data) def parse_section_package_data(self, section_options): """Parses `package_data` configuration file section. From 299632a1194683540d3d6f0cbc9bcd30863deaa8 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 6 Dec 2021 23:23:48 +0000 Subject: [PATCH 07/55] Extract function to normalise data_files MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit … from config to expand --- setuptools/config/expand.py | 15 +++++++++++++++ setuptools/config/setupcfg.py | 4 ++-- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py index 6de88def15..5ede128db0 100644 --- a/setuptools/config/expand.py +++ b/setuptools/config/expand.py @@ -236,3 +236,18 @@ def canonic_package_data(package_data): if "*" in package_data: package_data[""] = package_data.pop("*") return package_data + + +def canonic_data_files(data_files, root_dir=None): + """For compatibility with ``setup.py``, ``data_files`` should be a list + of pairs instead of a dict. + + This function also expands glob patterns. + """ + if isinstance(data_files, list): + return data_files + + return [ + (dest, glob_relative(patterns, root_dir)) + for dest, patterns in data_files.items() + ] diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py index 6e9117e64d..ea4b495e30 100644 --- a/setuptools/config/setupcfg.py +++ b/setuptools/config/setupcfg.py @@ -617,5 +617,5 @@ def parse_section_data_files(self, section_options): :param dict section_options: """ - parsed = self._parse_section_to_dict(section_options, self._parse_list_glob) - self['data_files'] = [(k, v) for k, v in parsed.items()] + parsed = self._parse_section_to_dict(section_options, self._parse_list) + self['data_files'] = expand.canonic_data_files(parsed) From 925feb9648f2c538140f18a209a729baa1a6a150 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 6 Dec 2021 23:26:53 +0000 Subject: [PATCH 08/55] Remove unused _parse_list_glob --- setuptools/config/setupcfg.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py index ea4b495e30..7ddf024bcc 100644 --- a/setuptools/config/setupcfg.py +++ b/setuptools/config/setupcfg.py @@ -213,18 +213,6 @@ def _parse_list(cls, value, separator=','): return [chunk.strip() for chunk in value if chunk.strip()] - @classmethod - def _parse_list_glob(cls, value, separator=','): - """Equivalent to _parse_list() but expands any glob patterns using glob(). - - However, unlike with glob() calls, the results remain relative paths. - - :param value: - :param separator: List items separator character. - :rtype: list - """ - return expand.glob_relative(cls._parse_list(value, separator=separator)) - @classmethod def _parse_dict(cls, value): """Represents value as a dict. From 160c3a35cf165dcdf861b740c6814db514dda3f9 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 6 Dec 2021 23:37:06 +0000 Subject: [PATCH 09/55] Add cmdclass to expand --- setuptools/config/expand.py | 7 +++++++ setuptools/config/setupcfg.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py index 5ede128db0..e0da179c33 100644 --- a/setuptools/config/expand.py +++ b/setuptools/config/expand.py @@ -192,6 +192,13 @@ def resolve_class(qualified_class_name): return getattr(module, class_name) +def cmdclass(values): + """Given a dictionary mapping command names to strings for qualified class + names, apply :func:`resolve_class` to the dict values. + """ + return {k: resolve_class(v) for k, v in values.items()} + + def find_packages(namespaces=False, **kwargs): """Works similarly to :func:`setuptools.find_packages`, but with all arguments given as keyword arguments. Moreover, ``where`` can be given diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py index 7ddf024bcc..80cf454107 100644 --- a/setuptools/config/setupcfg.py +++ b/setuptools/config/setupcfg.py @@ -517,7 +517,7 @@ def parsers(self): } def _parse_cmdclass(self, value): - return {k: expand.resolve_class(v) for k, v in self._parse_dict(value).items()} + return expand.cmdclass(self._parse_dict(value)) def _parse_packages(self, value): """Parses `packages` option value. From 92b2a68b3aabdf095938284dcd51c10f1fcd0dcc Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 7 Dec 2021 17:52:15 +0000 Subject: [PATCH 10/55] Allow cmdclass to be expanded with custom package layouts --- setuptools/config/expand.py | 48 ++++++++++++++++++++++++------------- 1 file changed, 32 insertions(+), 16 deletions(-) diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py index e0da179c33..74f38bff59 100644 --- a/setuptools/config/expand.py +++ b/setuptools/config/expand.py @@ -156,47 +156,63 @@ def read_attr(attr_desc, package_dir=None, root_dir=None): module_name = '.'.join(attrs_path) module_name = module_name or '__init__' + parent_path, module_name = _find_module(module_name, package_dir, root_dir) + + with patch_path(parent_path): + try: + # attempt to load value statically + return getattr(StaticModule(module_name), attr_name) + except Exception: + # fallback to simple import + module = importlib.import_module(module_name) + + return getattr(module, attr_name) + + +def _find_module(module_name, package_dir, root_dir): + """Given a module (that could normally be imported by ``module_name`` + after the build is complete), find the path to the parent directory where + it is contained and the canonical name that could be used to import it + considering the ``package_dir`` in the build configuration and ``root_dir`` + """ parent_path = root_dir + module_parts = module_name.split('.') if package_dir: - if attrs_path[0] in package_dir: + if module_parts[0] in package_dir: # A custom path was specified for the module we want to import - custom_path = package_dir[attrs_path[0]] + custom_path = package_dir[module_parts[0]] parts = custom_path.rsplit('/', 1) if len(parts) > 1: parent_path = os.path.join(root_dir, parts[0]) parent_module = parts[1] else: parent_module = custom_path - module_name = ".".join([parent_module, *attrs_path[1:]]) + module_name = ".".join([parent_module, *module_parts[1:]]) elif '' in package_dir: # A custom parent directory was specified for all root modules parent_path = os.path.join(root_dir, package_dir['']) - with patch_path(parent_path): - try: - # attempt to load value statically - return getattr(StaticModule(module_name), attr_name) - except Exception: - # fallback to simple import - module = importlib.import_module(module_name) - - return getattr(module, attr_name) + return parent_path, module_name -def resolve_class(qualified_class_name): +def resolve_class(qualified_class_name, package_dir=None, root_dir=None): """Given a qualified class name, return the associated class object""" + root_dir = root_dir or os.getcwd() idx = qualified_class_name.rfind('.') class_name = qualified_class_name[idx + 1 :] pkg_name = qualified_class_name[:idx] - module = importlib.import_module(pkg_name) + + parent_path, module_name = _find_module(pkg_name, package_dir, root_dir) + with patch_path(parent_path): + module = importlib.import_module(module_name) return getattr(module, class_name) -def cmdclass(values): +def cmdclass(values, package_dir=None, root_dir=None): """Given a dictionary mapping command names to strings for qualified class names, apply :func:`resolve_class` to the dict values. """ - return {k: resolve_class(v) for k, v in values.items()} + return {k: resolve_class(v, package_dir, root_dir) for k, v in values.items()} def find_packages(namespaces=False, **kwargs): From 9dc0a8f1feee384a5966750eb2a89fad800f3a09 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 7 Dec 2021 18:06:48 +0000 Subject: [PATCH 11/55] Add root_dir option to expand.find_packages --- setuptools/config/expand.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py index 74f38bff59..c4dc098e6c 100644 --- a/setuptools/config/expand.py +++ b/setuptools/config/expand.py @@ -215,7 +215,7 @@ def cmdclass(values, package_dir=None, root_dir=None): return {k: resolve_class(v, package_dir, root_dir) for k, v in values.items()} -def find_packages(namespaces=False, **kwargs): +def find_packages(*, namespaces=False, root_dir=None, **kwargs): """Works similarly to :func:`setuptools.find_packages`, but with all arguments given as keyword arguments. Moreover, ``where`` can be given as a list (the results will be simply concatenated). @@ -232,11 +232,17 @@ def find_packages(namespaces=False, **kwargs): else: from setuptools import PackageFinder + root_dir = root_dir or "." where = kwargs.pop('where', ['.']) if isinstance(where, str): where = [where] + target = (_nest_url_style_path(root_dir, path) for path in where) + return list(chain_iter(PackageFinder.find(x, **kwargs) for x in target)) - return list(chain_iter(PackageFinder.find(x, **kwargs) for x in where)) + +def _nest_url_style_path(parent, path): + path = parent if path == "." else os.path.join(parent, path) + return path.replace(os.sep, "/").rstrip("/") def version(value): From 17802ef79e3a28d2df264d7bc8e4c6cb82a627c4 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 2 Dec 2021 19:23:34 +0000 Subject: [PATCH 12/55] Add `tomli` as vendorised dependency This eventually will allow reading project metadata directly from `pyproject.toml` --- setuptools/_vendor/tomli/__init__.py | 9 + setuptools/_vendor/tomli/_parser.py | 663 +++++++++++++++++++++++++++ setuptools/_vendor/tomli/_re.py | 101 ++++ setuptools/_vendor/tomli/_types.py | 6 + setuptools/_vendor/tomli/py.typed | 1 + setuptools/_vendor/vendored.txt | 1 + setuptools/extern/__init__.py | 2 +- 7 files changed, 782 insertions(+), 1 deletion(-) create mode 100644 setuptools/_vendor/tomli/__init__.py create mode 100644 setuptools/_vendor/tomli/_parser.py create mode 100644 setuptools/_vendor/tomli/_re.py create mode 100644 setuptools/_vendor/tomli/_types.py create mode 100644 setuptools/_vendor/tomli/py.typed diff --git a/setuptools/_vendor/tomli/__init__.py b/setuptools/_vendor/tomli/__init__.py new file mode 100644 index 0000000000..7bcdbab36c --- /dev/null +++ b/setuptools/_vendor/tomli/__init__.py @@ -0,0 +1,9 @@ +"""A lil' TOML parser.""" + +__all__ = ("loads", "load", "TOMLDecodeError") +__version__ = "1.2.2" # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT + +from tomli._parser import TOMLDecodeError, load, loads + +# Pretend this exception was created here. +TOMLDecodeError.__module__ = "tomli" diff --git a/setuptools/_vendor/tomli/_parser.py b/setuptools/_vendor/tomli/_parser.py new file mode 100644 index 0000000000..89e81c3b39 --- /dev/null +++ b/setuptools/_vendor/tomli/_parser.py @@ -0,0 +1,663 @@ +import string +from types import MappingProxyType +from typing import Any, BinaryIO, Dict, FrozenSet, Iterable, NamedTuple, Optional, Tuple +import warnings + +from tomli._re import ( + RE_DATETIME, + RE_LOCALTIME, + RE_NUMBER, + match_to_datetime, + match_to_localtime, + match_to_number, +) +from tomli._types import Key, ParseFloat, Pos + +ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127)) + +# Neither of these sets include quotation mark or backslash. They are +# currently handled as separate cases in the parser functions. +ILLEGAL_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t") +ILLEGAL_MULTILINE_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t\n") + +ILLEGAL_LITERAL_STR_CHARS = ILLEGAL_BASIC_STR_CHARS +ILLEGAL_MULTILINE_LITERAL_STR_CHARS = ILLEGAL_MULTILINE_BASIC_STR_CHARS + +ILLEGAL_COMMENT_CHARS = ILLEGAL_BASIC_STR_CHARS + +TOML_WS = frozenset(" \t") +TOML_WS_AND_NEWLINE = TOML_WS | frozenset("\n") +BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + "-_") +KEY_INITIAL_CHARS = BARE_KEY_CHARS | frozenset("\"'") +HEXDIGIT_CHARS = frozenset(string.hexdigits) + +BASIC_STR_ESCAPE_REPLACEMENTS = MappingProxyType( + { + "\\b": "\u0008", # backspace + "\\t": "\u0009", # tab + "\\n": "\u000A", # linefeed + "\\f": "\u000C", # form feed + "\\r": "\u000D", # carriage return + '\\"': "\u0022", # quote + "\\\\": "\u005C", # backslash + } +) + + +class TOMLDecodeError(ValueError): + """An error raised if a document is not valid TOML.""" + + +def load(fp: BinaryIO, *, parse_float: ParseFloat = float) -> Dict[str, Any]: + """Parse TOML from a binary file object.""" + s_bytes = fp.read() + try: + s = s_bytes.decode() + except AttributeError: + warnings.warn( + "Text file object support is deprecated in favor of binary file objects." + ' Use `open("foo.toml", "rb")` to open the file in binary mode.', + DeprecationWarning, + stacklevel=2, + ) + s = s_bytes # type: ignore[assignment] + return loads(s, parse_float=parse_float) + + +def loads(s: str, *, parse_float: ParseFloat = float) -> Dict[str, Any]: # noqa: C901 + """Parse TOML from a string.""" + + # The spec allows converting "\r\n" to "\n", even in string + # literals. Let's do so to simplify parsing. + src = s.replace("\r\n", "\n") + pos = 0 + out = Output(NestedDict(), Flags()) + header: Key = () + + # Parse one statement at a time + # (typically means one line in TOML source) + while True: + # 1. Skip line leading whitespace + pos = skip_chars(src, pos, TOML_WS) + + # 2. Parse rules. Expect one of the following: + # - end of file + # - end of line + # - comment + # - key/value pair + # - append dict to list (and move to its namespace) + # - create dict (and move to its namespace) + # Skip trailing whitespace when applicable. + try: + char = src[pos] + except IndexError: + break + if char == "\n": + pos += 1 + continue + if char in KEY_INITIAL_CHARS: + pos = key_value_rule(src, pos, out, header, parse_float) + pos = skip_chars(src, pos, TOML_WS) + elif char == "[": + try: + second_char: Optional[str] = src[pos + 1] + except IndexError: + second_char = None + if second_char == "[": + pos, header = create_list_rule(src, pos, out) + else: + pos, header = create_dict_rule(src, pos, out) + pos = skip_chars(src, pos, TOML_WS) + elif char != "#": + raise suffixed_err(src, pos, "Invalid statement") + + # 3. Skip comment + pos = skip_comment(src, pos) + + # 4. Expect end of line or end of file + try: + char = src[pos] + except IndexError: + break + if char != "\n": + raise suffixed_err( + src, pos, "Expected newline or end of document after a statement" + ) + pos += 1 + + return out.data.dict + + +class Flags: + """Flags that map to parsed keys/namespaces.""" + + # Marks an immutable namespace (inline array or inline table). + FROZEN = 0 + # Marks a nest that has been explicitly created and can no longer + # be opened using the "[table]" syntax. + EXPLICIT_NEST = 1 + + def __init__(self) -> None: + self._flags: Dict[str, dict] = {} + + def unset_all(self, key: Key) -> None: + cont = self._flags + for k in key[:-1]: + if k not in cont: + return + cont = cont[k]["nested"] + cont.pop(key[-1], None) + + def set_for_relative_key(self, head_key: Key, rel_key: Key, flag: int) -> None: + cont = self._flags + for k in head_key: + if k not in cont: + cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}} + cont = cont[k]["nested"] + for k in rel_key: + if k in cont: + cont[k]["flags"].add(flag) + else: + cont[k] = {"flags": {flag}, "recursive_flags": set(), "nested": {}} + cont = cont[k]["nested"] + + def set(self, key: Key, flag: int, *, recursive: bool) -> None: # noqa: A003 + cont = self._flags + key_parent, key_stem = key[:-1], key[-1] + for k in key_parent: + if k not in cont: + cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}} + cont = cont[k]["nested"] + if key_stem not in cont: + cont[key_stem] = {"flags": set(), "recursive_flags": set(), "nested": {}} + cont[key_stem]["recursive_flags" if recursive else "flags"].add(flag) + + def is_(self, key: Key, flag: int) -> bool: + if not key: + return False # document root has no flags + cont = self._flags + for k in key[:-1]: + if k not in cont: + return False + inner_cont = cont[k] + if flag in inner_cont["recursive_flags"]: + return True + cont = inner_cont["nested"] + key_stem = key[-1] + if key_stem in cont: + cont = cont[key_stem] + return flag in cont["flags"] or flag in cont["recursive_flags"] + return False + + +class NestedDict: + def __init__(self) -> None: + # The parsed content of the TOML document + self.dict: Dict[str, Any] = {} + + def get_or_create_nest( + self, + key: Key, + *, + access_lists: bool = True, + ) -> dict: + cont: Any = self.dict + for k in key: + if k not in cont: + cont[k] = {} + cont = cont[k] + if access_lists and isinstance(cont, list): + cont = cont[-1] + if not isinstance(cont, dict): + raise KeyError("There is no nest behind this key") + return cont + + def append_nest_to_list(self, key: Key) -> None: + cont = self.get_or_create_nest(key[:-1]) + last_key = key[-1] + if last_key in cont: + list_ = cont[last_key] + try: + list_.append({}) + except AttributeError: + raise KeyError("An object other than list found behind this key") + else: + cont[last_key] = [{}] + + +class Output(NamedTuple): + data: NestedDict + flags: Flags + + +def skip_chars(src: str, pos: Pos, chars: Iterable[str]) -> Pos: + try: + while src[pos] in chars: + pos += 1 + except IndexError: + pass + return pos + + +def skip_until( + src: str, + pos: Pos, + expect: str, + *, + error_on: FrozenSet[str], + error_on_eof: bool, +) -> Pos: + try: + new_pos = src.index(expect, pos) + except ValueError: + new_pos = len(src) + if error_on_eof: + raise suffixed_err(src, new_pos, f"Expected {expect!r}") from None + + if not error_on.isdisjoint(src[pos:new_pos]): + while src[pos] not in error_on: + pos += 1 + raise suffixed_err(src, pos, f"Found invalid character {src[pos]!r}") + return new_pos + + +def skip_comment(src: str, pos: Pos) -> Pos: + try: + char: Optional[str] = src[pos] + except IndexError: + char = None + if char == "#": + return skip_until( + src, pos + 1, "\n", error_on=ILLEGAL_COMMENT_CHARS, error_on_eof=False + ) + return pos + + +def skip_comments_and_array_ws(src: str, pos: Pos) -> Pos: + while True: + pos_before_skip = pos + pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) + pos = skip_comment(src, pos) + if pos == pos_before_skip: + return pos + + +def create_dict_rule(src: str, pos: Pos, out: Output) -> Tuple[Pos, Key]: + pos += 1 # Skip "[" + pos = skip_chars(src, pos, TOML_WS) + pos, key = parse_key(src, pos) + + if out.flags.is_(key, Flags.EXPLICIT_NEST) or out.flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Can not declare {key} twice") + out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) + try: + out.data.get_or_create_nest(key) + except KeyError: + raise suffixed_err(src, pos, "Can not overwrite a value") from None + + if not src.startswith("]", pos): + raise suffixed_err(src, pos, 'Expected "]" at the end of a table declaration') + return pos + 1, key + + +def create_list_rule(src: str, pos: Pos, out: Output) -> Tuple[Pos, Key]: + pos += 2 # Skip "[[" + pos = skip_chars(src, pos, TOML_WS) + pos, key = parse_key(src, pos) + + if out.flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Can not mutate immutable namespace {key}") + # Free the namespace now that it points to another empty list item... + out.flags.unset_all(key) + # ...but this key precisely is still prohibited from table declaration + out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) + try: + out.data.append_nest_to_list(key) + except KeyError: + raise suffixed_err(src, pos, "Can not overwrite a value") from None + + if not src.startswith("]]", pos): + raise suffixed_err(src, pos, 'Expected "]]" at the end of an array declaration') + return pos + 2, key + + +def key_value_rule( + src: str, pos: Pos, out: Output, header: Key, parse_float: ParseFloat +) -> Pos: + pos, key, value = parse_key_value_pair(src, pos, parse_float) + key_parent, key_stem = key[:-1], key[-1] + abs_key_parent = header + key_parent + + if out.flags.is_(abs_key_parent, Flags.FROZEN): + raise suffixed_err( + src, pos, f"Can not mutate immutable namespace {abs_key_parent}" + ) + # Containers in the relative path can't be opened with the table syntax after this + out.flags.set_for_relative_key(header, key, Flags.EXPLICIT_NEST) + try: + nest = out.data.get_or_create_nest(abs_key_parent) + except KeyError: + raise suffixed_err(src, pos, "Can not overwrite a value") from None + if key_stem in nest: + raise suffixed_err(src, pos, "Can not overwrite a value") + # Mark inline table and array namespaces recursively immutable + if isinstance(value, (dict, list)): + out.flags.set(header + key, Flags.FROZEN, recursive=True) + nest[key_stem] = value + return pos + + +def parse_key_value_pair( + src: str, pos: Pos, parse_float: ParseFloat +) -> Tuple[Pos, Key, Any]: + pos, key = parse_key(src, pos) + try: + char: Optional[str] = src[pos] + except IndexError: + char = None + if char != "=": + raise suffixed_err(src, pos, 'Expected "=" after a key in a key/value pair') + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + pos, value = parse_value(src, pos, parse_float) + return pos, key, value + + +def parse_key(src: str, pos: Pos) -> Tuple[Pos, Key]: + pos, key_part = parse_key_part(src, pos) + key: Key = (key_part,) + pos = skip_chars(src, pos, TOML_WS) + while True: + try: + char: Optional[str] = src[pos] + except IndexError: + char = None + if char != ".": + return pos, key + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + pos, key_part = parse_key_part(src, pos) + key += (key_part,) + pos = skip_chars(src, pos, TOML_WS) + + +def parse_key_part(src: str, pos: Pos) -> Tuple[Pos, str]: + try: + char: Optional[str] = src[pos] + except IndexError: + char = None + if char in BARE_KEY_CHARS: + start_pos = pos + pos = skip_chars(src, pos, BARE_KEY_CHARS) + return pos, src[start_pos:pos] + if char == "'": + return parse_literal_str(src, pos) + if char == '"': + return parse_one_line_basic_str(src, pos) + raise suffixed_err(src, pos, "Invalid initial character for a key part") + + +def parse_one_line_basic_str(src: str, pos: Pos) -> Tuple[Pos, str]: + pos += 1 + return parse_basic_str(src, pos, multiline=False) + + +def parse_array(src: str, pos: Pos, parse_float: ParseFloat) -> Tuple[Pos, list]: + pos += 1 + array: list = [] + + pos = skip_comments_and_array_ws(src, pos) + if src.startswith("]", pos): + return pos + 1, array + while True: + pos, val = parse_value(src, pos, parse_float) + array.append(val) + pos = skip_comments_and_array_ws(src, pos) + + c = src[pos : pos + 1] + if c == "]": + return pos + 1, array + if c != ",": + raise suffixed_err(src, pos, "Unclosed array") + pos += 1 + + pos = skip_comments_and_array_ws(src, pos) + if src.startswith("]", pos): + return pos + 1, array + + +def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> Tuple[Pos, dict]: + pos += 1 + nested_dict = NestedDict() + flags = Flags() + + pos = skip_chars(src, pos, TOML_WS) + if src.startswith("}", pos): + return pos + 1, nested_dict.dict + while True: + pos, key, value = parse_key_value_pair(src, pos, parse_float) + key_parent, key_stem = key[:-1], key[-1] + if flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Can not mutate immutable namespace {key}") + try: + nest = nested_dict.get_or_create_nest(key_parent, access_lists=False) + except KeyError: + raise suffixed_err(src, pos, "Can not overwrite a value") from None + if key_stem in nest: + raise suffixed_err(src, pos, f"Duplicate inline table key {key_stem!r}") + nest[key_stem] = value + pos = skip_chars(src, pos, TOML_WS) + c = src[pos : pos + 1] + if c == "}": + return pos + 1, nested_dict.dict + if c != ",": + raise suffixed_err(src, pos, "Unclosed inline table") + if isinstance(value, (dict, list)): + flags.set(key, Flags.FROZEN, recursive=True) + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + + +def parse_basic_str_escape( # noqa: C901 + src: str, pos: Pos, *, multiline: bool = False +) -> Tuple[Pos, str]: + escape_id = src[pos : pos + 2] + pos += 2 + if multiline and escape_id in {"\\ ", "\\\t", "\\\n"}: + # Skip whitespace until next non-whitespace character or end of + # the doc. Error if non-whitespace is found before newline. + if escape_id != "\\\n": + pos = skip_chars(src, pos, TOML_WS) + try: + char = src[pos] + except IndexError: + return pos, "" + if char != "\n": + raise suffixed_err(src, pos, 'Unescaped "\\" in a string') + pos += 1 + pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) + return pos, "" + if escape_id == "\\u": + return parse_hex_char(src, pos, 4) + if escape_id == "\\U": + return parse_hex_char(src, pos, 8) + try: + return pos, BASIC_STR_ESCAPE_REPLACEMENTS[escape_id] + except KeyError: + if len(escape_id) != 2: + raise suffixed_err(src, pos, "Unterminated string") from None + raise suffixed_err(src, pos, 'Unescaped "\\" in a string') from None + + +def parse_basic_str_escape_multiline(src: str, pos: Pos) -> Tuple[Pos, str]: + return parse_basic_str_escape(src, pos, multiline=True) + + +def parse_hex_char(src: str, pos: Pos, hex_len: int) -> Tuple[Pos, str]: + hex_str = src[pos : pos + hex_len] + if len(hex_str) != hex_len or not HEXDIGIT_CHARS.issuperset(hex_str): + raise suffixed_err(src, pos, "Invalid hex value") + pos += hex_len + hex_int = int(hex_str, 16) + if not is_unicode_scalar_value(hex_int): + raise suffixed_err(src, pos, "Escaped character is not a Unicode scalar value") + return pos, chr(hex_int) + + +def parse_literal_str(src: str, pos: Pos) -> Tuple[Pos, str]: + pos += 1 # Skip starting apostrophe + start_pos = pos + pos = skip_until( + src, pos, "'", error_on=ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True + ) + return pos + 1, src[start_pos:pos] # Skip ending apostrophe + + +def parse_multiline_str(src: str, pos: Pos, *, literal: bool) -> Tuple[Pos, str]: + pos += 3 + if src.startswith("\n", pos): + pos += 1 + + if literal: + delim = "'" + end_pos = skip_until( + src, + pos, + "'''", + error_on=ILLEGAL_MULTILINE_LITERAL_STR_CHARS, + error_on_eof=True, + ) + result = src[pos:end_pos] + pos = end_pos + 3 + else: + delim = '"' + pos, result = parse_basic_str(src, pos, multiline=True) + + # Add at maximum two extra apostrophes/quotes if the end sequence + # is 4 or 5 chars long instead of just 3. + if not src.startswith(delim, pos): + return pos, result + pos += 1 + if not src.startswith(delim, pos): + return pos, result + delim + pos += 1 + return pos, result + (delim * 2) + + +def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> Tuple[Pos, str]: + if multiline: + error_on = ILLEGAL_MULTILINE_BASIC_STR_CHARS + parse_escapes = parse_basic_str_escape_multiline + else: + error_on = ILLEGAL_BASIC_STR_CHARS + parse_escapes = parse_basic_str_escape + result = "" + start_pos = pos + while True: + try: + char = src[pos] + except IndexError: + raise suffixed_err(src, pos, "Unterminated string") from None + if char == '"': + if not multiline: + return pos + 1, result + src[start_pos:pos] + if src.startswith('"""', pos): + return pos + 3, result + src[start_pos:pos] + pos += 1 + continue + if char == "\\": + result += src[start_pos:pos] + pos, parsed_escape = parse_escapes(src, pos) + result += parsed_escape + start_pos = pos + continue + if char in error_on: + raise suffixed_err(src, pos, f"Illegal character {char!r}") + pos += 1 + + +def parse_value( # noqa: C901 + src: str, pos: Pos, parse_float: ParseFloat +) -> Tuple[Pos, Any]: + try: + char: Optional[str] = src[pos] + except IndexError: + char = None + + # Basic strings + if char == '"': + if src.startswith('"""', pos): + return parse_multiline_str(src, pos, literal=False) + return parse_one_line_basic_str(src, pos) + + # Literal strings + if char == "'": + if src.startswith("'''", pos): + return parse_multiline_str(src, pos, literal=True) + return parse_literal_str(src, pos) + + # Booleans + if char == "t": + if src.startswith("true", pos): + return pos + 4, True + if char == "f": + if src.startswith("false", pos): + return pos + 5, False + + # Dates and times + datetime_match = RE_DATETIME.match(src, pos) + if datetime_match: + try: + datetime_obj = match_to_datetime(datetime_match) + except ValueError as e: + raise suffixed_err(src, pos, "Invalid date or datetime") from e + return datetime_match.end(), datetime_obj + localtime_match = RE_LOCALTIME.match(src, pos) + if localtime_match: + return localtime_match.end(), match_to_localtime(localtime_match) + + # Integers and "normal" floats. + # The regex will greedily match any type starting with a decimal + # char, so needs to be located after handling of dates and times. + number_match = RE_NUMBER.match(src, pos) + if number_match: + return number_match.end(), match_to_number(number_match, parse_float) + + # Arrays + if char == "[": + return parse_array(src, pos, parse_float) + + # Inline tables + if char == "{": + return parse_inline_table(src, pos, parse_float) + + # Special floats + first_three = src[pos : pos + 3] + if first_three in {"inf", "nan"}: + return pos + 3, parse_float(first_three) + first_four = src[pos : pos + 4] + if first_four in {"-inf", "+inf", "-nan", "+nan"}: + return pos + 4, parse_float(first_four) + + raise suffixed_err(src, pos, "Invalid value") + + +def suffixed_err(src: str, pos: Pos, msg: str) -> TOMLDecodeError: + """Return a `TOMLDecodeError` where error message is suffixed with + coordinates in source.""" + + def coord_repr(src: str, pos: Pos) -> str: + if pos >= len(src): + return "end of document" + line = src.count("\n", 0, pos) + 1 + if line == 1: + column = pos + 1 + else: + column = pos - src.rindex("\n", 0, pos) + return f"line {line}, column {column}" + + return TOMLDecodeError(f"{msg} (at {coord_repr(src, pos)})") + + +def is_unicode_scalar_value(codepoint: int) -> bool: + return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111) diff --git a/setuptools/_vendor/tomli/_re.py b/setuptools/_vendor/tomli/_re.py new file mode 100644 index 0000000000..9126829741 --- /dev/null +++ b/setuptools/_vendor/tomli/_re.py @@ -0,0 +1,101 @@ +from datetime import date, datetime, time, timedelta, timezone, tzinfo +from functools import lru_cache +import re +from typing import Any, Optional, Union + +from tomli._types import ParseFloat + +# E.g. +# - 00:32:00.999999 +# - 00:32:00 +_TIME_RE_STR = r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?" + +RE_NUMBER = re.compile( + r""" +0 +(?: + x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex + | + b[01](?:_?[01])* # bin + | + o[0-7](?:_?[0-7])* # oct +) +| +[+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part +(?P + (?:\.[0-9](?:_?[0-9])*)? # optional fractional part + (?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part +) +""", + flags=re.VERBOSE, +) +RE_LOCALTIME = re.compile(_TIME_RE_STR) +RE_DATETIME = re.compile( + fr""" +([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27 +(?: + [T ] + {_TIME_RE_STR} + (?:(Z)|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset +)? +""", + flags=re.VERBOSE, +) + + +def match_to_datetime(match: "re.Match") -> Union[datetime, date]: + """Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`. + + Raises ValueError if the match does not correspond to a valid date + or datetime. + """ + ( + year_str, + month_str, + day_str, + hour_str, + minute_str, + sec_str, + micros_str, + zulu_time, + offset_sign_str, + offset_hour_str, + offset_minute_str, + ) = match.groups() + year, month, day = int(year_str), int(month_str), int(day_str) + if hour_str is None: + return date(year, month, day) + hour, minute, sec = int(hour_str), int(minute_str), int(sec_str) + micros = int(micros_str.ljust(6, "0")) if micros_str else 0 + if offset_sign_str: + tz: Optional[tzinfo] = cached_tz( + offset_hour_str, offset_minute_str, offset_sign_str + ) + elif zulu_time: + tz = timezone.utc + else: # local date-time + tz = None + return datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz) + + +@lru_cache(maxsize=None) +def cached_tz(hour_str: str, minute_str: str, sign_str: str) -> timezone: + sign = 1 if sign_str == "+" else -1 + return timezone( + timedelta( + hours=sign * int(hour_str), + minutes=sign * int(minute_str), + ) + ) + + +def match_to_localtime(match: "re.Match") -> time: + hour_str, minute_str, sec_str, micros_str = match.groups() + micros = int(micros_str.ljust(6, "0")) if micros_str else 0 + return time(int(hour_str), int(minute_str), int(sec_str), micros) + + +def match_to_number(match: "re.Match", parse_float: "ParseFloat") -> Any: + if match.group("floatpart"): + return parse_float(match.group()) + return int(match.group(), 0) diff --git a/setuptools/_vendor/tomli/_types.py b/setuptools/_vendor/tomli/_types.py new file mode 100644 index 0000000000..e37cc8088f --- /dev/null +++ b/setuptools/_vendor/tomli/_types.py @@ -0,0 +1,6 @@ +from typing import Any, Callable, Tuple + +# Type annotations +ParseFloat = Callable[[str], Any] +Key = Tuple[str, ...] +Pos = int diff --git a/setuptools/_vendor/tomli/py.typed b/setuptools/_vendor/tomli/py.typed new file mode 100644 index 0000000000..7632ecf775 --- /dev/null +++ b/setuptools/_vendor/tomli/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561 diff --git a/setuptools/_vendor/vendored.txt b/setuptools/_vendor/vendored.txt index 8216ec9915..1a328e08fd 100644 --- a/setuptools/_vendor/vendored.txt +++ b/setuptools/_vendor/vendored.txt @@ -2,3 +2,4 @@ packaging==21.2 pyparsing==2.2.1 ordered-set==3.1.1 more_itertools==8.8.0 +tomli==1.2.2 diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py index baca1afabe..828f636606 100644 --- a/setuptools/extern/__init__.py +++ b/setuptools/extern/__init__.py @@ -69,5 +69,5 @@ def install(self): sys.meta_path.append(self) -names = 'packaging', 'pyparsing', 'ordered_set', 'more_itertools', +names = 'packaging', 'pyparsing', 'ordered_set', 'more_itertools', 'tomli' VendorImporter(__name__, names, 'setuptools._vendor').install() From 029b9c5f2d7f8212c48eb3b3621bf28c65cd7e85 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 3 Dec 2021 10:04:00 +0000 Subject: [PATCH 13/55] Add `validate-pyproject` as a vendored dependency In order to minimise dependencies, `validate-pyproject` has the ability to "dump" only the code necessary to run the validations to a given directory. This special strategy is used instead of the default `pip install -t`. The idea of using JSONSchema for validation was suggested in #2671, and the rationale for that approach is further discussed in https://github.com/abravalheri/validate-pyproject/blob/main/docs/faq.rst Using a library such as `validate-pyproject` has the advantage of incentive sing reuse and collaboration with other projects. Currently `validate-pyproject` ships a JSONSchema for the proposed use of `pyproject.toml` as means of configuration for setuptools. In the future, if there is interest, setuptools could also ship its own schema and just use the shared infrastructure of `validate-pyproject` (by advertising the schemas via entry-points). --- pavement.py | 43 +- setuptools/_vendor/_validate_pyproject/NOTICE | 439 ++++++++ .../_vendor/_validate_pyproject/__init__.py | 31 + .../_validate_pyproject/extra_validations.py | 36 + .../fastjsonschema_exceptions.py | 51 + .../fastjsonschema_validations.py | 1002 +++++++++++++++++ .../_vendor/_validate_pyproject/formats.py | 202 ++++ setuptools/_vendor/vendored.txt | 1 + setuptools/extern/__init__.py | 3 +- 9 files changed, 1806 insertions(+), 2 deletions(-) create mode 100644 setuptools/_vendor/_validate_pyproject/NOTICE create mode 100644 setuptools/_vendor/_validate_pyproject/__init__.py create mode 100644 setuptools/_vendor/_validate_pyproject/extra_validations.py create mode 100644 setuptools/_vendor/_validate_pyproject/fastjsonschema_exceptions.py create mode 100644 setuptools/_vendor/_validate_pyproject/fastjsonschema_validations.py create mode 100644 setuptools/_vendor/_validate_pyproject/formats.py diff --git a/pavement.py b/pavement.py index 81ff6f1201..ae0e58f13c 100644 --- a/pavement.py +++ b/pavement.py @@ -1,8 +1,13 @@ +import os import re import sys +import shutil import subprocess +import venv +import string +from tempfile import TemporaryDirectory -from paver.easy import task, path as Path +from paver.easy import info, task, path as Path def remove_all(paths): @@ -67,4 +72,40 @@ def update_pkg_resources(): def update_setuptools(): vendor = Path('setuptools/_vendor') install(vendor) + install_validate_pyproject(vendor) rewrite_packaging(vendor / 'packaging', 'setuptools.extern') + + +def install_validate_pyproject(vendor): + """``validate-pyproject`` can be vendorized to remove all dependencies""" + req = next( + (x for x in (vendor / "vendored.txt").lines() if 'validate-pyproject' in x), + "validate-pyproject[all]" + ) + + pkg, _, _ = req.strip(string.whitespace + "#").partition("#") + pkg = pkg.strip() + + opts = {} + if sys.version_info[:2] >= (3, 10): + opts["ignore_cleanup_errors"] = True + + with TemporaryDirectory(**opts) as tmp: + venv.create(tmp, with_pip=True) + path = os.pathsep.join(Path(tmp).glob("*")) + venv_python = shutil.which("python", path=path) + info(f"Temporarily installing {pkg!r}...") + subprocess.check_call([venv_python, "-m", "pip", "install", pkg]) + cmd = [ + venv_python, + "-m", + "validate_pyproject.vendoring", + "--output-dir", + str(vendor / "_validate_pyproject"), + "--enable-plugins", + "setuptools", + "distutils", + "--very-verbose" + ] + subprocess.check_output(cmd) + info(f"{pkg!r} vendorized") diff --git a/setuptools/_vendor/_validate_pyproject/NOTICE b/setuptools/_vendor/_validate_pyproject/NOTICE new file mode 100644 index 0000000000..020083ac22 --- /dev/null +++ b/setuptools/_vendor/_validate_pyproject/NOTICE @@ -0,0 +1,439 @@ +The code contained in this directory was automatically generated using the +following command: + + python -m validate_pyproject.vendoring --output-dir setuptools/_vendor/_validate_pyproject --enable-plugins setuptools distutils --very-verbose + +Please avoid changing it manually. + + +You can report issues or suggest changes directly to `validate-pyproject` +(or to the relevant plugin repository) + +- https://github.com/abravalheri/validate-pyproject/issues + + +*** + +The following files include code from opensource projects +(either as direct copies or modified versions): + +- `fastjsonschema_exceptions.py`: + - project: `fastjsonschema` - licensed under BSD-3-Clause + (https://github.com/horejsek/python-fastjsonschema) +- `extra_validations.py` and `format.py`: + - project: `validate-pyproject` - licensed under MPL-2.0 + (https://github.com/abravalheri/validate-pyproject) + + +Additionally the following files are automatically generated by tools provided +by the same projects: + +- `__init__.py` +- `fastjsonschema_validations.py` + +The relevant copyright notes and licenses are included bellow. + + +*** + +`fastjsonschema` +================ + +Copyright (c) 2018, Michal Horejsek +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. + + Neither the name of the {organization} nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +*** + +`validate-pyproject` +==================== + +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. "Contributor" + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. "Contributor Version" + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the terms of + a Secondary License. + +1.6. "Executable Form" + + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + + means a work that combines Covered Software with other material, in a + separate file or files, that is not Covered Software. + +1.8. "License" + + means this document. + +1.9. "Licensable" + + means having the right to grant, to the maximum extent possible, whether + at the time of the initial grant or subsequently, any and all of the + rights conveyed by this License. + +1.10. "Modifications" + + means any of the following: + + a. any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. "Patent Claims" of a Contributor + + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the License, + by the making, using, selling, offering for sale, having made, import, + or transfer of either its Contributions or its Contributor Version. + +1.12. "Secondary License" + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. "Source Code Form" + + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, "control" means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution + become effective for each Contribution on the date the Contributor first + distributes such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under + this License. No additional rights or licenses will be implied from the + distribution or licensing of Covered Software under this License. + Notwithstanding Section 2.1(b) above, no patent license is granted by a + Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of + its Contributions. + + This License does not grant any rights in the trademarks, service marks, + or logos of any Contributor (except as may be necessary to comply with + the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this + License (see Section 10.2) or under the terms of a Secondary License (if + permitted under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its + Contributions are its original creation(s) or it has sufficient rights to + grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under + applicable copyright doctrines of fair use, fair dealing, or other + equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under + the terms of this License. You must inform recipients that the Source + Code Form of the Covered Software is governed by the terms of this + License, and how they can obtain a copy of this License. You may not + attempt to alter or restrict the recipients' rights in the Source Code + Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter the + recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for + the Covered Software. If the Larger Work is a combination of Covered + Software with a work governed by one or more Secondary Licenses, and the + Covered Software is not Incompatible With Secondary Licenses, this + License permits You to additionally distribute such Covered Software + under the terms of such Secondary License(s), so that the recipient of + the Larger Work may, at their option, further distribute the Covered + Software under the terms of either this License or such Secondary + License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices + (including copyright notices, patent notices, disclaimers of warranty, or + limitations of liability) contained within the Source Code Form of the + Covered Software, except that You may alter any license notices to the + extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on + behalf of any Contributor. You must make it absolutely clear that any + such warranty, support, indemnity, or liability obligation is offered by + You alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, + judicial order, or regulation then You must: (a) comply with the terms of + this License to the maximum extent possible; and (b) describe the + limitations and the code they affect. Such description must be placed in a + text file included with all distributions of the Covered Software under + this License. Except to the extent prohibited by statute or regulation, + such description must be sufficiently detailed for a recipient of ordinary + skill to be able to understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing + basis, if such Contributor fails to notify You of the non-compliance by + some reasonable means prior to 60 days after You have come back into + compliance. Moreover, Your grants from a particular Contributor are + reinstated on an ongoing basis if such Contributor notifies You of the + non-compliance by some reasonable means, this is the first time You have + received notice of non-compliance with this License from such + Contributor, and You become compliant prior to 30 days after Your receipt + of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, + counter-claims, and cross-claims) alleging that a Contributor Version + directly or indirectly infringes any patent, then the rights granted to + You by any and all Contributors for the Covered Software under Section + 2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an "as is" basis, + without warranty of any kind, either expressed, implied, or statutory, + including, without limitation, warranties that the Covered Software is free + of defects, merchantable, fit for a particular purpose or non-infringing. + The entire risk as to the quality and performance of the Covered Software + is with You. Should any Covered Software prove defective in any respect, + You (not any Contributor) assume the cost of any necessary servicing, + repair, or correction. This disclaimer of warranty constitutes an essential + part of this License. No use of any Covered Software is authorized under + this License except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from + such party's negligence to the extent applicable law prohibits such + limitation. Some jurisdictions do not allow the exclusion or limitation of + incidental or consequential damages, so this exclusion and limitation may + not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts + of a jurisdiction where the defendant maintains its principal place of + business and such litigation shall be governed by laws of that + jurisdiction, without reference to its conflict-of-law provisions. Nothing + in this Section shall prevent a party's ability to bring cross-claims or + counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject + matter hereof. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. Any law or regulation which provides that + the language of a contract shall be construed against the drafter shall not + be used to construe this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version + of the License under which You originally received the Covered Software, + or under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a + modified version of this License if you rename the license and remove + any references to the name of the license steward (except to note that + such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary + Licenses If You choose to distribute Source Code Form that is + Incompatible With Secondary Licenses under the terms of this version of + the License, the notice described in Exhibit B of this License must be + attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + https://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, +then You may include the notice in a location (such as a LICENSE file in a +relevant directory) where a recipient would be likely to look for such a +notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice + + This Source Code Form is "Incompatible + With Secondary Licenses", as defined by + the Mozilla Public License, v. 2.0. + diff --git a/setuptools/_vendor/_validate_pyproject/__init__.py b/setuptools/_vendor/_validate_pyproject/__init__.py new file mode 100644 index 0000000000..2b1e77f369 --- /dev/null +++ b/setuptools/_vendor/_validate_pyproject/__init__.py @@ -0,0 +1,31 @@ +from functools import reduce +from typing import Any, Callable, Dict + +from . import formats +from .extra_validations import EXTRA_VALIDATIONS +from .fastjsonschema_exceptions import JsonSchemaException, JsonSchemaValueException +from .fastjsonschema_validations import validate as _validate + +__all__ = [ + "validate", + "FORMAT_FUNCTIONS", + "EXTRA_VALIDATIONS", + "JsonSchemaException", + "JsonSchemaValueException", +] + + +FORMAT_FUNCTIONS: Dict[str, Callable[[str], bool]] = { + fn.__name__.replace("_", "-"): fn + for fn in formats.__dict__.values() + if callable(fn) and not fn.__name__.startswith("_") +} + + +def validate(data: Any) -> bool: + """Validate the given ``data`` object using JSON Schema + This function raises ``JsonSchemaValueException`` if ``data`` is invalid. + """ + _validate(data, custom_formats=FORMAT_FUNCTIONS) + reduce(lambda acc, fn: fn(acc), EXTRA_VALIDATIONS, data) + return True diff --git a/setuptools/_vendor/_validate_pyproject/extra_validations.py b/setuptools/_vendor/_validate_pyproject/extra_validations.py new file mode 100644 index 0000000000..d7d5b39dd5 --- /dev/null +++ b/setuptools/_vendor/_validate_pyproject/extra_validations.py @@ -0,0 +1,36 @@ +"""The purpose of this module is implement PEP 621 validations that are +difficult to express as a JSON Schema (or that are not supported by the current +JSON Schema library). +""" + +from typing import Mapping, TypeVar + +from .fastjsonschema_exceptions import JsonSchemaValueException + +T = TypeVar("T", bound=Mapping) + + +class RedefiningStaticFieldAsDynamic(JsonSchemaValueException): + """According to PEP 621: + + Build back-ends MUST raise an error if the metadata specifies a field + statically as well as being listed in dynamic. + """ + + +def validate_project_dynamic(pyproject: T) -> T: + project_table = pyproject.get("project", {}) + dynamic = project_table.get("dynamic", []) + + for field in dynamic: + if field in project_table: + msg = f"You cannot provided a value for `project.{field}` and " + msg += "list it under `project.dynamic` at the same time" + name = f"data.project.{field}" + value = {field: project_table[field], "...": " # ...", "dynamic": dynamic} + raise RedefiningStaticFieldAsDynamic(msg, value, name, rule="PEP 621") + + return pyproject + + +EXTRA_VALIDATIONS = (validate_project_dynamic,) diff --git a/setuptools/_vendor/_validate_pyproject/fastjsonschema_exceptions.py b/setuptools/_vendor/_validate_pyproject/fastjsonschema_exceptions.py new file mode 100644 index 0000000000..63d9819924 --- /dev/null +++ b/setuptools/_vendor/_validate_pyproject/fastjsonschema_exceptions.py @@ -0,0 +1,51 @@ +import re + + +SPLIT_RE = re.compile(r'[\.\[\]]+') + + +class JsonSchemaException(ValueError): + """ + Base exception of ``fastjsonschema`` library. + """ + + +class JsonSchemaValueException(JsonSchemaException): + """ + Exception raised by validation function. Available properties: + + * ``message`` containing human-readable information what is wrong (e.g. ``data.property[index] must be smaller than or equal to 42``), + * invalid ``value`` (e.g. ``60``), + * ``name`` of a path in the data structure (e.g. ``data.propery[index]``), + * ``path`` as an array in the data structure (e.g. ``['data', 'propery', 'index']``), + * the whole ``definition`` which the ``value`` has to fulfil (e.g. ``{'type': 'number', 'maximum': 42}``), + * ``rule`` which the ``value`` is breaking (e.g. ``maximum``) + * and ``rule_definition`` (e.g. ``42``). + + .. versionchanged:: 2.14.0 + Added all extra properties. + """ + + def __init__(self, message, value=None, name=None, definition=None, rule=None): + super().__init__(message) + self.message = message + self.value = value + self.name = name + self.definition = definition + self.rule = rule + + @property + def path(self): + return [item for item in SPLIT_RE.split(self.name) if item != ''] + + @property + def rule_definition(self): + if not self.rule or not self.definition: + return None + return self.definition.get(self.rule) + + +class JsonSchemaDefinitionException(JsonSchemaException): + """ + Exception raised by generator of validation function. + """ diff --git a/setuptools/_vendor/_validate_pyproject/fastjsonschema_validations.py b/setuptools/_vendor/_validate_pyproject/fastjsonschema_validations.py new file mode 100644 index 0000000000..fa074b8261 --- /dev/null +++ b/setuptools/_vendor/_validate_pyproject/fastjsonschema_validations.py @@ -0,0 +1,1002 @@ +# noqa +# type: ignore +# flake8: noqa +# pylint: skip-file +# mypy: ignore-errors +# yapf: disable +# pylama:skip=1 + + +# *** PLEASE DO NOT MODIFY DIRECTLY: Automatically generated code *** + + +VERSION = "2.15.1" +import re +from .fastjsonschema_exceptions import JsonSchemaValueException + + +REGEX_PATTERNS = { + '^.*$': re.compile('^.*$'), + '.+': re.compile('.+'), + '^.+$': re.compile('^.+$'), + 'idn-email_re_pattern': re.compile('^[^@]+@[^@]+\\.[^@]+\\Z') +} + +NoneType = type(None) + +def validate(data, custom_formats): + validate_https___www_python_org_dev_peps_pep_0517(data, custom_formats) + return data + +def validate_https___www_python_org_dev_peps_pep_0517(data, custom_formats): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("data must be object", value=data, name="data", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://www.python.org/dev/peps/pep-0517/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': [':pep:`517` defines a build-system independent format for source trees', 'while :pep:`518` provides a way of specifying the minimum system requirements', 'for Python projects.', 'Please notice the ``project`` table (as defined in :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$ref': 'https://www.python.org/dev/peps/pep-0621/'}, 'tool': {'type': 'object', 'properties': {'distutils': {'$ref': 'https://docs.python.org/3/install/'}, 'setuptools': {'$ref': 'https://setuptools.pypa.io/en/latest/references/keywords.html'}}}}, 'project': {'$ref': 'https://www.python.org/dev/peps/pep-0621/'}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "build-system" in data_keys: + data_keys.remove("build-system") + data__buildsystem = data["build-system"] + if not isinstance(data__buildsystem, (dict)): + raise JsonSchemaValueException("data.build-system must be object", value=data__buildsystem, name="data.build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='type') + data__buildsystem_is_dict = isinstance(data__buildsystem, dict) + if data__buildsystem_is_dict: + data__buildsystem_len = len(data__buildsystem) + if not all(prop in data__buildsystem for prop in ['requires']): + raise JsonSchemaValueException("data.build-system must contain ['requires'] properties", value=data__buildsystem, name="data.build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='required') + data__buildsystem_keys = set(data__buildsystem.keys()) + if "requires" in data__buildsystem_keys: + data__buildsystem_keys.remove("requires") + data__buildsystem__requires = data__buildsystem["requires"] + if not isinstance(data__buildsystem__requires, (list, tuple)): + raise JsonSchemaValueException("data.build-system.requires must be array", value=data__buildsystem__requires, name="data.build-system.requires", definition={'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, rule='type') + data__buildsystem__requires_is_list = isinstance(data__buildsystem__requires, (list, tuple)) + if data__buildsystem__requires_is_list: + data__buildsystem__requires_len = len(data__buildsystem__requires) + for data__buildsystem__requires_x, data__buildsystem__requires_item in enumerate(data__buildsystem__requires): + if not isinstance(data__buildsystem__requires_item, (str)): + raise JsonSchemaValueException(""+"data.build-system.requires[{data__buildsystem__requires_x}]".format(**locals())+" must be string", value=data__buildsystem__requires_item, name=""+"data.build-system.requires[{data__buildsystem__requires_x}]".format(**locals())+"", definition={'type': 'string'}, rule='type') + if "build-backend" in data__buildsystem_keys: + data__buildsystem_keys.remove("build-backend") + data__buildsystem__buildbackend = data__buildsystem["build-backend"] + if not isinstance(data__buildsystem__buildbackend, (str)): + raise JsonSchemaValueException("data.build-system.build-backend must be string", value=data__buildsystem__buildbackend, name="data.build-system.build-backend", definition={'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, rule='type') + if isinstance(data__buildsystem__buildbackend, str): + if not custom_formats["pep517-backend-reference"](data__buildsystem__buildbackend): + raise JsonSchemaValueException("data.build-system.build-backend must be pep517-backend-reference", value=data__buildsystem__buildbackend, name="data.build-system.build-backend", definition={'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, rule='format') + if "backend-path" in data__buildsystem_keys: + data__buildsystem_keys.remove("backend-path") + data__buildsystem__backendpath = data__buildsystem["backend-path"] + if not isinstance(data__buildsystem__backendpath, (list, tuple)): + raise JsonSchemaValueException("data.build-system.backend-path must be array", value=data__buildsystem__backendpath, name="data.build-system.backend-path", definition={'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}, rule='type') + data__buildsystem__backendpath_is_list = isinstance(data__buildsystem__backendpath, (list, tuple)) + if data__buildsystem__backendpath_is_list: + data__buildsystem__backendpath_len = len(data__buildsystem__backendpath) + for data__buildsystem__backendpath_x, data__buildsystem__backendpath_item in enumerate(data__buildsystem__backendpath): + if not isinstance(data__buildsystem__backendpath_item, (str)): + raise JsonSchemaValueException(""+"data.build-system.backend-path[{data__buildsystem__backendpath_x}]".format(**locals())+" must be string", value=data__buildsystem__backendpath_item, name=""+"data.build-system.backend-path[{data__buildsystem__backendpath_x}]".format(**locals())+"", definition={'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}, rule='type') + if data__buildsystem_keys: + raise JsonSchemaValueException("data.build-system must not contain "+str(data__buildsystem_keys)+" properties", value=data__buildsystem, name="data.build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='additionalProperties') + if "project" in data_keys: + data_keys.remove("project") + data__project = data["project"] + validate_https___www_python_org_dev_peps_pep_0621(data__project, custom_formats) + if "tool" in data_keys: + data_keys.remove("tool") + data__tool = data["tool"] + if not isinstance(data__tool, (dict)): + raise JsonSchemaValueException("data.tool must be object", value=data__tool, name="data.tool", definition={'type': 'object', 'properties': {'distutils': {'$ref': 'https://docs.python.org/3/install/'}, 'setuptools': {'$ref': 'https://setuptools.pypa.io/en/latest/references/keywords.html'}}}, rule='type') + data__tool_is_dict = isinstance(data__tool, dict) + if data__tool_is_dict: + data__tool_keys = set(data__tool.keys()) + if "distutils" in data__tool_keys: + data__tool_keys.remove("distutils") + data__tool__distutils = data__tool["distutils"] + validate_https___docs_python_org_3_install(data__tool__distutils, custom_formats) + if "setuptools" in data__tool_keys: + data__tool_keys.remove("setuptools") + data__tool__setuptools = data__tool["setuptools"] + validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data__tool__setuptools, custom_formats) + if data_keys: + raise JsonSchemaValueException("data must not contain "+str(data_keys)+" properties", value=data, name="data", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://www.python.org/dev/peps/pep-0517/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': [':pep:`517` defines a build-system independent format for source trees', 'while :pep:`518` provides a way of specifying the minimum system requirements', 'for Python projects.', 'Please notice the ``project`` table (as defined in :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$ref': 'https://www.python.org/dev/peps/pep-0621/'}, 'tool': {'type': 'object', 'properties': {'distutils': {'$ref': 'https://docs.python.org/3/install/'}, 'setuptools': {'$ref': 'https://setuptools.pypa.io/en/latest/references/keywords.html'}}}}, 'project': {'$ref': 'https://www.python.org/dev/peps/pep-0621/'}}, rule='additionalProperties') + return data + +def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data, custom_formats): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("data must be object", value=data, name="data", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml `_.', 'It considers only ``setuptools`` `parameters', '`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, 'license': {'type': 'string', '$$description': ['PROVISIONAL: A string specifying the license of the package', '(might change with PEP 639)'], '$comment': 'TODO: revise if PEP 639 is accepted. Maybe ``license-expression``?'}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might change with PEP 639)'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Maybe ``license-files.glob``?'}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "platforms" in data_keys: + data_keys.remove("platforms") + data__platforms = data["platforms"] + if not isinstance(data__platforms, (list, tuple)): + raise JsonSchemaValueException("data.platforms must be array", value=data__platforms, name="data.platforms", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__platforms_is_list = isinstance(data__platforms, (list, tuple)) + if data__platforms_is_list: + data__platforms_len = len(data__platforms) + for data__platforms_x, data__platforms_item in enumerate(data__platforms): + if not isinstance(data__platforms_item, (str)): + raise JsonSchemaValueException(""+"data.platforms[{data__platforms_x}]".format(**locals())+" must be string", value=data__platforms_item, name=""+"data.platforms[{data__platforms_x}]".format(**locals())+"", definition={'type': 'string'}, rule='type') + if "provides" in data_keys: + data_keys.remove("provides") + data__provides = data["provides"] + if not isinstance(data__provides, (list, tuple)): + raise JsonSchemaValueException("data.provides must be array", value=data__provides, name="data.provides", definition={'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, rule='type') + data__provides_is_list = isinstance(data__provides, (list, tuple)) + if data__provides_is_list: + data__provides_len = len(data__provides) + for data__provides_x, data__provides_item in enumerate(data__provides): + if not isinstance(data__provides_item, (str)): + raise JsonSchemaValueException(""+"data.provides[{data__provides_x}]".format(**locals())+" must be string", value=data__provides_item, name=""+"data.provides[{data__provides_x}]".format(**locals())+"", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='type') + if isinstance(data__provides_item, str): + if not custom_formats["pep508-identifier"](data__provides_item): + raise JsonSchemaValueException(""+"data.provides[{data__provides_x}]".format(**locals())+" must be pep508-identifier", value=data__provides_item, name=""+"data.provides[{data__provides_x}]".format(**locals())+"", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='format') + if "obsoletes" in data_keys: + data_keys.remove("obsoletes") + data__obsoletes = data["obsoletes"] + if not isinstance(data__obsoletes, (list, tuple)): + raise JsonSchemaValueException("data.obsoletes must be array", value=data__obsoletes, name="data.obsoletes", definition={'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, rule='type') + data__obsoletes_is_list = isinstance(data__obsoletes, (list, tuple)) + if data__obsoletes_is_list: + data__obsoletes_len = len(data__obsoletes) + for data__obsoletes_x, data__obsoletes_item in enumerate(data__obsoletes): + if not isinstance(data__obsoletes_item, (str)): + raise JsonSchemaValueException(""+"data.obsoletes[{data__obsoletes_x}]".format(**locals())+" must be string", value=data__obsoletes_item, name=""+"data.obsoletes[{data__obsoletes_x}]".format(**locals())+"", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='type') + if isinstance(data__obsoletes_item, str): + if not custom_formats["pep508-identifier"](data__obsoletes_item): + raise JsonSchemaValueException(""+"data.obsoletes[{data__obsoletes_x}]".format(**locals())+" must be pep508-identifier", value=data__obsoletes_item, name=""+"data.obsoletes[{data__obsoletes_x}]".format(**locals())+"", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='format') + if "zip-safe" in data_keys: + data_keys.remove("zip-safe") + data__zipsafe = data["zip-safe"] + if not isinstance(data__zipsafe, (bool)): + raise JsonSchemaValueException("data.zip-safe must be boolean", value=data__zipsafe, name="data.zip-safe", definition={'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, rule='type') + if "script-files" in data_keys: + data_keys.remove("script-files") + data__scriptfiles = data["script-files"] + if not isinstance(data__scriptfiles, (list, tuple)): + raise JsonSchemaValueException("data.script-files must be array", value=data__scriptfiles, name="data.script-files", definition={'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, rule='type') + data__scriptfiles_is_list = isinstance(data__scriptfiles, (list, tuple)) + if data__scriptfiles_is_list: + data__scriptfiles_len = len(data__scriptfiles) + for data__scriptfiles_x, data__scriptfiles_item in enumerate(data__scriptfiles): + if not isinstance(data__scriptfiles_item, (str)): + raise JsonSchemaValueException(""+"data.script-files[{data__scriptfiles_x}]".format(**locals())+" must be string", value=data__scriptfiles_item, name=""+"data.script-files[{data__scriptfiles_x}]".format(**locals())+"", definition={'type': 'string'}, rule='type') + if "eager-resources" in data_keys: + data_keys.remove("eager-resources") + data__eagerresources = data["eager-resources"] + if not isinstance(data__eagerresources, (list, tuple)): + raise JsonSchemaValueException("data.eager-resources must be array", value=data__eagerresources, name="data.eager-resources", definition={'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__eagerresources_is_list = isinstance(data__eagerresources, (list, tuple)) + if data__eagerresources_is_list: + data__eagerresources_len = len(data__eagerresources) + for data__eagerresources_x, data__eagerresources_item in enumerate(data__eagerresources): + if not isinstance(data__eagerresources_item, (str)): + raise JsonSchemaValueException(""+"data.eager-resources[{data__eagerresources_x}]".format(**locals())+" must be string", value=data__eagerresources_item, name=""+"data.eager-resources[{data__eagerresources_x}]".format(**locals())+"", definition={'type': 'string'}, rule='type') + if "packages" in data_keys: + data_keys.remove("packages") + data__packages = data["packages"] + data__packages_one_of_count = 0 + if data__packages_one_of_count < 2: + try: + if not isinstance(data__packages, (list, tuple)): + raise JsonSchemaValueException("data.packages must be array", value=data__packages, name="data.packages", definition={'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, rule='type') + data__packages_is_list = isinstance(data__packages, (list, tuple)) + if data__packages_is_list: + data__packages_len = len(data__packages) + for data__packages_x, data__packages_item in enumerate(data__packages): + if not isinstance(data__packages_item, (str)): + raise JsonSchemaValueException(""+"data.packages[{data__packages_x}]".format(**locals())+" must be string", value=data__packages_item, name=""+"data.packages[{data__packages_x}]".format(**locals())+"", definition={'type': 'string', 'format': 'python-module-name'}, rule='type') + if isinstance(data__packages_item, str): + if not custom_formats["python-module-name"](data__packages_item): + raise JsonSchemaValueException(""+"data.packages[{data__packages_x}]".format(**locals())+" must be python-module-name", value=data__packages_item, name=""+"data.packages[{data__packages_x}]".format(**locals())+"", definition={'type': 'string', 'format': 'python-module-name'}, rule='format') + data__packages_one_of_count += 1 + except JsonSchemaValueException: pass + if data__packages_one_of_count < 2: + try: + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_find_directive(data__packages, custom_formats) + data__packages_one_of_count += 1 + except JsonSchemaValueException: pass + if data__packages_one_of_count != 1: + raise JsonSchemaValueException("data.packages must be valid exactly by one of oneOf definition", value=data__packages, name="data.packages", definition={'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$ref': '#/definitions/find-directive'}]}, rule='oneOf') + if "package-dir" in data_keys: + data_keys.remove("package-dir") + data__packagedir = data["package-dir"] + if not isinstance(data__packagedir, (dict)): + raise JsonSchemaValueException("data.package-dir must be object", value=data__packagedir, name="data.package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='type') + data__packagedir_is_dict = isinstance(data__packagedir, dict) + if data__packagedir_is_dict: + data__packagedir_keys = set(data__packagedir.keys()) + for data__packagedir_key, data__packagedir_val in data__packagedir.items(): + if REGEX_PATTERNS['^.*$'].search(data__packagedir_key): + if data__packagedir_key in data__packagedir_keys: + data__packagedir_keys.remove(data__packagedir_key) + if not isinstance(data__packagedir_val, (str)): + raise JsonSchemaValueException(""+"data.package-dir.{data__packagedir_key}".format(**locals())+" must be string", value=data__packagedir_val, name=""+"data.package-dir.{data__packagedir_key}".format(**locals())+"", definition={'type': 'string'}, rule='type') + if data__packagedir_keys: + raise JsonSchemaValueException("data.package-dir must not contain "+str(data__packagedir_keys)+" properties", value=data__packagedir, name="data.package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='additionalProperties') + data__packagedir_len = len(data__packagedir) + if data__packagedir_len != 0: + data__packagedir_property_names = True + for data__packagedir_key in data__packagedir: + try: + data__packagedir_key_one_of_count = 0 + if data__packagedir_key_one_of_count < 2: + try: + if isinstance(data__packagedir_key, str): + if not custom_formats["python-module-name"](data__packagedir_key): + raise JsonSchemaValueException("data.package-dir must be python-module-name", value=data__packagedir_key, name="data.package-dir", definition={'format': 'python-module-name'}, rule='format') + data__packagedir_key_one_of_count += 1 + except JsonSchemaValueException: pass + if data__packagedir_key_one_of_count < 2: + try: + if data__packagedir_key != "": + raise JsonSchemaValueException("data.package-dir must be same as const definition: ", value=data__packagedir_key, name="data.package-dir", definition={'const': ''}, rule='const') + data__packagedir_key_one_of_count += 1 + except JsonSchemaValueException: pass + if data__packagedir_key_one_of_count != 1: + raise JsonSchemaValueException("data.package-dir must be valid exactly by one of oneOf definition", value=data__packagedir_key, name="data.package-dir", definition={'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, rule='oneOf') + except JsonSchemaValueException: + data__packagedir_property_names = False + if not data__packagedir_property_names: + raise JsonSchemaValueException("data.package-dir must be named by propertyName definition", value=data__packagedir, name="data.package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='propertyNames') + if "package-data" in data_keys: + data_keys.remove("package-data") + data__packagedata = data["package-data"] + if not isinstance(data__packagedata, (dict)): + raise JsonSchemaValueException("data.package-data must be object", value=data__packagedata, name="data.package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type') + data__packagedata_is_dict = isinstance(data__packagedata, dict) + if data__packagedata_is_dict: + data__packagedata_keys = set(data__packagedata.keys()) + for data__packagedata_key, data__packagedata_val in data__packagedata.items(): + if REGEX_PATTERNS['^.*$'].search(data__packagedata_key): + if data__packagedata_key in data__packagedata_keys: + data__packagedata_keys.remove(data__packagedata_key) + if not isinstance(data__packagedata_val, (list, tuple)): + raise JsonSchemaValueException(""+"data.package-data.{data__packagedata_key}".format(**locals())+" must be array", value=data__packagedata_val, name=""+"data.package-data.{data__packagedata_key}".format(**locals())+"", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__packagedata_val_is_list = isinstance(data__packagedata_val, (list, tuple)) + if data__packagedata_val_is_list: + data__packagedata_val_len = len(data__packagedata_val) + for data__packagedata_val_x, data__packagedata_val_item in enumerate(data__packagedata_val): + if not isinstance(data__packagedata_val_item, (str)): + raise JsonSchemaValueException(""+"data.package-data.{data__packagedata_key}[{data__packagedata_val_x}]".format(**locals())+" must be string", value=data__packagedata_val_item, name=""+"data.package-data.{data__packagedata_key}[{data__packagedata_val_x}]".format(**locals())+"", definition={'type': 'string'}, rule='type') + if data__packagedata_keys: + raise JsonSchemaValueException("data.package-data must not contain "+str(data__packagedata_keys)+" properties", value=data__packagedata, name="data.package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='additionalProperties') + data__packagedata_len = len(data__packagedata) + if data__packagedata_len != 0: + data__packagedata_property_names = True + for data__packagedata_key in data__packagedata: + try: + data__packagedata_key_one_of_count = 0 + if data__packagedata_key_one_of_count < 2: + try: + if isinstance(data__packagedata_key, str): + if not custom_formats["python-module-name"](data__packagedata_key): + raise JsonSchemaValueException("data.package-data must be python-module-name", value=data__packagedata_key, name="data.package-data", definition={'format': 'python-module-name'}, rule='format') + data__packagedata_key_one_of_count += 1 + except JsonSchemaValueException: pass + if data__packagedata_key_one_of_count < 2: + try: + if data__packagedata_key != "*": + raise JsonSchemaValueException("data.package-data must be same as const definition: *", value=data__packagedata_key, name="data.package-data", definition={'const': '*'}, rule='const') + data__packagedata_key_one_of_count += 1 + except JsonSchemaValueException: pass + if data__packagedata_key_one_of_count != 1: + raise JsonSchemaValueException("data.package-data must be valid exactly by one of oneOf definition", value=data__packagedata_key, name="data.package-data", definition={'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, rule='oneOf') + except JsonSchemaValueException: + data__packagedata_property_names = False + if not data__packagedata_property_names: + raise JsonSchemaValueException("data.package-data must be named by propertyName definition", value=data__packagedata, name="data.package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='propertyNames') + if "include-package-data" in data_keys: + data_keys.remove("include-package-data") + data__includepackagedata = data["include-package-data"] + if not isinstance(data__includepackagedata, (bool)): + raise JsonSchemaValueException("data.include-package-data must be boolean", value=data__includepackagedata, name="data.include-package-data", definition={'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, rule='type') + if "exclude-package-data" in data_keys: + data_keys.remove("exclude-package-data") + data__excludepackagedata = data["exclude-package-data"] + if not isinstance(data__excludepackagedata, (dict)): + raise JsonSchemaValueException("data.exclude-package-data must be object", value=data__excludepackagedata, name="data.exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type') + data__excludepackagedata_is_dict = isinstance(data__excludepackagedata, dict) + if data__excludepackagedata_is_dict: + data__excludepackagedata_keys = set(data__excludepackagedata.keys()) + for data__excludepackagedata_key, data__excludepackagedata_val in data__excludepackagedata.items(): + if REGEX_PATTERNS['^.*$'].search(data__excludepackagedata_key): + if data__excludepackagedata_key in data__excludepackagedata_keys: + data__excludepackagedata_keys.remove(data__excludepackagedata_key) + if not isinstance(data__excludepackagedata_val, (list, tuple)): + raise JsonSchemaValueException(""+"data.exclude-package-data.{data__excludepackagedata_key}".format(**locals())+" must be array", value=data__excludepackagedata_val, name=""+"data.exclude-package-data.{data__excludepackagedata_key}".format(**locals())+"", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__excludepackagedata_val_is_list = isinstance(data__excludepackagedata_val, (list, tuple)) + if data__excludepackagedata_val_is_list: + data__excludepackagedata_val_len = len(data__excludepackagedata_val) + for data__excludepackagedata_val_x, data__excludepackagedata_val_item in enumerate(data__excludepackagedata_val): + if not isinstance(data__excludepackagedata_val_item, (str)): + raise JsonSchemaValueException(""+"data.exclude-package-data.{data__excludepackagedata_key}[{data__excludepackagedata_val_x}]".format(**locals())+" must be string", value=data__excludepackagedata_val_item, name=""+"data.exclude-package-data.{data__excludepackagedata_key}[{data__excludepackagedata_val_x}]".format(**locals())+"", definition={'type': 'string'}, rule='type') + if data__excludepackagedata_keys: + raise JsonSchemaValueException("data.exclude-package-data must not contain "+str(data__excludepackagedata_keys)+" properties", value=data__excludepackagedata, name="data.exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='additionalProperties') + data__excludepackagedata_len = len(data__excludepackagedata) + if data__excludepackagedata_len != 0: + data__excludepackagedata_property_names = True + for data__excludepackagedata_key in data__excludepackagedata: + try: + data__excludepackagedata_key_one_of_count = 0 + if data__excludepackagedata_key_one_of_count < 2: + try: + if isinstance(data__excludepackagedata_key, str): + if not custom_formats["python-module-name"](data__excludepackagedata_key): + raise JsonSchemaValueException("data.exclude-package-data must be python-module-name", value=data__excludepackagedata_key, name="data.exclude-package-data", definition={'format': 'python-module-name'}, rule='format') + data__excludepackagedata_key_one_of_count += 1 + except JsonSchemaValueException: pass + if data__excludepackagedata_key_one_of_count < 2: + try: + if data__excludepackagedata_key != "*": + raise JsonSchemaValueException("data.exclude-package-data must be same as const definition: *", value=data__excludepackagedata_key, name="data.exclude-package-data", definition={'const': '*'}, rule='const') + data__excludepackagedata_key_one_of_count += 1 + except JsonSchemaValueException: pass + if data__excludepackagedata_key_one_of_count != 1: + raise JsonSchemaValueException("data.exclude-package-data must be valid exactly by one of oneOf definition", value=data__excludepackagedata_key, name="data.exclude-package-data", definition={'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, rule='oneOf') + except JsonSchemaValueException: + data__excludepackagedata_property_names = False + if not data__excludepackagedata_property_names: + raise JsonSchemaValueException("data.exclude-package-data must be named by propertyName definition", value=data__excludepackagedata, name="data.exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='propertyNames') + if "namespace-packages" in data_keys: + data_keys.remove("namespace-packages") + data__namespacepackages = data["namespace-packages"] + if not isinstance(data__namespacepackages, (list, tuple)): + raise JsonSchemaValueException("data.namespace-packages must be array", value=data__namespacepackages, name="data.namespace-packages", definition={'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, rule='type') + data__namespacepackages_is_list = isinstance(data__namespacepackages, (list, tuple)) + if data__namespacepackages_is_list: + data__namespacepackages_len = len(data__namespacepackages) + for data__namespacepackages_x, data__namespacepackages_item in enumerate(data__namespacepackages): + if not isinstance(data__namespacepackages_item, (str)): + raise JsonSchemaValueException(""+"data.namespace-packages[{data__namespacepackages_x}]".format(**locals())+" must be string", value=data__namespacepackages_item, name=""+"data.namespace-packages[{data__namespacepackages_x}]".format(**locals())+"", definition={'type': 'string', 'format': 'python-module-name'}, rule='type') + if isinstance(data__namespacepackages_item, str): + if not custom_formats["python-module-name"](data__namespacepackages_item): + raise JsonSchemaValueException(""+"data.namespace-packages[{data__namespacepackages_x}]".format(**locals())+" must be python-module-name", value=data__namespacepackages_item, name=""+"data.namespace-packages[{data__namespacepackages_x}]".format(**locals())+"", definition={'type': 'string', 'format': 'python-module-name'}, rule='format') + if "py-modules" in data_keys: + data_keys.remove("py-modules") + data__pymodules = data["py-modules"] + if not isinstance(data__pymodules, (list, tuple)): + raise JsonSchemaValueException("data.py-modules must be array", value=data__pymodules, name="data.py-modules", definition={'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, rule='type') + data__pymodules_is_list = isinstance(data__pymodules, (list, tuple)) + if data__pymodules_is_list: + data__pymodules_len = len(data__pymodules) + for data__pymodules_x, data__pymodules_item in enumerate(data__pymodules): + if not isinstance(data__pymodules_item, (str)): + raise JsonSchemaValueException(""+"data.py-modules[{data__pymodules_x}]".format(**locals())+" must be string", value=data__pymodules_item, name=""+"data.py-modules[{data__pymodules_x}]".format(**locals())+"", definition={'type': 'string', 'format': 'python-module-name'}, rule='type') + if isinstance(data__pymodules_item, str): + if not custom_formats["python-module-name"](data__pymodules_item): + raise JsonSchemaValueException(""+"data.py-modules[{data__pymodules_x}]".format(**locals())+" must be python-module-name", value=data__pymodules_item, name=""+"data.py-modules[{data__pymodules_x}]".format(**locals())+"", definition={'type': 'string', 'format': 'python-module-name'}, rule='format') + if "data-files" in data_keys: + data_keys.remove("data-files") + data__datafiles = data["data-files"] + if not isinstance(data__datafiles, (dict)): + raise JsonSchemaValueException("data.data-files must be object", value=data__datafiles, name="data.data-files", definition={'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type') + data__datafiles_is_dict = isinstance(data__datafiles, dict) + if data__datafiles_is_dict: + data__datafiles_keys = set(data__datafiles.keys()) + for data__datafiles_key, data__datafiles_val in data__datafiles.items(): + if REGEX_PATTERNS['^.*$'].search(data__datafiles_key): + if data__datafiles_key in data__datafiles_keys: + data__datafiles_keys.remove(data__datafiles_key) + if not isinstance(data__datafiles_val, (list, tuple)): + raise JsonSchemaValueException(""+"data.data-files.{data__datafiles_key}".format(**locals())+" must be array", value=data__datafiles_val, name=""+"data.data-files.{data__datafiles_key}".format(**locals())+"", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__datafiles_val_is_list = isinstance(data__datafiles_val, (list, tuple)) + if data__datafiles_val_is_list: + data__datafiles_val_len = len(data__datafiles_val) + for data__datafiles_val_x, data__datafiles_val_item in enumerate(data__datafiles_val): + if not isinstance(data__datafiles_val_item, (str)): + raise JsonSchemaValueException(""+"data.data-files.{data__datafiles_key}[{data__datafiles_val_x}]".format(**locals())+" must be string", value=data__datafiles_val_item, name=""+"data.data-files.{data__datafiles_key}[{data__datafiles_val_x}]".format(**locals())+"", definition={'type': 'string'}, rule='type') + if "cmdclass" in data_keys: + data_keys.remove("cmdclass") + data__cmdclass = data["cmdclass"] + if not isinstance(data__cmdclass, (dict)): + raise JsonSchemaValueException("data.cmdclass must be object", value=data__cmdclass, name="data.cmdclass", definition={'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, rule='type') + data__cmdclass_is_dict = isinstance(data__cmdclass, dict) + if data__cmdclass_is_dict: + data__cmdclass_keys = set(data__cmdclass.keys()) + for data__cmdclass_key, data__cmdclass_val in data__cmdclass.items(): + if REGEX_PATTERNS['^.*$'].search(data__cmdclass_key): + if data__cmdclass_key in data__cmdclass_keys: + data__cmdclass_keys.remove(data__cmdclass_key) + if not isinstance(data__cmdclass_val, (str)): + raise JsonSchemaValueException(""+"data.cmdclass.{data__cmdclass_key}".format(**locals())+" must be string", value=data__cmdclass_val, name=""+"data.cmdclass.{data__cmdclass_key}".format(**locals())+"", definition={'type': 'string', 'format': 'python-qualified-identifier'}, rule='type') + if isinstance(data__cmdclass_val, str): + if not custom_formats["python-qualified-identifier"](data__cmdclass_val): + raise JsonSchemaValueException(""+"data.cmdclass.{data__cmdclass_key}".format(**locals())+" must be python-qualified-identifier", value=data__cmdclass_val, name=""+"data.cmdclass.{data__cmdclass_key}".format(**locals())+"", definition={'type': 'string', 'format': 'python-qualified-identifier'}, rule='format') + if "dynamic" in data_keys: + data_keys.remove("dynamic") + data__dynamic = data["dynamic"] + if not isinstance(data__dynamic, (dict)): + raise JsonSchemaValueException("data.dynamic must be object", value=data__dynamic, name="data.dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, 'license': {'type': 'string', '$$description': ['PROVISIONAL: A string specifying the license of the package', '(might change with PEP 639)'], '$comment': 'TODO: revise if PEP 639 is accepted. Maybe ``license-expression``?'}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might change with PEP 639)'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Maybe ``license-files.glob``?'}}}, rule='type') + data__dynamic_is_dict = isinstance(data__dynamic, dict) + if data__dynamic_is_dict: + data__dynamic_keys = set(data__dynamic.keys()) + if "version" in data__dynamic_keys: + data__dynamic_keys.remove("version") + data__dynamic__version = data__dynamic["version"] + data__dynamic__version_one_of_count = 0 + if data__dynamic__version_one_of_count < 2: + try: + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_attr_directive(data__dynamic__version, custom_formats) + data__dynamic__version_one_of_count += 1 + except JsonSchemaValueException: pass + if data__dynamic__version_one_of_count < 2: + try: + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__version, custom_formats) + data__dynamic__version_one_of_count += 1 + except JsonSchemaValueException: pass + if data__dynamic__version_one_of_count != 1: + raise JsonSchemaValueException("data.dynamic.version must be valid exactly by one of oneOf definition", value=data__dynamic__version, name="data.dynamic.version", definition={'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, rule='oneOf') + if "classifiers" in data__dynamic_keys: + data__dynamic_keys.remove("classifiers") + data__dynamic__classifiers = data__dynamic["classifiers"] + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__classifiers, custom_formats) + if "description" in data__dynamic_keys: + data__dynamic_keys.remove("description") + data__dynamic__description = data__dynamic["description"] + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__description, custom_formats) + if "entry-points" in data__dynamic_keys: + data__dynamic_keys.remove("entry-points") + data__dynamic__entrypoints = data__dynamic["entry-points"] + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__entrypoints, custom_formats) + if "readme" in data__dynamic_keys: + data__dynamic_keys.remove("readme") + data__dynamic__readme = data__dynamic["readme"] + data__dynamic__readme_any_of_count = 0 + if not data__dynamic__readme_any_of_count: + try: + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__readme, custom_formats) + data__dynamic__readme_any_of_count += 1 + except JsonSchemaValueException: pass + if not data__dynamic__readme_any_of_count: + try: + data__dynamic__readme_is_dict = isinstance(data__dynamic__readme, dict) + if data__dynamic__readme_is_dict: + data__dynamic__readme_keys = set(data__dynamic__readme.keys()) + if "content-type" in data__dynamic__readme_keys: + data__dynamic__readme_keys.remove("content-type") + data__dynamic__readme__contenttype = data__dynamic__readme["content-type"] + if not isinstance(data__dynamic__readme__contenttype, (str)): + raise JsonSchemaValueException("data.dynamic.readme.content-type must be string", value=data__dynamic__readme__contenttype, name="data.dynamic.readme.content-type", definition={'type': 'string'}, rule='type') + data__dynamic__readme_any_of_count += 1 + except JsonSchemaValueException: pass + if not data__dynamic__readme_any_of_count: + raise JsonSchemaValueException("data.dynamic.readme must be valid by one of anyOf definition", value=data__dynamic__readme, name="data.dynamic.readme", definition={'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, rule='anyOf') + data__dynamic__readme_is_dict = isinstance(data__dynamic__readme, dict) + if data__dynamic__readme_is_dict: + data__dynamic__readme_len = len(data__dynamic__readme) + if not all(prop in data__dynamic__readme for prop in ['file']): + raise JsonSchemaValueException("data.dynamic.readme must contain ['file'] properties", value=data__dynamic__readme, name="data.dynamic.readme", definition={'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, rule='required') + if "license" in data__dynamic_keys: + data__dynamic_keys.remove("license") + data__dynamic__license = data__dynamic["license"] + if not isinstance(data__dynamic__license, (str)): + raise JsonSchemaValueException("data.dynamic.license must be string", value=data__dynamic__license, name="data.dynamic.license", definition={'type': 'string', '$$description': ['PROVISIONAL: A string specifying the license of the package', '(might change with PEP 639)'], '$comment': 'TODO: revise if PEP 639 is accepted. Maybe ``license-expression``?'}, rule='type') + if "license-files" in data__dynamic_keys: + data__dynamic_keys.remove("license-files") + data__dynamic__licensefiles = data__dynamic["license-files"] + if not isinstance(data__dynamic__licensefiles, (list, tuple)): + raise JsonSchemaValueException("data.dynamic.license-files must be array", value=data__dynamic__licensefiles, name="data.dynamic.license-files", definition={'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might change with PEP 639)'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Maybe ``license-files.glob``?'}, rule='type') + data__dynamic__licensefiles_is_list = isinstance(data__dynamic__licensefiles, (list, tuple)) + if data__dynamic__licensefiles_is_list: + data__dynamic__licensefiles_len = len(data__dynamic__licensefiles) + for data__dynamic__licensefiles_x, data__dynamic__licensefiles_item in enumerate(data__dynamic__licensefiles): + if not isinstance(data__dynamic__licensefiles_item, (str)): + raise JsonSchemaValueException(""+"data.dynamic.license-files[{data__dynamic__licensefiles_x}]".format(**locals())+" must be string", value=data__dynamic__licensefiles_item, name=""+"data.dynamic.license-files[{data__dynamic__licensefiles_x}]".format(**locals())+"", definition={'type': 'string'}, rule='type') + else: data__dynamic["license-files"] = ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'] + if data_keys: + raise JsonSchemaValueException("data must not contain "+str(data_keys)+" properties", value=data, name="data", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml `_.', 'It considers only ``setuptools`` `parameters', '`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, 'license': {'type': 'string', '$$description': ['PROVISIONAL: A string specifying the license of the package', '(might change with PEP 639)'], '$comment': 'TODO: revise if PEP 639 is accepted. Maybe ``license-expression``?'}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might change with PEP 639)'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Maybe ``license-files.glob``?'}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='additionalProperties') + return data + +def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data, custom_formats): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("data must be object", value=data, name="data", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['file']): + raise JsonSchemaValueException("data must contain ['file'] properties", value=data, name="data", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='required') + data_keys = set(data.keys()) + if "file" in data_keys: + data_keys.remove("file") + data__file = data["file"] + data__file_one_of_count = 0 + if data__file_one_of_count < 2: + try: + if not isinstance(data__file, (str)): + raise JsonSchemaValueException("data.file must be string", value=data__file, name="data.file", definition={'type': 'string'}, rule='type') + data__file_one_of_count += 1 + except JsonSchemaValueException: pass + if data__file_one_of_count < 2: + try: + if not isinstance(data__file, (list, tuple)): + raise JsonSchemaValueException("data.file must be array", value=data__file, name="data.file", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__file_is_list = isinstance(data__file, (list, tuple)) + if data__file_is_list: + data__file_len = len(data__file) + for data__file_x, data__file_item in enumerate(data__file): + if not isinstance(data__file_item, (str)): + raise JsonSchemaValueException(""+"data.file[{data__file_x}]".format(**locals())+" must be string", value=data__file_item, name=""+"data.file[{data__file_x}]".format(**locals())+"", definition={'type': 'string'}, rule='type') + data__file_one_of_count += 1 + except JsonSchemaValueException: pass + if data__file_one_of_count != 1: + raise JsonSchemaValueException("data.file must be valid exactly by one of oneOf definition", value=data__file, name="data.file", definition={'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}, rule='oneOf') + if data_keys: + raise JsonSchemaValueException("data must not contain "+str(data_keys)+" properties", value=data, name="data", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='additionalProperties') + return data + +def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_attr_directive(data, custom_formats): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("data must be object", value=data, name="data", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['attr']): + raise JsonSchemaValueException("data must contain ['attr'] properties", value=data, name="data", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='required') + data_keys = set(data.keys()) + if "attr" in data_keys: + data_keys.remove("attr") + data__attr = data["attr"] + if not isinstance(data__attr, (str)): + raise JsonSchemaValueException("data.attr must be string", value=data__attr, name="data.attr", definition={'type': 'string'}, rule='type') + if data_keys: + raise JsonSchemaValueException("data must not contain "+str(data_keys)+" properties", value=data, name="data", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='additionalProperties') + return data + +def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_find_directive(data, custom_formats): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("data must be object", value=data, name="data", definition={'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "find" in data_keys: + data_keys.remove("find") + data__find = data["find"] + if not isinstance(data__find, (dict)): + raise JsonSchemaValueException("data.find must be object", value=data__find, name="data.find", definition={'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}, rule='type') + data__find_is_dict = isinstance(data__find, dict) + if data__find_is_dict: + data__find_keys = set(data__find.keys()) + if "where" in data__find_keys: + data__find_keys.remove("where") + data__find__where = data__find["where"] + if not isinstance(data__find__where, (list, tuple)): + raise JsonSchemaValueException("data.find.where must be array", value=data__find__where, name="data.find.where", definition={'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__find__where_is_list = isinstance(data__find__where, (list, tuple)) + if data__find__where_is_list: + data__find__where_len = len(data__find__where) + for data__find__where_x, data__find__where_item in enumerate(data__find__where): + if not isinstance(data__find__where_item, (str)): + raise JsonSchemaValueException(""+"data.find.where[{data__find__where_x}]".format(**locals())+" must be string", value=data__find__where_item, name=""+"data.find.where[{data__find__where_x}]".format(**locals())+"", definition={'type': 'string'}, rule='type') + if "exclude" in data__find_keys: + data__find_keys.remove("exclude") + data__find__exclude = data__find["exclude"] + if not isinstance(data__find__exclude, (list, tuple)): + raise JsonSchemaValueException("data.find.exclude must be array", value=data__find__exclude, name="data.find.exclude", definition={'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, rule='type') + data__find__exclude_is_list = isinstance(data__find__exclude, (list, tuple)) + if data__find__exclude_is_list: + data__find__exclude_len = len(data__find__exclude) + for data__find__exclude_x, data__find__exclude_item in enumerate(data__find__exclude): + if not isinstance(data__find__exclude_item, (str)): + raise JsonSchemaValueException(""+"data.find.exclude[{data__find__exclude_x}]".format(**locals())+" must be string", value=data__find__exclude_item, name=""+"data.find.exclude[{data__find__exclude_x}]".format(**locals())+"", definition={'type': 'string'}, rule='type') + if "include" in data__find_keys: + data__find_keys.remove("include") + data__find__include = data__find["include"] + if not isinstance(data__find__include, (list, tuple)): + raise JsonSchemaValueException("data.find.include must be array", value=data__find__include, name="data.find.include", definition={'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, rule='type') + data__find__include_is_list = isinstance(data__find__include, (list, tuple)) + if data__find__include_is_list: + data__find__include_len = len(data__find__include) + for data__find__include_x, data__find__include_item in enumerate(data__find__include): + if not isinstance(data__find__include_item, (str)): + raise JsonSchemaValueException(""+"data.find.include[{data__find__include_x}]".format(**locals())+" must be string", value=data__find__include_item, name=""+"data.find.include[{data__find__include_x}]".format(**locals())+"", definition={'type': 'string'}, rule='type') + if "namespaces" in data__find_keys: + data__find_keys.remove("namespaces") + data__find__namespaces = data__find["namespaces"] + if not isinstance(data__find__namespaces, (bool)): + raise JsonSchemaValueException("data.find.namespaces must be boolean", value=data__find__namespaces, name="data.find.namespaces", definition={'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}, rule='type') + if data__find_keys: + raise JsonSchemaValueException("data.find must not contain "+str(data__find_keys)+" properties", value=data__find, name="data.find", definition={'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}, rule='additionalProperties') + if data_keys: + raise JsonSchemaValueException("data must not contain "+str(data_keys)+" properties", value=data, name="data", definition={'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}, rule='additionalProperties') + return data + +def validate_https___docs_python_org_3_install(data, custom_formats): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("data must be object", value=data, name="data", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "global" in data_keys: + data_keys.remove("global") + data__global = data["global"] + if not isinstance(data__global, (dict)): + raise JsonSchemaValueException("data.global must be object", value=data__global, name="data.global", definition={'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}, rule='type') + for data_key, data_val in data.items(): + if REGEX_PATTERNS['.+'].search(data_key): + if data_key in data_keys: + data_keys.remove(data_key) + if not isinstance(data_val, (dict)): + raise JsonSchemaValueException(""+"data.{data_key}".format(**locals())+" must be object", value=data_val, name=""+"data.{data_key}".format(**locals())+"", definition={'type': 'object'}, rule='type') + return data + +def validate_https___www_python_org_dev_peps_pep_0621(data, custom_formats): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("data must be object", value=data, name="data", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://www.python.org/dev/peps/pep-0621/', 'title': '``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'if': {'not': {'required': ['version'], '$$description': ['version is statically defined in the ``version`` field']}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version should be listed in ``dynamic``']}}}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name']): + raise JsonSchemaValueException("data must contain ['name'] properties", value=data, name="data", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://www.python.org/dev/peps/pep-0621/', 'title': '``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'if': {'not': {'required': ['version'], '$$description': ['version is statically defined in the ``version`` field']}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version should be listed in ``dynamic``']}}}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='required') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("data.name must be string", value=data__name, name="data.name", definition={'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, rule='type') + if isinstance(data__name, str): + if not custom_formats["pep508-identifier"](data__name): + raise JsonSchemaValueException("data.name must be pep508-identifier", value=data__name, name="data.name", definition={'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, rule='format') + if "version" in data_keys: + data_keys.remove("version") + data__version = data["version"] + if not isinstance(data__version, (str)): + raise JsonSchemaValueException("data.version must be string", value=data__version, name="data.version", definition={'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, rule='type') + if isinstance(data__version, str): + if not custom_formats["pep440"](data__version): + raise JsonSchemaValueException("data.version must be pep440", value=data__version, name="data.version", definition={'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, rule='format') + if "description" in data_keys: + data_keys.remove("description") + data__description = data["description"] + if not isinstance(data__description, (str)): + raise JsonSchemaValueException("data.description must be string", value=data__description, name="data.description", definition={'type': 'string', '$$description': ['The `summary description of the project', '`_']}, rule='type') + if "readme" in data_keys: + data_keys.remove("readme") + data__readme = data["readme"] + data__readme_one_of_count = 0 + if data__readme_one_of_count < 2: + try: + if not isinstance(data__readme, (str)): + raise JsonSchemaValueException("data.readme must be string", value=data__readme, name="data.readme", definition={'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, rule='type') + data__readme_one_of_count += 1 + except JsonSchemaValueException: pass + if data__readme_one_of_count < 2: + try: + if not isinstance(data__readme, (dict)): + raise JsonSchemaValueException("data.readme must be object", value=data__readme, name="data.readme", definition={'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}, rule='type') + data__readme_any_of_count = 0 + if not data__readme_any_of_count: + try: + data__readme_is_dict = isinstance(data__readme, dict) + if data__readme_is_dict: + data__readme_len = len(data__readme) + if not all(prop in data__readme for prop in ['file']): + raise JsonSchemaValueException("data.readme must contain ['file'] properties", value=data__readme, name="data.readme", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, rule='required') + data__readme_keys = set(data__readme.keys()) + if "file" in data__readme_keys: + data__readme_keys.remove("file") + data__readme__file = data__readme["file"] + if not isinstance(data__readme__file, (str)): + raise JsonSchemaValueException("data.readme.file must be string", value=data__readme__file, name="data.readme.file", definition={'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}, rule='type') + data__readme_any_of_count += 1 + except JsonSchemaValueException: pass + if not data__readme_any_of_count: + try: + data__readme_is_dict = isinstance(data__readme, dict) + if data__readme_is_dict: + data__readme_len = len(data__readme) + if not all(prop in data__readme for prop in ['text']): + raise JsonSchemaValueException("data.readme must contain ['text'] properties", value=data__readme, name="data.readme", definition={'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}, rule='required') + data__readme_keys = set(data__readme.keys()) + if "text" in data__readme_keys: + data__readme_keys.remove("text") + data__readme__text = data__readme["text"] + if not isinstance(data__readme__text, (str)): + raise JsonSchemaValueException("data.readme.text must be string", value=data__readme__text, name="data.readme.text", definition={'type': 'string', 'description': 'Full text describing the project.'}, rule='type') + data__readme_any_of_count += 1 + except JsonSchemaValueException: pass + if not data__readme_any_of_count: + raise JsonSchemaValueException("data.readme must be valid by one of anyOf definition", value=data__readme, name="data.readme", definition={'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, rule='anyOf') + data__readme_is_dict = isinstance(data__readme, dict) + if data__readme_is_dict: + data__readme_len = len(data__readme) + if not all(prop in data__readme for prop in ['content-type']): + raise JsonSchemaValueException("data.readme must contain ['content-type'] properties", value=data__readme, name="data.readme", definition={'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}, rule='required') + data__readme_keys = set(data__readme.keys()) + if "content-type" in data__readme_keys: + data__readme_keys.remove("content-type") + data__readme__contenttype = data__readme["content-type"] + if not isinstance(data__readme__contenttype, (str)): + raise JsonSchemaValueException("data.readme.content-type must be string", value=data__readme__contenttype, name="data.readme.content-type", definition={'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}, rule='type') + data__readme_one_of_count += 1 + except JsonSchemaValueException: pass + if data__readme_one_of_count != 1: + raise JsonSchemaValueException("data.readme must be valid exactly by one of oneOf definition", value=data__readme, name="data.readme", definition={'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, rule='oneOf') + if "requires-python" in data_keys: + data_keys.remove("requires-python") + data__requirespython = data["requires-python"] + if not isinstance(data__requirespython, (str)): + raise JsonSchemaValueException("data.requires-python must be string", value=data__requirespython, name="data.requires-python", definition={'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, rule='type') + if isinstance(data__requirespython, str): + if not custom_formats["pep508-versionspec"](data__requirespython): + raise JsonSchemaValueException("data.requires-python must be pep508-versionspec", value=data__requirespython, name="data.requires-python", definition={'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, rule='format') + if "license" in data_keys: + data_keys.remove("license") + data__license = data["license"] + data__license_one_of_count = 0 + if data__license_one_of_count < 2: + try: + data__license_is_dict = isinstance(data__license, dict) + if data__license_is_dict: + data__license_len = len(data__license) + if not all(prop in data__license for prop in ['file']): + raise JsonSchemaValueException("data.license must contain ['file'] properties", value=data__license, name="data.license", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, rule='required') + data__license_keys = set(data__license.keys()) + if "file" in data__license_keys: + data__license_keys.remove("file") + data__license__file = data__license["file"] + if not isinstance(data__license__file, (str)): + raise JsonSchemaValueException("data.license.file must be string", value=data__license__file, name="data.license.file", definition={'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}, rule='type') + data__license_one_of_count += 1 + except JsonSchemaValueException: pass + if data__license_one_of_count < 2: + try: + data__license_is_dict = isinstance(data__license, dict) + if data__license_is_dict: + data__license_len = len(data__license) + if not all(prop in data__license for prop in ['text']): + raise JsonSchemaValueException("data.license must contain ['text'] properties", value=data__license, name="data.license", definition={'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}, rule='required') + data__license_keys = set(data__license.keys()) + if "text" in data__license_keys: + data__license_keys.remove("text") + data__license__text = data__license["text"] + if not isinstance(data__license__text, (str)): + raise JsonSchemaValueException("data.license.text must be string", value=data__license__text, name="data.license.text", definition={'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}, rule='type') + data__license_one_of_count += 1 + except JsonSchemaValueException: pass + if data__license_one_of_count != 1: + raise JsonSchemaValueException("data.license must be valid exactly by one of oneOf definition", value=data__license, name="data.license", definition={'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, rule='oneOf') + if "authors" in data_keys: + data_keys.remove("authors") + data__authors = data["authors"] + if not isinstance(data__authors, (list, tuple)): + raise JsonSchemaValueException("data.authors must be array", value=data__authors, name="data.authors", definition={'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, rule='type') + data__authors_is_list = isinstance(data__authors, (list, tuple)) + if data__authors_is_list: + data__authors_len = len(data__authors) + for data__authors_x, data__authors_item in enumerate(data__authors): + validate_https___www_python_org_dev_peps_pep_0621___definitions_author(data__authors_item, custom_formats) + if "maintainers" in data_keys: + data_keys.remove("maintainers") + data__maintainers = data["maintainers"] + if not isinstance(data__maintainers, (list, tuple)): + raise JsonSchemaValueException("data.maintainers must be array", value=data__maintainers, name="data.maintainers", definition={'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, rule='type') + data__maintainers_is_list = isinstance(data__maintainers, (list, tuple)) + if data__maintainers_is_list: + data__maintainers_len = len(data__maintainers) + for data__maintainers_x, data__maintainers_item in enumerate(data__maintainers): + validate_https___www_python_org_dev_peps_pep_0621___definitions_author(data__maintainers_item, custom_formats) + if "keywords" in data_keys: + data_keys.remove("keywords") + data__keywords = data["keywords"] + if not isinstance(data__keywords, (list, tuple)): + raise JsonSchemaValueException("data.keywords must be array", value=data__keywords, name="data.keywords", definition={'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, rule='type') + data__keywords_is_list = isinstance(data__keywords, (list, tuple)) + if data__keywords_is_list: + data__keywords_len = len(data__keywords) + for data__keywords_x, data__keywords_item in enumerate(data__keywords): + if not isinstance(data__keywords_item, (str)): + raise JsonSchemaValueException(""+"data.keywords[{data__keywords_x}]".format(**locals())+" must be string", value=data__keywords_item, name=""+"data.keywords[{data__keywords_x}]".format(**locals())+"", definition={'type': 'string'}, rule='type') + if "classifiers" in data_keys: + data_keys.remove("classifiers") + data__classifiers = data["classifiers"] + if not isinstance(data__classifiers, (list, tuple)): + raise JsonSchemaValueException("data.classifiers must be array", value=data__classifiers, name="data.classifiers", definition={'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, rule='type') + data__classifiers_is_list = isinstance(data__classifiers, (list, tuple)) + if data__classifiers_is_list: + data__classifiers_len = len(data__classifiers) + for data__classifiers_x, data__classifiers_item in enumerate(data__classifiers): + if not isinstance(data__classifiers_item, (str)): + raise JsonSchemaValueException(""+"data.classifiers[{data__classifiers_x}]".format(**locals())+" must be string", value=data__classifiers_item, name=""+"data.classifiers[{data__classifiers_x}]".format(**locals())+"", definition={'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, rule='type') + if isinstance(data__classifiers_item, str): + if not custom_formats["trove-classifier"](data__classifiers_item): + raise JsonSchemaValueException(""+"data.classifiers[{data__classifiers_x}]".format(**locals())+" must be trove-classifier", value=data__classifiers_item, name=""+"data.classifiers[{data__classifiers_x}]".format(**locals())+"", definition={'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, rule='format') + if "urls" in data_keys: + data_keys.remove("urls") + data__urls = data["urls"] + if not isinstance(data__urls, (dict)): + raise JsonSchemaValueException("data.urls must be object", value=data__urls, name="data.urls", definition={'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, rule='type') + data__urls_is_dict = isinstance(data__urls, dict) + if data__urls_is_dict: + data__urls_keys = set(data__urls.keys()) + for data__urls_key, data__urls_val in data__urls.items(): + if REGEX_PATTERNS['^.+$'].search(data__urls_key): + if data__urls_key in data__urls_keys: + data__urls_keys.remove(data__urls_key) + if not isinstance(data__urls_val, (str)): + raise JsonSchemaValueException(""+"data.urls.{data__urls_key}".format(**locals())+" must be string", value=data__urls_val, name=""+"data.urls.{data__urls_key}".format(**locals())+"", definition={'type': 'string', 'format': 'url'}, rule='type') + if isinstance(data__urls_val, str): + if not custom_formats["url"](data__urls_val): + raise JsonSchemaValueException(""+"data.urls.{data__urls_key}".format(**locals())+" must be url", value=data__urls_val, name=""+"data.urls.{data__urls_key}".format(**locals())+"", definition={'type': 'string', 'format': 'url'}, rule='format') + if data__urls_keys: + raise JsonSchemaValueException("data.urls must not contain "+str(data__urls_keys)+" properties", value=data__urls, name="data.urls", definition={'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, rule='additionalProperties') + if "scripts" in data_keys: + data_keys.remove("scripts") + data__scripts = data["scripts"] + validate_https___www_python_org_dev_peps_pep_0621___definitions_entry_point_group(data__scripts, custom_formats) + if "gui-scripts" in data_keys: + data_keys.remove("gui-scripts") + data__guiscripts = data["gui-scripts"] + validate_https___www_python_org_dev_peps_pep_0621___definitions_entry_point_group(data__guiscripts, custom_formats) + if "entry-points" in data_keys: + data_keys.remove("entry-points") + data__entrypoints = data["entry-points"] + data__entrypoints_is_dict = isinstance(data__entrypoints, dict) + if data__entrypoints_is_dict: + data__entrypoints_keys = set(data__entrypoints.keys()) + for data__entrypoints_key, data__entrypoints_val in data__entrypoints.items(): + if REGEX_PATTERNS['^.+$'].search(data__entrypoints_key): + if data__entrypoints_key in data__entrypoints_keys: + data__entrypoints_keys.remove(data__entrypoints_key) + validate_https___www_python_org_dev_peps_pep_0621___definitions_entry_point_group(data__entrypoints_val, custom_formats) + if data__entrypoints_keys: + raise JsonSchemaValueException("data.entry-points must not contain "+str(data__entrypoints_keys)+" properties", value=data__entrypoints, name="data.entry-points", definition={'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, rule='additionalProperties') + data__entrypoints_len = len(data__entrypoints) + if data__entrypoints_len != 0: + data__entrypoints_property_names = True + for data__entrypoints_key in data__entrypoints: + try: + if isinstance(data__entrypoints_key, str): + if not custom_formats["python-entrypoint-group"](data__entrypoints_key): + raise JsonSchemaValueException("data.entry-points must be python-entrypoint-group", value=data__entrypoints_key, name="data.entry-points", definition={'format': 'python-entrypoint-group'}, rule='format') + except JsonSchemaValueException: + data__entrypoints_property_names = False + if not data__entrypoints_property_names: + raise JsonSchemaValueException("data.entry-points must be named by propertyName definition", value=data__entrypoints, name="data.entry-points", definition={'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, rule='propertyNames') + if "dependencies" in data_keys: + data_keys.remove("dependencies") + data__dependencies = data["dependencies"] + if not isinstance(data__dependencies, (list, tuple)): + raise JsonSchemaValueException("data.dependencies must be array", value=data__dependencies, name="data.dependencies", definition={'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, rule='type') + data__dependencies_is_list = isinstance(data__dependencies, (list, tuple)) + if data__dependencies_is_list: + data__dependencies_len = len(data__dependencies) + for data__dependencies_x, data__dependencies_item in enumerate(data__dependencies): + validate_https___www_python_org_dev_peps_pep_0621___definitions_dependency(data__dependencies_item, custom_formats) + if "optional-dependencies" in data_keys: + data_keys.remove("optional-dependencies") + data__optionaldependencies = data["optional-dependencies"] + if not isinstance(data__optionaldependencies, (dict)): + raise JsonSchemaValueException("data.optional-dependencies must be object", value=data__optionaldependencies, name="data.optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, rule='type') + data__optionaldependencies_is_dict = isinstance(data__optionaldependencies, dict) + if data__optionaldependencies_is_dict: + data__optionaldependencies_keys = set(data__optionaldependencies.keys()) + for data__optionaldependencies_key, data__optionaldependencies_val in data__optionaldependencies.items(): + if REGEX_PATTERNS['^.+$'].search(data__optionaldependencies_key): + if data__optionaldependencies_key in data__optionaldependencies_keys: + data__optionaldependencies_keys.remove(data__optionaldependencies_key) + if not isinstance(data__optionaldependencies_val, (list, tuple)): + raise JsonSchemaValueException(""+"data.optional-dependencies.{data__optionaldependencies_key}".format(**locals())+" must be array", value=data__optionaldependencies_val, name=""+"data.optional-dependencies.{data__optionaldependencies_key}".format(**locals())+"", definition={'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}, rule='type') + data__optionaldependencies_val_is_list = isinstance(data__optionaldependencies_val, (list, tuple)) + if data__optionaldependencies_val_is_list: + data__optionaldependencies_val_len = len(data__optionaldependencies_val) + for data__optionaldependencies_val_x, data__optionaldependencies_val_item in enumerate(data__optionaldependencies_val): + validate_https___www_python_org_dev_peps_pep_0621___definitions_dependency(data__optionaldependencies_val_item, custom_formats) + if data__optionaldependencies_keys: + raise JsonSchemaValueException("data.optional-dependencies must not contain "+str(data__optionaldependencies_keys)+" properties", value=data__optionaldependencies, name="data.optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, rule='additionalProperties') + data__optionaldependencies_len = len(data__optionaldependencies) + if data__optionaldependencies_len != 0: + data__optionaldependencies_property_names = True + for data__optionaldependencies_key in data__optionaldependencies: + try: + if isinstance(data__optionaldependencies_key, str): + if not custom_formats["pep508-identifier"](data__optionaldependencies_key): + raise JsonSchemaValueException("data.optional-dependencies must be pep508-identifier", value=data__optionaldependencies_key, name="data.optional-dependencies", definition={'format': 'pep508-identifier'}, rule='format') + except JsonSchemaValueException: + data__optionaldependencies_property_names = False + if not data__optionaldependencies_property_names: + raise JsonSchemaValueException("data.optional-dependencies must be named by propertyName definition", value=data__optionaldependencies, name="data.optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, rule='propertyNames') + if "dynamic" in data_keys: + data_keys.remove("dynamic") + data__dynamic = data["dynamic"] + if not isinstance(data__dynamic, (list, tuple)): + raise JsonSchemaValueException("data.dynamic must be array", value=data__dynamic, name="data.dynamic", definition={'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}, rule='type') + data__dynamic_is_list = isinstance(data__dynamic, (list, tuple)) + if data__dynamic_is_list: + data__dynamic_len = len(data__dynamic) + for data__dynamic_x, data__dynamic_item in enumerate(data__dynamic): + if data__dynamic_item not in ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']: + raise JsonSchemaValueException(""+"data.dynamic[{data__dynamic_x}]".format(**locals())+" must be one of ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']", value=data__dynamic_item, name=""+"data.dynamic[{data__dynamic_x}]".format(**locals())+"", definition={'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}, rule='enum') + try: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['version']): + raise JsonSchemaValueException("data must contain ['version'] properties", value=data, name="data", definition={'required': ['version'], '$$description': ['version is statically defined in the ``version`` field']}, rule='required') + except JsonSchemaValueException: pass + else: + raise JsonSchemaValueException("data must not be valid by not definition", value=data, name="data", definition={'not': {'required': ['version'], '$$description': ['version is statically defined in the ``version`` field']}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, rule='not') + except JsonSchemaValueException: + pass + else: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "dynamic" in data_keys: + data_keys.remove("dynamic") + data__dynamic = data["dynamic"] + data__dynamic_is_list = isinstance(data__dynamic, (list, tuple)) + if data__dynamic_is_list: + data__dynamic_contains = False + for data__dynamic_key in data__dynamic: + try: + if data__dynamic_key != "version": + raise JsonSchemaValueException("data.dynamic must be same as const definition: version", value=data__dynamic_key, name="data.dynamic", definition={'const': 'version'}, rule='const') + data__dynamic_contains = True + break + except JsonSchemaValueException: pass + if not data__dynamic_contains: + raise JsonSchemaValueException("data.dynamic must contain one of contains definition", value=data__dynamic, name="data.dynamic", definition={'contains': {'const': 'version'}, '$$description': ['version should be listed in ``dynamic``']}, rule='contains') + return data + +def validate_https___www_python_org_dev_peps_pep_0621___definitions_dependency(data, custom_formats): + if not isinstance(data, (str)): + raise JsonSchemaValueException("data must be string", value=data, name="data", definition={'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}, rule='type') + if isinstance(data, str): + if not custom_formats["pep508"](data): + raise JsonSchemaValueException("data must be pep508", value=data, name="data", definition={'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}, rule='format') + return data + +def validate_https___www_python_org_dev_peps_pep_0621___definitions_entry_point_group(data, custom_formats): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("data must be object", value=data, name="data", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + for data_key, data_val in data.items(): + if REGEX_PATTERNS['^.+$'].search(data_key): + if data_key in data_keys: + data_keys.remove(data_key) + if not isinstance(data_val, (str)): + raise JsonSchemaValueException(""+"data.{data_key}".format(**locals())+" must be string", value=data_val, name=""+"data.{data_key}".format(**locals())+"", definition={'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}, rule='type') + if isinstance(data_val, str): + if not custom_formats["python-entrypoint-reference"](data_val): + raise JsonSchemaValueException(""+"data.{data_key}".format(**locals())+" must be python-entrypoint-reference", value=data_val, name=""+"data.{data_key}".format(**locals())+"", definition={'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}, rule='format') + if data_keys: + raise JsonSchemaValueException("data must not contain "+str(data_keys)+" properties", value=data, name="data", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='additionalProperties') + data_len = len(data) + if data_len != 0: + data_property_names = True + for data_key in data: + try: + if isinstance(data_key, str): + if not custom_formats["python-entrypoint-name"](data_key): + raise JsonSchemaValueException("data must be python-entrypoint-name", value=data_key, name="data", definition={'format': 'python-entrypoint-name'}, rule='format') + except JsonSchemaValueException: + data_property_names = False + if not data_property_names: + raise JsonSchemaValueException("data must be named by propertyName definition", value=data, name="data", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='propertyNames') + return data + +def validate_https___www_python_org_dev_peps_pep_0621___definitions_author(data, custom_formats): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("data must be object", value=data, name="data", definition={'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("data.name must be string", value=data__name, name="data.name", definition={'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, rule='type') + if "email" in data_keys: + data_keys.remove("email") + data__email = data["email"] + if not isinstance(data__email, (str)): + raise JsonSchemaValueException("data.email must be string", value=data__email, name="data.email", definition={'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}, rule='type') + if isinstance(data__email, str): + if not REGEX_PATTERNS["idn-email_re_pattern"].match(data__email): + raise JsonSchemaValueException("data.email must be idn-email", value=data__email, name="data.email", definition={'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}, rule='format') + return data \ No newline at end of file diff --git a/setuptools/_vendor/_validate_pyproject/formats.py b/setuptools/_vendor/_validate_pyproject/formats.py new file mode 100644 index 0000000000..cc8566af93 --- /dev/null +++ b/setuptools/_vendor/_validate_pyproject/formats.py @@ -0,0 +1,202 @@ +import logging +import re +import string +from itertools import chain +from urllib.parse import urlparse + +_logger = logging.getLogger(__name__) + +# ------------------------------------------------------------------------------------- +# PEP 440 + +VERSION_PATTERN = r""" + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+            [-_\.]?
+            (?P(a|b|c|rc|alpha|beta|pre|preview))
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?Ppost|rev|r)
+                [-_\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\.]?
+            (?Pdev)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+VERSION_REGEX = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.X | re.I)
+
+
+def pep440(version: str) -> bool:
+    return VERSION_REGEX.match(version) is not None
+
+
+# -------------------------------------------------------------------------------------
+# PEP 508
+
+PEP508_IDENTIFIER_PATTERN = r"([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])"
+PEP508_IDENTIFIER_REGEX = re.compile(f"^{PEP508_IDENTIFIER_PATTERN}$", re.I)
+
+
+def pep508_identifier(name: str) -> bool:
+    return PEP508_IDENTIFIER_REGEX.match(name) is not None
+
+
+try:
+    try:
+        from packaging import requirements as _req
+    except ImportError:  # pragma: no cover
+        # let's try setuptools vendored version
+        from setuptools._vendor.packaging import requirements as _req  # type: ignore
+
+    def pep508(value: str) -> bool:
+        try:
+            _req.Requirement(value)
+            return True
+        except _req.InvalidRequirement:
+            return False
+
+
+except ImportError:  # pragma: no cover
+    _logger.warning(
+        "Could not find an installation of `packaging`. Requirements, dependencies and "
+        "versions might not be validated. "
+        "To enforce validation, please install `packaging`."
+    )
+
+    def pep508(value: str) -> bool:
+        return True
+
+
+def pep508_versionspec(value: str) -> bool:
+    """Expression that can be used to specify/lock versions (including ranges)"""
+    if any(c in value for c in (";", "]", "@")):
+        # In PEP 508:
+        # conditional markers, extras and URL specs are not included in the
+        # versionspec
+        return False
+    # Let's pretend we have a dependency called `requirement` with the given
+    # version spec, then we can re-use the pep508 function for validation:
+    return pep508(f"requirement{value}")
+
+
+# -------------------------------------------------------------------------------------
+# PEP 517
+
+
+def pep517_backend_reference(value: str) -> bool:
+    module, _, obj = value.partition(":")
+    identifiers = (i.strip() for i in chain(module.split("."), obj.split(".")))
+    return all(python_identifier(i) for i in identifiers if i)
+
+
+# -------------------------------------------------------------------------------------
+# Classifiers - PEP 301
+
+
+try:
+    from trove_classifiers import classifiers as _trove_classifiers
+
+    def trove_classifier(value: str) -> bool:
+        return value in _trove_classifiers
+
+
+except ImportError:  # pragma: no cover
+
+    class _TroveClassifier:
+        def __init__(self):
+            self._warned = False
+            self.__name__ = "trove-classifier"
+
+        def __call__(self, value: str) -> bool:
+            if self._warned is False:
+                self._warned = True
+                _logger.warning("Install ``trove-classifiers`` to ensure validation.")
+            return True
+
+    trove_classifier = _TroveClassifier()
+
+
+# -------------------------------------------------------------------------------------
+# Non-PEP related
+
+
+def url(value: str) -> bool:
+    try:
+        parts = urlparse(value)
+        return bool(parts.scheme and parts.netloc)
+        # ^  TODO: should we enforce schema to be http(s)?
+    except Exception:
+        return False
+
+
+# https://packaging.python.org/specifications/entry-points/
+ENTRYPOINT_PATTERN = r"[^\[\s=]([^=]*[^\s=])?"
+ENTRYPOINT_REGEX = re.compile(f"^{ENTRYPOINT_PATTERN}$", re.I)
+RECOMMEDED_ENTRYPOINT_PATTERN = r"[\w.-]+"
+RECOMMEDED_ENTRYPOINT_REGEX = re.compile(f"^{RECOMMEDED_ENTRYPOINT_PATTERN}$", re.I)
+ENTRYPOINT_GROUP_PATTERN = r"\w+(\.\w+)*"
+ENTRYPOINT_GROUP_REGEX = re.compile(f"^{ENTRYPOINT_GROUP_PATTERN}$", re.I)
+
+
+def python_identifier(value: str) -> bool:
+    return value.isidentifier()
+
+
+def python_qualified_identifier(value: str) -> bool:
+    if value.startswith(".") or value.endswith("."):
+        return False
+    return all(python_identifier(m) for m in value.split("."))
+
+
+def python_module_name(value: str) -> bool:
+    return python_qualified_identifier(value)
+
+
+def python_entrypoint_group(value: str) -> bool:
+    return ENTRYPOINT_GROUP_REGEX.match(value) is not None
+
+
+def python_entrypoint_name(value: str) -> bool:
+    if not ENTRYPOINT_REGEX.match(value):
+        return False
+    if not RECOMMEDED_ENTRYPOINT_REGEX.match(value):
+        msg = f"Entry point `{value}` does not follow recommended pattern: "
+        msg += RECOMMEDED_ENTRYPOINT_PATTERN
+        _logger.warning(msg)
+    return True
+
+
+def python_entrypoint_reference(value: str) -> bool:
+    if ":" not in value:
+        return False
+    module, _, rest = value.partition(":")
+    if "[" in rest:
+        obj, _, extras_ = rest.partition("[")
+        if extras_.strip()[-1] != "]":
+            return False
+        extras = (x.strip() for x in extras_.strip(string.whitespace + "[]").split(","))
+        if not all(pep508_identifier(e) for e in extras):
+            return False
+        _logger.warning(f"`{value}` - using extras for entry points is not recommended")
+    else:
+        obj = rest
+
+    identifiers = chain(module.split("."), obj.split("."))
+    return all(python_identifier(i.strip()) for i in identifiers)
diff --git a/setuptools/_vendor/vendored.txt b/setuptools/_vendor/vendored.txt
index 1a328e08fd..c1f7237ccc 100644
--- a/setuptools/_vendor/vendored.txt
+++ b/setuptools/_vendor/vendored.txt
@@ -3,3 +3,4 @@ pyparsing==2.2.1
 ordered-set==3.1.1
 more_itertools==8.8.0
 tomli==1.2.2
+# validate-pyproject[all]==0.3.1  # Special handling, don't remove
diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py
index 828f636606..cb0fe6c7b8 100644
--- a/setuptools/extern/__init__.py
+++ b/setuptools/extern/__init__.py
@@ -69,5 +69,6 @@ def install(self):
             sys.meta_path.append(self)
 
 
-names = 'packaging', 'pyparsing', 'ordered_set', 'more_itertools', 'tomli'
+names = ('packaging', 'pyparsing', 'ordered_set', 'more_itertools',
+         'tomli', '_validate_pyproject')
 VendorImporter(__name__, names, 'setuptools._vendor').install()

From 55a9eb7ef94cff24558138997345a7994304b217 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 3 Dec 2021 15:54:12 +0000
Subject: [PATCH 14/55] Implement read_configuration from pyproject.toml

This is the first step towards making setuptools understand
`pyproject.toml` as a configuration file.

The implementation deliberately allows splitting the act of loading the
configuration from a file in 2 stages: the reading of the file itself
and the expansion of directives (and other derived information).
---
 setuptools/config/pyprojecttoml.py            | 150 ++++++++++++++++++
 setuptools/tests/config/test_pyprojecttoml.py | 103 ++++++++++++
 2 files changed, 253 insertions(+)
 create mode 100644 setuptools/config/pyprojecttoml.py
 create mode 100644 setuptools/tests/config/test_pyprojecttoml.py

diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
new file mode 100644
index 0000000000..dc6aab6e4f
--- /dev/null
+++ b/setuptools/config/pyprojecttoml.py
@@ -0,0 +1,150 @@
+"""Load setuptools configuration from ``pyproject.toml`` files"""
+import os
+from contextlib import contextmanager
+from distutils import log
+from functools import partial
+
+from setuptools.extern import tomli
+from setuptools.extern._validate_pyproject import validate
+from setuptools.config import expand as _expand
+from setuptools.errors import OptionError
+
+
+def read_configuration(filepath, expand=True, ignore_option_errors=False):
+    """Read given configuration file and returns options from it as a dict.
+
+    :param str|unicode filepath: Path to configuration file in the ``pyproject.toml``
+        format.
+
+    :param bool expand: Whether to expand directives and other computed values
+        (i.e. post-process the given configuration)
+
+    :param bool ignore_option_errors: Whether to silently ignore
+        options, values of which could not be resolved (e.g. due to exceptions
+        in directives such as file:, attr:, etc.).
+        If False exceptions are propagated as expected.
+
+    :rtype: dict
+    """
+    filepath = os.path.abspath(filepath)
+
+    with open(filepath, "rb") as file:
+        asdict = tomli.load(file)
+
+    with _ignore_errors(ignore_option_errors):
+        validate(asdict)
+
+    if expand:
+        root_dir = os.path.dirname(filepath)
+        return expand_configuration(asdict, root_dir, ignore_option_errors)
+
+    return asdict
+
+
+def expand_configuration(config, root_dir=None, ignore_option_errors=False):
+    """Given a configuration with unresolved fields (e.g. dynamic, cmdclass, ...)
+    find their final values.
+
+    :param dict config: Dict containing the configuration for the distribution
+    :param str root_dir: Top-level directory for the distribution/project
+        (the same directory where ``pyproject.toml`` is place)
+    :param bool ignore_option_errors: see :func:`read_configuration`
+
+    :rtype: dict
+    """
+    root_dir = root_dir or os.getcwd()
+    project_cfg = config.get("project", {})
+    setuptools_cfg = config.get("tool", {}).get("setuptools", {})
+    package_dir = setuptools_cfg.get("package-dir")
+
+    _expand_all_dynamic(project_cfg, setuptools_cfg, root_dir, ignore_option_errors)
+    _expand_packages(setuptools_cfg, root_dir, ignore_option_errors)
+    _canonic_package_data(setuptools_cfg)
+    _canonic_package_data(setuptools_cfg, "exclude-package-data")
+
+    process = partial(_process_field, ignore_option_errors=ignore_option_errors)
+    cmdclass = partial(_expand.cmdclass, package_dir=package_dir, root_dir=root_dir)
+    data_files = partial(_expand.canonic_data_files, root_dir=root_dir)
+    process(setuptools_cfg, "data-files", data_files)
+    process(setuptools_cfg, "cmdclass", cmdclass)
+
+    return config
+
+
+def _expand_all_dynamic(project_cfg, setuptools_cfg, root_dir, ignore_option_errors):
+    silent = ignore_option_errors
+    dynamic_cfg = setuptools_cfg.get("dynamic", {})
+    package_dir = setuptools_cfg.get("package-dir", None)
+    special = ("license", "readme", "version")
+    # license-files are handled directly in the metadata, so no expansion
+    # readme and version need special handling
+    dynamic = project_cfg.get("dynamic", [])
+    regular_dynamic = (x for x in dynamic if x not in special)
+
+    for field in regular_dynamic:
+        value = _expand_dynamic(dynamic_cfg, field, package_dir, root_dir, silent)
+        project_cfg[field] = value
+
+    if "version" in dynamic and "version" in dynamic_cfg:
+        version = _expand_dynamic(dynamic_cfg, "version", package_dir, root_dir, silent)
+        project_cfg["version"] = _expand.version(version)
+
+    if "readme" in dynamic:
+        project_cfg["readme"] = _expand_readme(dynamic_cfg, root_dir, silent)
+
+
+def _expand_dynamic(dynamic_cfg, field, package_dir, root_dir, ignore_option_errors):
+    if field in dynamic_cfg:
+        directive = dynamic_cfg[field]
+        if "file" in directive:
+            return _expand.read_files(directive["file"], root_dir)
+        if "attr" in directive:
+            return _expand.read_attr(directive["attr"], package_dir, root_dir)
+    elif not ignore_option_errors:
+        msg = f"Impossible to expand dynamic value of {field!r}. "
+        msg += f"No configuration found for `tool.setuptools.dynamic.{field}`"
+        raise OptionError(msg)
+    return None
+
+
+def _expand_readme(dynamic_cfg, root_dir, ignore_option_errors):
+    silent = ignore_option_errors
+    return {
+        "text": _expand_dynamic(dynamic_cfg, "readme", None, root_dir, silent),
+        "content-type": dynamic_cfg["readme"].get("content-type", "text/x-rst")
+    }
+
+
+def _expand_packages(setuptools_cfg, root_dir, ignore_option_errors=False):
+    packages = setuptools_cfg.get("packages")
+    if packages is None:
+        return
+
+    find = packages.get("find")
+    if isinstance(find, dict):
+        find["root_dir"] = root_dir
+        with _ignore_errors(ignore_option_errors):
+            setuptools_cfg["packages"] = _expand.find_packages(**find)
+
+
+def _process_field(container, field, fn, ignore_option_errors=False):
+    if field in container:
+        with _ignore_errors(ignore_option_errors):
+            container[field] = fn(container[field])
+
+
+def _canonic_package_data(setuptools_cfg, field="package-data"):
+    package_data = setuptools_cfg.get(field, {})
+    return _expand.canonic_package_data(package_data)
+
+
+@contextmanager
+def _ignore_errors(ignore_option_errors):
+    if not ignore_option_errors:
+        yield
+        return
+
+    try:
+        yield
+    except Exception as ex:
+        log.debug(f"Ignored error: {ex.__class__.__name__} - {ex}")
diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py
new file mode 100644
index 0000000000..8b1e95b570
--- /dev/null
+++ b/setuptools/tests/config/test_pyprojecttoml.py
@@ -0,0 +1,103 @@
+from setuptools.config.pyprojecttoml import read_configuration, expand_configuration
+
+EXAMPLE = """
+[project]
+name = "myproj"
+keywords = ["some", "key", "words"]
+dynamic = ["version", "readme"]
+requires-python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+dependencies = [
+    'importlib-metadata>=0.12;python_version<"3.8"',
+    'importlib-resources>=1.0;python_version<"3.7"',
+    'pathlib2>=2.3.3,<3;python_version < "3.4" and sys.platform != "win32"',
+]
+
+[project.optional-dependencies]
+docs = [
+    "sphinx>=3",
+    "sphinx-argparse>=0.2.5",
+    "sphinx-rtd-theme>=0.4.3",
+]
+testing = [
+    "pytest>=1",
+    "coverage>=3,<5",
+]
+
+[project.scripts]
+exec = "pkg.__main__:exec"
+
+[build-system]
+requires = ["setuptools", "wheel"]
+build-backend = "setuptools.build_meta"
+
+[tool.setuptools]
+package-dir = {"" = "src"}
+zip-safe = true
+platforms = ["any"]
+
+[tool.setuptools.packages.find]
+where = ["src"]
+namespaces = true
+
+[tool.setuptools.cmdclass]
+sdist = "pkg.mod.CustomSdist"
+
+[tool.setuptools.dynamic.version]
+attr = "pkg.__version__.VERSION"
+
+[tool.setuptools.dynamic.readme]
+file = ["README.md"]
+content-type = "text/markdown"
+
+[tool.setuptools.package-data]
+"*" = ["*.txt"]
+
+[tool.setuptools.data-files]
+"data" = ["files/*.txt"]
+
+[tool.distutils.sdist]
+formats = "gztar"
+
+[tool.distutils.bdist_wheel]
+universal = true
+"""
+
+
+def test_read_configuration(tmp_path):
+    pyproject = tmp_path / "pyproject.toml"
+
+    files = [
+        "src/pkg/__init__.py",
+        "src/other/nested/__init__.py",
+        "files/file.txt"
+    ]
+    for file in files:
+        (tmp_path / file).parent.mkdir(exist_ok=True, parents=True)
+        (tmp_path / file).touch()
+
+    pyproject.write_text(EXAMPLE)
+    (tmp_path / "README.md").write_text("hello world")
+    (tmp_path / "src/pkg/mod.py").write_text("class CustomSdist: pass")
+    (tmp_path / "src/pkg/__version__.py").write_text("VERSION = (3, 10)")
+    (tmp_path / "src/pkg/__main__.py").write_text("def exec(): print('hello')")
+
+    config = read_configuration(pyproject, expand=False)
+    assert config["project"].get("version") is None
+    assert config["project"].get("readme") is None
+
+    expanded = expand_configuration(config, tmp_path)
+    assert read_configuration(pyproject, expand=True) == expanded
+    from pprint import pprint
+    pprint(expanded)
+    assert expanded["project"]["version"] == "3.10"
+    assert expanded["project"]["readme"]["text"] == "hello world"
+    assert set(expanded["tool"]["setuptools"]["packages"]) == {
+        "pkg",
+        "other",
+        "other.nested",
+    }
+    assert "" in expanded["tool"]["setuptools"]["package-data"]
+    assert "*" not in expanded["tool"]["setuptools"]["package-data"]
+    assert expanded["tool"]["setuptools"]["data-files"] == [
+        ("data", ["files/file.txt"])
+    ]

From ae065872ddbb4ef99709b4cd269f083ebd1efbb7 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 9 Dec 2021 11:32:10 +0000
Subject: [PATCH 15/55] Expand dynamic entry_points from pyproject.toml

The user might specify dynamic `entry-points` via a `file:`
directive (a similar feature for `setup.cfg` is documented in
[declarative config]).

The changes introduced here add the ability to expand them
when reading the configuration from `pyproject.toml`.

[declarative config]: https://setuptools.pypa.io/en/latest/userguide/declarative_config.html
---
 setuptools/config/expand.py                   | 15 ++++++++
 setuptools/config/pyprojecttoml.py            | 19 ++++++++--
 setuptools/tests/config/test_pyprojecttoml.py | 35 +++++++++++++++++++
 3 files changed, 67 insertions(+), 2 deletions(-)

diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index c4dc098e6c..26b7f75ae0 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -23,6 +23,7 @@
 import sys
 from glob import iglob
 from itertools import chain
+from configparser import ConfigParser
 
 from distutils.errors import DistutilsOptionError
 
@@ -280,3 +281,17 @@ def canonic_data_files(data_files, root_dir=None):
         (dest, glob_relative(patterns, root_dir))
         for dest, patterns in data_files.items()
     ]
+
+
+def entry_points(text, text_source="entry-points"):
+    """Given the contents of entry-points file,
+    process it into a 2-level dictionary (``dict[str, dict[str, str]]``).
+    The first level keys are entry-point groups, the second level keys are
+    entry-point names, and the second level values are references to objects
+    (that correspond to the entry-point value).
+    """
+    parser = ConfigParser(default_section=None)
+    parser.read_string(text, text_source)
+    groups = {k: dict(v.items()) for k, v in parser.items()}
+    groups.pop(parser.default_section, None)
+    return groups
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index dc6aab6e4f..12898de73d 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -75,9 +75,9 @@ def _expand_all_dynamic(project_cfg, setuptools_cfg, root_dir, ignore_option_err
     silent = ignore_option_errors
     dynamic_cfg = setuptools_cfg.get("dynamic", {})
     package_dir = setuptools_cfg.get("package-dir", None)
-    special = ("license", "readme", "version")
+    special = ("license", "readme", "version", "entry-points", "scripts", "gui-scripts")
     # license-files are handled directly in the metadata, so no expansion
-    # readme and version need special handling
+    # readme, version and "entry-points" need special handling
     dynamic = project_cfg.get("dynamic", [])
     regular_dynamic = (x for x in dynamic if x not in special)
 
@@ -92,6 +92,11 @@ def _expand_all_dynamic(project_cfg, setuptools_cfg, root_dir, ignore_option_err
     if "readme" in dynamic:
         project_cfg["readme"] = _expand_readme(dynamic_cfg, root_dir, silent)
 
+    if "entry-points" in dynamic:
+        field = "entry-points"
+        value = _expand_dynamic(dynamic_cfg, field, package_dir, root_dir, silent)
+        project_cfg.update(_expand_entry_points(value, dynamic))
+
 
 def _expand_dynamic(dynamic_cfg, field, package_dir, root_dir, ignore_option_errors):
     if field in dynamic_cfg:
@@ -115,6 +120,16 @@ def _expand_readme(dynamic_cfg, root_dir, ignore_option_errors):
     }
 
 
+def _expand_entry_points(text, dynamic):
+    groups = _expand.entry_points(text)
+    expanded = {"entry-points": groups}
+    if "scripts" in dynamic and "console_scripts" in groups:
+        expanded["scripts"] = groups.pop("console_scripts")
+    if "gui-scripts" in dynamic and "gui_scripts" in groups:
+        expanded["gui-scripts"] = groups.pop("gui_scripts")
+    return expanded
+
+
 def _expand_packages(setuptools_cfg, root_dir, ignore_option_errors=False):
     packages = setuptools_cfg.get("packages")
     if packages is None:
diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py
index 8b1e95b570..088ca1a72f 100644
--- a/setuptools/tests/config/test_pyprojecttoml.py
+++ b/setuptools/tests/config/test_pyprojecttoml.py
@@ -1,3 +1,5 @@
+from configparser import ConfigParser
+
 from setuptools.config.pyprojecttoml import read_configuration, expand_configuration
 
 EXAMPLE = """
@@ -101,3 +103,36 @@ def test_read_configuration(tmp_path):
     assert expanded["tool"]["setuptools"]["data-files"] == [
         ("data", ["files/file.txt"])
     ]
+
+
+ENTRY_POINTS = {
+    "console_scripts": {"a": "mod.a:func"},
+    "gui_scripts": {"b": "mod.b:func"},
+    "other": {"c": "mod.c:func [extra]"},
+}
+
+
+def test_expand_entry_point(tmp_path):
+    entry_points = ConfigParser()
+    entry_points.read_dict(ENTRY_POINTS)
+    with open(tmp_path / "entry-points.txt", "w") as f:
+        entry_points.write(f)
+
+    tool = {"setuptools": {"dynamic": {"entry-points": {"file": "entry-points.txt"}}}}
+    project = {"dynamic": ["scripts", "gui-scripts", "entry-points"]}
+    pyproject = {"project": project, "tool": tool}
+    expanded = expand_configuration(pyproject, tmp_path)
+    assert len(expanded["project"]["scripts"]) == 1
+    assert expanded["project"]["scripts"]["a"] == "mod.a:func"
+    assert len(expanded["project"]["gui-scripts"]) == 1
+    assert expanded["project"]["gui-scripts"]["b"] == "mod.b:func"
+    print(expanded["project"]["entry-points"])
+    assert len(expanded["project"]["entry-points"]) == 1
+    assert expanded["project"]["entry-points"]["other"]["c"] == "mod.c:func [extra]"
+
+    project = {"dynamic": ["entry-points"]}
+    pyproject = {"project": project, "tool": tool}
+    expanded = expand_configuration(pyproject, tmp_path)
+    assert len(expanded["project"]["entry-points"]) == 3
+    assert "scripts" not in expanded["project"]
+    assert "gui-scripts" not in expanded["project"]

From a54da74a131f87fd3698a649c5cbb61e444c8084 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 9 Dec 2021 11:35:41 +0000
Subject: [PATCH 16/55] Add means to apply and compare pyproject.toml metadata

The `setuptools.metadata` module contains functions that allows applying
metadata-related configuration read from a `pyproject.toml` file into an
existing `dist` object. It also allows comparing metadata obtained from
different places to check if they are equivalent or not.
---
 setuptools/metadata.py            | 319 ++++++++++++++++++++++++++++++
 setuptools/tests/test_metadata.py | 132 +++++++++++++
 2 files changed, 451 insertions(+)
 create mode 100644 setuptools/metadata.py
 create mode 100644 setuptools/tests/test_metadata.py

diff --git a/setuptools/metadata.py b/setuptools/metadata.py
new file mode 100644
index 0000000000..fa5a06640f
--- /dev/null
+++ b/setuptools/metadata.py
@@ -0,0 +1,319 @@
+"""Collection of functions and constants to help dealing with `core metadata`_
+(e.g. obtaining it from ``pyproject.toml``, comparing, applying to a dist
+object, etc..).
+
+.. _core metadata: https://packaging.python.org/en/latest/specifications/core-metadata
+"""
+import os
+from email.headerregistry import Address
+from functools import partial
+from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Set, Union
+
+from setuptools.extern.packaging import version
+from setuptools.extern.packaging.requirements import Requirement
+
+if TYPE_CHECKING:
+    from setuptools.dist import Distribution  # noqa
+
+_Path = Union[os.PathLike, str, None]
+_DictOrStr = Union[dict, str]
+_CorrespFn = Callable[[Any, dict, _Path], None]
+_Correspondence = Union[str, _CorrespFn]
+
+
+CORE_METADATA = (
+    "Metadata-Version",
+    "Name",
+    "Version",
+    "Dynamic",
+    "Platform",
+    "Supported-Platform",
+    "Summary",
+    "Description",
+    "Description-Content-Type",
+    "Keywords",
+    "Home-page",
+    "Download-URL",
+    "Author",
+    "Author-email",
+    "Maintainer",
+    "Maintainer-email",
+    "License",
+    "License-File",  # Not standard yet
+    "Classifier",
+    "Requires-Dist",
+    "Requires-Python",
+    "Requires-External",
+    "Project-URL",
+    "Provides-Extra",
+    "Provides-Dist",
+    "Obsoletes-Dist",
+)
+
+MULTIPLE_USE = (
+    "Dynamic",
+    "Platform",
+    "Supported-Platform",
+    "License-File",  # Not standard yet
+    "Classifier",
+    "Requires-Dist",
+    "Requires-External",
+    "Project-URL",
+    "Provides-Extra",
+    "Provides-Dist",
+    "Obsoletes-Dist",
+)
+
+UPDATES = {
+    "requires": "requires_dist",  # PEP 314 => PEP 345
+    "provides": "provides_dist",  # PEP 314 => PEP 345
+    "obsoletes": "obsoletes_dist",  # PEP 314 => PEP 345
+}
+"""Fields whose names where updated but whose syntax remained basically the same
+(can be safely upgraded).
+This mapping uses the JSON key normalisation from :pep:`566#json-compatible-metadata`
+"""
+
+LIST_VALUES = {*MULTIPLE_USE, "Keywords"}
+DEFAULT_LICENSE_FILES = ('LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*')
+# defaults from the `wheel` package
+
+
+def json_compatible_key(key: str) -> str:
+    """As defined in :pep:`566#json-compatible-metadata`"""
+    return key.lower().replace("-", "_")
+
+
+RFC822_KEYS = {json_compatible_key(k): k for k in CORE_METADATA}
+"""Mapping between JSON compatible keys (:pep:`566#json-compatible-metadata`)
+and email-header style (:rfc:`822`) core metadata keys.
+"""
+
+
+def normalise_key(key: str) -> str:
+    key = json_compatible_key(key)
+    if key[-1] == "s" and key[:-1] in RFC822_KEYS:
+        # Sometimes some keys come in the plural (e.g. "classifiers", "license_files")
+        return key[:-1]
+    return key
+
+
+def _summary(val: str, dest: dict, _root_dir: _Path):
+    from setuptools.dist import single_line
+    dest.update(summary=single_line(val))
+
+
+def _description(val: _DictOrStr, dest: dict, root_dir: _Path):
+    if isinstance(val, str):
+        text = val
+        ctype = "text/x-rst"
+    else:
+        from setuptools.config import expand
+
+        text = expand.read_files(val["file"]) if "file" in val else val["text"]
+        ctype = val["content-type"]
+
+    dest.update(description=text, description_content_type=ctype)
+
+
+def _license(val: dict, dest: dict, root_dir: _Path):
+    if "file" in val:
+        dest.update(license_file=val["file"])
+    else:
+        dest.update(license=val["text"])
+
+
+def _people(val: List[dict], dest: dict, _root_dir: _Path, kind: str):
+    field = []
+    email_field = []
+    for person in val:
+        if "name" not in person:
+            email_field.append(person["email"])
+        elif "email" not in person:
+            field.append(person["name"])
+        else:
+            addr = Address(display_name=person["name"], addr_spec=person["email"])
+            email_field.append(str(addr))
+
+    if field:
+        dest.update(kind=field)
+    if email_field:
+        dest.update({f"{kind}_email": email_field})
+
+
+def _urls(val: dict, dest: dict, _root_dir: _Path):
+    special = ("download_url", "home_page")
+    mapping = {x.replace("_", ""): x for x in special}
+    for key, url in val.items():
+        norm_key = json_compatible_key(key).replace("_", "")
+        if norm_key in mapping:
+            dest[mapping[norm_key]] = url
+    dest["project_url"] = [", ".join(i) for i in val.items()]
+
+
+def _dependencies(val: list, dest: dict, _root_dir: _Path):
+    requires_dist = dest.setdefault("requires_dist", [])
+    requires_dist.extend(val)
+
+
+def _add_extra(dep: str, extra_name: str) -> str:
+    cond_expr = f"extra == '{extra_name}'"
+    joiner = " and " if ";" in dep else "; "
+    return joiner.join((dep, cond_expr))
+
+
+def _optional_dependencies(val: dict, dest: dict, root_dir: _Path):
+    extra = set(dest.get("provides_extra", []))
+    for key, deps in val.items():
+        extra.add(key)
+        cond_deps = [_add_extra(x, key) for x in deps]
+        _dependencies(cond_deps, dest, root_dir)
+    dest["provides_extra"] = list(extra)
+
+
+PYPROJECT_CORRESPONDENCE: Dict[str, _CorrespFn] = {
+    "description": _summary,
+    "readme": _description,
+    "license": _license,
+    "authors": partial(_people, kind="author"),
+    "maintainers": partial(_people, kind="maintainer"),
+    "urls": _urls,
+    "dependencies": _dependencies,
+    "optional_dependencies": _optional_dependencies
+}
+
+TOOL_SPECIFIC = ("provides", "obsoletes", "platforms")
+
+
+def from_pyproject(pyproject: dict, root_dir: _Path = None) -> dict:
+    """Given a dict representing the contents of a ``pyproject.toml``,
+    already validated and with directives and dynamic values expanded,
+    return a JSON-like metadata dict as defined in
+    :pep:`566#json-compatible-metadata`
+
+    This function is "forgiving" with its inputs, but strict with its outputs.
+    """
+    metadata = {}
+    project = pyproject.get("project", {}).copy()
+    dynamic = {normalise_key(k) for k in project.pop("dynamic", [])}
+    _from_project_table(metadata, project, dynamic, root_dir)
+
+    tool_table = pyproject.get("tool", {}).get("setuptools", {})
+    _from_tool_table(metadata, tool_table)
+
+    dynamic_cfg = tool_table.get("dynamic", {})
+    _finalize_dynamic(metadata, dynamic, dynamic_cfg, root_dir)
+
+    return metadata
+
+
+def _finalize_dynamic(metadata: dict, dynamic: set, dynamic_cfg: dict, root_dir: _Path):
+    from setuptools.config import expand
+
+    # Dynamic license needs special handling (cannot be expanded in terms of PEP 621)
+    # due to the mutually exclusive `text` and `file`
+    dynamic_license = {"license", "license_files"}
+    dynamic_cfg.setdefault("license_files", DEFAULT_LICENSE_FILES)
+    keys = set(dynamic_cfg) & dynamic_license if "license" in dynamic else set()
+
+    for key in keys:
+        json_key = json_compatible_key(key)
+        val = dynamic_cfg[key]
+        if json_key == "license_files":
+            files = {v: v for v in expand.glob_relative(val, root_dir)}  # deduplicate
+            val = [v for v in files.keys() if not v.endswith("~")]
+        metadata[normalise_key(key)] = val
+        dynamic.discard("license")
+
+    if dynamic:
+        metadata["dynamic"] = sorted(list(dynamic))
+
+
+def _from_project_table(metadata: dict, project: dict, dynamic: set, root_dir: _Path):
+    for key, val in project.items():
+        if not val:
+            continue
+        json_key = json_compatible_key(key)
+        norm_key = normalise_key(json_key)
+        if norm_key in dynamic:
+            dynamic.remove(norm_key)
+        if json_key in PYPROJECT_CORRESPONDENCE:
+            PYPROJECT_CORRESPONDENCE[json_key](val, metadata, root_dir)
+        elif norm_key in RFC822_KEYS:
+            metadata[norm_key] = val
+
+
+def _from_tool_table(metadata: dict, tool_table: dict):
+    for key in TOOL_SPECIFIC:
+        if key in tool_table:
+            norm_key = normalise_key(UPDATES.get(key, key))
+            metadata[norm_key] = tool_table[key]
+
+
+SETUPTOOLS_RENAMES = {"long_description_content_type": "description_content_type"}
+OUTDATED_SETTERS = {"requires_dist": "requires"}
+
+
+def apply(metadata: dict, dist: "Distribution", _source: str = "pyproject.toml"):
+    """Apply a JSON-like ``metadata`` dict as defined in
+    :pep:`566#json-compatible-metadata` into a ``Distribution`` object
+    (configuring the distribution object accordingly)
+    """
+    metadata_obj = dist.metadata
+    norm_attrs = ((normalise_key(x), x) for x in metadata_obj.__dict__)
+    norm_attrs = ((UPDATES.get(k, k) , v) for k, v in norm_attrs)
+    norm_attrs = ((SETUPTOOLS_RENAMES.get(k, k) , v) for k, v in norm_attrs)
+    metadata_attrs = ((k, v) for k, v in norm_attrs if k in RFC822_KEYS)
+    metadata_setters = {
+        k: getattr(metadata_obj, f"set_{v}", partial(setattr, metadata_obj, v))
+        for k, v in metadata_attrs
+    }
+
+    for key, value in metadata.items():
+        norm_key = normalise_key(key)
+        if norm_key in OUTDATED_SETTERS:
+            setattr(metadata_obj, OUTDATED_SETTERS[norm_key], value)
+        elif norm_key in metadata_setters:
+            metadata_setters[norm_key](value)
+        else:
+            setattr(metadata_obj, norm_key, value)
+
+
+def compare(metadata1: dict, metadata2: dict) -> Union[bool, int]:
+    """Compare ``metadata1`` and ``metadata2`` and return:
+    - ``True`` if ``metadata1 == ``metadata2``
+    - ``1`` if ``metadata1`` is a subset of ``metadata2``
+    - ``-1`` if ``metadata2`` is a subset of ``metadata1``
+    - ``False`` otherwise
+
+    Both ``metadata1`` and ``metadata2`` should be dicts containing
+    JSON-compatible metadata, as defined in :pep:`566#json-compatible-metadata`.
+    Extra keys will be ignored.
+    """
+    valid_keys = set(RFC822_KEYS)
+    return_value: Union[bool, int] = True
+    metadata1_keys = valid_keys & set(metadata1)
+    metadata2_keys = valid_keys & set(metadata2)
+    if metadata1_keys ^ metadata2_keys:
+        return False
+    if metadata1_keys - metadata2_keys:
+        return_value = -1
+    elif metadata2_keys - metadata1_keys:
+        return_value = 1
+
+    for key in (metadata1_keys & metadata2_keys):
+        value1, value2 = metadata1[key], metadata2[key]
+        if key == "version":
+            value1, value2 = version.parse(value1), version.parse(value2)
+        elif key == "requires_dist":
+            value1, value2 = _norm_reqs(value1), _norm_reqs(value2)
+        if RFC822_KEYS.get(key, key) in LIST_VALUES:
+            value1, value2 = set(value1), set(value2)
+        if value1 != value2:
+            return False
+
+    return return_value
+
+
+def _norm_reqs(reqs: Iterable[str]) -> Set[str]:
+    return set(map(lambda req: str(Requirement(req)), reqs))
diff --git a/setuptools/tests/test_metadata.py b/setuptools/tests/test_metadata.py
new file mode 100644
index 0000000000..71aa2c0a25
--- /dev/null
+++ b/setuptools/tests/test_metadata.py
@@ -0,0 +1,132 @@
+from setuptools import metadata as meta
+from setuptools.config.pyprojecttoml import read_configuration
+from setuptools.dist import Distribution
+
+EXAMPLE = """
+[project]
+name = "myproj"
+keywords = ["some", "key", "words"]
+dynamic = ["version", "readme", "license"]
+requires-python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+dependencies = [
+    'importlib_metadata>=0.12;python_version<"3.8"',
+    'importlib_resources>=1.0;python_version<"3.7"',
+    'pathlib2>=2.3.3,<3;python_version < "3.4" and sys.platform != "win32"',
+]
+
+[project.optional-dependencies]
+docs = [
+    "sphinx>=3",
+    "sphinx-argparse>=0.2.5",
+    "sphinx-rtd-theme>=0.4.3",
+]
+testing = [
+    "pytest>=1",
+    "coverage>=3,<5",
+]
+
+[project.scripts]
+exec = "pkg.__main__:exec"
+
+[build-system]
+requires = ["setuptools", "wheel"]
+build-backend = "setuptools.build_meta"
+
+[tool.setuptools]
+package-dir = {"" = "src"}
+zip-safe = true
+platforms = ["any"]
+
+[tool.setuptools.packages.find]
+where = ["src"]
+namespaces = true
+
+[tool.setuptools.cmdclass]
+sdist = "pkg.mod.CustomSdist"
+
+[tool.setuptools.dynamic]
+license = "MIT"
+
+[tool.setuptools.dynamic.version]
+attr = "pkg.__version__.VERSION"
+
+[tool.setuptools.dynamic.readme]
+file = ["README.md"]
+content-type = "text/markdown"
+
+[tool.setuptools.package-data]
+"*" = ["*.txt"]
+
+[tool.setuptools.data-files]
+"data" = ["files/*.txt"]
+
+[tool.distutils.sdist]
+formats = "gztar"
+
+[tool.distutils.bdist_wheel]
+universal = true
+"""
+
+
+def _project_files(root_dir):
+    pyproject = root_dir / "pyproject.toml"
+
+    files = ["src/pkg/__init__.py", "src/other/nested/__init__.py", "files/file.txt"]
+    for file in files:
+        (root_dir / file).parent.mkdir(exist_ok=True, parents=True)
+        (root_dir / file).touch()
+
+    pyproject.write_text(EXAMPLE)
+    (root_dir / "LICENSE.txt").write_text("MIT")
+    (root_dir / "README.md").write_text("hello world")
+    (root_dir / "src/pkg/mod.py").write_text("class CustomSdist: pass")
+    (root_dir / "src/pkg/__version__.py").write_text("VERSION = (3, 10)")
+    (root_dir / "src/pkg/__main__.py").write_text("def exec(): print('hello')")
+
+
+EXPECTED_METADATA = {
+    "name": "myproj",
+    "version": "3.10",
+    "keywords": ["some", "key", "words"],
+    "license": "MIT",
+    "license_file": ["LICENSE.txt"],
+    "description": "hello world",
+    "description_content_type": "text/markdown",
+    "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
+    "platform": ["any"],
+    "provides_extra": ["docs", "testing"],
+    "requires_dist": [
+        'importlib_metadata>=0.12;python_version<"3.8"',
+        'importlib_resources>=1.0;python_version<"3.7"',
+        'pathlib2>=2.3.3,<3;python_version < "3.4" and sys.platform != "win32"',
+        "sphinx>=3; extra == 'docs'",
+        "sphinx-argparse>=0.2.5; extra == 'docs'",
+        "sphinx-rtd-theme>=0.4.3; extra == 'docs'",
+        "pytest>=1; extra == 'testing'",
+        "coverage>=3,<5; extra == 'testing'",
+    ],
+}
+
+
+def test_from_pyproject(tmp_path):
+    _project_files(tmp_path)
+    pyproject = tmp_path / "pyproject.toml"
+    metadata = meta.from_pyproject(read_configuration(pyproject), root_dir=tmp_path)
+    cmp = meta.compare(metadata, EXPECTED_METADATA)
+    if cmp is not True:
+        print("cmp:", cmp)
+        assert metadata == EXPECTED_METADATA  # just so pytest will print the diff
+
+
+def test_apply(tmp_path):
+    _project_files(tmp_path)
+    pyproject = tmp_path / "pyproject.toml"
+    metadata = meta.from_pyproject(read_configuration(pyproject), root_dir=tmp_path)
+    dist = Distribution({})
+    meta.apply(metadata, dist)
+    internal_meta = dist.metadata
+    assert internal_meta.name == EXPECTED_METADATA["name"]
+    assert (
+        internal_meta.long_description_content_type
+        == EXPECTED_METADATA["description_content_type"]
+    )

From 8c5a4b7f7e8c0d27d4c9085dc09e0c5c4df5e51d Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 9 Dec 2021 11:42:38 +0000
Subject: [PATCH 17/55] Add means to apply and compare options from
 pyproject.toml

The `setuptools.options` module contains functions that allow applying
non-metadata related configuration that comes from a `pyproject.toml`
file into an existing `dist` object. Similarly to `setuptools.metadata`,
comparing options is also allowed.
---
 setuptools/options.py            | 154 +++++++++++++++++++++++++++++++
 setuptools/tests/test_options.py | 136 +++++++++++++++++++++++++++
 2 files changed, 290 insertions(+)
 create mode 100644 setuptools/options.py
 create mode 100644 setuptools/tests/test_options.py

diff --git a/setuptools/options.py b/setuptools/options.py
new file mode 100644
index 0000000000..438a291c53
--- /dev/null
+++ b/setuptools/options.py
@@ -0,0 +1,154 @@
+"""Collection of functions and constants to help dealing with setuptools and
+distutils configuration options.
+(e.g. obtaining it from ``pyproject.toml``, comparing, applying to a dist
+object, etc..).
+"""
+import os
+from typing import TYPE_CHECKING, Any, Iterable, List, Set, Tuple, Union
+
+if TYPE_CHECKING:
+    from setuptools.dist import Distribution  # noqa
+
+Scalar = Union[int, float, bool, None, str]
+_Path = Union[os.PathLike, str, None]
+
+OPTIONS = {
+    # "obsoletes", "provides" => covered in metadata
+    # "install_requires",  => covered in metadata
+    # "setup_requires",  =>  replaced by build.requirements
+    # "tests_require",  => deprecated
+    # "python_requires", => covered in metadata
+    "zip_safe",
+    "package_dir",
+    "scripts",
+    "eager_resources",
+    "dependency_links",
+    "namespace_packages",
+    "py_modules",
+    "packages",
+    "package_data",
+    "include_package_data",
+    "exclude_package_data",
+    "data_files",
+    "entry_points",
+    "cmdclass",
+}
+
+TOOL_TABLE_RENAMES = {"script_files": "scripts"}
+
+SCALAR_VALUES = {"zip_safe", "include_package_data"}
+DICT_VALUES = {
+    "package_dir",
+    "package_data",
+    "exclude_package_data",
+    "entry_points",
+    "cmdclass"
+}
+LIST_VALUES = OPTIONS - SCALAR_VALUES - DICT_VALUES
+
+
+def normalise_key(key: str) -> str:
+    return key.lower().replace("-", "_")
+
+
+def from_pyproject(pyproject: dict, root_dir: _Path = None) -> dict:
+    """Given a dict representing the contents of a ``pyproject.toml``,
+    already validated and with directives and dynamic values expanded,
+    return dict with setuptools specific options.
+
+    This function is "forgiving" with its inputs, but strict with its outputs.
+    """
+    options = {}
+    tool_table = pyproject.get("tool", {}).get("setuptools", {})
+    for key, value in tool_table.items():
+        norm_key = normalise_key(key)
+        norm_key = TOOL_TABLE_RENAMES.get(norm_key, norm_key)
+        if norm_key in OPTIONS:
+            options[norm_key] = value
+
+    # entry-points
+    project = pyproject.get("project", {})
+    entry_points = _normalise_entry_points(project)
+    if entry_points:
+        options["entry_points"] = entry_points
+
+    return options
+
+
+def _normalise_entry_points(project: dict):
+    entry_points = project.get("entry-points", project.get("entry_points", {}))
+    renaming = {"scripts": "console_scripts", "gui_scripts": "scripts"}
+    for key, value in project.items():
+        norm_key = normalise_key(key)
+        if norm_key in renaming and value:
+            entry_points[renaming[norm_key]] = value
+
+    return {
+        name: [f"{k} = {v}" for k, v in group.items()]
+        for name, group in entry_points.items()
+    }
+
+
+def apply(options: dict, dist: "Distribution", _source: str = "pyproject.toml"):
+    """Apply ``options`` into a ``Distribution`` object
+    (configuring the distribution object accordingly).
+
+    ``option`` should be a dict similar to the ones returned by
+    :func:`from_pyproject`.
+    """
+    for key, value in options.items():
+        setattr(dist, key, value)
+
+
+
+def compare(options1: dict, options2: dict) -> Union[bool, int]:
+    """Compare ``options1`` and ``options2`` and return:
+    - ``True`` if ``options1 == ``options2``
+    - ``1`` if ``options1`` is a subset of ``options2``
+    - ``-1`` if ``options2`` is a subset of ``options1``
+    - ``False`` otherwise
+
+    Both ``options1`` and ``options2`` should be dicts similar to the ones
+    returned by :func:`from_pyproject`. Extra keys will be ignored.
+    """
+    valid_keys = OPTIONS
+    return_value: Union[bool, int] = True
+    options1_keys = valid_keys & set(options1)
+    options2_keys = valid_keys & set(options2)
+    if options1_keys ^ options2_keys:
+        return False
+    if options1_keys - options2_keys:
+        return_value = -1
+    elif options2_keys - options1_keys:
+        return_value = 1
+
+    for key in (options1_keys & options2_keys):
+        value1, value2 = options1[key], options1[key]
+        if key == "data_files":
+            value1, value2 = _norm_items(value1), _norm_items(value2)
+        elif key == "cmdclass":
+            value1 = {(k, v.__qualname__) for k, v in value1.items()}
+            value2 = {(k, v.__qualname__) for k, v in value2.items()}
+        elif key in DICT_VALUES:
+            value1, value2 = _norm_items(value1.items()), _norm_items(value2.items())
+        elif key in LIST_VALUES:
+            value1, value2 = set(value1), set(value2)
+        if value1 != value2:
+            return False
+
+    return return_value
+
+
+def _norm_items(
+    items: Iterable[Tuple[str, Union[Scalar, list]]]
+) -> Set[Tuple[Scalar, ...]]:
+    return {_comparable_items(i) for i in items}
+
+
+def _comparable_items(
+    items: Tuple[str, Union[Scalar, List[Scalar]]]
+) -> Tuple[Scalar, ...]:
+    key, values = items
+    if isinstance(values, list):
+        return (key, *sorted(values))
+    return (key, values)
diff --git a/setuptools/tests/test_options.py b/setuptools/tests/test_options.py
new file mode 100644
index 0000000000..cd7a9a28e2
--- /dev/null
+++ b/setuptools/tests/test_options.py
@@ -0,0 +1,136 @@
+from configparser import ConfigParser
+from unittest.mock import Mock
+
+from setuptools import options
+from setuptools.config.pyprojecttoml import read_configuration
+from setuptools.dist import Distribution
+
+EXAMPLE = """
+[project]
+name = "myproj"
+keywords = ["some", "key", "words"]
+dynamic = ["version", "readme", "license"]
+requires-python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+dependencies = [
+    'importlib_metadata>=0.12;python_version<"3.8"',
+    'importlib_resources>=1.0;python_version<"3.7"',
+    'pathlib2>=2.3.3,<3;python_version < "3.4" and sys.platform != "win32"',
+]
+
+[project.optional-dependencies]
+docs = [
+    "sphinx>=3",
+    "sphinx-argparse>=0.2.5",
+    "sphinx-rtd-theme>=0.4.3",
+]
+testing = [
+    "pytest>=1",
+    "coverage>=3,<5",
+]
+
+[project.scripts]
+exec = "pkg.__main__:exec"
+
+[build-system]
+requires = ["setuptools", "wheel"]
+build-backend = "setuptools.build_meta"
+
+[tool.setuptools]
+package-dir = {"" = "src"}
+zip-safe = true
+platforms = ["any"]
+include-package-data = true
+
+[tool.setuptools.packages.find]
+where = ["src"]
+namespaces = true
+
+[tool.setuptools.cmdclass]
+sdist = "pkg.mod.CustomSdist"
+
+[tool.setuptools.dynamic]
+license = "MIT"
+license-files = ["LICENSE.txt"]
+
+[tool.setuptools.dynamic.version]
+attr = "pkg.__version__.VERSION"
+
+[tool.setuptools.dynamic.readme]
+file = ["README.md"]
+content-type = "text/markdown"
+
+[tool.setuptools.package-data]
+"*" = ["*.txt"]
+
+[tool.setuptools.data-files]
+"data" = ["files/*.txt"]
+
+[tool.distutils.sdist]
+formats = "gztar"
+
+[tool.distutils.bdist_wheel]
+universal = true
+"""
+
+
+ENTRY_POINTS = {
+    "console_scripts": {"a": "mod.a:func"},
+    "gui_scripts": {"b": "mod.b:func"},
+    "other": {"c": "mod.c:func [extra]"},
+}
+
+
+def _project_files(root_dir):
+    pyproject = root_dir / "pyproject.toml"
+
+    files = ["src/pkg/__init__.py", "src/other/nested/__init__.py", "files/file.txt"]
+    for file in files:
+        (root_dir / file).parent.mkdir(exist_ok=True, parents=True)
+        (root_dir / file).touch()
+
+    pyproject.write_text(EXAMPLE)
+    (root_dir / "README.md").write_text("hello world")
+    (root_dir / "src/pkg/mod.py").write_text("class CustomSdist: pass")
+    (root_dir / "src/pkg/__version__.py").write_text("VERSION = (3, 10)")
+    (root_dir / "src/pkg/__main__.py").write_text("def exec(): print('hello')")
+
+    entry_points = ConfigParser()
+    entry_points.read_dict(ENTRY_POINTS)
+    with open(root_dir / "entry-points.txt", "w") as f:
+        entry_points.write(f)
+
+
+EXPECTED_OPTIONS = {
+    "zip_safe": True,
+    "include_package_data": True,
+    "package_dir": {"": "src"},
+    "packages": ["pkg", "other", "other.nested"],
+    "package_data": {"": ["*.txt"]},
+    "data_files": [("data", ["files/file.txt"])],
+    "cmdclass": {"sdist": Mock(__qualname__="pkg.mod.CustomSdist")},
+    "entry_points": {"console_scripts": ["exec = pkg.__main__:exec"]}
+}
+
+
+def test_from_pyproject(tmp_path):
+    _project_files(tmp_path)
+    pyproject = tmp_path / "pyproject.toml"
+    opts = options.from_pyproject(read_configuration(pyproject), root_dir=tmp_path)
+    cmp = options.compare(opts, EXPECTED_OPTIONS)
+    if cmp is not True:
+        print("cmp:", cmp)
+        assert opts == EXPECTED_OPTIONS  # just so pytest will print the diff
+
+
+def test_apply(tmp_path):
+    _project_files(tmp_path)
+    pyproject = tmp_path / "pyproject.toml"
+    opts = options.from_pyproject(read_configuration(pyproject), root_dir=tmp_path)
+    dist = Distribution({})
+    options.apply(opts, dist)
+    assert dist.zip_safe is True
+    assert dist.include_package_data is True
+    assert set(dist.data_files[0][1]) == {"files/file.txt"}
+    cls = dist.cmdclass["sdist"]
+    assert f"{cls.__module__}.{cls.__name__}" == "pkg.mod.CustomSdist"
+    assert set(dist.entry_points["console_scripts"]) == {"exec = pkg.__main__:exec"}

From 99287868055b164c138fd2bc7b6a492638095ac9 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 9 Dec 2021 13:07:14 +0000
Subject: [PATCH 18/55] Handle distutils command options in setuptools.options

This change allows comparing and applying command options stored under
the `[tool.distutils.]` table in `pyproject.toml`.
---
 setuptools/options.py            | 139 ++++++++++++++++++++++++++-----
 setuptools/tests/test_options.py |   8 +-
 2 files changed, 125 insertions(+), 22 deletions(-)

diff --git a/setuptools/options.py b/setuptools/options.py
index 438a291c53..61b09550cc 100644
--- a/setuptools/options.py
+++ b/setuptools/options.py
@@ -4,9 +4,12 @@
 object, etc..).
 """
 import os
-from typing import TYPE_CHECKING, Any, Iterable, List, Set, Tuple, Union
+from itertools import chain
+from typing import (TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Set, Tuple,
+                    Type, Union)
 
 if TYPE_CHECKING:
+    from pkg_resources import EntryPoint  # noqa
     from setuptools.dist import Distribution  # noqa
 
 Scalar = Union[int, float, bool, None, str]
@@ -32,6 +35,7 @@
     "data_files",
     "entry_points",
     "cmdclass",
+    "command_options",
 }
 
 TOOL_TABLE_RENAMES = {"script_files": "scripts"}
@@ -42,7 +46,8 @@
     "package_data",
     "exclude_package_data",
     "entry_points",
-    "cmdclass"
+    "cmdclass",
+    "command_options",
 }
 LIST_VALUES = OPTIONS - SCALAR_VALUES - DICT_VALUES
 
@@ -59,23 +64,26 @@ def from_pyproject(pyproject: dict, root_dir: _Path = None) -> dict:
     This function is "forgiving" with its inputs, but strict with its outputs.
     """
     options = {}
+    _ = root_dir  # argument exists for symmetry with setuptools.metadata
+
+    valid_options = OPTIONS - {"command_options"}
+
     tool_table = pyproject.get("tool", {}).get("setuptools", {})
     for key, value in tool_table.items():
         norm_key = normalise_key(key)
         norm_key = TOOL_TABLE_RENAMES.get(norm_key, norm_key)
-        if norm_key in OPTIONS:
+        if norm_key in valid_options:
             options[norm_key] = value
 
-    # entry-points
-    project = pyproject.get("project", {})
-    entry_points = _normalise_entry_points(project)
-    if entry_points:
-        options["entry_points"] = entry_points
+    _normalise_entry_points(pyproject, options)
+    _copy_command_options(pyproject, options)
 
     return options
 
 
-def _normalise_entry_points(project: dict):
+def _normalise_entry_points(pyproject: dict, options: dict):
+    project = pyproject.get("project", {})
+
     entry_points = project.get("entry-points", project.get("entry_points", {}))
     renaming = {"scripts": "console_scripts", "gui_scripts": "scripts"}
     for key, value in project.items():
@@ -83,22 +91,83 @@ def _normalise_entry_points(project: dict):
         if norm_key in renaming and value:
             entry_points[renaming[norm_key]] = value
 
-    return {
-        name: [f"{k} = {v}" for k, v in group.items()]
-        for name, group in entry_points.items()
-    }
+    if entry_points:
+        options["entry_points"] = {
+            name: [f"{k} = {v}" for k, v in group.items()]
+            for name, group in entry_points.items()
+        }
+
+
+def _copy_command_options(pyproject: dict, options: dict):
+    from distutils import log
+
+    from pkg_resources import iter_entry_points
+    from setuptools.dist import Distribution
+
+    tool_table = pyproject.get("tool", {})
+    valid_options = {"global": _normalise_cmd_options(Distribution.global_options)}
+
+    cmdclass = tool_table.get("setuptools", {}).get("cmdclass", {}).items()
+    entry_points = (_load_ep(ep) for ep in iter_entry_points('distutils.commands'))
+    entry_points = (ep for ep in entry_points if ep)
+    for cmd, cmd_class in chain(entry_points, cmdclass):
+        opts = valid_options.get(cmd, set())
+        opts = opts | _normalise_cmd_options(getattr(cmd_class, "user_options", []))
+        valid_options[cmd] = opts
+
+    cmd_opts = {}
+    for cmd, config in pyproject.get("tool", {}).get("distutils", {}).items():
+        cmd = normalise_key(cmd)
+        valid = valid_options.get(cmd, set())
+        cmd_opts.setdefault(cmd, {})
+        for key, value in config.items():
+            key = normalise_key(key)
+            cmd_opts[cmd][key] = value
+            if key not in valid:
+                # To avoid removing options that are specified dynamically we
+                # just log a warn...
+                log.warn(f"Command option {cmd}.{key} is not defined")
+
+    if cmd_opts:
+        options["command_options"] = cmd_opts
+
+
+def _load_ep(ep: "EntryPoint") -> Optional[Tuple[str, Type]]:
+    # Ignore all the errors
+    try:
+        return (ep.name, ep.load())
+    except Exception as ex:
+        from distutils import log
+        msg = f"{ex.__class__.__name__} while trying to load entry-point {ep.name}"
+        log.warn(f"{msg}: {ex}")
+        return None
+
 
+def _normalise_cmd_option_key(name: str) -> str:
+    return normalise_key(name).strip("_=")
 
-def apply(options: dict, dist: "Distribution", _source: str = "pyproject.toml"):
+
+def _normalise_cmd_options(desc: List[Tuple[str, Optional[str], str]]) -> Set[str]:
+    return {_normalise_cmd_option_key(fancy_option[0]) for fancy_option in desc}
+
+
+def apply(options: dict, dist: "Distribution", source: str = "pyproject.toml"):
     """Apply ``options`` into a ``Distribution`` object
     (configuring the distribution object accordingly).
 
     ``option`` should be a dict similar to the ones returned by
-    :func:`from_pyproject`.
+    :func:`from_pyproject` (already validated and expanded).
     """
     for key, value in options.items():
         setattr(dist, key, value)
 
+    command_options = dist.command_options
+    for cmd, opts in options.get("command_options", {}).items():
+        dest = command_options.setdefault(cmd, {})
+        items = list(dest.items())  # eager, so we can modify dest
+        for key, value in items:
+            dest.pop(key.replace("_", "-"), None)
+            dest[normalise_key(key)] = (source, value)
 
 
 def compare(options1: dict, options2: dict) -> Union[bool, int]:
@@ -112,15 +181,11 @@ def compare(options1: dict, options2: dict) -> Union[bool, int]:
     returned by :func:`from_pyproject`. Extra keys will be ignored.
     """
     valid_keys = OPTIONS
-    return_value: Union[bool, int] = True
     options1_keys = valid_keys & set(options1)
     options2_keys = valid_keys & set(options2)
-    if options1_keys ^ options2_keys:
+    return_value: Union[bool, int] = _compare_sets(options1_keys, options2_keys)
+    if return_value is False:
         return False
-    if options1_keys - options2_keys:
-        return_value = -1
-    elif options2_keys - options1_keys:
-        return_value = 1
 
     for key in (options1_keys & options2_keys):
         value1, value2 = options1[key], options1[key]
@@ -129,6 +194,17 @@ def compare(options1: dict, options2: dict) -> Union[bool, int]:
         elif key == "cmdclass":
             value1 = {(k, v.__qualname__) for k, v in value1.items()}
             value2 = {(k, v.__qualname__) for k, v in value2.items()}
+        elif key == "command_options":
+            value1 = _norm_items(_comparable_cmd_opts(value1).items())
+            value2 = _norm_items(_comparable_cmd_opts(value2).items())
+            cmp = _compare_sets(value1, value2)
+            # Let's be more relaxed with command options, since they can be read
+            # from other files in disk
+            all_int = isinstance(cmp, int) and isinstance(return_value, int)
+            if cmp is False or (cmp != return_value and all_int):
+                return False
+            return_value = cmp
+            continue
         elif key in DICT_VALUES:
             value1, value2 = _norm_items(value1.items()), _norm_items(value2.items())
         elif key in LIST_VALUES:
@@ -139,12 +215,33 @@ def compare(options1: dict, options2: dict) -> Union[bool, int]:
     return return_value
 
 
+def _compare_sets(value1: set, value2: set) -> Union[bool, int]:
+    """
+    ``True`` if ``value1 == ``value2``
+    ``1`` if ``value1`` is a subset of ``value2``
+    ``-1`` if ``value2`` is a subset of ``value1``
+    ``False`` otherwise
+    """
+    return_value: Union[bool, int] = True
+    if value1 ^ value2:
+        return False
+    if value1 - value2:
+        return_value = -1
+    elif value2 - value1:
+        return_value = 1
+    return return_value
+
+
 def _norm_items(
     items: Iterable[Tuple[str, Union[Scalar, list]]]
 ) -> Set[Tuple[Scalar, ...]]:
     return {_comparable_items(i) for i in items}
 
 
+def _comparable_cmd_opts(value: Dict[str, Dict[str, Any]]) -> Dict[str, Any]:
+    return {f"{cmd}.{k}": v for cmd, opts in value.items() for k, v in opts.items()}
+
+
 def _comparable_items(
     items: Tuple[str, Union[Scalar, List[Scalar]]]
 ) -> Tuple[Scalar, ...]:
diff --git a/setuptools/tests/test_options.py b/setuptools/tests/test_options.py
index cd7a9a28e2..9c1ba9f6a5 100644
--- a/setuptools/tests/test_options.py
+++ b/setuptools/tests/test_options.py
@@ -108,7 +108,11 @@ def _project_files(root_dir):
     "package_data": {"": ["*.txt"]},
     "data_files": [("data", ["files/file.txt"])],
     "cmdclass": {"sdist": Mock(__qualname__="pkg.mod.CustomSdist")},
-    "entry_points": {"console_scripts": ["exec = pkg.__main__:exec"]}
+    "entry_points": {"console_scripts": ["exec = pkg.__main__:exec"]},
+    "command_options": {
+        "sdist": {"formats": "gztar"},
+        "bdist_wheel": {"universal": True},
+    }
 }
 
 
@@ -134,3 +138,5 @@ def test_apply(tmp_path):
     cls = dist.cmdclass["sdist"]
     assert f"{cls.__module__}.{cls.__name__}" == "pkg.mod.CustomSdist"
     assert set(dist.entry_points["console_scripts"]) == {"exec = pkg.__main__:exec"}
+    assert dist.command_options["sdist"]["formats"] == ("pyproject.toml", "gztar")
+    assert dist.command_options["bdist_wheel"]["universal"] == ("pyproject.toml", True)

From 39eeeae4a92dd9873bb21f5477fa6b4b28f03063 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 9 Dec 2021 14:12:15 +0000
Subject: [PATCH 19/55] Rename setuptools.config.{setupcfg => legacy_setupcfg}

In #2685 the plan that the community seems to agree on is to always
automatically translate `setup.cfg` into a `pyproject.toml` equivalent
and then proceed reading/parsing the configuration.

The objective of this change is to open space so we can implement this
way of reading `setup.cfg` in a new `setuptools.config.setupcfg` module
while keeping the legacy way of handling `setup.cfg` around.

The rationale behind keeping the legacy way around is that, to avoid
breaking existing packages during a transition period, we can compare
the old and the new way of parsing the configuration (e.g. via
`setuptools.{metadata,options}.compare`) and in the case they are
conflicting, use the old way and emit a warning asking the user to
report the error.
---
 setuptools/config/__init__.py                         | 2 +-
 setuptools/config/{setupcfg.py => legacy_setupcfg.py} | 0
 2 files changed, 1 insertion(+), 1 deletion(-)
 rename setuptools/config/{setupcfg.py => legacy_setupcfg.py} (100%)

diff --git a/setuptools/config/__init__.py b/setuptools/config/__init__.py
index 7a4febf9c2..98d1a39b11 100644
--- a/setuptools/config/__init__.py
+++ b/setuptools/config/__init__.py
@@ -1,6 +1,6 @@
 # For backward compatibility, the following classes/functions are exposed
 # from `config.setupcfg`
-from setuptools.config.setupcfg import (
+from setuptools.config.legacy_setupcfg import (
     ConfigHandler,
     parse_configuration,
     read_configuration,
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/legacy_setupcfg.py
similarity index 100%
rename from setuptools/config/setupcfg.py
rename to setuptools/config/legacy_setupcfg.py

From c63c58ac79c8a3927373355c9b345fea7eeb432a Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 3 Dec 2021 08:35:32 +0000
Subject: [PATCH 20/55] Add `ini2toml` as a vendored dependency

Some `ini2toml` modules are removed, since they are not necessary for
setuptools use case and might require extra dependencies (e.g. CLI
usage, writing TOML files, format preserving).

--

The automatic conversion of `setup.cfg` into `pyproject.toml` as the
*one true way* of doing declarative builds was first suggested in #1688.

--

There are advantages and disadvantages in using an external tool such
as `ini2toml` for the automatic conversion.

The main advantage is that users can use the exact same tool for
converting their old packages and it will work in the same way
setuptools works. This means that on top of doing an automatic
conversion, the users are also offered an alternative to explicitly
update their package configuration.

The main disadvantage is that `ini2toml` works in a way that is a bit
more complex than simply parsing the `setup.cfg` file and dumping it as
`pyproject.toml` (we can think about `ini2toml` as if it was
transforming an "AST-equivalent" representation of the INI and TOML
formats -- this is necessary for other use cases covered by the library).
In order to minimise this complexity, some parts of the
package are stripped out during the vendoring process.

Also note that currently `ini2toml` ships a "built-in" plugin for
setuptools. In the future, if there is interest, there is also the
possibility of moving this plugin directly under setuptools repository.
---
 pavement.py                                   |  21 +
 setuptools/_vendor/ini2toml/__init__.py       |  25 +
 .../_vendor/ini2toml/base_translator.py       | 153 ++++
 .../_vendor/ini2toml/drivers/__init__.py      |   0
 .../_vendor/ini2toml/drivers/configparser.py  |  25 +
 .../ini2toml/drivers/plain_builtins.py        |  73 ++
 setuptools/_vendor/ini2toml/errors.py         |  69 ++
 .../_vendor/ini2toml/intermediate_repr.py     | 277 ++++++
 .../_vendor/ini2toml/plugins/__init__.py      |  90 ++
 .../_vendor/ini2toml/plugins/best_effort.py   |  62 ++
 .../ini2toml/plugins/setuptools_pep621.py     | 853 ++++++++++++++++++
 setuptools/_vendor/ini2toml/profile.py        |  60 ++
 setuptools/_vendor/ini2toml/py.typed          |   0
 .../_vendor/ini2toml/transformations.py       | 365 ++++++++
 setuptools/_vendor/ini2toml/types.py          | 121 +++
 setuptools/_vendor/vendored.txt               |   1 +
 setuptools/extern/__init__.py                 |   2 +-
 17 files changed, 2196 insertions(+), 1 deletion(-)
 create mode 100644 setuptools/_vendor/ini2toml/__init__.py
 create mode 100644 setuptools/_vendor/ini2toml/base_translator.py
 create mode 100644 setuptools/_vendor/ini2toml/drivers/__init__.py
 create mode 100644 setuptools/_vendor/ini2toml/drivers/configparser.py
 create mode 100644 setuptools/_vendor/ini2toml/drivers/plain_builtins.py
 create mode 100644 setuptools/_vendor/ini2toml/errors.py
 create mode 100644 setuptools/_vendor/ini2toml/intermediate_repr.py
 create mode 100644 setuptools/_vendor/ini2toml/plugins/__init__.py
 create mode 100644 setuptools/_vendor/ini2toml/plugins/best_effort.py
 create mode 100644 setuptools/_vendor/ini2toml/plugins/setuptools_pep621.py
 create mode 100644 setuptools/_vendor/ini2toml/profile.py
 create mode 100644 setuptools/_vendor/ini2toml/py.typed
 create mode 100644 setuptools/_vendor/ini2toml/transformations.py
 create mode 100644 setuptools/_vendor/ini2toml/types.py

diff --git a/pavement.py b/pavement.py
index ae0e58f13c..301945c76f 100644
--- a/pavement.py
+++ b/pavement.py
@@ -60,7 +60,9 @@ def install(vendor):
     remove_all(vendor.glob('*.dist-info'))
     remove_all(vendor.glob('*.egg-info'))
     remove_all(vendor.glob('six.py'))
+    remove_ini2toml_extras(vendor)
     (vendor / '__init__.py').write_text('')
+    (vendor / 'bin').rmtree()
 
 
 def update_pkg_resources():
@@ -109,3 +111,22 @@ def install_validate_pyproject(vendor):
         ]
         subprocess.check_output(cmd)
         info(f"{pkg!r} vendorized")
+
+
+def remove_ini2toml_extras(vendor):
+    """Remove unnecessary files from `ini2toml`"""
+    unecessary = [
+        "ini2toml/__main__.py",
+        "ini2toml/api.py",
+        "ini2toml/cli.py",
+        "ini2toml/drivers/configupdater.py",
+        "ini2toml/drivers/full_toml.py",
+        "ini2toml/drivers/lite_toml.py",
+        "ini2toml/plugins/coverage.py",
+        "ini2toml/plugins/isort.py",
+        "ini2toml/plugins/mypy.py",
+        "ini2toml/plugins/profile_independent_tasks.py",
+        "ini2toml/plugins/pytest.py",
+        "ini2toml/translator.py",
+    ]
+    remove_all(vendor / file for file in unecessary)
diff --git a/setuptools/_vendor/ini2toml/__init__.py b/setuptools/_vendor/ini2toml/__init__.py
new file mode 100644
index 0000000000..0d8dbdf8cb
--- /dev/null
+++ b/setuptools/_vendor/ini2toml/__init__.py
@@ -0,0 +1,25 @@
+import sys
+
+if sys.version_info[:2] >= (3, 8):  # pragma: no cover
+    # TODO: Import directly (no need for conditional) when `python_requires = >= 3.8`
+    from importlib.metadata import PackageNotFoundError, version
+else:  # pragma: no cover
+    try:
+        from importlib_metadata import PackageNotFoundError, version
+    except ImportError:
+        from pkg_resources import DistributionNotFound as PackageNotFoundError
+
+        def version(name):
+            import pkg_resources
+
+            return pkg_resources.get_distribution(name).version
+
+
+try:
+    # Change here if project is renamed and does not equal the package name
+    dist_name = __name__
+    __version__ = version(dist_name)
+except PackageNotFoundError:  # pragma: no cover
+    __version__ = "unknown"
+finally:
+    del version, PackageNotFoundError
diff --git a/setuptools/_vendor/ini2toml/base_translator.py b/setuptools/_vendor/ini2toml/base_translator.py
new file mode 100644
index 0000000000..c7fcafe24b
--- /dev/null
+++ b/setuptools/_vendor/ini2toml/base_translator.py
@@ -0,0 +1,153 @@
+from functools import reduce
+from types import MappingProxyType
+from typing import Dict, Generic, List, Mapping, Sequence, TypeVar
+
+from . import types  # Structural/Abstract types
+from .errors import (
+    AlreadyRegisteredAugmentation,
+    InvalidAugmentationName,
+    UndefinedProfile,
+)
+from .profile import Profile, ProfileAugmentation
+from .transformations import apply
+
+T = TypeVar("T")
+EMPTY = MappingProxyType({})  # type: ignore
+
+
+class BaseTranslator(Generic[T]):
+    """Translator object that follows the public API defined in
+    :class:`ini2toml.types.Translator`. See :doc:`dev-guide` for a quick explanation of
+    concepts such as plugins, profiles, profile augmentations, etc.
+
+    Arguments
+    ---------
+
+    ini_loads_fn:
+        function to convert the ``.ini/.cfg`` file into an :class:`intermediate
+        representation ` object.
+        Possible values for this argument include:
+
+        - :func:`ini2toml.drivers.configparser.parse` (when comments can be simply
+          removed)
+        - :func:`ini2toml.drivers.configupdater.parse` (when you wish to preserve
+          comments in the TOML output)
+
+    toml_dumps_fn:
+        function to convert the :class:`intermediate representation
+        ` object into (ideally)
+        a TOML string.
+        If you don't exactly need a TOML string (maybe you want your TOML to
+        be represented by :class:`bytes` or simply the equivalent :obj:`dict`) you can
+        also pass a ``Callable[[IntermediateRepr], T]`` function for any desired ``T``.
+
+        Possible values for this argument include:
+
+        - :func:`ini2toml.drivers.lite_toml.convert` (when comments can be simply
+          removed)
+        - :func:`ini2toml.drivers.full_toml.convert` (when you wish to preserve
+          comments in the TOML output)
+        - :func:`ini2toml.drivers.plain_builtins.convert` (when you wish to retrieve a
+          :class:`dict` equivalent to the TOML, instead of string with the TOML syntax)
+
+    plugins:
+        list of plugins activation functions. By default no plugin will be activated.
+    profiles:
+        list of profile objects, by default no profile will be pre-loaded (plugins can
+        still add them).
+    profile_augmentations:
+        list of profile augmentations. By default no profile augmentation will be
+        preloaded (plugins can still add them)
+    ini_parser_opts:
+        syntax options for parsing ``.ini/.cfg`` files (see
+        :mod:`~configparser.ConfigParser` and :mod:`~configupdater.ConfigUpdater`).
+        By default it uses the standard configuration of the selected parser (depending
+        on the choice of ``ini_loads_fn``).
+
+    Tip
+    ---
+
+    Most of the times the usage of :class:`~ini2toml.translator.Translator` is preferred
+    over :class:`~ini2toml.base_translator.BaseTranslator` (unless you are vendoring
+    ``ini2toml`` and wants to reduce the number of files included in your project).
+    """
+
+    profiles: Dict[str, types.Profile]
+    plugins: List[types.Plugin]
+
+    def __init__(
+        self,
+        ini_loads_fn: types.IniLoadsFn,
+        toml_dumps_fn: types.IReprCollapseFn[T],
+        plugins: Sequence[types.Plugin] = (),
+        profiles: Sequence[types.Profile] = (),
+        profile_augmentations: Sequence[types.ProfileAugmentation] = (),
+        ini_parser_opts: Mapping = EMPTY,
+    ):
+        self.plugins = list(plugins)
+        self.ini_parser_opts = ini_parser_opts
+        self.profiles = {p.name: p for p in profiles}
+        self.augmentations: Dict[str, types.ProfileAugmentation] = {
+            (p.name or p.fn.__name__): p for p in profile_augmentations
+        }
+
+        self._loads_fn = ini_loads_fn
+        self._dumps_fn = toml_dumps_fn
+
+        for activate in self.plugins:
+            activate(self)
+
+    def loads(self, text: str) -> types.IntermediateRepr:
+        return self._loads_fn(text, self.ini_parser_opts)
+
+    def dumps(self, irepr: types.IntermediateRepr) -> T:
+        return self._dumps_fn(irepr)
+
+    def __getitem__(self, profile_name: str) -> types.Profile:
+        """Retrieve an existing profile (or create a new one)."""
+        if profile_name not in self.profiles:
+            profile = Profile(profile_name)
+            if self.ini_parser_opts:
+                profile = profile.replace(ini_parser_opts=self.ini_parser_opts)
+            self.profiles[profile_name] = profile
+        return self.profiles[profile_name]
+
+    def augment_profiles(
+        self,
+        fn: types.ProfileAugmentationFn,
+        active_by_default: bool = False,
+        name: str = "",
+        help_text: str = "",
+    ):
+        """Register a profile augmentation function to be called after the
+        profile is selected and before the actual translation (see :doc:`dev-guide`).
+        """
+        name = (name or fn.__name__).strip()
+        InvalidAugmentationName.check(name)
+        AlreadyRegisteredAugmentation.check(name, fn, self.augmentations)
+        help_text = help_text or fn.__doc__ or ""
+        obj = ProfileAugmentation(fn, active_by_default, name, help_text)
+        self.augmentations[name] = obj
+
+    def _add_augmentations(
+        self, profile: types.Profile, explicit_activation: Mapping[str, bool] = EMPTY
+    ) -> types.Profile:
+        for aug in self.augmentations.values():
+            if aug.is_active(explicit_activation.get(aug.name)):
+                aug.fn(profile)
+        return profile
+
+    def translate(
+        self,
+        ini: str,
+        profile_name: str,
+        active_augmentations: Mapping[str, bool] = EMPTY,
+    ) -> T:
+        UndefinedProfile.check(profile_name, list(self.profiles.keys()))
+        profile = self._add_augmentations(self[profile_name], active_augmentations)
+
+        ini = reduce(apply, profile.pre_processors, ini)
+        irepr = self.loads(ini)
+        irepr = reduce(apply, profile.intermediate_processors, irepr)
+        toml = self.dumps(irepr)
+        return reduce(apply, profile.post_processors, toml)
diff --git a/setuptools/_vendor/ini2toml/drivers/__init__.py b/setuptools/_vendor/ini2toml/drivers/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/ini2toml/drivers/configparser.py b/setuptools/_vendor/ini2toml/drivers/configparser.py
new file mode 100644
index 0000000000..68c2a3e0ed
--- /dev/null
+++ b/setuptools/_vendor/ini2toml/drivers/configparser.py
@@ -0,0 +1,25 @@
+from configparser import ConfigParser
+from types import MappingProxyType
+from typing import Mapping
+
+from ..types import IntermediateRepr
+
+EMPTY: Mapping = MappingProxyType({})
+
+
+def parse(text: str, opts: Mapping = EMPTY) -> IntermediateRepr:
+    cfg = ConfigParser(**opts)
+    cfg.read_string(text)
+    irepr = IntermediateRepr()
+    for name, section in cfg.items():
+        if name == "DEFAULT":
+            continue
+        irepr.append(name, translate_section(section))
+    return irepr
+
+
+def translate_section(section: Mapping):
+    irepr = IntermediateRepr()
+    for name, value in section.items():
+        irepr.append(name, value)
+    return irepr
diff --git a/setuptools/_vendor/ini2toml/drivers/plain_builtins.py b/setuptools/_vendor/ini2toml/drivers/plain_builtins.py
new file mode 100644
index 0000000000..d57762e948
--- /dev/null
+++ b/setuptools/_vendor/ini2toml/drivers/plain_builtins.py
@@ -0,0 +1,73 @@
+"""The purpose of this module is to "collapse" the intermediate representation of a TOML
+document into Python buitin data types (mainly a composition of :class:`dict`,
+:class:`list`, :class:`int`, :class:`float`, :class:`bool`, :class:`str`).
+
+This is **not a loss-less** process, since comments are not preserved.
+"""
+from collections.abc import Mapping, MutableMapping
+from functools import singledispatch
+from typing import Any, TypeVar
+
+from ..errors import InvalidTOMLKey
+from ..types import Commented, CommentedKV, CommentedList, HiddenKey, IntermediateRepr
+
+__all__ = [
+    "convert",
+    "collapse",
+]
+
+M = TypeVar("M", bound=MutableMapping)
+
+
+def convert(irepr: IntermediateRepr) -> dict:
+    return collapse(irepr)
+
+
+@singledispatch
+def collapse(obj):
+    # Catch all
+    return obj
+
+
+@collapse.register(Commented)
+def _collapse_commented(obj: Commented) -> Any:
+    return obj.value_or(None)
+
+
+@collapse.register(CommentedList)
+def _collapse_commented_list(obj: CommentedList) -> list:
+    return [collapse(v) for v in obj.as_list()]
+
+
+@collapse.register(CommentedKV)
+def _collapse_commented_kv(obj: CommentedKV) -> dict:
+    return {k: collapse(v) for k, v in obj.as_dict().items()}
+
+
+@collapse.register(Mapping)
+def _collapse_mapping(obj: Mapping) -> dict:
+    return _convert_irepr_to_dict(obj, {})
+
+
+@collapse.register(list)
+def _collapse_list(obj: list) -> list:
+    return [collapse(e) for e in obj]
+
+
+def _convert_irepr_to_dict(irepr: Mapping, out: M) -> M:
+    for key, value in irepr.items():
+        if isinstance(key, HiddenKey):
+            continue
+        elif isinstance(key, tuple):
+            parent_key, *rest = key
+            if not isinstance(parent_key, str):
+                raise InvalidTOMLKey(key)
+            p = out.setdefault(parent_key, {})
+            if not isinstance(p, MutableMapping):
+                msg = f"Value for `{parent_key}` expected to be Mapping, found {p!r}"
+                raise ValueError(msg)
+            nested_key = rest[0] if len(rest) == 1 else tuple(rest)
+            _convert_irepr_to_dict({nested_key: value}, p)
+        elif isinstance(key, (int, str)):
+            out[str(key)] = collapse(value)
+    return out
diff --git a/setuptools/_vendor/ini2toml/errors.py b/setuptools/_vendor/ini2toml/errors.py
new file mode 100644
index 0000000000..337428d84e
--- /dev/null
+++ b/setuptools/_vendor/ini2toml/errors.py
@@ -0,0 +1,69 @@
+from textwrap import dedent
+from typing import Callable, List, Mapping, Sequence
+
+from . import types
+
+
+class UndefinedProfile(ValueError):
+    """The given profile ('{name}') is not registered with ``ini2toml``.
+    Are you sure you have the right plugins installed and loaded?
+    """
+
+    def __init__(self, name: str, available: Sequence[str]):
+        msg = self.__class__.__doc__ or ""
+        super().__init__(msg.format(name=name) + f"Available: {', '.join(available)})")
+
+    @classmethod
+    def check(cls, name: str, available: List[str]):
+        if name not in available:
+            raise cls(name, available)
+
+
+class AlreadyRegisteredAugmentation(ValueError):
+    """The profile augmentation '{name}' is already registered for '{existing}'.
+
+    Some installed plugins seem to be in conflict with each other,
+    please check '{new}' and '{existing}'.
+    If you are the developer behind one of them, please use a different name.
+    """
+
+    def __init__(self, name: str, new: Callable, existing: Callable):
+        existing_id = f"{existing.__module__}.{existing.__qualname__}"
+        new_id = f"{new.__module__}.{new.__qualname__}"
+        msg = dedent(self.__class__.__doc__ or "")
+        super().__init__(msg.format(name=name, new=new_id, existing=existing_id))
+
+    @classmethod
+    def check(
+        cls, name: str, fn: Callable, registry: Mapping[str, types.ProfileAugmentation]
+    ):
+        if name in registry:
+            raise cls(name, fn, registry[name].fn)
+
+
+class InvalidAugmentationName(ValueError):
+    """Profile augmentations should be valid python identifiers"""
+
+    def __init__(self, name: str):
+        msg = self.__class__.__doc__ or ""
+        super().__init__(f"{msg} ('{name}' given)")
+
+    @classmethod
+    def check(cls, name: str):
+        if not name.isidentifier():
+            raise cls(name)
+
+
+class InvalidTOMLKey(ValueError):
+    """{key!r} is not a valid key in the intermediate TOML representation"""
+
+    def __init__(self, key):
+        msg = self.__doc__.format(key=key)
+        super().__init__(msg)
+
+
+class InvalidCfgBlock(ValueError):  # pragma: no cover -- not supposed to happen
+    """Something is wrong with the provided CFG AST, the given block is not valid."""
+
+    def __init__(self, block):
+        super().__init__(f"{block.__class__}: {block}", {"block_object": block})
diff --git a/setuptools/_vendor/ini2toml/intermediate_repr.py b/setuptools/_vendor/ini2toml/intermediate_repr.py
new file mode 100644
index 0000000000..02b974ea61
--- /dev/null
+++ b/setuptools/_vendor/ini2toml/intermediate_repr.py
@@ -0,0 +1,277 @@
+"""Intermediate representations used by ``ini2toml`` when transforming between
+the INI and TOML syntaxes.
+"""
+from collections import UserList
+from enum import Enum
+from itertools import chain
+from pprint import pformat
+from textwrap import indent
+from types import MappingProxyType
+from typing import (
+    Any,
+    Dict,
+    Generic,
+    Iterable,
+    List,
+    Mapping,
+    MutableMapping,
+    Optional,
+    Sequence,
+    Tuple,
+    TypeVar,
+    Union,
+    cast,
+)
+from uuid import uuid4
+
+T = TypeVar("T")
+S = TypeVar("S")
+R = TypeVar("R", bound="IntermediateRepr")
+
+KV = Tuple[str, T]
+
+NotGiven = Enum("NotGiven", "NOT_GIVEN")
+NOT_GIVEN = NotGiven.NOT_GIVEN
+
+EMPTY: Mapping = MappingProxyType({})
+
+
+class HiddenKey:
+    __slots__ = ("_value",)
+
+    def __init__(self):
+        self._value = uuid4().int
+
+    def __eq__(self, other):
+        return self.__class__ is other.__class__ and self._value == other._value
+
+    def __hash__(self):
+        return hash((self.__class__.__name__, self._value))
+
+    def __str__(self):
+        return f"{self.__class__.__name__}()"
+
+    __repr__ = __str__
+
+
+class WhitespaceKey(HiddenKey):
+    pass
+
+
+class CommentKey(HiddenKey):
+    pass
+
+
+Key = Union[str, HiddenKey, Tuple[Union[str, HiddenKey], ...]]
+
+
+class IntermediateRepr(MutableMapping):
+    def __init__(
+        self,
+        elements: Mapping[Key, Any] = EMPTY,
+        order: Sequence[Key] = (),
+        inline_comment: str = "",
+        **kwargs,
+    ):
+        el = chain(elements.items(), kwargs.items())
+        self.elements: Dict[Key, Any] = {}
+        self.order: List[Key] = []
+        self.inline_comment = inline_comment
+        self.elements.update(el)
+        self.order.extend(order or self.elements.keys())
+        elem_not_in_order = any(k not in self.order for k in self.elements)
+        order_not_in_elem = any(k not in self.elements for k in self.order)
+        if elem_not_in_order or order_not_in_elem:
+            raise ValueError(f"{order} and {elements} need to have the same keys")
+
+    def __repr__(self):
+        inner = ",\n".join(
+            indent(f"{k}={pformat(getattr(self, k))}", "    ")
+            for k in ("elements", "order", "inline_comment")
+        )
+        return f"{self.__class__.__name__}(\n{inner}\n)"
+
+    def __eq__(self, other):
+        L = len(self)
+        if not (
+            isinstance(other, self.__class__)
+            and self.inline_comment == other.inline_comment
+            and len(other) == L
+        ):
+            return False
+        self_ = [(str(k), v) for k, v in self.items()]
+        other_ = [(str(k), v) for k, v in other.items()]
+        return all(self_[i] == other_[i] for i in range(L))
+
+    def rename(self, old_key: Key, new_key: Key, ignore_missing=False):
+        """This method renames an existing key, without changing its position.
+        Notice that ``new_key`` cannot be already present, and that trying to rename
+        a non-pre-existing key will also result in error (unless
+        ``ignore_missing=True``).
+        """
+        if old_key == new_key:
+            return self
+        if new_key in self.order:
+            raise KeyError(f"new_key={new_key!r} already exists")
+        if old_key not in self.order and ignore_missing:
+            return self
+        i = self.order.index(old_key)
+        self.order[i] = new_key
+        self.elements[new_key] = self.elements.pop(old_key)
+        return self
+
+    def insert(self, position: int, key: Key, value: Any):
+        """Simulate the position-aware :meth:`collections.abc.MutableMapping.insert`
+        method, but also require a ``key`` to be specified.
+        """
+        if key in self.order:
+            raise KeyError(f"key={key!r} already exists")
+        self.order.insert(position, key)
+        self.elements[key] = value
+
+    def index(self, key: Key) -> int:
+        """Find the position of ``key``"""
+        return self.order.index(key)
+
+    def append(self, key: Key, value: Any):
+        """Simulate the position-aware :meth:`collections.abc.MutableMapping.append`
+        method, but also require a ``key`` to be specified.
+        """
+        self.insert(len(self.order), key, value)
+
+    def copy(self: R) -> R:
+        return self.__class__(self.elements.copy(), self.order[:], self.inline_comment)
+
+    def replace_first_remove_others(
+        self, existing_keys: Sequence[Key], new_key: Key, value: Any
+    ):
+        """Find the first key in ``existing_keys`` that existing in the intermediate
+        representation, and replaces it with ``new_key`` (similar to
+        :meth:`replace`).
+        All the other keys in ``existing_keys`` are removed and the value of
+        ``new_key`` is set to ``value``.
+        """
+        idx = [self.index(k) for k in existing_keys if k in self]
+        if not idx:
+            i = len(self)
+        else:
+            i = sorted(idx)[0]
+            for key in existing_keys:
+                self.pop(key, None)
+        self.insert(i, new_key, value)
+        return i
+
+    def __getitem__(self, key: Key):
+        return self.elements[key]
+
+    def __setitem__(self, key: Key, value: Any):
+        if key not in self.elements:
+            self.order.append(key)
+        self.elements[key] = value
+
+    def __delitem__(self, key: Key):
+        del self.elements[key]
+        self.order.remove(key)
+
+    def __iter__(self):
+        return iter(self.order)
+
+    def __len__(self):
+        return len(self.order)
+
+
+# These objects hold information about the processed values + comments
+# in such a way that we can later convert them to TOML while still preserving
+# the comments (if we want to).
+
+
+class Commented(Generic[T]):
+    def __init__(
+        self,
+        value: Union[T, NotGiven] = NOT_GIVEN,
+        comment: Optional[str] = None,
+    ):
+        self.value = value
+        self.comment = comment
+
+    def __eq__(self, other):
+        return (
+            self.__class__ is other.__class__
+            and self.value == other.value
+            and self.comment == other.comment
+        )
+
+    def comment_only(self):
+        return self.value is NOT_GIVEN
+
+    def has_comment(self):
+        return bool(self.comment)
+
+    def value_or(self, fallback: S) -> Union[T, S]:
+        return fallback if self.value is NOT_GIVEN else self.value
+
+    def as_commented_list(self) -> "CommentedList[T]":
+        value = [] if self.value is NOT_GIVEN else [self.value]
+        return CommentedList([Commented(value, self.comment)])
+
+    def __repr__(self):
+        return f"{self.__class__.__name__}({self.value!r}, {self.comment!r})"
+
+
+class CommentedList(Generic[T], UserList):
+    def __init__(self, data: Sequence[Commented[List[T]]] = ()):
+        super().__init__(data)
+
+    def as_list(self) -> list:
+        out = []
+        for entry in self:
+            values = entry.value_or([])
+            for value in values:
+                out.append(value)
+        return out
+
+    def insert_line(self, i, values: Iterable[T], comment: Optional[str] = None):
+        values = list(values)
+        if values or comment:
+            self.insert(i, Commented(values, comment))
+
+
+class CommentedKV(Generic[T], UserList):
+    def __init__(self, data: Sequence[Commented[List[KV[T]]]] = ()):
+        super().__init__(data)
+
+    def find(self, key: str) -> Optional[Tuple[int, int]]:
+        for i, row in enumerate(self):
+            for j, item in enumerate(row.value_or([])):
+                if item[0] == key:
+                    return (i, j)
+        return None
+
+    def insert_line(self, i, values: Iterable[KV[T]], comment: Optional[str] = None):
+        values = list(values)
+        if values or comment:
+            self.insert(i, Commented(values, comment))
+        return self
+
+    def as_dict(self) -> dict:
+        out = {}
+        for entry in self:
+            values = (v for v in entry.value_or([cast(KV, ())]) if v)
+            for k, v in values:
+                out[k] = v
+        return out
+
+    def to_ir(self) -> IntermediateRepr:
+        """:class:`CommentedKV` are usually intended to represent INI options, while
+        :class:`IntermediateRepr` are usually intended to represent INI sections.
+        Therefore this function allows "promoting" an option-equivalent to a
+        section-equivalent representation.
+        """
+        irepr = IntermediateRepr()
+        for row in self:
+            for key, value in row.value_or([]):
+                irepr[key] = value
+            if row.has_comment():
+                irepr[key] = Commented(value, row.comment)
+
+        return irepr
diff --git a/setuptools/_vendor/ini2toml/plugins/__init__.py b/setuptools/_vendor/ini2toml/plugins/__init__.py
new file mode 100644
index 0000000000..b543ae9fb1
--- /dev/null
+++ b/setuptools/_vendor/ini2toml/plugins/__init__.py
@@ -0,0 +1,90 @@
+# The code in this module is mostly borrowed/adapted from PyScaffold and was originally
+# published under the MIT license
+# The original PyScaffold license can be found in 'tests/examples/pyscaffold'
+
+import sys
+from textwrap import dedent
+from typing import Callable, Iterable, List, Optional
+
+from .. import __version__
+from ..types import Plugin
+
+ENTRYPOINT_GROUP = "ini2toml.processing"
+
+try:
+    if sys.version_info[:2] >= (3, 8):  # pragma: no cover
+        # TODO: Import directly (no conditional) when `python_requires = >= 3.8`
+        from importlib.metadata import EntryPoint, entry_points
+    else:  # pragma: no cover
+        from importlib_metadata import EntryPoint, entry_points
+
+    def iterate_entry_points(group=ENTRYPOINT_GROUP) -> Iterable[EntryPoint]:
+        """Produces a generator yielding an EntryPoint object for each plugin registered
+        via `setuptools`_ entry point mechanism.
+
+        This method can be used in conjunction with :obj:`load_from_entry_point` to
+        filter the plugins before actually loading them.
+
+
+        .. _setuptools: https://setuptools.pypa.io/en/latest/userguide/entry_point.html
+        """  # noqa
+        entries = entry_points()
+        if hasattr(entries, "select"):
+            # The select method was introduced in importlib_metadata 3.9/3.10
+            # and the previous dict interface was declared deprecated
+            entries_ = entries.select(group=group)  # type: ignore
+        else:
+            # TODO: Once Python 3.10 becomes the oldest version supported, this fallback
+            #       and conditional statement can be removed.
+            entries_ = (plugin for plugin in entries.get(group, []))
+        return sorted(entries_, key=lambda e: e.name)
+
+except ImportError:  # pragma: no cover
+    from pkg_resources import EntryPoint, iter_entry_points  # type: ignore
+
+    def iterate_entry_points(group=ENTRYPOINT_GROUP) -> Iterable[EntryPoint]:
+        return iter_entry_points(group)
+
+
+def load_from_entry_point(entry_point: EntryPoint) -> Plugin:
+    """Carefully load the plugin, raising a meaningful message in case of errors"""
+    try:
+        return entry_point.load()
+    except Exception as ex:
+        raise ErrorLoadingPlugin(entry_point=entry_point) from ex
+
+
+def list_from_entry_points(
+    group: str = ENTRYPOINT_GROUP,
+    filtering: Callable[[EntryPoint], bool] = lambda _: True,
+) -> List[Plugin]:
+    """Produces a list of plugin objects for each plugin registered
+    via `setuptools`_ entry point mechanism.
+
+    Args:
+        group: name of the setuptools' entry_point group where plugins is being
+            registered
+        filtering: function returning a boolean deciding if the entry point should be
+            loaded and included (or not) in the final list. A ``True`` return means the
+            plugin should be included.
+
+    .. _setuptools: https://setuptools.pypa.io/en/latest/userguide/entry_point.html
+    """  # noqa
+    return [
+        load_from_entry_point(e) for e in iterate_entry_points(group) if filtering(e)
+    ]
+
+
+class ErrorLoadingPlugin(RuntimeError):
+    """There was an error loading '{plugin}'.
+    Please make sure you have installed a version of the plugin that is compatible
+    with {package} {version}. You can also try uninstalling it.
+    """
+
+    def __init__(self, plugin: str = "", entry_point: Optional[EntryPoint] = None):
+        if entry_point and not plugin:
+            plugin = getattr(entry_point, "module", entry_point.name)
+
+        sub = dict(package=__package__, version=__version__, plugin=plugin)
+        msg = dedent(self.__doc__ or "").format(**sub).splitlines()
+        super().__init__(f"{msg[0]}\n{' '.join(msg[1:])}")
diff --git a/setuptools/_vendor/ini2toml/plugins/best_effort.py b/setuptools/_vendor/ini2toml/plugins/best_effort.py
new file mode 100644
index 0000000000..3122aa22ff
--- /dev/null
+++ b/setuptools/_vendor/ini2toml/plugins/best_effort.py
@@ -0,0 +1,62 @@
+import re
+from functools import partial
+from typing import TypeVar
+
+from ..transformations import split_comment, split_kv_pairs, split_list, split_scalar
+from ..types import HiddenKey, IntermediateRepr, Translator
+
+M = TypeVar("M", bound=IntermediateRepr)
+
+SECTION_SPLITTER = re.compile(r"\.|:|\\")
+KEY_SEP = "="
+
+
+def activate(translator: Translator):
+    profile = translator["best_effort"]
+    plugin = BestEffort()
+    profile.intermediate_processors.append(plugin.process_values)
+    profile.help_text = plugin.__doc__ or ""
+
+
+class BestEffort:
+    """Guess option value conversion based on the string format"""
+
+    def __init__(
+        self,
+        key_sep=KEY_SEP,
+        section_splitter=SECTION_SPLITTER,
+    ):
+        self.key_sep = key_sep
+        self.section_splitter = section_splitter
+        self.split_dict = partial(split_kv_pairs, key_sep=KEY_SEP)
+
+    def process_values(self, doc: M) -> M:
+        doc_items = list(doc.items())
+        for name, section in doc_items:
+            doc[name] = self.apply_best_effort_to_section(section)
+            # Separate nested sections
+            if self.section_splitter.search(name):
+                keys = tuple(self.section_splitter.split(name))
+                doc.rename(name, keys)
+        return doc
+
+    def apply_best_effort_to_section(self, section: M) -> M:
+        options = list(section.items())
+        # Convert option values:
+        for field, value in options:
+            self.apply_best_effort(section, field, value)
+        return section
+
+    def apply_best_effort(self, container: M, field: str, value: str):
+        if isinstance(field, HiddenKey):
+            return
+        lines = value.splitlines()
+        if len(lines) > 1:
+            if self.key_sep in value:
+                container[field] = self.split_dict(value)
+            else:
+                container[field] = split_list(value)
+        elif field.endswith("version"):
+            container[field] = split_comment(value)
+        else:
+            container[field] = split_scalar(value)
diff --git a/setuptools/_vendor/ini2toml/plugins/setuptools_pep621.py b/setuptools/_vendor/ini2toml/plugins/setuptools_pep621.py
new file mode 100644
index 0000000000..05b2d5555e
--- /dev/null
+++ b/setuptools/_vendor/ini2toml/plugins/setuptools_pep621.py
@@ -0,0 +1,853 @@
+import logging
+import re
+import warnings
+from functools import partial, reduce
+from itertools import chain, zip_longest
+from typing import (
+    Any,
+    Dict,
+    List,
+    Mapping,
+    Optional,
+    Sequence,
+    Set,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+    cast,
+)
+
+try:
+    from packaging.requirements import Requirement
+except ImportError:  # pragma: no cover
+    from setuptools.extern.packaging.requirements import Requirement  # type: ignore
+
+from ..transformations import (
+    apply,
+    coerce_bool,
+    deprecated,
+    kebab_case,
+    noop,
+    split_comment,
+    split_kv_pairs,
+    split_list,
+)
+from ..types import Commented, CommentedKV, CommentedList, CommentKey, HiddenKey
+from ..types import IntermediateRepr as IR
+from ..types import Transformation, Translator, WhitespaceKey
+from .best_effort import BestEffort
+
+try:
+    from setuptools._distutils import command as distutils_commands
+except ImportError:  # pragma: no cover
+    from distutils import command as distutils_commands
+
+R = TypeVar("R", bound=IR)
+
+RenameRules = Dict[Tuple[str, ...], Union[Tuple[Union[str, int], ...], None]]
+ProcessingRules = Dict[Tuple[str, ...], Transformation]
+
+
+_logger = logging.getLogger(__name__)
+
+chain_iter = chain.from_iterable
+
+# Functions that split values from comments and parse those values
+split_list_comma = partial(split_list, sep=",", subsplit_dangling=False)
+split_list_semi = partial(split_list, sep=";", subsplit_dangling=False)
+split_hash_comment = partial(split_comment, comment_prefixes="#")  # avoid splitting `;`
+split_bool = partial(split_comment, coerce_fn=coerce_bool)
+split_kv_of_lists = partial(split_kv_pairs, coerce_fn=split_list_comma)
+# URLs can contain the # symbol
+split_kv_urls = partial(split_kv_pairs, comment_prefixes=(" #",))
+split_url = partial(split_comment, comment_prefixes=(" #",))
+
+SECTION_SPLITTER = re.compile(r"\.|:")
+SETUPTOOLS_SECTIONS = ("metadata", "options")
+SKIP_CHILD_NORMALISATION = (
+    "options.entry_points",
+    "options.package_data",
+    "options.exclude_package_data",
+    "options.extras_require",
+    "options.data_files",
+)
+COMMAND_SECTIONS = (
+    "global",
+    "alias",
+    "install",
+    "develop",
+    "sdist",
+    "bdist",
+    "bdist_wheel",
+    *getattr(distutils_commands, "__all__", []),
+)
+DEFAULT_LICENSE_FILES = ("LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*")
+# defaults from the `wheel` package
+
+
+def activate(translator: Translator):
+    plugin = SetuptoolsPEP621()
+    profile = translator["setup.cfg"]
+    profile.intermediate_processors += [plugin.normalise_keys, plugin.pep621_transform]
+    profile.help_text = plugin.__doc__ or ""
+
+
+class SetuptoolsPEP621:
+    """Convert settings to 'pyproject.toml' based on :pep:`621`"""
+
+    BUILD_REQUIRES = ("setuptools", "wheel")
+
+    def __init__(self):
+        self._be = BestEffort(key_sep="=")
+
+    @classmethod
+    def template(
+        cls,
+        ir_cls: Type[R] = IR,  # type: ignore
+        build_requires: Sequence[str] = (),
+    ) -> R:
+        build_system = {
+            "requires": [*(build_requires or cls.BUILD_REQUIRES)],
+            # ^ NOTE: the code ahead assumes no version
+            "build-backend": "setuptools.build_meta",
+        }
+        tpl = {
+            "metadata": ir_cls(),  # NOTE: will be renamed later
+            "build-system": ir_cls(build_system),  # type: ignore
+            "tool": ir_cls(),
+        }
+        return ir_cls(tpl)  # type: ignore
+
+    def setupcfg_aliases(self):
+        """``setup.cfg`` aliases as defined in:
+        https://setuptools.pypa.io/en/stable/userguide/declarative_config.html
+        """
+        return {
+            "classifier": "classifiers",
+            "summary": "description",
+            "platform": "platforms",
+            "license-file": "license-files",
+            "home-page": "url",
+        }
+
+    def processing_rules(self) -> ProcessingRules:
+        """Value type processing, as defined in:
+        https://setuptools.pypa.io/en/stable/userguide/declarative_config.html
+        """
+        # If not present bellow will be transformed via split_comment by default
+        return {
+            ("metadata", "version"): directive("file", "attr"),
+            ("metadata", "classifiers"): directive("file", orelse=split_list_comma),
+            ("metadata", "keywords"): split_list_comma,
+            ("metadata", "description"): directive("file"),
+            # ---
+            ("metadata", "long-description"): directive("file", orelse=noop),
+            ("metadata", "long-description-content-type"): split_hash_comment,
+            # => NOTICE: further processed via
+            #            `merge_and_rename_long_description_and_content_type`
+            # ---
+            ("metadata", "license-files"): split_list_comma,
+            # => NOTICE: in PEP 621, it should be a single file
+            #            further processed via `handle_license_and_files`
+            # ---
+            ("metadata", "url"): split_url,
+            ("metadata", "download-url"): split_url,
+            ("metadata", "project-urls"): split_kv_urls,
+            # => NOTICE: further processed via `merge_and_rename_urls`
+            # ---- Not covered by PEP 621 ----
+            ("metadata", "platforms"): split_list_comma,
+            # ---
+            ("metadata", "provides"): split_list_comma,
+            ("metadata", "requires"): deprecated("requires", split_list_comma),
+            ("metadata", "obsoletes"): split_list_comma,
+            # => NOTICE: not supported by pip
+            # ---- Options ----
+            ("options", "zip-safe"): split_bool,
+            ("options", "setup-requires"): split_deps,
+            ("options", "install-requires"): split_deps,
+            ("options", "tests-require"): split_deps,
+            ("options", "scripts"): split_list_comma,
+            ("options", "eager-resources"): split_list_comma,
+            ("options", "dependency-links"): deprecated(
+                "dependency-links", split_list_comma
+            ),  # noqa
+            ("options", "entry-points"): directive(
+                "file", orelse=value_error("option.entry-points")
+            ),
+            ("options", "include-package-data"): split_bool,
+            ("options", "package-dir"): split_kv_pairs,
+            ("options", "namespace-packages"): split_list_comma,
+            ("options", "py-modules"): split_list_comma,
+            ("options", "cmdclass"): split_kv_pairs,
+            ("options", "data-files"): deprecated("data-files", split_kv_of_lists),
+            ("options", "packages"): directive(
+                "find", "find_namespace", orelse=split_list_comma
+            ),
+            ("options.packages.find", "include"): split_list_comma,
+            ("options.packages.find", "exclude"): split_list_comma,
+            ("options.packages.find", "exclude"): split_list_comma,
+        }
+        # See also dependent_processing_rules
+
+    def dependent_processing_rules(self, doc: IR) -> ProcessingRules:
+        """Dynamically create processing rules, such as :func:`processing_rules` based
+        on the existing document.
+        """
+        groups: Mapping[str, Transformation] = {
+            "options.extras-require": split_deps,
+            "options.package-data": split_list_comma,
+            "options.exclude-package-data": split_list_comma,
+            "options.data-files": split_list_comma,
+            "options.entry-points": split_kv_pairs,
+        }
+        return {
+            (g, k): fn
+            for g, fn in groups.items()
+            for k in doc.get(g, ())
+            if isinstance(k, str)
+        }
+
+    def apply_value_processing(self, doc: R) -> R:
+        """Process ``setup.cfg`` values according to :meth:`processing_rules` and
+        :meth:`dependent_processing_rules`.
+
+        This function assumes all field names were normalised by :meth:`normalise_keys`.
+        """
+        default = {
+            (name, option): split_comment
+            for name, section in doc.items()
+            if name in ("metadata", "options")
+            for option in section
+            if isinstance(option, (str, tuple)) and not isinstance(option, HiddenKey)
+        }
+        transformations: dict = {
+            **default,
+            **self.processing_rules(),
+            **self.dependent_processing_rules(doc),
+        }
+        for (section, option), fn in transformations.items():
+            value = doc.get(section, {}).get(option, None)
+            if value is not None:
+                doc[section][option] = fn(value)
+        return doc
+
+    def merge_and_rename_urls(self, doc: R) -> R:
+        """The following renames can be applied when comparing setuptools metadata and
+        :pep:`621`::
+
+            url => urls.homepage
+            download-url => urls.download
+            project-urls.* => urls.*
+        """
+        metadata: IR = doc["metadata"]
+        new_urls = [
+            (dest, metadata.pop(orig))
+            for orig, dest in [("url", "Homepage"), ("download-url", "Download")]
+            if orig in metadata
+        ]
+        urls = metadata.get("project-urls", CommentedKV())
+        for k, v in reversed(new_urls):
+            urls.insert_line(0, [(k, v.value)], v.comment)
+
+        if urls.as_dict():
+            keys = ("project-urls", "url", "download-url")
+            metadata.replace_first_remove_others(keys, "urls", urls)
+        return doc
+
+    def merge_authors_maintainers_and_emails(self, doc: R) -> R:
+        """When transforming setuptools metadata and :pep:`621`, we have to merge
+        ``author/maintainer`` and ``author-email/maintainer-email`` into a single
+        dict-like object with 2 keys.
+        Some projects also provide multiple, comma separated, values for each field.
+        In that case we assume that the value for the i-th author/maintainer should be
+        paired with to the i-th author-email/maintainer-email value.
+        """
+        metadata: IR = doc["metadata"]
+
+        def _split_values(field):
+            commented: Commented[str] = metadata.get(field, Commented())
+            values = commented.value_or("").strip().split(",")
+            return (v.strip() for v in values), commented.comment
+
+        for key in ("author", "maintainer"):
+            fields = (key, f"{key}-email")
+            values, comments = zip(*(_split_values(f) for f in fields))
+            combined = (
+                {k: v for k, v in zip(("name", "email"), person_data) if v}
+                # ^-- Remove empty fields
+                for person_data in zip_longest(*values, fillvalue="")
+            )
+            people = [IR(c) for c in combined if c]  # type: ignore[arg-type]
+
+            if people:
+                # author/maintainer => author**S**/maintainer**S**
+                i = metadata.replace_first_remove_others(fields, f"{key}s", people)
+                for j, cmt in enumerate(c for c in comments if c):
+                    metadata.insert(j + i + 1, CommentKey(), cmt)
+        return doc
+
+    def merge_and_rename_long_description_and_content_type(self, doc: R) -> R:
+        """:pep:`621` offers a single field (``readme``) to cover things present in two
+        fields in ``setup.cfg``::
+
+            long_description.file => readme.file
+            long_description => readme.text
+            long-description-content-type => readme.content-type
+
+        We also have to be aware that :pep:`621` accepts a single file, so the option of
+        combining multiple files as presented in ``setup.cfg`` have to be handled via
+        ``dynamic``.
+        """
+        metadata: IR = doc["metadata"]
+        long_desc: Union[Directive, str, None] = metadata.get("long-description")
+        if not long_desc:
+            metadata.pop("long-description", None)
+            metadata.pop("long-description-content-type", None)
+            return doc
+
+        readme: Dict[str, Any] = {}
+        dynamic = False
+        if isinstance(long_desc, Directive):
+            # In PEP 621 "readme" should be a single file
+            files: CommentedList[str] = long_desc["file"]
+            files_list = files.as_list()
+            if len(files_list) == 1:
+                readme = {"file": Commented(files_list[0], files[0].comment)}
+            else:
+                readme = dict(long_desc)
+                dynamic = True
+        else:
+            readme = {"text": long_desc.strip()}
+
+        content_type = metadata.pop("long-description-content-type", None)
+        if content_type:
+            readme["content-type"] = content_type
+
+        if dynamic:
+            metadata.setdefault("dynamic", []).append("readme")
+            doc.setdefault("options.dynamic", IR()).append("readme", readme)
+            metadata.pop("long-description")
+            return doc
+
+        if len(list(readme.keys())) == 1 and "file" in readme:
+            metadata["long-description"] = readme["file"]
+        else:
+            metadata["long-description"] = IR(readme)  # type: ignore[arg-type]
+        metadata.rename("long-description", "readme")
+        return doc
+
+    def handle_license_and_files(self, doc: R) -> R:
+        """In :pep:`621` we have a single field for license, which might have a single
+        value (file path) or a dict-like structure::
+
+            license-files => license.file
+            license => license.text
+
+        We also have to be aware that :pep:`621` accepts a single file, so the option of
+        combining multiple files as presented in ``setup.cfg`` have to be handled via
+        ``dynamic``.
+        """
+        metadata: IR = doc["metadata"]
+        files: Optional[CommentedList[str]] = metadata.get("license-files")
+        # Setuptools automatically includes license files if not present
+        # so let's make it dynamic
+        files_as_list = (files and files.as_list()) or list(DEFAULT_LICENSE_FILES)
+        text = metadata.get("license")
+
+        # PEP 621 specifies a single "file". If there is more, we need to use "dynamic"
+        if files_as_list and (
+            len(files_as_list) > 1
+            or any(char in files_as_list[0] for char in "*?[")  # glob pattern
+            or text  # PEP 621 forbids both license and license-files at the same time
+        ):
+            metadata.setdefault("dynamic", []).append("license")
+            dynamic = doc.setdefault("options.dynamic", IR())
+            if text:
+                dynamic.append("license", text)
+            dynamic.append("license-files", files_as_list)
+            # 'file' and 'text' are mutually exclusive in PEP 621
+            metadata.pop("license", None)
+            metadata.pop("license-files", None)
+            return doc
+
+        if files_as_list:
+            files = cast(CommentedList[str], files)
+            license = IR(file=Commented(files_as_list[0], files[0].comment))
+        elif text:
+            license = IR(text=metadata["license"])
+        else:
+            return doc
+
+        fields = ("license-files", "license")
+        metadata.replace_first_remove_others(fields, "license", license)
+        return doc
+
+    def move_and_split_entrypoints(self, doc: R) -> R:
+        """In ``setup.cfg`` there is no special treatment for entry-points that will be
+        transformed in console/GUI scripts. On the other hand :pep:`621` defines
+        separated fields::
+
+            entry-points.console-scripts => scripts
+            entry-points.gui-scripts => gui-scripts
+            entry-points.* => "entry-points".*
+        """
+        entrypoints: IR = doc.get("options.entry-points", IR())
+        if not entrypoints:
+            doc.pop("options.entry-points", None)
+            return doc
+        doc.rename("options.entry-points", "project:entry-points")
+        # ^ use `:` to guarantee it is split later
+        script_keys = ["console-scripts", "gui-scripts"]
+        script_keys += [k.replace("-", "_") for k in script_keys]
+        keys = (k for k in script_keys if k in entrypoints)
+        for key in keys:
+            scripts: CommentedKV = entrypoints.pop(key)
+            new_key = key.replace("_", "-").replace("console-", "")
+            doc.append(f"project:{new_key}", scripts.to_ir())
+        if not entrypoints or all(isinstance(k, WhitespaceKey) for k in entrypoints):
+            doc.pop("project:entry-points")
+        return doc
+
+    def move_options_missing_in_pep621(self, doc: R) -> R:
+        """:pep:`621` specifies as project metadata values that are covered
+        in ``setup.cfg "options"`` section.
+        """
+        # First we handle simple options
+        naming = {
+            "python-requires": "requires-python",
+            "install-requires": "dependencies",
+            "entry-points": "entry-points",
+        }
+        metadata = doc.setdefault("metadata", IR())
+        options = doc.setdefault("options", IR())
+        metadata.update({v: options.pop(k) for k, v in naming.items() if k in options})
+
+        # Then we handle entire sections:
+        naming = {"extras-require": "optional-dependencies"}
+        for src, target in naming.items():
+            doc.rename(f"options.{src}", f"project:{target}", ignore_missing=True)
+
+        return doc
+
+    def remove_metadata_not_in_pep621(self, doc: R) -> R:
+        """:pep:`621` does not cover all project metadata in ``setup.cfg "metadata"``
+        section. That is left as "tool" specific configuration.
+        """
+        specific = ["platforms", "provides", "obsoletes"]
+        metadata, options = doc["metadata"], doc["options"]
+        options.update({k: metadata.pop(k) for k in specific if k in metadata})
+        return doc
+
+    def rename_script_files(self, doc: R) -> R:
+        """``setuptools`` define a ``options.scripts`` parameters that refer to
+        script files, not created via entry-points.
+        To avoid confusion with :pep:`621` scripts (generated via entry-points)
+        let's rename this field to ``script-files``
+        """
+        doc["options"].rename("scripts", "script-files", ignore_missing=True)
+        return doc
+
+    def handle_packages_find(self, doc: R) -> R:
+        """``setup.cfg`` uses a option + a section to define ``options.packages.find``
+        and its ``find_namespace`` variant. This does not work very well with the
+        convention used for the TOML encoding, since the option and the section would
+        end up with the same name (and overwriting each other). Therefore we need to
+        "merge" them.
+        """
+        options = doc["options"]
+        # Abort when not using find or find_namespace
+        packages = options.get("packages")
+        if not isinstance(packages, Directive):
+            return doc
+        prefix = packages.kind.replace("_", "-")
+        # Enhancement #1: Unify find and find_namespaces, using `namespaces` as a flag
+        options["packages"] = Directive("find", {"namespaces": "namespace" in prefix})
+        if "options.packages.find" in doc:
+            packages = options.pop("packages")
+            doc["options.packages.find"].update(packages["find"])
+            # Enhancement #2: ``where`` accepts multiple values (array)
+            where = doc["options.packages.find"].get("where", None)
+            if where:
+                doc["options.packages.find"]["where"] = _ensure_where_list(where)
+        return doc
+
+    def handle_dynamic(self, doc: R) -> R:
+        """All the configuration fields in :pep:`621` that are dynamically discovered at
+        build time have to be explicitly list in ``dynamic``.
+        This function moves directive usages (e.g. ``file:`` and ``attr:``) to a
+        tool-specific subtable (``tool.setuptools.dynamic``), and add the corresponding
+        field to ``dynamic``.
+        Since ``version`` is a mandatory core metadata, it will be added to ``dynamic``
+        when not explicitly set (in that case plugins such as ``setuptools_scm`` are
+        expected to provide a value at runtime).
+        """
+        potential = ["version", "classifiers", "description"]
+        # directives = {k[-1]: v for k, v in self.setupcfg_directives().items()}
+        metadata, options = doc["metadata"], doc["options"]
+
+        field_falues = ((f, metadata.get(f)) for f in potential)
+        fields = [f for f, v in field_falues if isinstance(v, Directive)]
+
+        dynamic = {f: metadata.pop(f, None) for f in fields}
+        if "version" not in metadata and "version" not in dynamic:
+            msg = (
+                "No `version` was found in `[metadata]`, `ini2toml` will assume it is "
+                "defined by tools like `setuptools-scm` or in `setup.py`. "
+                "Automatically adding it to `dynamic` (in accordance with PEP 621)"
+            )
+            _logger.debug(msg)
+            fields.insert(0, "version")
+
+        extras: List[str] = []
+        ep = metadata.pop("entry-points", options.pop("entry-points", None))
+        if isinstance(ep, Directive):
+            fields.append("entry-points")
+            dynamic["entry-points"] = ep
+            extras = ["scripts", "gui-scripts"]
+        if not fields:
+            return doc
+        metadata.setdefault("dynamic", []).extend(fields + extras)
+
+        if dynamic:
+            doc.setdefault("options.dynamic", IR()).update(dynamic)
+            # ^ later `options.dynamic` is converted to `tool.setuptools.dynamic`
+        return doc
+
+    def fix_extras_require(self, doc: R) -> R:
+        """`extras-require` can have markers embeded in the extra group
+        they need to be removed and added to the dependencies themselves
+        """
+        if "project:optional-dependencies" not in doc:
+            return doc
+
+        extras = doc["project:optional-dependencies"]
+        keys = list(extras.keys())  # Eager, so we can modify extras
+        for key in keys:
+            if not isinstance(key, str):
+                continue
+            extra_name, _, marker = key.partition(":")
+            extra_name, marker = extra_name.strip(), marker.strip()
+            if not marker:
+                continue
+            values = extras[key]
+            extras.rename(key, extra_name)
+            extras[extra_name] = [_add_marker(r, marker) for r in values.as_list()]
+
+        return doc
+
+    def move_setup_requires(self, doc: R) -> R:
+        """Move ``setup_requires`` to the equivalent field in :pep:`518`, and add
+        mandatory build dependencies if they are missing and
+        """
+        options = doc["options"]
+        build_system = doc["build-system"]
+        if "setup-requires" in options:
+            msg = "The field 'setup_requires' is deprecated. "
+            msg += "Converting to `build-system.requires` as specified by PEP 518."
+            warnings.warn(msg, DeprecationWarning)
+            requirements: CommentedList[str] = options.pop("setup-requires")
+            # Deduplicate
+            existing = {Requirement(r).name: r for r in requirements.as_list()}
+            mandatory = {
+                Requirement(r).name: r
+                for r in chain(build_system.get("requires", []), self.BUILD_REQUIRES)
+            }
+            new = [r for name, r in mandatory.items() if name not in existing]
+            for req in reversed(new):
+                requirements.insert_line(0, (req,))
+            build_system["requires"] = requirements
+
+        return doc
+
+    def move_tests_require(self, doc: R) -> R:
+        """Move ``tests_require`` to a ``testing`` extra as optional dependency
+        (this option is deprecated in setuptools (the test command is deprecated).
+
+        It assumes ``move_options_missing_in_pep621`` already run (to populate
+        ``project:optional-dependencies``.
+        """
+        if "tests-require" in doc["options"]:
+            msg = "The field 'tests_require' is deprecated and no longer supported. "
+            msg += "Dependencies will be converted to optional (`testing` extra). "
+            msg += "You can use a tool like `tox` or `nox` to replace this workflow."
+            warnings.warn(msg, DeprecationWarning)
+            reqs: CommentedList[str] = doc["options"].pop("tests-require")
+            if "project:optional-dependencies" not in doc:
+                doc["project:optional-dependencies"] = IR(testing=reqs)
+                return doc
+
+            opt_deps = doc["project:optional-dependencies"]
+            if "testing" not in opt_deps:
+                opt_deps["testing"] = reqs
+
+            testing: CommentedList[str] = opt_deps["testing"]
+            test_deps = {Requirement(r).name: r for r in reqs.as_list()}
+            existing_deps = {Requirement(r).name: r for r in testing.as_list()}
+            new = [r for name, r in test_deps.items() if name not in existing_deps]
+            for req in new:
+                testing.insert_line(len(testing), (req,))
+
+        return doc
+
+    def make_include_package_data_explicit(self, doc: R) -> R:
+        options = doc["options"]
+        if "include-package-data" not in options:
+            # This allows setuptools to decide to change the default from False to True,
+            # when adopting PEP 621
+            options["include-package-data"] = False
+
+        return doc
+
+    def parse_setup_py_command_options(self, doc: R) -> R:
+        """``distutils`` commands can accept arguments from ``setup.cfg`` files.
+        This function moves these arguments to their own ``distutils``
+        tool-specific sub-table
+        """
+        sections = list(doc.keys())
+        commands = _distutils_commands()
+        for k in sections:
+            if isinstance(k, str) and k in commands:
+                section = self._be.apply_best_effort_to_section(doc[k])
+                for option in section:
+                    if isinstance(option, str):
+                        section.rename(option, self.normalise_key(option))
+                doc[k] = section
+                doc.rename(k, ("distutils", k))
+        return doc
+
+    def split_subtables(self, out: R) -> R:
+        """``setuptools`` emulate nested sections (e.g.: ``options.extras_require``)
+        which can be directly expressed in TOML via sub-tables.
+        """
+        sections = [
+            k
+            for k in out.keys()
+            if isinstance(k, str) and (k.startswith("options.") or ":" in k)
+        ]
+        for section in sections:
+            new_key = SECTION_SPLITTER.split(section)
+            if section.startswith("options."):
+                new_key = ["tool", "setuptools", *new_key[1:]]
+            out.rename(section, tuple(new_key))
+        return out
+
+    def ensure_pep518(self, doc: R) -> R:
+        """:pep:`518` specifies that any other tool adding configuration under
+        ``pyproject.toml`` should use the ``tool`` table. This means that the only
+        top-level keys are ``build-system``, ``project`` and ``tool``.
+        """
+        allowed = ("build-system", "project", "tool", "metadata", "options")
+        allowed_prefixes = ("options.", "project:")
+        for k in list(doc.keys()):
+            key = k
+            rest: Sequence = ()
+            if isinstance(k, tuple) and not isinstance(key, HiddenKey):
+                key, *rest = k
+            if isinstance(key, HiddenKey):
+                continue
+            if not (key in allowed or any(key.startswith(p) for p in allowed_prefixes)):
+                doc.rename(k, ("tool", key, *rest))
+        return doc
+
+    def pep621_transform(self, doc: R) -> R:
+        """Rules are applied sequentially and therefore can interfere with the following
+        ones. Please notice that renaming is applied after value processing.
+        """
+        transformations = [
+            # --- value processing and type changes ---
+            self.apply_value_processing,
+            # --- transformations mainly focusing on PEP 621 ---
+            self.merge_and_rename_urls,
+            self.merge_authors_maintainers_and_emails,
+            self.merge_and_rename_long_description_and_content_type,
+            self.handle_license_and_files,
+            self.move_and_split_entrypoints,
+            self.move_options_missing_in_pep621,
+            self.remove_metadata_not_in_pep621,
+            # --- General fixes
+            self.fix_extras_require,
+            self.rename_script_files,
+            self.handle_packages_find,
+            self.handle_dynamic,
+            self.move_setup_requires,
+            self.move_tests_require,
+            self.make_include_package_data_explicit,
+            # --- distutils ---
+            self.parse_setup_py_command_options,
+            # --- final steps ---
+            self.split_subtables,
+            self.ensure_pep518,
+        ]
+        out = self.template(doc.__class__)
+        out.update(doc)
+        out.setdefault("metadata", IR())
+        out.setdefault("options", IR())
+        out = reduce(apply, transformations, out)
+        out.rename("metadata", "project", ignore_missing=True)
+        out.rename("options", ("tool", "setuptools"), ignore_missing=True)
+        return out
+
+    def normalise_keys(self, cfg: R) -> R:
+        """Normalise keys in ``setup.cfg``, by replacing aliases with cannonic names
+        and replacing the snake case with kebab case.
+
+        .. note:: Although setuptools recently deprecated kebab case in ``setup.cfg``
+           ``pyproject.toml`` use it as a convention (as established in :pep:`517`,
+           :pep:`518` and :pep:`621`) so this normalisation makes more sense for the
+           translation.
+        """
+        # Normalise for the same convention as pyproject
+        for i in range(len(cfg.order)):
+            section_name = cfg.order[i]
+            if not isinstance(section_name, str):
+                continue
+            if not any(section_name.startswith(s) for s in SETUPTOOLS_SECTIONS):
+                continue
+            section = cfg[section_name]
+            cfg.rename(section_name, kebab_case(section_name))
+            if any(section_name.startswith(s) for s in SKIP_CHILD_NORMALISATION):
+                continue
+            for j in range(len(section.order)):
+                option_name = section.order[j]
+                if not isinstance(option_name, str):
+                    continue
+                section.rename(option_name, self.normalise_key(option_name))
+        # Normalise aliases
+        metadata = cfg.get("metadata")
+        if not metadata:
+            return cfg
+        for alias, cannonic in self.setupcfg_aliases().items():
+            if alias in metadata:
+                msg = f"{alias!r} is deprecated. Translating to {cannonic!r} instead."
+                warnings.warn(msg, DeprecationWarning)
+                metadata.rename(alias, cannonic)
+        return cfg
+
+    def normalise_key(self, key: str) -> str:
+        """Normalise a single key for option"""
+        return kebab_case(key)
+
+
+# ---- Helpers ----
+
+
+class Directive(dict):
+    """Represent a setuptools' setup.cfg directive (e.g 'file:', 'attr:')
+
+    In TOML these directives can be represented as dict-like objects, however in the
+    conversion algorithm we need to be able to differentiate between them.
+    By inheriting from dict, we can use directive classes interchangeably but also check
+    using `isinstance(obj, Directive)`.
+    """
+
+    def __init__(self, kind: str, args: Any):
+        self.kind = kind
+        self.args = Any
+        super().__init__(((kind, args),))
+
+
+def directive(*directives: str, orelse=split_comment):
+    """:obj:`~functools.partial` form of :func:`split_directive`"""
+    directives = directives or ("file", "attr")
+    return partial(split_directive, directives=directives, orelse=orelse)
+
+
+def split_directive(
+    value: str, directives: Sequence[str] = ("file", "attr"), orelse=split_comment
+):
+    candidates = (d for d in directives if value.strip().startswith(f"{d}:"))
+    directive = next(candidates, None)
+    if directive is None:
+        return orelse(value)
+
+    raw_value = value.lstrip()[len(directive) + 1 :].strip()
+    if directive == "file":
+        return Directive(directive, split_list_comma(raw_value))
+    return Directive(directive, split_comment(raw_value))
+
+
+def value_error(field: str):
+    """Simply raise a :exc:`ValueError` when used as a transformation function"""
+
+    def _fn(value):
+        raise ValueError(f"Invalid value for {field!r}: {value!r}")
+
+    return _fn
+
+
+def _distutils_commands() -> Set[str]:
+    try:
+        from . import iterate_entry_points
+
+        commands = [ep.name for ep in iterate_entry_points("distutils.commands")]
+    except Exception:
+        commands = []
+    return {*commands, *COMMAND_SECTIONS}
+
+
+def _ensure_where_list(where):
+    if isinstance(where, Commented):
+        return where.as_commented_list()
+
+    return [where]
+
+
+def _add_marker(dep: str, marker: str) -> str:
+    joiner = " and " if ";" in dep else "; "
+    return joiner.join((dep, marker))
+
+
+def split_deps(value):
+    """Setuptools seem to accept line continuations for markers
+    (with comments in the middle), and that is more difficult to process.
+    e.g.: https://github.com/jaraco/zipp
+    """
+    internal: CommentedList[str] = split_list_semi(value)
+    lines = list(internal)
+    L = len(lines)
+    i = j = 0
+    remove = []
+    while i < L:
+        line = lines[i]
+        if line.comment_only() or not line.value:
+            i += 1
+            continue
+        while line.value and line.value[-1].strip()[-1] == "\\":
+            comments: List[Tuple(int, str)] = []
+            for j in range(i + 1, L):
+                # Find the non commented / non empty line
+                following = lines[j]
+                if following.value_or(None):
+                    line = _fuse_lines(line, following)
+                    lines[i] = line
+                    if len(comments) == 1 and not line.has_comment():
+                        # If just one comment was found in between,
+                        # use it as a inline comment
+                        remove.append(comments[0][0])
+                        line.comment = comments[0][1]
+                    remove.append(j)
+                    i = j
+                    break
+                if following.comment:
+                    # Store the comments, they might be used as inline
+                    comments.append((j, following.comment))
+        i += 1
+
+    for i in reversed(remove):  # backwards otherwise we lose track of the indexes
+        lines.pop(i)
+
+    return CommentedList(lines)
+
+
+def _fuse_lines(line1: Commented[List[str]], line2: Commented[List[str]]):
+    """Fuse 2 lines in a CommentedList that accidentally split a single
+    value between them
+    """
+    values1 = line1.value
+    values2 = line2.value
+    # Requires line1 and line2 to not be empty
+    assert isinstance(values1, list) and isinstance(values2, list)
+    keep1, keep2 = values1[:-1], values2[1:]
+    shared = values1[-1].strip().strip("\\").strip() + " " + values2[0].strip()
+    return Commented(keep1 + [shared] + keep2, line2.comment)
diff --git a/setuptools/_vendor/ini2toml/profile.py b/setuptools/_vendor/ini2toml/profile.py
new file mode 100644
index 0000000000..f7083dbc8e
--- /dev/null
+++ b/setuptools/_vendor/ini2toml/profile.py
@@ -0,0 +1,60 @@
+import inspect
+from typing import Optional, Sequence, TypeVar
+
+from .types import IntermediateProcessor, ProfileAugmentationFn, TextProcessor
+
+P = TypeVar("P", bound="Profile")
+
+
+def replace(self: P, **changes) -> P:
+    """Works similarly to :func:`dataclasses.replace`"""
+    sig = inspect.signature(self.__class__)
+    kwargs = {x: getattr(self, x) for x in sig.parameters}
+    kwargs.update(changes)
+    return self.__class__(**kwargs)
+
+
+class Profile:
+    """Profile object that follows the public API defined in
+    :class:`ini2toml.types.Profile`.
+    """
+
+    def __init__(
+        self,
+        name: str,
+        help_text: str = "",
+        pre_processors: Sequence[TextProcessor] = (),
+        intermediate_processors: Sequence[IntermediateProcessor] = (),
+        post_processors: Sequence[TextProcessor] = (),
+        ini_parser_opts: Optional[dict] = None,
+    ):
+        self.name = name
+        self.help_text = help_text
+        self.pre_processors = list(pre_processors)
+        self.intermediate_processors = list(intermediate_processors)
+        self.post_processors = list(post_processors)
+        self.ini_parser_opts = ini_parser_opts
+
+    replace = replace
+
+
+class ProfileAugmentation:
+    def __init__(
+        self,
+        fn: ProfileAugmentationFn,
+        active_by_default: bool = False,
+        name: str = "",
+        help_text: str = "",
+    ):
+        self.fn = fn
+        self.active_by_default = active_by_default
+        self.name = name
+        self.help_text = help_text
+
+    def is_active(self, explicitly_active: Optional[bool] = None) -> bool:
+        """``explicitly_active`` is a tree-state variable: ``True`` if the user
+        explicitly asked for the augmentation, ``False`` if the user explicitly denied
+        the augmentation, or ``None`` otherwise.
+        """
+        activation = explicitly_active
+        return activation is True or (activation is None and self.active_by_default)
diff --git a/setuptools/_vendor/ini2toml/py.typed b/setuptools/_vendor/ini2toml/py.typed
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/ini2toml/transformations.py b/setuptools/_vendor/ini2toml/transformations.py
new file mode 100644
index 0000000000..7257995e81
--- /dev/null
+++ b/setuptools/_vendor/ini2toml/transformations.py
@@ -0,0 +1,365 @@
+"""Reusable value and type casting transformations"""
+import warnings
+from collections.abc import MutableMapping
+from functools import reduce, wraps
+from typing import (
+    Any,
+    Callable,
+    List,
+    Optional,
+    Sequence,
+    Tuple,
+    TypeVar,
+    Union,
+    cast,
+    overload,
+)
+
+from .types import Commented, CommentedKV, CommentedList
+
+CP = ("#", ";")
+"""Default Comment Prefixes"""
+
+S = TypeVar("S")
+T = TypeVar("T")
+U = TypeVar("U")
+V = TypeVar("V")
+X = TypeVar("X")
+Y = TypeVar("Y")
+M = TypeVar("M", bound=MutableMapping)
+KV = Tuple[str, T]
+
+FN = Callable[[X], Y]
+
+Scalar = Union[int, float, bool, str]  # TODO: missing time and datetime
+"""Simple data types with TOML correspondence"""
+
+CoerceFn = Callable[[str], T]
+"""Functions that know how to parser/coerce string values into different types"""
+
+Transformation = Union[Callable[[str], Any], Callable[[M], M]]
+"""There are 2 main types of transformation:
+
+- The first one is a simple transformation that processes a string value (coming from an
+  option in the original CFG/INI file) into a value with an equivalent TOML data type.
+  For example: transforming ``"2"`` (string) into ``2`` (integer).
+- The second one tries to preserve metadata (such as comments) from the original CFG/INI
+  file. This kind of transformation processes a string value into an intermediary
+  representation (e.g. :obj:`Commented`, :obj:`CommentedList`, obj:`CommentedKV`)
+  that needs to be properly handled before adding to the TOML document.
+
+In a higher level we can also consider an ensemble of transformations that transform an
+entire table of the TOML document.
+"""
+
+TF = TypeVar("TF", bound=Transformation)
+
+
+# ---- Simple value processors ----
+# These functions return plain objects, that can be directly added to the TOML document
+
+
+def noop(x: T) -> T:
+    return x
+
+
+def is_true(value: str) -> bool:
+    value = value.lower()
+    return value in ("true", "1", "yes", "on")
+
+
+def is_false(value: str) -> bool:
+    value = value.lower()
+    return value in ("false", "0", "no", "off", "none", "null", "nil")
+
+
+def is_float(value: str) -> bool:
+    cleaned = value.lower().lstrip("+-").replace(".", "").replace("_", "")
+    return cleaned.isdecimal() and value.count(".") <= 1 or cleaned in ("inf", "nan")
+
+
+def coerce_bool(value: str) -> bool:
+    if is_true(value):
+        return True
+    if is_false(value):
+        return False
+    raise ValueError(f"{value!r} cannot be converted to boolean")
+
+
+def coerce_scalar(value: str) -> Scalar:
+    """Try to convert the given string to a proper "scalar" type (e.g. integer, float,
+    bool, ...) with an direct TOML equivalent.
+    If the conversion is unknown or not possible, it will return the same input value
+    (as string).
+
+    .. note:: This function "guesses" the value type based in heuristics and/or regular
+       expressions, therefore there is no guarantee the output has the same type as
+       intended by the original author.
+
+    .. note:: Currently date/time-related types are not supported.
+    """
+    value = value.strip()
+    if value.isdecimal():
+        return int(value)
+    if is_float(value):
+        return float(value)
+    if is_true(value):
+        return True
+    elif is_false(value):
+        return False
+    # Do we need this? Or is there a better way? How about time objects
+    # > try:
+    # >     return datetime.fromisoformat(value)
+    # > except ValueError:
+    # >     pass
+    return value
+
+
+def kebab_case(field: str) -> str:
+    return field.lower().replace("_", "-")
+
+
+def deprecated(
+    name: str, fn: TF = noop, instead: str = ""  # type: ignore[assignment]
+) -> TF:
+    """Wrapper around the ``fn`` transformation to warn user about deprecation."""
+    extra = f". Use {instead!r} instead" if instead else ""
+
+    @wraps(fn)
+    def _fn(*args, **kwargs):
+        warnings.warn(f"{name!r} is deprecated{extra}", DeprecationWarning)
+        return fn(*args, **kwargs)
+
+    return cast(TF, _fn)
+
+
+# ---- Complex value processors ----
+# These functions return an intermediate representation of the value,
+# that need `apply` to be added to a container
+
+
+@overload
+def split_comment(value: str, *, comment_prefixes=CP) -> Commented[str]:
+    ...
+
+
+@overload
+def split_comment(
+    value: str, coerce_fn: CoerceFn[T], comment_prefixes=CP
+) -> Commented[T]:
+    ...
+
+
+def split_comment(value, coerce_fn=noop, comment_prefixes=CP):
+    if not isinstance(value, str):
+        return value
+    value = value.strip()
+    prefixes = [p for p in comment_prefixes if p in value]
+
+    # We just process inline comments for single line options
+    if not prefixes or len(value.splitlines()) > 1:
+        return Commented(coerce_fn(value))
+
+    if any(value.startswith(p) for p in comment_prefixes):
+        return Commented(comment=_strip_comment(value, comment_prefixes))
+
+    prefix = prefixes[0]  # We can only analyse one...
+    value, _, cmt = value.partition(prefix)
+    return Commented(coerce_fn(value.strip()), _strip_comment(cmt, comment_prefixes))
+
+
+def split_scalar(value: str, *, comment_prefixes=CP) -> Commented[Scalar]:
+    return split_comment(value, coerce_scalar, comment_prefixes)
+
+
+@overload
+def split_list(
+    value: str, sep: str = ",", *, subsplit_dangling=True, comment_prefixes=CP
+) -> CommentedList[str]:
+    ...
+
+
+@overload
+def split_list(
+    value: str,
+    sep: str = ",",
+    *,
+    coerce_fn: CoerceFn[T],
+    subsplit_dangling=True,
+    comment_prefixes=CP,
+) -> CommentedList[T]:
+    ...
+
+
+@overload
+def split_list(
+    value: str,
+    sep: str,
+    coerce_fn: CoerceFn[T],
+    subsplit_dangling=True,
+    comment_prefixes=CP,
+) -> CommentedList[T]:
+    ...
+
+
+def split_list(
+    value, sep=",", coerce_fn=noop, subsplit_dangling=True, comment_prefixes=CP
+):
+    """Value encoded as a (potentially) dangling list values separated by ``sep``.
+
+    This function will first try to split the value by lines (dangling list) using
+    :func:`str.splitlines`. Then, if ``subsplit_dangling=True``, it will split each line
+    using ``sep``. As a result a list of items is obtained.
+    For each item in this list ``coerce_fn`` is applied.
+    """
+    if not isinstance(value, str):
+        return value
+    comment_prefixes = [p for p in comment_prefixes if sep not in p]
+
+    values = value.strip().splitlines()
+    if not subsplit_dangling and len(values) > 1:
+        sep += "\n"  # force a pattern that cannot be found in a split line
+
+    def _split(line: str) -> list:
+        return [coerce_fn(v.strip()) for v in line.split(sep) if v]
+
+    return CommentedList([split_comment(v, _split, comment_prefixes) for v in values])
+
+
+@overload
+def split_kv_pairs(
+    value: str,
+    key_sep: str = "=",
+    *,
+    pair_sep=",",
+    subsplit_dangling=True,
+    comment_prefixes=CP,
+) -> CommentedKV[str]:
+    ...
+
+
+@overload
+def split_kv_pairs(
+    value: str,
+    key_sep: str = "=",
+    *,
+    coerce_fn: CoerceFn[T],
+    pair_sep=",",
+    subsplit_dangling=True,
+    comment_prefixes=CP,
+) -> CommentedKV[T]:
+    ...
+
+
+@overload
+def split_kv_pairs(
+    value: str,
+    key_sep: str,
+    coerce_fn: CoerceFn[T],
+    pair_sep=",",
+    subsplit_dangling=True,
+    comment_prefixes=CP,
+) -> CommentedKV[T]:
+    ...
+
+
+def split_kv_pairs(
+    value,
+    key_sep="=",
+    coerce_fn=noop,
+    pair_sep=",",
+    subsplit_dangling=True,
+    comment_prefixes=CP,
+):
+    """Value encoded as a (potentially) dangling list of key-value pairs.
+
+    This function will first try to split the value by lines (dangling list) using
+    :func:`str.splitlines`. Then, if ``subsplit_dangling=True``, it will split each line
+    using ``pair_sep``. As a result a list of key-value pairs is obtained.
+    For each item in this list, the key is separated from the value by ``key_sep``.
+    ``coerce_fn`` is used to convert the value in each pair.
+    """
+    prefixes = [p for p in comment_prefixes if key_sep not in p and pair_sep not in p]
+
+    values = value.strip().splitlines()
+    if not subsplit_dangling and len(values) > 1:
+        pair_sep += "\n"  # force a pattern that cannot be found in a split line
+
+    def _split_kv(line: str) -> List[KV]:
+        pairs = (
+            item.split(key_sep, maxsplit=1)
+            for item in line.strip().split(pair_sep)
+            if key_sep in item
+        )
+        return [(k.strip(), coerce_fn(v.strip())) for k, v in pairs]
+
+    return CommentedKV([split_comment(v, _split_kv, prefixes) for v in values])
+
+
+# ---- Public Helpers ----
+
+
+def remove_prefixes(text: str, prefixes: Sequence[str]):
+    text = text.strip()
+    for prefix in prefixes:
+        if prefix and text.startswith(prefix):
+            return text[len(prefix) :].strip()
+    return text
+
+
+def apply(x, fn):
+    """Useful to reduce over a list of functions"""
+    return fn(x)
+
+
+@overload
+def pipe(fn1: FN[S, T], fn2: FN[T, U]) -> FN[S, U]:
+    ...
+
+
+@overload
+def pipe(fn1: FN[S, T], fn2: FN[T, U], fn3: FN[U, V]) -> FN[S, V]:
+    ...
+
+
+@overload
+def pipe(fn1: FN[S, T], fn2: FN[T, U], fn3: FN[U, V], fn4: FN[V, X]) -> FN[S, X]:
+    ...
+
+
+@overload
+def pipe(
+    fn1: FN[S, T], fn2: FN[T, U], fn3: FN[U, V], fn4: FN[V, X], fn5: FN[X, Y]
+) -> FN[S, Y]:
+    ...
+
+
+@overload
+def pipe(
+    fn1: FN[S, T],
+    fn2: FN[T, U],
+    fn3: FN[U, V],
+    fn4: FN[V, X],
+    fn5: FN[X, Y],
+    *fn: FN[Y, Y],
+) -> FN[S, Y]:
+    ...
+
+
+def pipe(*fns):
+    """Compose 1-argument functions respecting the sequence they should be applied:
+
+    .. code-block:: python
+
+        pipe(fn1, fn2, fn3, ..., fnN)(x) == fnN(...(fn3(fn2(fn1(x)))))
+    """
+    return lambda x: reduce(apply, fns, x)
+
+
+# ---- Private Helpers ----
+
+
+def _strip_comment(msg: Optional[str], prefixes: Sequence[str] = CP) -> Optional[str]:
+    if not msg:
+        return None
+    return remove_prefixes(msg, prefixes)
diff --git a/setuptools/_vendor/ini2toml/types.py b/setuptools/_vendor/ini2toml/types.py
new file mode 100644
index 0000000000..af5c7d813d
--- /dev/null
+++ b/setuptools/_vendor/ini2toml/types.py
@@ -0,0 +1,121 @@
+import sys
+from collections.abc import Mapping, MutableMapping
+from typing import TYPE_CHECKING, Any, Callable, List, Optional, TypeVar, Union
+
+from .intermediate_repr import (
+    KV,
+    Commented,
+    CommentedKV,
+    CommentedList,
+    CommentKey,
+    HiddenKey,
+    IntermediateRepr,
+    Key,
+    WhitespaceKey,
+)
+
+if sys.version_info <= (3, 8):  # pragma: no cover
+    # TODO: Import directly when `python_requires = >= 3.8`
+    if TYPE_CHECKING:
+        from typing_extensions import Protocol
+    else:
+        # Not a real replacement but allows getting rid of the dependency
+        from abc import ABC as Protocol
+else:  # pragma: no cover
+    from typing import Protocol
+
+
+R = TypeVar("R", bound=IntermediateRepr)
+T = TypeVar("T")
+M = TypeVar("M", bound=MutableMapping)
+
+Scalar = Union[int, float, bool, str]  # TODO: missing time and datetime
+CoerceFn = Callable[[str], T]
+Transformation = Union[Callable[[str], Any], Callable[[M], M]]
+
+TextProcessor = Callable[[str], str]
+IntermediateProcessor = Callable[[R], R]
+
+
+IniLoadsFn = Callable[[str, Mapping], IntermediateRepr]
+IReprCollapseFn = Callable[[IntermediateRepr], T]
+TomlDumpsFn = IReprCollapseFn[str]
+
+
+class CLIChoice(Protocol):
+    name: str
+    help_text: str
+
+
+class Profile(Protocol):
+    name: str
+    help_text: str
+    pre_processors: List[TextProcessor]
+    intermediate_processors: List[IntermediateProcessor]
+    post_processors: List[TextProcessor]
+
+
+class ProfileAugmentation(Protocol):
+    active_by_default: bool
+    name: str
+    help_text: str
+
+    def fn(self, profile: Profile):
+        ...
+
+    def is_active(self, explicitly_active: Optional[bool] = None) -> bool:
+        """``explicitly_active`` is a tree-state variable: ``True`` if the user
+        explicitly asked for the augmentation, ``False`` if the user explicitly denied
+        the augmentation, or ``None`` otherwise.
+        """
+
+
+class Translator(Protocol):
+    def __getitem__(self, profile_name: str) -> Profile:
+        """Create and register (and return) a translation :class:`Profile`
+        (or return a previously registered one) (see :ref:`core-concepts`).
+        """
+
+    def augment_profiles(
+        self,
+        fn: "ProfileAugmentationFn",
+        active_by_default: bool = False,
+        name: str = "",
+        help_text: str = "",
+    ):
+        """Register a profile augmentation function (see :ref:`core-concepts`).
+        The keyword ``name`` and ``help_text`` can be used to customise the description
+        featured in ``ini2toml``'s CLI, but when these arguments are not given (or empty
+        strings), ``name`` is taken from ``fn.__name__`` and ``help_text`` is taken from
+        ``fn.__doc__`` (docstring).
+        """
+
+
+Plugin = Callable[[Translator], None]
+ProfileAugmentationFn = Callable[[Profile], None]
+
+
+__all__ = [
+    "CLIChoice",
+    "CommentKey",
+    "Commented",
+    "CommentedKV",
+    "CommentedList",
+    "HiddenKey",
+    "IniLoadsFn",
+    "IntermediateProcessor",
+    "IntermediateRepr",
+    "Key",
+    "KV",
+    "Plugin",
+    "Profile",
+    "ProfileAugmentation",
+    "ProfileAugmentationFn",
+    "TextProcessor",
+    "Translator",
+    "TomlDumpsFn",
+    "WhitespaceKey",
+    "Scalar",
+    "CoerceFn",
+    "Transformation",
+]
diff --git a/setuptools/_vendor/vendored.txt b/setuptools/_vendor/vendored.txt
index c1f7237ccc..0ad24bf908 100644
--- a/setuptools/_vendor/vendored.txt
+++ b/setuptools/_vendor/vendored.txt
@@ -3,4 +3,5 @@ pyparsing==2.2.1
 ordered-set==3.1.1
 more_itertools==8.8.0
 tomli==1.2.2
+ini2toml==0.6
 # validate-pyproject[all]==0.3.1  # Special handling, don't remove
diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py
index cb0fe6c7b8..ba4ba12453 100644
--- a/setuptools/extern/__init__.py
+++ b/setuptools/extern/__init__.py
@@ -70,5 +70,5 @@ def install(self):
 
 
 names = ('packaging', 'pyparsing', 'ordered_set', 'more_itertools',
-         'tomli', '_validate_pyproject')
+         'tomli', 'ini2toml', '_validate_pyproject')
 VendorImporter(__name__, names, 'setuptools._vendor').install()

From 6e86080ecdb46df0a41c9ccac85b43913dbf5d57 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 9 Dec 2021 18:40:13 +0000
Subject: [PATCH 21/55] Automatically convert `setup.cfg` => `pyproject.toml`

This is the initial implementation of the "configuration driver"
that indirectly reads `setup.cfg` by first converting it to a data
structure corresponding to `pyproject.toml` and then expanding it.

This idea is based on the approach defined in #2685.

LIMITATION: Differently from the `legacy_setupcfg` "configuration driver",
`setupcfg` does not support reading other distutils file.
The `find_others` flag is removed because of that.
---
 setuptools/config/pyprojecttoml.py       |  5 +-
 setuptools/config/setupcfg.py            | 67 +++++++++++++++++
 setuptools/tests/config/test_setupcfg.py | 93 ++++++++++++++++++++++++
 3 files changed, 164 insertions(+), 1 deletion(-)
 create mode 100644 setuptools/config/setupcfg.py
 create mode 100644 setuptools/tests/config/test_setupcfg.py

diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index 12898de73d..18bf31c49e 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -7,7 +7,7 @@
 from setuptools.extern import tomli
 from setuptools.extern._validate_pyproject import validate
 from setuptools.config import expand as _expand
-from setuptools.errors import OptionError
+from setuptools.errors import OptionError, FileError
 
 
 def read_configuration(filepath, expand=True, ignore_option_errors=False):
@@ -28,6 +28,9 @@ def read_configuration(filepath, expand=True, ignore_option_errors=False):
     """
     filepath = os.path.abspath(filepath)
 
+    if not os.path.isfile(filepath):
+        raise FileError(f"Configuration file {filepath!r} does not exist.")
+
     with open(filepath, "rb") as file:
         asdict = tomli.load(file)
 
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
new file mode 100644
index 0000000000..ce87843b16
--- /dev/null
+++ b/setuptools/config/setupcfg.py
@@ -0,0 +1,67 @@
+"""Automatically convert ``setup.cfg`` file into a ``pyproject.toml``-equivalent
+in memory data structure, and then proceed to load the configuration.
+"""
+import os
+from typing import Union
+
+from setuptools.errors import FileError
+from setuptools.extern.ini2toml.base_translator import BaseTranslator
+from setuptools.extern.ini2toml.drivers import configparser as configparser_driver
+from setuptools.extern.ini2toml.drivers import plain_builtins as plain_builtins_driver
+from setuptools.extern.ini2toml.plugins import setuptools_pep621 as setuptools_plugin
+
+from setuptools.config import pyprojecttoml as pyproject_config
+
+
+_Path = Union[os.PathLike, str, None]
+
+
+def convert(setupcfg_file: _Path) -> dict:
+    """Convert the ``setup.cfg`` file into a data struct similar to
+    the one that would be obtained by parsing a ``pyproject.toml``
+    """
+    with open(setupcfg_file, "r") as f:
+        ini_text = f.read()
+
+    translator = BaseTranslator(
+        ini_loads_fn=configparser_driver.parse,
+        toml_dumps_fn=plain_builtins_driver.convert,
+        plugins=[setuptools_plugin.activate],
+        ini_parser_opts={},
+    )
+    return translator.translate(ini_text, profile_name="setup.cfg")
+
+
+expand_configuration = pyproject_config.expand_configuration
+
+
+def read_configuration(
+    filepath: _Path, expand: bool = True, ignore_option_errors: bool = False
+):
+    """Read given configuration file and returns options from it as a dict.
+
+    :param str|unicode filepath: Path to configuration file to get options from.
+
+    :param bool expand: Whether to expand directives and other computed values
+        (i.e. post-process the given configuration)
+
+    :param bool ignore_option_errors: Whether to silently ignore
+        options, values of which could not be resolved (e.g. due to exceptions
+        in directives such as file:, attr:, etc.).
+        If False exceptions are propagated as expected.
+
+    :rtype: dict
+    """
+    filepath = os.path.abspath(filepath)
+
+    if not os.path.isfile(filepath):
+        raise FileError(f"Configuration file {filepath!r} does not exist.")
+
+    asdict = convert(filepath)
+
+    with pyproject_config._ignore_errors(ignore_option_errors):
+        pyproject_config.validate(asdict)
+
+    if expand:
+        root_dir = os.path.dirname(filepath)
+        return expand_configuration(asdict, root_dir, ignore_option_errors)
diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
new file mode 100644
index 0000000000..4e10f834ff
--- /dev/null
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -0,0 +1,93 @@
+from textwrap import dedent
+
+from setuptools.config.setupcfg import convert, read_configuration
+
+EXAMPLE = {
+    "LICENSE": "----- MIT LICENSE TEXT PLACEHOLDER ----",
+    "README.md": "hello world",
+    "pyproject.toml": dedent("""\
+        [build-system]
+        requires = ["setuptools>=42", "wheel"]
+        build-backend = "setuptools.build_meta"
+    """),
+    "setup.cfg": dedent("""\
+        [metadata]
+        name = example-pkg
+        version = 0.0.1
+        author = Example Author
+        author_email = author@example.com
+        description = A small example package
+        long_description = file: README.md
+        long_description_content_type = text/markdown
+        url = https://github.com/pypa/sampleproject
+        project_urls =
+            Bug Tracker = https://github.com/pypa/sampleproject/issues
+        classifiers =
+            Programming Language :: Python :: 3
+            License :: OSI Approved :: MIT License
+            Operating System :: OS Independent
+
+        [options]
+        package_dir =
+            = src
+        packages = find:
+        python_requires = >=3.6
+        install_requires =
+            peppercorn
+        entry_points = file: entry_points.ini
+
+        [options.extras_require]
+        dev =
+            check-manifest
+        test =
+            coverage
+
+        [options.packages.find]
+        where = src
+    """),
+    "entry_points.ini": dedent("""\
+        [my.plugin.group]
+        add_one = example_package.example:add_one
+    """),
+    "src/example_package/__init__.py": "",
+    "src/example_package/example.py": "def add_one(number):\n    return number + 1",
+    "src/example_package/package_data.csv": "42",
+    "src/example_package/nested/__init__.py": "",
+}
+
+
+def create_project(parent_dir, files):
+    for file, content in files.items():
+        path = parent_dir / file
+        path.parent.mkdir(exist_ok=True, parents=True)
+        path.write_text(content)
+
+
+def test_convert(tmp_path):
+    create_project(tmp_path, EXAMPLE)
+    pyproject = convert(tmp_path / "setup.cfg")
+    project = pyproject["project"]
+    assert project["name"] == "example-pkg"
+    assert project["version"] == "0.0.1"
+    assert project["readme"]["file"] == "README.md"
+    assert project["readme"]["content-type"] == "text/markdown"
+    assert project["urls"]["Homepage"] == "https://github.com/pypa/sampleproject"
+    assert set(project["dependencies"]) == {"peppercorn"}
+    assert set(project["optional-dependencies"]["dev"]) == {"check-manifest"}
+    assert set(project["optional-dependencies"]["test"]) == {"coverage"}
+    setuptools = pyproject["tool"]["setuptools"]
+    from pprint import pprint
+    pprint(setuptools)
+    assert set(setuptools["dynamic"]["entry-points"]["file"]) == {"entry_points.ini"}
+    assert setuptools["packages"]["find"]["where"] == ["src"]
+    assert setuptools["packages"]["find"]["namespaces"] is False
+
+
+def test_read_configuration(tmp_path):
+    create_project(tmp_path, EXAMPLE)
+    pyproject = read_configuration(tmp_path / "setup.cfg")
+    project = pyproject["project"]
+    ep_value = "example_package.example:add_one"
+    assert project["entry-points"]["my.plugin.group"]["add_one"] == ep_value
+    setuptools = pyproject["tool"]["setuptools"]
+    assert set(setuptools["packages"]) == {"example_package", "example_package.nested"}

From 36518e9eda563b013e46522a9c2e5037f9131e42 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 9 Dec 2021 20:57:16 +0000
Subject: [PATCH 22/55] Avoid manipulating sys.path in config.expand

Instead we can used `importlib` to load modules directly from a specific
path.

(This also seems to prevent some order-dependent errors in the tests?)
---
 setuptools/config/expand.py            | 67 ++++++++++++++------------
 setuptools/tests/config/test_expand.py |  4 +-
 setuptools/tests/test_config.py        | 25 +++++-----
 3 files changed, 50 insertions(+), 46 deletions(-)

diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index 26b7f75ae0..06e52e458c 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -16,7 +16,6 @@
 functions among several configuration file formats.
 """
 import ast
-import contextlib
 import importlib
 import io
 import os
@@ -35,10 +34,7 @@ class StaticModule:
     Attempt to load the module by the name
     """
 
-    def __init__(self, name):
-        spec = importlib.util.find_spec(name)
-        if spec is None:
-            raise ModuleNotFoundError(name)
+    def __init__(self, name, spec):
         with open(spec.origin) as strm:
             src = strm.read()
         module = ast.parse(src)
@@ -60,18 +56,6 @@ def __getattr__(self, attr):
             ) from e
 
 
-@contextlib.contextmanager
-def patch_path(path):
-    """
-    Add path to front of sys.path for the duration of the context.
-    """
-    try:
-        sys.path.insert(0, path)
-        yield
-    finally:
-        sys.path.remove(path)
-
-
 def glob_relative(patterns, root_dir=None):
     """Expand the list of glob patterns, but preserving relative paths.
 
@@ -153,21 +137,37 @@ def read_attr(attr_desc, package_dir=None, root_dir=None):
     root_dir = root_dir or os.getcwd()
     attrs_path = attr_desc.strip().split('.')
     attr_name = attrs_path.pop()
-
     module_name = '.'.join(attrs_path)
     module_name = module_name or '__init__'
+    parent_path, path, module_name = _find_module(module_name, package_dir, root_dir)
+    spec = _find_spec(module_name, path, parent_path)
 
-    parent_path, module_name = _find_module(module_name, package_dir, root_dir)
+    try:
+        return getattr(StaticModule(module_name, spec), attr_name)
+    except Exception:
+        # fallback to evaluate module
+        module = _load_spec(spec, module_name)
+        return getattr(module, attr_name)
 
-    with patch_path(parent_path):
-        try:
-            # attempt to load value statically
-            return getattr(StaticModule(module_name), attr_name)
-        except Exception:
-            # fallback to simple import
-            module = importlib.import_module(module_name)
 
-    return getattr(module, attr_name)
+def _find_spec(module_name, module_path, parent_path):
+    spec = importlib.util.spec_from_file_location(module_name, module_path)
+    spec = spec or importlib.util.find_spec(module_name)
+
+    if spec is None:
+        raise ModuleNotFoundError(module_name)
+
+    return spec
+
+
+def _load_spec(spec, module_name):
+    name = getattr(spec, "__name__", module_name)
+    if name in sys.modules:
+        return sys.modules[name]
+    module = importlib.util.module_from_spec(spec)
+    sys.modules[name] = module  # cache (it also ensures `==` works on loaded items)
+    spec.loader.exec_module(module)
+    return module
 
 
 def _find_module(module_name, package_dir, root_dir):
@@ -193,7 +193,13 @@ def _find_module(module_name, package_dir, root_dir):
             # A custom parent directory was specified for all root modules
             parent_path = os.path.join(root_dir, package_dir[''])
 
-    return parent_path, module_name
+    path_start = os.path.join(parent_path, *module_name.split("."))
+    candidates = chain(
+        (f"{path_start}.py", os.path.join(path_start, "__init__.py")),
+        iglob(f"{path_start}.*")
+    )
+    module_path = next((x for x in candidates if os.path.isfile(x)), None)
+    return parent_path, module_path, module_name
 
 
 def resolve_class(qualified_class_name, package_dir=None, root_dir=None):
@@ -203,9 +209,8 @@ def resolve_class(qualified_class_name, package_dir=None, root_dir=None):
     class_name = qualified_class_name[idx + 1 :]
     pkg_name = qualified_class_name[:idx]
 
-    parent_path, module_name = _find_module(pkg_name, package_dir, root_dir)
-    with patch_path(parent_path):
-        module = importlib.import_module(module_name)
+    parent_path, path, module_name = _find_module(pkg_name, package_dir, root_dir)
+    module = _load_spec(_find_spec(module_name, path, parent_path), module_name)
     return getattr(module, class_name)
 
 
diff --git a/setuptools/tests/config/test_expand.py b/setuptools/tests/config/test_expand.py
index 72fb22b2f9..11dc74aa3d 100644
--- a/setuptools/tests/config/test_expand.py
+++ b/setuptools/tests/config/test_expand.py
@@ -76,8 +76,8 @@ def test_read_attr(tmp_path):
 
 
 def test_resolve_class():
-    from distutils.command import sdist
-    assert expand.resolve_class('distutils.command.sdist') == sdist
+    from setuptools.command.sdist import sdist
+    assert expand.resolve_class("setuptools.command.sdist.sdist") == sdist
 
 
 def test_find_packages(tmp_path):
diff --git a/setuptools/tests/test_config.py b/setuptools/tests/test_config.py
index 005742e4be..02c4224f3e 100644
--- a/setuptools/tests/test_config.py
+++ b/setuptools/tests/test_config.py
@@ -1,8 +1,8 @@
-import types
-import sys
+import os
 
 import contextlib
 import configparser
+import importlib
 
 import pytest
 
@@ -10,7 +10,6 @@
 from mock import patch
 from setuptools.dist import Distribution, _Distribution
 from setuptools.config import ConfigHandler, read_configuration
-from distutils.core import Command
 from .textwrap import DALS
 
 
@@ -858,23 +857,23 @@ def test_python_requires_invalid(self, tmpdir):
             with get_dist(tmpdir) as dist:
                 dist.parse_config_files()
 
-    def test_cmdclass(self, tmpdir):
-        class CustomCmd(Command):
-            pass
-
-        m = types.ModuleType('custom_build', 'test package')
-
-        m.__dict__['CustomCmd'] = CustomCmd
-
-        sys.modules['custom_build'] = m
+    def test_cmdclass(self, tmpdir, monkeypatch):
+        module_path = os.path.join(tmpdir, "custom_build.py")
+        with open(module_path, "w") as f:
+            f.write("from distutils.core import Command\n")
+            f.write("class CustomCmd(Command): pass\n")
 
         fake_env(
             tmpdir,
             '[options]\n' 'cmdclass =\n' '    customcmd = custom_build.CustomCmd\n',
         )
 
+        with monkeypatch.context() as m:
+            m.syspath_prepend(tmpdir)
+            custom_build = importlib.import_module("custom_build")
+
         with get_dist(tmpdir) as dist:
-            assert dist.cmdclass == {'customcmd': CustomCmd}
+            assert dist.cmdclass == {'customcmd': custom_build.CustomCmd}
 
 
 saved_dist_init = _Distribution.__init__

From 3c293b53e4ea67c4d9a01a414fcdc0460cb99a92 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 9 Dec 2021 22:02:40 +0000
Subject: [PATCH 23/55] Add means to reconstruct metadata dict from dist object

---
 setuptools/metadata.py            | 51 +++++++++++++++++++++++++------
 setuptools/tests/test_metadata.py |  7 +++++
 2 files changed, 49 insertions(+), 9 deletions(-)

diff --git a/setuptools/metadata.py b/setuptools/metadata.py
index fa5a06640f..2ee6df32c9 100644
--- a/setuptools/metadata.py
+++ b/setuptools/metadata.py
@@ -7,6 +7,7 @@
 import os
 from email.headerregistry import Address
 from functools import partial
+from itertools import chain
 from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Set, Union
 
 from setuptools.extern.packaging import version
@@ -84,7 +85,7 @@ def json_compatible_key(key: str) -> str:
     return key.lower().replace("-", "_")
 
 
-RFC822_KEYS = {json_compatible_key(k): k for k in CORE_METADATA}
+RFC822_MAP = {json_compatible_key(k): k for k in CORE_METADATA}
 """Mapping between JSON compatible keys (:pep:`566#json-compatible-metadata`)
 and email-header style (:rfc:`822`) core metadata keys.
 """
@@ -92,7 +93,7 @@ def json_compatible_key(key: str) -> str:
 
 def normalise_key(key: str) -> str:
     key = json_compatible_key(key)
-    if key[-1] == "s" and key[:-1] in RFC822_KEYS:
+    if key[-1] == "s" and key[:-1] in RFC822_MAP:
         # Sometimes some keys come in the plural (e.g. "classifiers", "license_files")
         return key[:-1]
     return key
@@ -239,7 +240,7 @@ def _from_project_table(metadata: dict, project: dict, dynamic: set, root_dir: _
             dynamic.remove(norm_key)
         if json_key in PYPROJECT_CORRESPONDENCE:
             PYPROJECT_CORRESPONDENCE[json_key](val, metadata, root_dir)
-        elif norm_key in RFC822_KEYS:
+        elif norm_key in RFC822_MAP:
             metadata[norm_key] = val
 
 
@@ -250,8 +251,13 @@ def _from_tool_table(metadata: dict, tool_table: dict):
             metadata[norm_key] = tool_table[key]
 
 
-SETUPTOOLS_RENAMES = {"long_description_content_type": "description_content_type"}
+SETUPTOOLS_RENAMES = {
+    "long_description_content_type": "description_content_type",
+    "license_files": "license_file"
+}
 OUTDATED_SETTERS = {"requires_dist": "requires"}
+SETUPTOOLS_PATCHES = {"long_description_content_type", "project_urls",
+                      "provides_extras", "license_file", "license_files"}
 
 
 def apply(metadata: dict, dist: "Distribution", _source: str = "pyproject.toml"):
@@ -260,10 +266,12 @@ def apply(metadata: dict, dist: "Distribution", _source: str = "pyproject.toml")
     (configuring the distribution object accordingly)
     """
     metadata_obj = dist.metadata
-    norm_attrs = ((normalise_key(x), x) for x in metadata_obj.__dict__)
+    keys = set(metadata_obj.__dict__) | SETUPTOOLS_PATCHES
+    print(f"{keys=}")
+    norm_attrs = ((normalise_key(x), x) for x in keys)
     norm_attrs = ((UPDATES.get(k, k) , v) for k, v in norm_attrs)
     norm_attrs = ((SETUPTOOLS_RENAMES.get(k, k) , v) for k, v in norm_attrs)
-    metadata_attrs = ((k, v) for k, v in norm_attrs if k in RFC822_KEYS)
+    metadata_attrs = ((k, v) for k, v in norm_attrs if k in RFC822_MAP)
     metadata_setters = {
         k: getattr(metadata_obj, f"set_{v}", partial(setattr, metadata_obj, v))
         for k, v in metadata_attrs
@@ -271,7 +279,9 @@ def apply(metadata: dict, dist: "Distribution", _source: str = "pyproject.toml")
 
     for key, value in metadata.items():
         norm_key = normalise_key(key)
-        if norm_key in OUTDATED_SETTERS:
+        if norm_key == "license_file":
+            metadata_obj.license_files = value  # plural for setuptools
+        elif norm_key in OUTDATED_SETTERS:
             setattr(metadata_obj, OUTDATED_SETTERS[norm_key], value)
         elif norm_key in metadata_setters:
             metadata_setters[norm_key](value)
@@ -290,7 +300,7 @@ def compare(metadata1: dict, metadata2: dict) -> Union[bool, int]:
     JSON-compatible metadata, as defined in :pep:`566#json-compatible-metadata`.
     Extra keys will be ignored.
     """
-    valid_keys = set(RFC822_KEYS)
+    valid_keys = set(RFC822_MAP)
     return_value: Union[bool, int] = True
     metadata1_keys = valid_keys & set(metadata1)
     metadata2_keys = valid_keys & set(metadata2)
@@ -307,7 +317,7 @@ def compare(metadata1: dict, metadata2: dict) -> Union[bool, int]:
             value1, value2 = version.parse(value1), version.parse(value2)
         elif key == "requires_dist":
             value1, value2 = _norm_reqs(value1), _norm_reqs(value2)
-        if RFC822_KEYS.get(key, key) in LIST_VALUES:
+        if RFC822_MAP.get(key, key) in LIST_VALUES:
             value1, value2 = set(value1), set(value2)
         if value1 != value2:
             return False
@@ -317,3 +327,26 @@ def compare(metadata1: dict, metadata2: dict) -> Union[bool, int]:
 
 def _norm_reqs(reqs: Iterable[str]) -> Set[str]:
     return set(map(lambda req: str(Requirement(req)), reqs))
+
+
+def from_dist(dist: "Distribution") -> dict:
+    """Given a distribution object, extract core metadata from it,
+    and returns as a JSON-like dict as defined in
+    :pep:`566#json-compatible-metadata`
+    """
+    metadata = {}
+    target = dist.metadata
+    old = chain(UPDATES.items(), SETUPTOOLS_RENAMES.items(), OUTDATED_SETTERS.items())
+    outdated = {v: k for k, v in old}
+    for key in set(RFC822_MAP):
+        candidates = [outdated.get(key, key)]
+        if key[-1] != 's':
+            candidates.append(f"{key}s")  # sometimes the key is in the plural form
+        attr = next((k for k in candidates if hasattr(target, k)), None)
+        if attr is None:
+            continue
+        value = getattr(target, attr)
+        if value or value is False:
+            metadata[key] = value
+
+    return metadata
diff --git a/setuptools/tests/test_metadata.py b/setuptools/tests/test_metadata.py
index 71aa2c0a25..e3b7b62d76 100644
--- a/setuptools/tests/test_metadata.py
+++ b/setuptools/tests/test_metadata.py
@@ -126,7 +126,14 @@ def test_apply(tmp_path):
     meta.apply(metadata, dist)
     internal_meta = dist.metadata
     assert internal_meta.name == EXPECTED_METADATA["name"]
+    assert internal_meta.license_files == EXPECTED_METADATA["license_file"]
     assert (
         internal_meta.long_description_content_type
         == EXPECTED_METADATA["description_content_type"]
     )
+
+    reconstructed = meta.from_dist(dist)
+    cmp = meta.compare(metadata, reconstructed)
+    if cmp is not True:
+        print("cmp:", cmp)
+        assert metadata == reconstructed  # just so pytest will print the diff

From ce32429b36ac7eb38d7c646c325b3758a03fa0c0 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 9 Dec 2021 22:19:14 +0000
Subject: [PATCH 24/55] Add means to reconstruct options dict from dist

---
 setuptools/options.py            | 17 +++++++++++++++++
 setuptools/tests/test_options.py |  6 ++++++
 2 files changed, 23 insertions(+)

diff --git a/setuptools/options.py b/setuptools/options.py
index 61b09550cc..f0c238ea16 100644
--- a/setuptools/options.py
+++ b/setuptools/options.py
@@ -249,3 +249,20 @@ def _comparable_items(
     if isinstance(values, list):
         return (key, *sorted(values))
     return (key, values)
+
+
+def from_dist(dist: "Distribution") -> dict:
+    """Given a distribution object, extract options from it"""
+    options = {}
+    for key in OPTIONS:
+        value = getattr(dist, key, None)
+        if value or value is False:
+            options[key] = value
+
+    for cmd, opts in dist.command_options.items():
+        command_options = options.setdefault("command_options", {})
+        for key, (_src, value) in opts.items():
+            dest = command_options.setdefault(cmd, {})
+            dest[key] = value
+
+    return options
diff --git a/setuptools/tests/test_options.py b/setuptools/tests/test_options.py
index 9c1ba9f6a5..9c8d49956b 100644
--- a/setuptools/tests/test_options.py
+++ b/setuptools/tests/test_options.py
@@ -140,3 +140,9 @@ def test_apply(tmp_path):
     assert set(dist.entry_points["console_scripts"]) == {"exec = pkg.__main__:exec"}
     assert dist.command_options["sdist"]["formats"] == ("pyproject.toml", "gztar")
     assert dist.command_options["bdist_wheel"]["universal"] == ("pyproject.toml", True)
+
+    reconstructed = options.from_dist(dist)
+    cmp = options.compare(opts, reconstructed)
+    if cmp is not True:
+        print("cmp:", cmp)
+        assert opts == reconstructed  # just so pytest will print the diff

From f868ad2ab305229eddcbe4aa7e29b76281b28c2e Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 9 Dec 2021 22:27:14 +0000
Subject: [PATCH 25/55] Rename and adapt test_config to test_legacy_setupcfg

---
 setuptools/config/__init__.py                        | 12 +++---------
 setuptools/tests/config/__init__.py                  |  0
 .../tests_legacy_setupcfg.py}                        |  6 +++---
 3 files changed, 6 insertions(+), 12 deletions(-)
 create mode 100644 setuptools/tests/config/__init__.py
 rename setuptools/tests/{test_config.py => config/tests_legacy_setupcfg.py} (99%)

diff --git a/setuptools/config/__init__.py b/setuptools/config/__init__.py
index 98d1a39b11..1802e4d99a 100644
--- a/setuptools/config/__init__.py
+++ b/setuptools/config/__init__.py
@@ -1,13 +1,7 @@
 # For backward compatibility, the following classes/functions are exposed
-# from `config.setupcfg`
-from setuptools.config.legacy_setupcfg import (
-    ConfigHandler,
-    parse_configuration,
-    read_configuration,
-)
+# from `config.legacy_setupcfg`
+from setuptools.config.legacy_setupcfg import parse_configuration
 
 __all__ = [
-    'ConfigHandler',
-    'parse_configuration',
-    'read_configuration'
+    'parse_configuration',  # still required by setuptools.dist
 ]
diff --git a/setuptools/tests/config/__init__.py b/setuptools/tests/config/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/tests/test_config.py b/setuptools/tests/config/tests_legacy_setupcfg.py
similarity index 99%
rename from setuptools/tests/test_config.py
rename to setuptools/tests/config/tests_legacy_setupcfg.py
index 02c4224f3e..84cdf62b8f 100644
--- a/setuptools/tests/test_config.py
+++ b/setuptools/tests/config/tests_legacy_setupcfg.py
@@ -7,10 +7,10 @@
 import pytest
 
 from distutils.errors import DistutilsOptionError, DistutilsFileError
-from mock import patch
+from unittest.mock import patch
 from setuptools.dist import Distribution, _Distribution
-from setuptools.config import ConfigHandler, read_configuration
-from .textwrap import DALS
+from setuptools.config.legacy_setupcfg import ConfigHandler, read_configuration
+from ..textwrap import DALS
 
 
 class ErrConfigHandler(ConfigHandler):

From d134aaf8756304755d61fb955575781426fe2ff8 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 10 Dec 2021 18:41:52 +0000
Subject: [PATCH 26/55] Improve circular imports

---
 setuptools/config/legacy_setupcfg.py | 3 ++-
 setuptools/config/pyprojecttoml.py   | 5 +++--
 setuptools/config/setupcfg.py        | 2 +-
 setuptools/dist.py                   | 3 ++-
 4 files changed, 8 insertions(+), 5 deletions(-)

diff --git a/setuptools/config/legacy_setupcfg.py b/setuptools/config/legacy_setupcfg.py
index 80cf454107..1785690840 100644
--- a/setuptools/config/legacy_setupcfg.py
+++ b/setuptools/config/legacy_setupcfg.py
@@ -10,7 +10,8 @@
 from distutils.errors import DistutilsOptionError, DistutilsFileError
 from setuptools.extern.packaging.version import Version, InvalidVersion
 from setuptools.extern.packaging.specifiers import SpecifierSet
-from setuptools.config import expand
+
+from . import expand
 
 
 def read_configuration(filepath, find_others=False, ignore_option_errors=False):
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index 18bf31c49e..6cd7a95d22 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -1,13 +1,14 @@
 """Load setuptools configuration from ``pyproject.toml`` files"""
 import os
 from contextlib import contextmanager
-from distutils import log
 from functools import partial
 
 from setuptools.extern import tomli
 from setuptools.extern._validate_pyproject import validate
-from setuptools.config import expand as _expand
 from setuptools.errors import OptionError, FileError
+from distutils import log
+
+from . import expand as _expand
 
 
 def read_configuration(filepath, expand=True, ignore_option_errors=False):
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index ce87843b16..4b9b5f52c3 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -10,7 +10,7 @@
 from setuptools.extern.ini2toml.drivers import plain_builtins as plain_builtins_driver
 from setuptools.extern.ini2toml.plugins import setuptools_pep621 as setuptools_plugin
 
-from setuptools.config import pyprojecttoml as pyproject_config
+from . import pyprojecttoml as pyproject_config
 
 
 _Path = Union[os.PathLike, str, None]
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 37a10d1dcd..37a4194de3 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -36,7 +36,6 @@
 import setuptools.command
 from setuptools import windows_support
 from setuptools.monkey import get_unpatched
-from setuptools.config import parse_configuration
 import pkg_resources
 from setuptools.extern.packaging import version
 
@@ -799,6 +798,8 @@ def parse_config_files(self, filenames=None, ignore_option_errors=False):
         and loads configuration.
 
         """
+        from setuptools.config.legacy_setupcfg import parse_configuration
+
         self._parse_config_files(filenames=filenames)
 
         parse_configuration(

From a3169580b3cf6d3a18ed053b421d6a0614c0efc4 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 10 Dec 2021 18:42:45 +0000
Subject: [PATCH 27/55] Fix config.pyproject attempting to expand already
 expanded packages

---
 setuptools/config/pyprojecttoml.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index 6cd7a95d22..a5d306a7e8 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -136,7 +136,7 @@ def _expand_entry_points(text, dynamic):
 
 def _expand_packages(setuptools_cfg, root_dir, ignore_option_errors=False):
     packages = setuptools_cfg.get("packages")
-    if packages is None:
+    if packages is None or isinstance(packages, (list, tuple)):
         return
 
     find = packages.get("find")

From 25ddead898065c4b0210c74675030a10fa1ee900 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 10 Dec 2021 18:44:10 +0000
Subject: [PATCH 28/55] Improve setuptools.{metadata,options} conversion and
 comparison

---
 setuptools/metadata.py |  64 +++++++++++++++++++------
 setuptools/options.py  | 106 ++++++++++++++++++++++-------------------
 2 files changed, 106 insertions(+), 64 deletions(-)

diff --git a/setuptools/metadata.py b/setuptools/metadata.py
index 2ee6df32c9..23e9bee0ac 100644
--- a/setuptools/metadata.py
+++ b/setuptools/metadata.py
@@ -105,12 +105,12 @@ def _summary(val: str, dest: dict, _root_dir: _Path):
 
 
 def _description(val: _DictOrStr, dest: dict, root_dir: _Path):
+    from setuptools.config import expand
+
     if isinstance(val, str):
-        text = val
+        text = expand.read_files(val)
         ctype = "text/x-rst"
     else:
-        from setuptools.config import expand
-
         text = expand.read_files(val["file"]) if "file" in val else val["text"]
         ctype = val["content-type"]
 
@@ -194,6 +194,9 @@ def from_pyproject(pyproject: dict, root_dir: _Path = None) -> dict:
 
     This function is "forgiving" with its inputs, but strict with its outputs.
     """
+    if not pyproject:
+        return {}
+
     metadata = {}
     project = pyproject.get("project", {}).copy()
     dynamic = {normalise_key(k) for k in project.pop("dynamic", [])}
@@ -223,8 +226,9 @@ def _finalize_dynamic(metadata: dict, dynamic: set, dynamic_cfg: dict, root_dir:
         if json_key == "license_files":
             files = {v: v for v in expand.glob_relative(val, root_dir)}  # deduplicate
             val = [v for v in files.keys() if not v.endswith("~")]
-        metadata[normalise_key(key)] = val
         dynamic.discard("license")
+        if val:
+            metadata[normalise_key(key)] = val
 
     if dynamic:
         metadata["dynamic"] = sorted(list(dynamic))
@@ -252,8 +256,12 @@ def _from_tool_table(metadata: dict, tool_table: dict):
 
 
 SETUPTOOLS_RENAMES = {
+    "long_description": "description",
     "long_description_content_type": "description_content_type",
-    "license_files": "license_file"
+    "license_files": "license_file",
+    "python_requires": "requires_python",
+    "description": "summary",
+    "url": "home_page",
 }
 OUTDATED_SETTERS = {"requires_dist": "requires"}
 SETUPTOOLS_PATCHES = {"long_description_content_type", "project_urls",
@@ -267,7 +275,6 @@ def apply(metadata: dict, dist: "Distribution", _source: str = "pyproject.toml")
     """
     metadata_obj = dist.metadata
     keys = set(metadata_obj.__dict__) | SETUPTOOLS_PATCHES
-    print(f"{keys=}")
     norm_attrs = ((normalise_key(x), x) for x in keys)
     norm_attrs = ((UPDATES.get(k, k) , v) for k, v in norm_attrs)
     norm_attrs = ((SETUPTOOLS_RENAMES.get(k, k) , v) for k, v in norm_attrs)
@@ -281,6 +288,12 @@ def apply(metadata: dict, dist: "Distribution", _source: str = "pyproject.toml")
         norm_key = normalise_key(key)
         if norm_key == "license_file":
             metadata_obj.license_files = value  # plural for setuptools
+        elif norm_key == "project_url":
+            urls = {}
+            for url in value:
+                name, _, address = url.partition(",")
+                urls[name.strip()] = address.strip()
+            metadata_obj.project_urls = urls
         elif norm_key in OUTDATED_SETTERS:
             setattr(metadata_obj, OUTDATED_SETTERS[norm_key], value)
         elif norm_key in metadata_setters:
@@ -301,15 +314,11 @@ def compare(metadata1: dict, metadata2: dict) -> Union[bool, int]:
     Extra keys will be ignored.
     """
     valid_keys = set(RFC822_MAP)
-    return_value: Union[bool, int] = True
     metadata1_keys = valid_keys & set(metadata1)
     metadata2_keys = valid_keys & set(metadata2)
-    if metadata1_keys ^ metadata2_keys:
+    return_value = _compare_sets(metadata1_keys, metadata2_keys)
+    if return_value is False:
         return False
-    if metadata1_keys - metadata2_keys:
-        return_value = -1
-    elif metadata2_keys - metadata1_keys:
-        return_value = 1
 
     for key in (metadata1_keys & metadata2_keys):
         value1, value2 = metadata1[key], metadata2[key]
@@ -319,12 +328,35 @@ def compare(metadata1: dict, metadata2: dict) -> Union[bool, int]:
             value1, value2 = _norm_reqs(value1), _norm_reqs(value2)
         if RFC822_MAP.get(key, key) in LIST_VALUES:
             value1, value2 = set(value1), set(value2)
-        if value1 != value2:
+
+        if isinstance(value1, set) and isinstance(value2, set):
+            cmp = _compare_sets(value1, value2)
+            diff_returns = (cmp != return_value and return_value is not True)
+            if cmp is False or (cmp is not True and diff_returns):
+                return False
+            return_value = cmp
+        elif value1 != value2:
             return False
 
     return return_value
 
 
+def _compare_sets(value1: set, value2: set) -> Union[bool, int]:
+    """
+    ``True`` if ``value1 == ``value2``
+    ``1`` if ``value1`` is a subset of ``value2``
+    ``-1`` if ``value2`` is a subset of ``value1``
+    ``False`` otherwise
+    """
+    if value1 == value2:
+        return True
+    if value1 > value2:
+        return -1
+    if value1 < value2:
+        return 1
+    return False  # both sets have unique keys
+
+
 def _norm_reqs(reqs: Iterable[str]) -> Set[str]:
     return set(map(lambda req: str(Requirement(req)), reqs))
 
@@ -339,14 +371,16 @@ def from_dist(dist: "Distribution") -> dict:
     old = chain(UPDATES.items(), SETUPTOOLS_RENAMES.items(), OUTDATED_SETTERS.items())
     outdated = {v: k for k, v in old}
     for key in set(RFC822_MAP):
-        candidates = [outdated.get(key, key)]
+        candidates = [outdated.get(key, key), key]
         if key[-1] != 's':
             candidates.append(f"{key}s")  # sometimes the key is in the plural form
         attr = next((k for k in candidates if hasattr(target, k)), None)
         if attr is None:
             continue
         value = getattr(target, attr)
-        if value or value is False:
+        if key == "project_url" and isinstance(value, dict) and value:
+            metadata[key] = [", ".join(i) for i in value.items()]
+        elif value or value is False:
             metadata[key] = value
 
     return metadata
diff --git a/setuptools/options.py b/setuptools/options.py
index f0c238ea16..87ee5a6f8a 100644
--- a/setuptools/options.py
+++ b/setuptools/options.py
@@ -4,9 +4,11 @@
 object, etc..).
 """
 import os
+from collections.abc import Mapping
 from itertools import chain
 from typing import (TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Set, Tuple,
                     Type, Union)
+from types import MappingProxyType
 
 if TYPE_CHECKING:
     from pkg_resources import EntryPoint  # noqa
@@ -14,6 +16,7 @@
 
 Scalar = Union[int, float, bool, None, str]
 _Path = Union[os.PathLike, str, None]
+EMPTY = MappingProxyType({})  # Immutable dict-like
 
 OPTIONS = {
     # "obsoletes", "provides" => covered in metadata
@@ -63,6 +66,9 @@ def from_pyproject(pyproject: dict, root_dir: _Path = None) -> dict:
 
     This function is "forgiving" with its inputs, but strict with its outputs.
     """
+    if not pyproject:
+        return {}
+
     options = {}
     _ = root_dir  # argument exists for symmetry with setuptools.metadata
 
@@ -101,19 +107,9 @@ def _normalise_entry_points(pyproject: dict, options: dict):
 def _copy_command_options(pyproject: dict, options: dict):
     from distutils import log
 
-    from pkg_resources import iter_entry_points
-    from setuptools.dist import Distribution
-
     tool_table = pyproject.get("tool", {})
-    valid_options = {"global": _normalise_cmd_options(Distribution.global_options)}
-
-    cmdclass = tool_table.get("setuptools", {}).get("cmdclass", {}).items()
-    entry_points = (_load_ep(ep) for ep in iter_entry_points('distutils.commands'))
-    entry_points = (ep for ep in entry_points if ep)
-    for cmd, cmd_class in chain(entry_points, cmdclass):
-        opts = valid_options.get(cmd, set())
-        opts = opts | _normalise_cmd_options(getattr(cmd_class, "user_options", []))
-        valid_options[cmd] = opts
+    cmdclass = tool_table.get("setuptools", {}).get("cmdclass", {})
+    valid_options = _valid_command_options(cmdclass)
 
     cmd_opts = {}
     for cmd, config in pyproject.get("tool", {}).get("distutils", {}).items():
@@ -132,6 +128,22 @@ def _copy_command_options(pyproject: dict, options: dict):
         options["command_options"] = cmd_opts
 
 
+def _valid_command_options(cmdclass: Mapping = EMPTY) -> Dict[str, Set[str]]:
+    from pkg_resources import iter_entry_points
+    from setuptools.dist import Distribution
+
+    valid_options = {"global": _normalise_cmd_options(Distribution.global_options)}
+
+    entry_points = (_load_ep(ep) for ep in iter_entry_points('distutils.commands'))
+    entry_points = (ep for ep in entry_points if ep)
+    for cmd, cmd_class in chain(entry_points, cmdclass.items()):
+        opts = valid_options.get(cmd, set())
+        opts = opts | _normalise_cmd_options(getattr(cmd_class, "user_options", []))
+        valid_options[cmd] = opts
+
+    return valid_options
+
+
 def _load_ep(ep: "EntryPoint") -> Optional[Tuple[str, Type]]:
     # Ignore all the errors
     try:
@@ -180,56 +192,43 @@ def compare(options1: dict, options2: dict) -> Union[bool, int]:
     Both ``options1`` and ``options2`` should be dicts similar to the ones
     returned by :func:`from_pyproject`. Extra keys will be ignored.
     """
+    from .metadata import _compare_sets
+
     valid_keys = OPTIONS
     options1_keys = valid_keys & set(options1)
     options2_keys = valid_keys & set(options2)
-    return_value: Union[bool, int] = _compare_sets(options1_keys, options2_keys)
+    return_value = _compare_sets(options1_keys, options2_keys)
     if return_value is False:
         return False
 
     for key in (options1_keys & options2_keys):
-        value1, value2 = options1[key], options1[key]
-        if key == "data_files":
-            value1, value2 = _norm_items(value1), _norm_items(value2)
-        elif key == "cmdclass":
-            value1 = {(k, v.__qualname__) for k, v in value1.items()}
-            value2 = {(k, v.__qualname__) for k, v in value2.items()}
-        elif key == "command_options":
-            value1 = _norm_items(_comparable_cmd_opts(value1).items())
-            value2 = _norm_items(_comparable_cmd_opts(value2).items())
+        value1, value2 = _norm_values(key, options1[key], options1[key])
+        if isinstance(value1, set) and isinstance(value2, set):
             cmp = _compare_sets(value1, value2)
-            # Let's be more relaxed with command options, since they can be read
-            # from other files in disk
-            all_int = isinstance(cmp, int) and isinstance(return_value, int)
-            if cmp is False or (cmp != return_value and all_int):
+            diff_returns = (cmp != return_value and return_value is not True)
+            if cmp is False or (cmp is not True and diff_returns):
                 return False
             return_value = cmp
-            continue
-        elif key in DICT_VALUES:
-            value1, value2 = _norm_items(value1.items()), _norm_items(value2.items())
-        elif key in LIST_VALUES:
-            value1, value2 = set(value1), set(value2)
-        if value1 != value2:
+        elif value1 != value2:
             return False
 
     return return_value
 
 
-def _compare_sets(value1: set, value2: set) -> Union[bool, int]:
-    """
-    ``True`` if ``value1 == ``value2``
-    ``1`` if ``value1`` is a subset of ``value2``
-    ``-1`` if ``value2`` is a subset of ``value1``
-    ``False`` otherwise
-    """
-    return_value: Union[bool, int] = True
-    if value1 ^ value2:
-        return False
-    if value1 - value2:
-        return_value = -1
-    elif value2 - value1:
-        return_value = 1
-    return return_value
+def _norm_values(key: str, value1, value2) -> tuple:
+    if key == "data_files":
+        value1, value2 = _norm_items(value1), _norm_items(value2)
+    elif key == "cmdclass":
+        value1 = {(k, v.__qualname__) for k, v in value1.items()}
+        value2 = {(k, v.__qualname__) for k, v in value2.items()}
+    elif key == "command_options":
+        value1 = _norm_items(_comparable_cmd_opts(value1).items())
+        value2 = _norm_items(_comparable_cmd_opts(value2).items())
+    elif key in DICT_VALUES:
+        value1, value2 = _norm_items(value1.items()), _norm_items(value2.items())
+    elif key in LIST_VALUES:
+        value1, value2 = set(value1), set(value2)
+    return value1, value2
 
 
 def _norm_items(
@@ -254,15 +253,24 @@ def _comparable_items(
 def from_dist(dist: "Distribution") -> dict:
     """Given a distribution object, extract options from it"""
     options = {}
-    for key in OPTIONS:
+    for key in OPTIONS - {"command_options"}:
         value = getattr(dist, key, None)
         if value or value is False:
             options[key] = value
 
+    valid_cmd = set(_valid_command_options(dist.cmdclass)) - {"metadata", "options"}
+
+    command_options = {}
     for cmd, opts in dist.command_options.items():
-        command_options = options.setdefault("command_options", {})
+        if cmd not in valid_cmd:
+            continue
         for key, (_src, value) in opts.items():
             dest = command_options.setdefault(cmd, {})
             dest[key] = value
 
+    if command_options:
+        options["command_options"] = command_options
+
+    # TODO: make sure entry_points are not strings (parse if they are)
+
     return options

From a307352005646a490368403f03884731295bdaa3 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 10 Dec 2021 19:51:06 +0000
Subject: [PATCH 29/55] Separate read and apply operations in
 config.legacy_setupcfg

This make it easier to re-use the `_apply` function.

The `_apply` function can be used to apply a `setup.cfg`
file into an existing dist object, and then obtain the metadata and
options using the `setuptools.{metadata,options}` modules.
---
 setuptools/config/legacy_setupcfg.py | 24 ++++++++++++++----------
 1 file changed, 14 insertions(+), 10 deletions(-)

diff --git a/setuptools/config/legacy_setupcfg.py b/setuptools/config/legacy_setupcfg.py
index 1785690840..bcb26fd6e6 100644
--- a/setuptools/config/legacy_setupcfg.py
+++ b/setuptools/config/legacy_setupcfg.py
@@ -30,7 +30,17 @@ def read_configuration(filepath, find_others=False, ignore_option_errors=False):
 
     :rtype: dict
     """
-    from setuptools.dist import Distribution, _Distribution
+    from setuptools.dist import Distribution
+
+    dist = Distribution()
+    filenames = dist.find_config_files() if find_others else []
+    handlers = _apply(filepath, dist, filenames, ignore_option_errors)
+    return configuration_to_dict(handlers)
+
+
+def _apply(filepath, dist, other_files=(), ignore_option_errors=False):
+    """Read configuration from ``filepath`` and applies to the ``dist`` object."""
+    from setuptools.dist import _Distribution
 
     filepath = os.path.abspath(filepath)
 
@@ -39,24 +49,18 @@ def read_configuration(filepath, find_others=False, ignore_option_errors=False):
 
     current_directory = os.getcwd()
     os.chdir(os.path.dirname(filepath))
+    filenames = [*other_files, filepath]
 
     try:
-        dist = Distribution()
-
-        filenames = dist.find_config_files() if find_others else []
-        if filepath not in filenames:
-            filenames.append(filepath)
-
         _Distribution.parse_config_files(dist, filenames=filenames)
-
         handlers = parse_configuration(
             dist, dist.command_options, ignore_option_errors=ignore_option_errors
         )
-
+        dist._finalize_license_files()
     finally:
         os.chdir(current_directory)
 
-    return configuration_to_dict(handlers)
+    return handlers
 
 
 def _get_option(target_obj, key):

From edc89922f804760201eaf6a37917bfa408489251 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 10 Dec 2021 20:05:35 +0000
Subject: [PATCH 30/55] Add a way of displaying the diff between 2 objects

This will be useful to display a message to the user
if the configuration resulting from reading `setup.cfg` via
config.legacy_setupcfg or via config.setupcfg differs.

Knowing this diff also facilitates debugging.
---
 setuptools/diff_utils.py            | 37 +++++++++++++++++++++
 setuptools/tests/test_diff_utils.py | 50 +++++++++++++++++++++++++++++
 2 files changed, 87 insertions(+)
 create mode 100644 setuptools/diff_utils.py
 create mode 100644 setuptools/tests/test_diff_utils.py

diff --git a/setuptools/diff_utils.py b/setuptools/diff_utils.py
new file mode 100644
index 0000000000..42cc1aeea8
--- /dev/null
+++ b/setuptools/diff_utils.py
@@ -0,0 +1,37 @@
+import json
+import difflib
+
+
+def diff(obj1, obj2, label1="left", label2="right") -> str:
+    """Attempt to create a string representation of the difference between 2
+    objects. If the objects contain a weird data type that cannot be serialised to
+    JSON, this function will simply convert it to str.
+    """
+
+    left = json.dumps(_make_comparable(obj1), indent=2, sort_keys=True)
+    right = json.dumps(_make_comparable(obj2), indent=2, sort_keys=True)
+    delta = difflib.unified_diff(
+        left.splitlines(True),
+        right.splitlines(True),
+        fromfile=label1,
+        tofile=label2
+    )
+    return "".join(delta)
+
+
+def _make_comparable(obj):
+    # It is not easy to force JSONEconder to sort arrays, so we pre-process
+    if isinstance(obj, (str, int, float, bool)) or obj is None:
+        return obj
+
+    if isinstance(obj, (list, tuple)):
+        comparable = [_make_comparable(x) for x in obj]
+        try:
+            return sorted(comparable)
+        except Exception:
+            return comparable
+
+    if isinstance(obj, dict):
+        return {k: _make_comparable(v) for k, v in obj.items()}
+
+    return str(obj)
diff --git a/setuptools/tests/test_diff_utils.py b/setuptools/tests/test_diff_utils.py
new file mode 100644
index 0000000000..5296142a1e
--- /dev/null
+++ b/setuptools/tests/test_diff_utils.py
@@ -0,0 +1,50 @@
+from textwrap import dedent
+
+import pytest
+
+from setuptools.diff_utils import diff
+
+
+class _Obj:
+    def __init__(self, _repr):
+        self._repr = _repr
+
+    def __str__(self):
+        return str(self._repr)
+
+
+@pytest.mark.parametrize("example1,example2", [
+    ({"a": (1, 2, 3), "b": [-1, 1]}, {"b": [1, -1], "a": [1, 2, 3]}),
+    (_Obj(1), _Obj(1))
+])
+def test_no_diff(example1, example2):
+    delta = diff(example1, example2)
+    assert len(delta.strip()) == 0
+
+
+@pytest.mark.parametrize("example1,example2", [
+    ({"a": (1, 2, 3), "b": [-1, 1]}, {"b": [1], "c": [1, 2, 3]}),
+])
+def test_diff(example1, example2):
+    delta = diff(example1, example2)
+    expected = """\
+        --- left
+        +++ right
+        @@ -1,11 +1,10 @@
+         {
+        -  "a": [
+        +  "b": [
+        +    1
+        +  ],
+        +  "c": [
+             1,
+             2,
+             3
+        -  ],
+        -  "b": [
+        -    -1,
+        -    1
+           ]
+         }
+    """
+    assert delta.strip() == dedent(expected).strip()

From c431c0632c2b65b0d2042c0897addcc8487ae76a Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 10 Dec 2021 20:21:34 +0000
Subject: [PATCH 31/55] Add checks to make sure automatic conversion of configs
 work

---
 setuptools/config/_backward_compatibility.py | 123 +++++++++++++++++++
 1 file changed, 123 insertions(+)
 create mode 100644 setuptools/config/_backward_compatibility.py

diff --git a/setuptools/config/_backward_compatibility.py b/setuptools/config/_backward_compatibility.py
new file mode 100644
index 0000000000..abc6de4aa1
--- /dev/null
+++ b/setuptools/config/_backward_compatibility.py
@@ -0,0 +1,123 @@
+"""Simple checks to make sure the configuration that is automatically converted
+to the ``pyproject.toml`` format results in the same outcome (in terms of the
+``Distribution`` object parameters) as the one that would be previously obtained
+via ``setup.cfg``.
+"""
+import os
+import warnings
+from textwrap import dedent
+from typing import Union, Tuple, Optional
+
+
+_Path = Union[os.PathLike, str]
+
+
+EXCLUDED_FROM_COMPARISON = [
+    "metadata_version",  # not currently handled/considered
+    # Setuptools normalisations might make the following fields differ:
+    "provides_extra", "requires_dist",
+    # PEP 621 is specific about using Author/Maintainer-email when both
+    # name and emails are provided
+    "author", "maintainer", "author_email", "maintainer_email",
+]
+
+
+def ensure_compatible_conversion(filepath: _Path, ignore_option_errors: bool) -> bool:
+    from setuptools import metadata, options
+    from setuptools.diff_utils import diff
+
+    new, legacy = _read_configs(filepath, ignore_option_errors)
+
+    metas = (new["metadata"].copy(), legacy["metadata"].copy())
+    for meta in metas:
+        for field in EXCLUDED_FROM_COMPARISON:
+            meta.pop(field, None)
+
+    cmp = metadata.compare(*metas)
+    is_compatible = cmp is True or cmp == -1  # -1 => first is superset of second
+    if not is_compatible:
+        labels = ("pyproject.toml-style metadata", "setupt.cfg-style metadata")
+        raise NonEquivalentConversion(filepath, diff(*metas, *labels))
+
+    cmp = options.compare(new["options"], legacy["options"])
+    is_compatible = cmp is True or cmp == -1  # -1 => first is superset of second
+    if not is_compatible:
+        labels = ("pyproject.toml-style options", "setupt.cfg-style options")
+        delta = diff(new["options"], legacy["options"], *labels)
+        raise NonEquivalentConversion(filepath, delta)
+
+    return True
+
+
+def _read_configs(filepath: _Path, ignore_option_errors: bool) -> Tuple[dict, dict]:
+    from setuptools import metadata, options
+    from setuptools.dist import Distribution
+
+    from . import setupcfg, legacy_setupcfg
+
+    try:
+        with warnings.catch_warnings():
+            warnings.simplefilter("ignore")
+            config = setupcfg.read_configuration(filepath, True, ignore_option_errors)
+    except Exception as ex:
+        raise FailedExperimentalConversion(filepath) from ex
+
+    new = {
+        "metadata": metadata.from_pyproject(config),
+        "options": options.from_pyproject(config),
+    }
+
+    dist = Distribution()
+    with warnings.catch_warnings():
+        warnings.simplefilter("ignore")
+        legacy_setupcfg._apply(filepath, dist, (), ignore_option_errors)
+
+    legacy = {
+        "metadata": metadata.from_dist(dist),
+        "options": options.from_dist(dist),
+    }
+
+    return new, legacy
+
+
+class FailedExperimentalConversion(Exception):
+    """\
+    Some errors happened when trying to automatically convert configurations
+    form {file!r} (`setup.cfg` style) to `pyproject.toml` style.
+    """
+
+    _ISSUES_NOTE = """\
+    Please make sure you have a valid package configuration.
+    Note that setuptools support for configuration via `pyproject.toml` is
+    still **EXPERIMENTAL**. You can help by reporting this issue to:
+    \t- https://github.com/abravalheri/ini2toml/issues (automatic conversion)
+    \t- https://github.com/abravalheri/validate_pyproject/issues (validation)
+    \t- https://github.com/pypa/setuptools/issues (non conversion-related problems)
+    Please provide as much information to replicate this error as possible.
+    Pull requests are welcome and encouraged.
+    """
+
+    def __init__(self, filepath: _Path, msg: Optional[str] = None):
+        msg = (msg or self.__class__.__doc__).format(file=os.path.abspath(filepath))
+        super().__init__(dedent(msg) + "\n" + dedent(self._ISSUES_NOTE))
+
+    def warn(self):
+        """Issue a warn with the same error message.
+        For situations that are possible to workaround, but it is good to tell the user
+        """
+        warnings.warn(str(self), category=FailedConversionWarning, stacklevel=2)
+
+
+class NonEquivalentConversion(FailedExperimentalConversion):
+    """\
+    Failed automatic conversion of `setup.cfg`-style configuration to `pyproject.toml`,
+    the outcome configuration is not equivalent:
+    \n{diff}
+    """
+
+    def __init__(self, filepath: _Path, delta: str):
+        super().__init__(filepath, self.__class__.__doc__.format(diff=delta))
+
+
+class FailedConversionWarning(UserWarning):
+    """Warning associated with ``FailedExperimentalConversion``"""

From 045123a18aaa700412580a03effced3d90c6e2ec Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 10 Dec 2021 20:25:31 +0000
Subject: [PATCH 32/55] Add a new public API for reading setuptools
 configuration

Additionally, deprecate the old `setup.cfg`-only API.
---
 setuptools/config/__init__.py | 133 +++++++++++++++++++++++++++++++++-
 1 file changed, 129 insertions(+), 4 deletions(-)

diff --git a/setuptools/config/__init__.py b/setuptools/config/__init__.py
index 1802e4d99a..8c66bbe4dd 100644
--- a/setuptools/config/__init__.py
+++ b/setuptools/config/__init__.py
@@ -1,7 +1,132 @@
-# For backward compatibility, the following classes/functions are exposed
-# from `config.legacy_setupcfg`
-from setuptools.config.legacy_setupcfg import parse_configuration
+import os
+import sys
+import warnings
+from functools import wraps
+from textwrap import dedent
+from typing import TYPE_CHECKING, Callable, Optional, TypeVar, Union
+
+if sys.version_info[:1] >= (3, 8):  # pragma: no cover
+    from typing import Literal
+
+    Syntax = Optional[Literal["ini", "cfg", "toml"]]
+else:  # pragma: no cover
+    Syntax = Optional[str]
+
+if TYPE_CHECKING:
+    from setuptools.dist import Distribution
+
+Fn = TypeVar("Fn", bound=Callable)
+_Path = Union[os.PathLike, str]
+
 
 __all__ = [
-    'parse_configuration',  # still required by setuptools.dist
+    "parse_configuration",
+    "read_configuration",
+    "read",
+    "apply"
 ]
+
+
+# -------- Backward compatibility -------
+
+
+def _deprecation_notice(fn: Fn) -> Fn:
+    from setuptools import SetuptoolsDeprecationWarning
+
+    @wraps(fn)
+    def _wrapper(*args, **kwargs):
+        msg = f"""\
+            As setuptools moves its configuration towards `pyproject.toml`,
+            `{fn.__name__}` became deprecated.
+
+            For the time being, the `setuptools.config.legacy_setupcfg` module
+            provides backwards compatibility, but it might be removed in the future.
+            Users are encouraged to use:
+
+            `setuptools.config.read`: to obtain a dict corresponding to the
+                 data-struct stored in the `pyproject.toml` format.
+            `setuptools.config.apply`: to apply the configurations read into an
+                existing `setuptools.dist.Distribution` object.
+        """
+        warnings.warn(dedent(msg), SetuptoolsDeprecationWarning)
+        return fn(*args, **kwargs)
+
+    return _wrapper
+
+
+@_deprecation_notice
+def read_configuration(filepath, find_others=False, ignore_option_errors=False):
+    from .legacy_setupcfg import read_configuration as _legacy
+    return _legacy(filepath, find_others, ignore_option_errors)
+
+
+@_deprecation_notice
+def parse_configuration(distribution, command_options, ignore_option_errors=False):
+    from .legacy_setupcfg import parse_configuration as _legacy
+    return _legacy(distribution, command_options, ignore_option_errors)
+
+
+# -------- New API -------
+
+
+def read(
+    filepath: _Path,
+    expand: bool = True,
+    ignore_option_errors: bool = False,
+    syntax: Syntax = None,
+) -> dict:
+    """Read configuration from ``pyproject.toml``.
+
+    In the case a config file with the legacy ``setup.cfg`` format is provided,
+    this function will attempt to automatically convert it to the new format.
+    If this conversion goes wrong, a `FailedExperimentalConversion` error is raised.
+
+    :param bool expand: Whether to expand directives and other computed values
+        (i.e. post-process the given configuration)
+
+    :param bool ignore_option_errors: Whether to silently ignore
+        options, values of which could not be resolved (e.g. due to exceptions
+        in directives such as file:, attr:, etc.).
+        If False exceptions are propagated as expected.
+
+    :param syntax: One of `ini` or `toml` (optional). When not provided setuptools
+        will attempt to guess based on the file name.
+    """
+    from . import setupcfg, pyprojecttoml
+
+    if syntax is None:
+        _, ext = os.path.splitext(filepath)
+        if ext not in {".ini", ".cfg", ".toml"}:
+            msg = f"Could not infer the configuration language for {filepath!r}. "
+            msg += 'Please specify the `syntax` argument (e.g. `syntax="toml"`)'
+            raise ValueError(msg)
+        syntax = ext.strip(".")
+
+    if syntax != "toml":
+        from ._backward_compatibility import ensure_compatible_conversion
+
+        ensure_compatible_conversion(filepath, ignore_option_errors)
+        # ^-- To support the transition period we do a comparison and fail if it differs
+        #     TODO: Remove once the transition period ends
+        return setupcfg.read_configuration(filepath, expand, ignore_option_errors)
+
+    return pyprojecttoml.read_configuration(filepath, expand, ignore_option_errors)
+
+
+def apply(
+    config: dict,
+    dist: "Distribution",
+    source: str = "pyproject.toml"
+) -> "Distribution":
+    """Apply configurations from a dict (that was loaded via the ``read`` function)
+    into a distribution object.
+    """
+    from setuptools import metadata, options
+
+    meta = metadata.from_pyproject(config)
+    metadata.apply(meta, dist)
+
+    opts = options.from_pyproject(config)
+    options.apply(opts, dist)
+
+    return dist

From 38db91c6b203c3af47d80c79b543f2e514ee718f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 10 Dec 2021 20:29:02 +0000
Subject: [PATCH 33/55] Ensure examples work with the automatic conversion of
 setup.cfg

---
 .gitignore                                    |  1 +
 setuptools/tests/config/setupcfg_examples.txt | 22 +++++++++
 .../config/test_backward_compatibility.py     | 47 +++++++++++++++++++
 3 files changed, 70 insertions(+)
 create mode 100644 setuptools/tests/config/setupcfg_examples.txt
 create mode 100644 setuptools/tests/config/test_backward_compatibility.py

diff --git a/.gitignore b/.gitignore
index dc14826ec8..7114d90316 100644
--- a/.gitignore
+++ b/.gitignore
@@ -21,3 +21,4 @@ setuptools.egg-info
 .idea/
 .pytest_cache/
 .mypy_cache/
+/setuptools/tests/config/downloads
diff --git a/setuptools/tests/config/setupcfg_examples.txt b/setuptools/tests/config/setupcfg_examples.txt
new file mode 100644
index 0000000000..a0c2ef3416
--- /dev/null
+++ b/setuptools/tests/config/setupcfg_examples.txt
@@ -0,0 +1,22 @@
+# ====================================================================
+# Some popular packages that use setup.cfg (and others not so popular)
+# Reference: https://hugovk.github.io/top-pypi-packages/
+# ====================================================================
+https://github.com/pypa/wheel/raw/0acd203cd896afec7f715aa2ff5980a403459a3b/setup.cfg
+https://github.com/python/importlib_metadata/raw/2f05392ca980952a6960d82b2f2d2ea10aa53239/setup.cfg
+https://github.com/jaraco/skeleton/raw/d9008b5c510cd6969127a6a2ab6f832edddef296/setup.cfg
+https://github.com/jaraco/zipp/raw/700d3a96390e970b6b962823bfea78b4f7e1c537/setup.cfg
+https://github.com/pallets/jinja/raw/7d72eb7fefb7dce065193967f31f805180508448/setup.cfg
+https://github.com/tkem/cachetools/raw/2fd87a94b8d3861d80e9e4236cd480bfdd21c90d/setup.cfg
+https://github.com/aio-libs/aiohttp/raw/5e0e6b7080f2408d5f1dd544c0e1cf88378b7b10/setup.cfg
+https://github.com/pallets/flask/raw/9486b6cf57bd6a8a261f67091aca8ca78eeec1e3/setup.cfg
+https://github.com/pallets/click/raw/6411f425fae545f42795665af4162006b36c5e4a/setup.cfg
+https://github.com/sqlalchemy/sqlalchemy/raw/533f5718904b620be8d63f2474229945d6f8ba5d/setup.cfg
+https://github.com/pytest-dev/pluggy/raw/461ef63291d13589c4e21aa182cd1529257e9a0a/setup.cfg
+https://github.com/pytest-dev/pytest/raw/c7be96dae487edbd2f55b561b31b68afac1dabe6/setup.cfg
+https://github.com/tqdm/tqdm/raw/fc69d5dcf578f7c7986fa76841a6b793f813df35/setup.cfg
+https://github.com/platformdirs/platformdirs/raw/7b7852128dd6f07511b618d6edea35046bd0c6ff/setup.cfg
+https://github.com/pandas-dev/pandas/raw/bc17343f934a33dc231c8c74be95d8365537c376/setup.cfg
+https://github.com/django/django/raw/4e249d11a6e56ca8feb4b055b681cec457ef3a3d/setup.cfg
+https://github.com/pyscaffold/pyscaffold/raw/de7aa5dc059fbd04307419c667cc4961bc9df4b8/setup.cfg
+https://github.com/pypa/virtualenv/raw/f92eda6e3da26a4d28c2663ffb85c4960bdb990c/setup.cfg
diff --git a/setuptools/tests/config/test_backward_compatibility.py b/setuptools/tests/config/test_backward_compatibility.py
new file mode 100644
index 0000000000..26a9441966
--- /dev/null
+++ b/setuptools/tests/config/test_backward_compatibility.py
@@ -0,0 +1,47 @@
+import re
+from pathlib import Path
+from urllib.request import urlopen
+from unittest.mock import Mock
+
+import pytest
+
+from setuptools import config
+from setuptools.config import expand
+
+
+EXAMPLES = (Path(__file__).parent / "setupcfg_examples.txt").read_text()
+EXAMPLE_URLS = [x for x in EXAMPLES.splitlines() if not x.startswith("#")]
+DOWNLOAD_DIR = Path(__file__).parent / "downloads"
+
+
+@pytest.mark.parametrize("url", EXAMPLE_URLS)
+@pytest.mark.filterwarnings("ignore")
+def test_succesfull_conversion(url, monkeypatch):
+    monkeypatch.setattr(expand, 'read_attr', Mock(return_value="0.0.1"))
+    example = retrieve_file(url, DOWNLOAD_DIR)
+    config.read(example, syntax="ini")
+
+
+NAME_REMOVE = ("http://", "https://", "github.com/", "/raw/")
+
+
+def retrieve_file(url, download_dir):
+    file_name = url.strip()
+    for part in NAME_REMOVE:
+        file_name = file_name.replace(part, '').strip().strip('/:').strip()
+    file_name = re.sub(r"[^\-_\.\w\d]+", "_", file_name)
+    path = Path(download_dir, file_name)
+    if not path.exists():
+        download_dir.mkdir(exist_ok=True, parents=True)
+        download(url, path)
+    return path
+
+
+def download(url, dest):
+    with urlopen(url) as f:
+        data = f.read()
+
+    with open(dest, "wb") as f:
+        f.write(data)
+
+    assert Path(dest).exists()

From 629d26b474be4e85d4c742744e84b50aeee310bc Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 10 Dec 2021 21:30:43 +0000
Subject: [PATCH 34/55] Make it possible to ignore `setup.cfg` in
 dist.parse_config_files

---
 setuptools/dist.py            |  4 ++++
 setuptools/tests/test_dist.py | 20 ++++++++++++++++++++
 2 files changed, 24 insertions(+)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index 37a4194de3..fdb2836b74 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -453,6 +453,7 @@ def __init__(self, attrs=None):
         self.patch_missing_pkg_info(attrs)
         self.dependency_links = attrs.pop('dependency_links', [])
         self.setup_requires = attrs.pop('setup_requires', [])
+        self.skip_setupcfg = attrs.pop("skip_setupcfg", False)
         for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
             vars(self).setdefault(ep.name, None)
         _Distribution.__init__(
@@ -657,6 +658,9 @@ def _parse_config_files(self, filenames=None):  # noqa: C901
         if filenames is None:
             filenames = self.find_config_files()
 
+        if self.skip_setupcfg:
+            filenames = [x for x in filenames if os.path.basename(x) != 'setup.cfg']
+
         if DEBUG:
             self.announce("Distribution.parse_config_files():")
 
diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py
index c4279f0bc4..321134ec2b 100644
--- a/setuptools/tests/test_dist.py
+++ b/setuptools/tests/test_dist.py
@@ -374,3 +374,23 @@ def test_check_specifier():
 )
 def test_rfc822_unescape(content, result):
     assert (result or content) == rfc822_unescape(rfc822_escape(content))
+
+
+def test_skip_setupcfg(tmpdir):
+    setupcfg = """\
+    [metadata]
+    name = example
+    version = 0.0.1
+    """
+    tmpdir.join("setup.cfg").write(DALS(setupcfg))
+    with tmpdir.as_cwd():
+        dist = Distribution()
+        dist.parse_config_files()
+        assert dist.get_name() == "example"
+        assert dist.get_version() == "0.0.1"
+
+        dist = Distribution()
+        dist.skip_setupcfg = True
+        dist.parse_config_files()
+        assert dist.get_name() == "UNKNOWN"
+        assert dist.get_version() == "0.0.0"

From a4010919e5500b004c43a741931c98d408872dfb Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sat, 11 Dec 2021 18:52:32 +0000
Subject: [PATCH 35/55] Clean metadata/options if the file is not being used to
 configure setuptools

---
 setuptools/config/pyprojecttoml.py |  4 ++++
 setuptools/config/setupcfg.py      | 20 ++++++++++++++++++++
 2 files changed, 24 insertions(+)

diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index a5d306a7e8..ba98fcc5ec 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -34,6 +34,10 @@ def read_configuration(filepath, expand=True, ignore_option_errors=False):
 
     with open(filepath, "rb") as file:
         asdict = tomli.load(file)
+    project_table = asdict.get("project")
+    tool_table = asdict.get("tool", {}).get("setuptools")
+    if not asdict or not(project_table or tool_table):
+        return {}  # User is not using pyproject to configure setuptools
 
     with _ignore_errors(ignore_option_errors):
         validate(asdict)
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 4b9b5f52c3..40519b494b 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -2,6 +2,7 @@
 in memory data structure, and then proceed to load the configuration.
 """
 import os
+from configparser import ConfigParser
 from typing import Union
 
 from setuptools.errors import FileError
@@ -59,9 +60,28 @@ def read_configuration(
 
     asdict = convert(filepath)
 
+    with open(filepath, "r") as f:
+        cfg = ConfigParser()
+        cfg.read_file(f)
+
+    if not(
+        "metadata" in cfg
+        or any(x.startswith("option") for x in cfg.sections())
+    ):
+        # Secondary use of `setup.cfg`, probably for distutils commands or other tools
+        asdict.pop("project", None)
+        asdict.get("tool", {}).pop("setuptools", None)
+
     with pyproject_config._ignore_errors(ignore_option_errors):
         pyproject_config.validate(asdict)
 
+    if "options" in cfg and "setup_requires" in cfg["options"]:
+        # TODO: Workaround needed while `setup_requires` is still handled
+        tool_table = asdict.setdefault("tool", {}).setdefault("setuptools", {})
+        tool_table["setup_requires"] = cfg["options"]["setup_requires"]
+
     if expand:
         root_dir = os.path.dirname(filepath)
         return expand_configuration(asdict, root_dir, ignore_option_errors)
+
+    return asdict

From dabbbf1d96664e99dd6c7904c2d508f715ff051a Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sat, 11 Dec 2021 19:11:35 +0000
Subject: [PATCH 36/55] Use run_setup to obtain a dist object and apply the new
 configs

The `run_setup` function from `distutils.core` allows retrieving the
distribution object before the configuration is read.
Then using the new functions from the `setuptools.config` package
we can apply the TOML-style configuration (either from a
`pyproject.toml` file or by an automatically converted `setup.cfg`).

To ensure backward compatibility, in the case of discrepancies in the
conversion of the `setup.cfg` files, the implementation falls back to
the legacy configuration procedure.
---
 setuptools/build_meta.py | 66 ++++++++++++++++++++++++++++++++++++++++
 1 file changed, 66 insertions(+)

diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index d0ac613ba3..20f78a65ae 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -126,6 +126,32 @@ def suppress_known_deprecation():
         yield
 
 
+@contextlib.contextmanager
+def _patch_distutils_exec():
+    """Make sure distutils uses the code exec-ing enhancements"""
+    orig_exec = exec
+    if hasattr(distutils.core, "run_commands"):
+        yield  # do nothing, already using the improved version of distutils
+        return
+
+    def _exec(code, global_vars):
+        try:
+            _, tmp = tempfile.mkstemp(suffix="setup.py")
+            with open(tmp, "wb") as f:
+                f.write(code)
+            with tokenize.open(tmp) as f:
+                code = f.read().replace(r'\r\n', r'\n')
+        finally:
+            os.remove(tmp)
+        orig_exec(code, {**global_vars, "__name__": "__main__"})
+
+    distutils.core.exec = _exec
+    try:
+        yield
+    finally:
+        distutils.core.exec = orig_exec
+
+
 class _BuildMetaBackend(object):
 
     def _fix_config(self, config_settings):
@@ -133,6 +159,46 @@ def _fix_config(self, config_settings):
         config_settings.setdefault('--global-option', [])
         return config_settings
 
+    def _get_dist(self, setup_script="setup.py", config_file="pyproject.toml",
+                  legacy_config_file="setup.cfg"):
+        """Retrieve a distribution object already configured."""
+        import setuptools.config
+
+        read_opts = {}
+        if os.path.exists(setup_script):
+            with no_install_setup_requires(), _patch_distutils_exec():
+                dist = distutils.core.run_setup(setup_script, stop_after="init")
+            # read_opts['ignore_option_errors'] = True
+        else:
+            dist = setuptools.dist.Distribution()
+
+        if os.path.exists(legacy_config_file):
+            from setuptools.config import _backward_compatibility
+
+            try:
+                config = setuptools.config.read(legacy_config_file, **read_opts)
+                setuptools.config.apply(config, dist)
+                dist.skip_setupcfg = True
+                tool_table = config.get("tool", {}).get("setuptools", {})
+                # TODO: Remove when `setup_requires` is no longer supported
+                dist.setup_requires = tool_table.get("setup_requires", [])
+            except _backward_compatibility.FailedExperimentalConversion as e:
+                # Let's take a conservative approach during the transition between
+                # `setup.cfg` and `pyproject.toml`:
+                # In the case there is a problem with the automatic conversion
+                # we tell the user (so they can open an issue or fix bad configuration)
+                # but still fallback to the old procedure.
+                # TODO: Just fail after the transition period ends.
+                e.warn()
+
+        dist.parse_config_files()  # Should we read files out of the dist dir??
+
+        if os.path.exists(config_file):
+            config = setuptools.config.read(config_file)
+            setuptools.config.apply(config, dist)
+
+        return dist
+
     def _get_build_requires(self, config_settings, requirements):
         config_settings = self._fix_config(config_settings)
 

From 972248fa06f6c814ca51d6fea4dfd3c43bf1eca9 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sat, 11 Dec 2021 19:21:08 +0000
Subject: [PATCH 37/55] Use the distribution object to run commands in
 build_meta

Once we have access to the distribution object, it is no longer
necessary to fully run the setup script.
Instead the backend can have more control over the process.

This change build on top of the `_get_dist` function and the
`distutils.core.run_commands` to trigger commands on the distribution
object.

For the legacy backend the setup script still runs traditionally
(but the corner cases are handled directly by `distutils.core.run_setup`)
---
 setuptools/build_meta.py | 62 +++++++++++++++++++---------------------
 1 file changed, 29 insertions(+), 33 deletions(-)

diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index 20f78a65ae..2ee75e1b58 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -111,14 +111,6 @@ def _file_with_extension(directory, extension):
     return file
 
 
-def _open_setup_script(setup_script):
-    if not os.path.exists(setup_script):
-        # Supply a default setup.py
-        return io.StringIO(u"from setuptools import setup; setup()")
-
-    return getattr(tokenize, 'open', open)(setup_script)
-
-
 @contextlib.contextmanager
 def suppress_known_deprecation():
     with warnings.catch_warnings():
@@ -212,16 +204,20 @@ def _get_build_requires(self, config_settings, requirements):
 
         return requirements
 
-    def run_setup(self, setup_script='setup.py'):
+    def run_command(self, *args):
         # Note that we can reuse our build directory between calls
         # Correctness comes first, then optimization later
-        __file__ = setup_script
-        __name__ = '__main__'
-
-        with _open_setup_script(__file__) as f:
-            code = f.read().replace(r'\r\n', r'\n')
-
-        exec(compile(code, __file__, 'exec'), locals())
+        dist = self._get_dist()
+        dist.script_name = sys.argv[0]
+        dist.script_args = args
+        dist.parse_command_line()
+
+        if hasattr(distutils.core, 'run_commands'):
+            return distutils.core.run_commands(dist)
+        try:  # TODO: remove fallback once seuptools can use local distutils
+            dist.run_commands()
+        except Exception as ex:
+            raise SystemExit("error:" + str(ex))
 
     def get_requires_for_build_wheel(self, config_settings=None):
         config_settings = self._fix_config(config_settings)
@@ -234,10 +230,7 @@ def get_requires_for_build_sdist(self, config_settings=None):
 
     def prepare_metadata_for_build_wheel(self, metadata_directory,
                                          config_settings=None):
-        sys.argv = sys.argv[:1] + [
-            'dist_info', '--egg-base', metadata_directory]
-        with no_install_setup_requires():
-            self.run_setup()
+        self.run_command('dist_info', '--egg-base', metadata_directory)
 
         dist_info_directory = metadata_directory
         while True:
@@ -274,11 +267,10 @@ def _build_with_temp_dir(self, setup_command, result_extension,
         # Build in a temporary directory, then copy to the target.
         os.makedirs(result_directory, exist_ok=True)
         with tempfile.TemporaryDirectory(dir=result_directory) as tmp_dist_dir:
-            sys.argv = (sys.argv[:1] + setup_command +
-                        ['--dist-dir', tmp_dist_dir] +
-                        config_settings["--global-option"])
-            with no_install_setup_requires():
-                self.run_setup()
+            self.run_command(
+                *setup_command, "--dist-dir", tmp_dist_dir,
+                *config_settings["--global-option"]
+            )
 
             result_basename = _file_with_extension(
                 tmp_dist_dir, result_extension)
@@ -313,25 +305,30 @@ class _BuildMetaLegacyBackend(_BuildMetaBackend):
     packaging mechanism,
     and will eventually be removed.
     """
-    def run_setup(self, setup_script='setup.py'):
+    setup_script = "setup.py"
+
+    def run_command(self, *args):
         # In order to maintain compatibility with scripts assuming that
         # the setup.py script is in a directory on the PYTHONPATH, inject
         # '' into sys.path. (pypa/setuptools#1642)
         sys_path = list(sys.path)           # Save the original path
 
+        setup_script = self.setup_script
+        if not os.path.exists(setup_script) or os.stat(setup_script).st_size == 0:
+            msg = f"Empty or missing {setup_script!r}. A valid script that calls "
+            msg += "`setup()` is required by the legacy backend."
+            raise ValueError(msg)
+
         script_dir = os.path.dirname(os.path.abspath(setup_script))
         if script_dir not in sys.path:
             sys.path.insert(0, script_dir)
 
         # Some setup.py scripts (e.g. in pygame and numpy) use sys.argv[0] to
         # get the directory of the source code. They expect it to refer to the
-        # setup.py script.
-        sys_argv_0 = sys.argv[0]
-        sys.argv[0] = setup_script
-
+        # setup.py script. ==> This is already handled in distutils.core
         try:
-            super(_BuildMetaLegacyBackend,
-                  self).run_setup(setup_script=setup_script)
+            with no_install_setup_requires(), _patch_distutils_exec():
+                distutils.core.run_setup(setup_script, args)
         finally:
             # While PEP 517 frontends should be calling each hook in a fresh
             # subprocess according to the standard (and thus it should not be
@@ -339,7 +336,6 @@ def run_setup(self, setup_script='setup.py'):
             # the original path so that the path manipulation does not persist
             # within the hook after run_setup is called.
             sys.path[:] = sys_path
-            sys.argv[0] = sys_argv_0
 
 
 # The primary backend

From a3563766c8dd3a5e5c6959d9cdbffe848bf03be0 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sat, 11 Dec 2021 19:25:31 +0000
Subject: [PATCH 38/55] Replace _get_build_requires by using the dist object

Now that the dist object is available for the backend, we can simplify
the workarounds to obtain the `setup_requires` (just reading the
equivalent attribute in the dist object should suffice)
---
 setuptools/build_meta.py | 48 ++++++----------------------------------
 1 file changed, 7 insertions(+), 41 deletions(-)

diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index 2ee75e1b58..fa3f1c3dd6 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -26,7 +26,6 @@
 Again, this is not a formal definition! Just a "taste" of the module.
 """
 
-import io
 import os
 import sys
 import tokenize
@@ -34,6 +33,7 @@
 import contextlib
 import tempfile
 import warnings
+from itertools import chain
 
 import setuptools
 import distutils
@@ -45,35 +45,7 @@
            'prepare_metadata_for_build_wheel',
            'build_wheel',
            'build_sdist',
-           '__legacy__',
-           'SetupRequirementsError']
-
-
-class SetupRequirementsError(BaseException):
-    def __init__(self, specifiers):
-        self.specifiers = specifiers
-
-
-class Distribution(setuptools.dist.Distribution):
-    def fetch_build_eggs(self, specifiers):
-        specifier_list = list(map(str, parse_requirements(specifiers)))
-
-        raise SetupRequirementsError(specifier_list)
-
-    @classmethod
-    @contextlib.contextmanager
-    def patch(cls):
-        """
-        Replace
-        distutils.dist.Distribution with this class
-        for the duration of this context.
-        """
-        orig = distutils.core.Distribution
-        distutils.core.Distribution = cls
-        try:
-            yield
-        finally:
-            distutils.core.Distribution = orig
+           '__legacy__']
 
 
 @contextlib.contextmanager
@@ -192,17 +164,11 @@ def _get_dist(self, setup_script="setup.py", config_file="pyproject.toml",
         return dist
 
     def _get_build_requires(self, config_settings, requirements):
-        config_settings = self._fix_config(config_settings)
-
-        sys.argv = sys.argv[:1] + ['egg_info'] + \
-            config_settings["--global-option"]
-        try:
-            with Distribution.patch():
-                self.run_setup()
-        except SetupRequirementsError as e:
-            requirements += e.specifiers
-
-        return requirements
+        dist = self._get_dist()
+        parsed = chain(parse_requirements(requirements),
+                       parse_requirements(dist.setup_requires))
+        deduplicated = {r.key: str(r) for r in parsed}
+        return list(deduplicated.values())
 
     def run_command(self, *args):
         # Note that we can reuse our build directory between calls

From 3dd56ed1dbf9ba52e2a478d18477a358209d88bd Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sat, 11 Dec 2021 19:28:40 +0000
Subject: [PATCH 39/55] Change test_build_meta accordingly

After the changes in build_meta, some checks in the associated test
suite are required.

It seems that some assertions are not really needed, but it is important
to clarify that. For the time being, some skips are added until further
clarification.
---
 setuptools/tests/test_build_meta.py | 42 +++++++++++++++++++++++++----
 1 file changed, 37 insertions(+), 5 deletions(-)

diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index 0f4a1a7363..e4c5b9458e 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -3,7 +3,7 @@
 import tarfile
 import importlib
 from concurrent import futures
-import re
+import warnings
 
 import pytest
 from jaraco import path
@@ -120,6 +120,22 @@ def run():
             print('hello')
         """)
     },
+    {
+        'setup.cfg': DALS("""
+        [metadata]
+        name = foo
+        version = 0.0.0
+
+        [options]
+        py_modules=hello
+        setup_requires=six
+        """),
+        'setup.py': "__import__('setuptools').setup()",
+        'hello.py': DALS("""
+        def run():
+            print('hello')
+        """)
+    },
 ]
 
 
@@ -131,9 +147,18 @@ def get_build_backend(self):
 
     @pytest.fixture(params=defns)
     def build_backend(self, tmpdir, request):
+        if 'legacy' in self.backend_name and 'setup.py' not in request.param:
+            pytest.skip("Legacy backend needs 'setup.py'")
+            # TODO: Is there a motivation behind testing the legacy backend
+            #       without a setup.py script?
+
         path.build(request.param, prefix=str(tmpdir))
         with tmpdir.as_cwd():
-            yield self.get_build_backend()
+            with warnings.catch_warnings():
+                # Most of the scripts in this test use `setup_requires`
+                # and ini2toml will issue a deprecation warning for that field
+                warnings.simplefilter("ignore", category=DeprecationWarning)
+                yield self.get_build_backend()
 
     def test_get_requires_for_build_wheel(self, build_backend):
         actual = build_backend.get_requires_for_build_wheel()
@@ -437,6 +462,10 @@ def run():
             """)
     }
 
+    @pytest.mark.skip("TODO: Clarify the use case motivating this test")
+    # Why is it necessary that the assertion error is raised?
+    # Or is it just a side effect that was found to happen,
+    # but it is not necessary for setuptools to work properly?
     def test_sys_argv_passthrough(self, tmpdir_cwd):
         path.build(self._sys_argv_0_passthrough)
         build_backend = self.get_build_backend()
@@ -448,9 +477,12 @@ def test_build_with_empty_setuppy(self, build_backend, build_hook):
         files = {'setup.py': ''}
         path.build(files)
 
-        with pytest.raises(
-                ValueError,
-                match=re.escape('No distribution was found.')):
+        error_messages = (
+            r"(setup\(\)' was never called)|"
+            "(Empty or missing 'setup.py')"
+        )
+
+        with pytest.raises((ValueError, RuntimeError), match=error_messages):
             getattr(build_backend, build_hook)("temp")
 
 

From 373e201f9168779f7d5f5b99afc152c08a486db4 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sat, 11 Dec 2021 19:31:27 +0000
Subject: [PATCH 40/55] Prevent bootstrap problems with vendored dependencies
 in config

The `setuptools.config` module needs at least 3 vendored dependencies to
work, but it seems that this might cause some bootstrapping problems.

This change implements a workaround for that (and add better debugging
messages).
---
 setuptools/config/pyprojecttoml.py | 47 ++++++++++++++++++++++++++----
 setuptools/config/setupcfg.py      |  2 +-
 2 files changed, 43 insertions(+), 6 deletions(-)

diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index ba98fcc5ec..aafec14f46 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -1,15 +1,53 @@
 """Load setuptools configuration from ``pyproject.toml`` files"""
 import os
+import sys
 from contextlib import contextmanager
 from functools import partial
+from typing import Union
+import json
 
-from setuptools.extern import tomli
-from setuptools.extern._validate_pyproject import validate
 from setuptools.errors import OptionError, FileError
 from distutils import log
 
 from . import expand as _expand
 
+_Path = Union[str, os.PathLike]
+
+
+def load_file(filepath: _Path):
+    try:
+        from setuptools.extern import tomli
+    except ImportError:  # Bootstrap problem (?) diagnosed by test_distutils_adoption
+        sys_path = sys.path.copy()
+        try:
+            from setuptools import _vendor
+            sys.path.append(_vendor.__path__[0])
+            import tomli
+        finally:
+            sys.path = sys_path
+
+    with open(filepath, "rb") as file:
+        return tomli.load(file)
+
+
+def validate(config: dict, filepath: _Path):
+    from setuptools.extern import _validate_pyproject
+    from setuptools.extern._validate_pyproject import fastjsonschema_exceptions
+
+    try:
+        return _validate_pyproject.validate(config)
+    except fastjsonschema_exceptions.JsonSchemaValueException as ex:
+        msg = [f"Schema: {ex}"]
+        if ex.value:
+            msg.append(f"Given value:\n{json.dumps(ex.value, indent=2)}")
+        if ex.rule:
+            msg.append(f"Offending rule: {json.dumps(ex.rule, indent=2)}")
+        if ex.definition:
+            msg.append(f"Definition:\n{json.dumps(ex.definition, indent=2)}")
+
+        log.error("\n\n".join(msg) + "\n")
+        raise
+
 
 def read_configuration(filepath, expand=True, ignore_option_errors=False):
     """Read given configuration file and returns options from it as a dict.
@@ -32,15 +70,14 @@ def read_configuration(filepath, expand=True, ignore_option_errors=False):
     if not os.path.isfile(filepath):
         raise FileError(f"Configuration file {filepath!r} does not exist.")
 
-    with open(filepath, "rb") as file:
-        asdict = tomli.load(file)
+    asdict = load_file(filepath) or {}
     project_table = asdict.get("project")
     tool_table = asdict.get("tool", {}).get("setuptools")
     if not asdict or not(project_table or tool_table):
         return {}  # User is not using pyproject to configure setuptools
 
     with _ignore_errors(ignore_option_errors):
-        validate(asdict)
+        validate(asdict, filepath)
 
     if expand:
         root_dir = os.path.dirname(filepath)
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 40519b494b..4dcde4ae2f 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -73,7 +73,7 @@ def read_configuration(
         asdict.get("tool", {}).pop("setuptools", None)
 
     with pyproject_config._ignore_errors(ignore_option_errors):
-        pyproject_config.validate(asdict)
+        pyproject_config.validate(asdict, filepath)
 
     if "options" in cfg and "setup_requires" in cfg["options"]:
         # TODO: Workaround needed while `setup_requires` is still handled

From 0c18c78ab54c009b3b3b182ef3033c957c23cf16 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sat, 11 Dec 2021 17:16:06 +0000
Subject: [PATCH 41/55] Prevent tests from copying the .git/.tox and other
 heavy folders

The `tmp_src` fixture copies the setuptools directory to prevent errors
that appear when running tests concurrently. However it seems that is
copying everything including the `.git` directory (and possibly others
like `.tox`). These directories can be quite heavy and error prone to
copy.

The changes introduced here prevent copying these unnecessary
folders/files. As a side effect, the tests should run slightly faster.
---
 setuptools/tests/fixtures.py | 14 ++++++++++++--
 1 file changed, 12 insertions(+), 2 deletions(-)

diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py
index a5a172e0f9..229c900ead 100644
--- a/setuptools/tests/fixtures.py
+++ b/setuptools/tests/fixtures.py
@@ -2,6 +2,7 @@
 import sys
 import shutil
 import subprocess
+from pathlib import Path
 
 import pytest
 
@@ -40,8 +41,17 @@ def tmp_src(request, tmp_path):
     when they are not being executed sequentially.
     """
     tmp_src_path = tmp_path / 'src'
-    shutil.copytree(request.config.rootdir, tmp_src_path)
-    return tmp_src_path
+    tmp_src_path.mkdir(exist_ok=True, parents=True)
+    for item in Path(request.config.rootdir).glob("*"):
+        name = item.name
+        if str(name).startswith(".") or name in ("dist", "build", "docs"):
+            # Avoid copying unnecessary folders, specially the .git one
+            # that can contain lots of files and is error prone
+            continue
+        copy = shutil.copy2 if item.is_file() else shutil.copytree
+        copy(item, tmp_src_path / item.name)
+
+    yield tmp_src_path
 
 
 @pytest.fixture(autouse=True, scope="session")

From e128ba4ceb3ada35f0a6df8d26000a9fe45a6719 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sat, 11 Dec 2021 22:01:00 +0000
Subject: [PATCH 42/55] Add backend test with pyproject.toml-only config

---
 setuptools/build_meta.py            |  2 +
 setuptools/tests/test_build_meta.py | 92 ++++++++++++++++++++++++++++-
 2 files changed, 93 insertions(+), 1 deletion(-)

diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index fa3f1c3dd6..3c884eb775 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -161,6 +161,8 @@ def _get_dist(self, setup_script="setup.py", config_file="pyproject.toml",
             config = setuptools.config.read(config_file)
             setuptools.config.apply(config, dist)
 
+        dist.finalize_options()
+
         return dist
 
     def _get_build_requires(self, config_settings, requirements):
diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index e4c5b9458e..8772ae0f80 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -2,8 +2,9 @@
 import shutil
 import tarfile
 import importlib
-from concurrent import futures
 import warnings
+from concurrent import futures
+from zipfile import ZipFile
 
 import pytest
 from jaraco import path
@@ -224,6 +225,95 @@ def test_build_with_existing_file_present(self, build_type, tmpdir_cwd):
         assert third_result == second_result
         assert os.path.getsize(os.path.join(dist_dir, third_result)) > 0
 
+    def test_build_with_pyproject_metadata(self, tmpdir_cwd):
+        if 'legacy' in self.backend_name:
+            pytest.skip("Legacy backend does not support 'pyproject.toml' metadata")
+
+        files = {
+            'setup.py': "__import__('setuptools').setup()",
+            'pyproject.toml': DALS("""
+                [build-system]
+                requires = ["setuptools", "wheel"]
+                build-backend = "setuptools.build_meta"
+
+                [project]
+                name = "foo"
+                description = "This is a Python package"
+                dynamic = ["version", "license", "readme"]
+                classifiers = [
+                    "Development Status :: 5 - Production/Stable",
+                    "Intended Audience :: Developers"
+                ]
+                urls = {Homepage = "http://github.com"}
+                dependencies = [
+                    "appdirs",
+                ]
+
+                [project.optional-dependencies]
+                all = [
+                    "tomli>=1",
+                    "pyscaffold>=4,<5",
+                    'importlib; python_version == "2.6"',
+                ]
+
+                [project.scripts]
+                foo = "foo.cli:main"
+
+                [tool.setuptools]
+                package-dir = {"" = "src"}
+                packages = {find = {where = ["src"]}}
+
+                [tool.setuptools.dynamic]
+                version = {attr = "foo.__version__"}
+                license = "MIT"
+                license_files = ["LICENSE*"]
+                readme = {file = "README.rst"}
+                """),
+            "README.rst": "This is a ``README``",
+            "LICENSE.txt": "---- placeholder MIT license ----",
+            "src": {
+                "foo": {
+                    "__init__.py": "__version__ = '0.1'",
+                    "cli.py": "def main(): print('hello world')",
+                    "data.txt": "def main(): print('hello world')",
+                }
+            }
+        }
+        path.build(files)
+        build_backend = self.get_build_backend()
+        wheel_file = build_backend.build_wheel("temp")
+
+        with ZipFile(os.path.join("temp", wheel_file)) as zipfile:
+            wheel_contents = set(zipfile.namelist())
+            metadata = str(zipfile.read("foo-0.1.dist-info/METADATA"), "utf-8")
+            license = str(zipfile.read("foo-0.1.dist-info/LICENSE.txt"), "utf-8")
+            epoints = str(zipfile.read("foo-0.1.dist-info/entry_points.txt"), "utf-8")
+
+        assert "foo/data.txt" not in wheel_contents
+        assert wheel_contents == {
+            "foo/__init__.py",
+            "foo/cli.py",
+            "foo-0.1.dist-info/LICENSE.txt",
+            "foo-0.1.dist-info/METADATA",
+            "foo-0.1.dist-info/WHEEL",
+            "foo-0.1.dist-info/entry_points.txt",
+            "foo-0.1.dist-info/top_level.txt",
+            "foo-0.1.dist-info/RECORD",
+        }
+        assert license == "---- placeholder MIT license ----"
+        for line in (
+            "Summary: This is a Python package",
+            "License: MIT",
+            "Classifier: Intended Audience :: Developers",
+            "Requires: appdirs",
+            "Requires: tomli>=1; extra == 'all'",
+            "Requires: importlib; python_version == \"2.6\" and extra == 'all'",
+        ):
+            assert line in metadata
+
+        assert metadata.strip().endswith("This is a ``README``")
+        assert epoints.strip() == "[console_scripts]\nfoo = foo.cli:main"
+
     def test_build_sdist(self, build_backend):
         dist_dir = os.path.abspath('pip-sdist')
         os.makedirs(dist_dir)

From 11407927cb3af4ccada2bb48a4bdca210075b98f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sat, 11 Dec 2021 22:15:27 +0000
Subject: [PATCH 43/55] Make include_package_data=True for `pyproject.toml`
 configs

There seems to be an opinion in the community that
`include_package_data=True` would be a better default and a quality of
life improvement.

This change relies on the fact that `ini2toml` will automatically
backfill `include_package_data=False` for converted `setup.cfg` files
to implement this change in a backward compatible way.
---
 setuptools/config/pyprojecttoml.py  | 6 ++++++
 setuptools/tests/test_build_meta.py | 6 +++++-
 2 files changed, 11 insertions(+), 1 deletion(-)

diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index aafec14f46..6da7ccccc2 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -76,6 +76,12 @@ def read_configuration(filepath, expand=True, ignore_option_errors=False):
     if not asdict or not(project_table or tool_table):
         return {}  # User is not using pyproject to configure setuptools
 
+    # There is an overall sense in the community that making include_package_data=True
+    # the default would be an improvement.
+    # `ini2toml` backfills include_package_data=False when nothing is explicitly given,
+    # therefore setting a default here is backwards compatible.
+    tool_table.setdefault("include-package-data", True)
+
     with _ignore_errors(ignore_option_errors):
         validate(asdict, filepath)
 
diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index 8772ae0f80..1d69972176 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -269,6 +269,10 @@ def test_build_with_pyproject_metadata(self, tmpdir_cwd):
                 license_files = ["LICENSE*"]
                 readme = {file = "README.rst"}
                 """),
+            "MANIFEST.in": DALS("""
+                global-include *.py *.txt
+                global-exclude *.py[cod]
+                """),
             "README.rst": "This is a ``README``",
             "LICENSE.txt": "---- placeholder MIT license ----",
             "src": {
@@ -289,10 +293,10 @@ def test_build_with_pyproject_metadata(self, tmpdir_cwd):
             license = str(zipfile.read("foo-0.1.dist-info/LICENSE.txt"), "utf-8")
             epoints = str(zipfile.read("foo-0.1.dist-info/entry_points.txt"), "utf-8")
 
-        assert "foo/data.txt" not in wheel_contents
         assert wheel_contents == {
             "foo/__init__.py",
             "foo/cli.py",
+            "foo/data.txt",  # include_package_data defaults to True
             "foo-0.1.dist-info/LICENSE.txt",
             "foo-0.1.dist-info/METADATA",
             "foo-0.1.dist-info/WHEEL",

From b63a56a1870e5a9361d9b0fa29e5879965588803 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sun, 12 Dec 2021 00:34:08 +0000
Subject: [PATCH 44/55] Add news fragment

---
 changelog.d/2924.breaking.rst      |  2 ++
 changelog.d/2924.change.1.rst      | 14 ++++++++++++++
 changelog.d/2924.change.2.rst      |  5 +++++
 changelog.d/2924.change.3.rst      |  3 +++
 changelog.d/2924.deprecation.1.rst |  7 +++++++
 docs/conf.py                       |  4 ++++
 6 files changed, 35 insertions(+)
 create mode 100644 changelog.d/2924.breaking.rst
 create mode 100644 changelog.d/2924.change.1.rst
 create mode 100644 changelog.d/2924.change.2.rst
 create mode 100644 changelog.d/2924.change.3.rst
 create mode 100644 changelog.d/2924.deprecation.1.rst

diff --git a/changelog.d/2924.breaking.rst b/changelog.d/2924.breaking.rst
new file mode 100644
index 0000000000..323dfa289d
--- /dev/null
+++ b/changelog.d/2924.breaking.rst
@@ -0,0 +1,2 @@
+Removed public class ``SetupRequirementsError`` from the
+``setuptools.build_meta`` module.
diff --git a/changelog.d/2924.change.1.rst b/changelog.d/2924.change.1.rst
new file mode 100644
index 0000000000..ba8cbeef3e
--- /dev/null
+++ b/changelog.d/2924.change.1.rst
@@ -0,0 +1,14 @@
+Added **experimental** support for ``pyproject.toml`` configuration
+(as introduced by :pep:`621`). Configuration parameters not covered by
+standards are handled in the ``[tool.setuptools]`` sub-table.
+
+Existing ``setup.cfg`` configuration is now automatically converted into the
+``pyproject.toml`` equivalent before taking effect (as proposed in
+:issue:`1688`).
+During the transition period, if this conversion results in non compatible
+settings, setuptools will fallback to the previous behaviour.
+
+Please note that the legacy backend does not support ``pyproject.toml``
+configuration.
+
+-- by :user:`abravalheri`.
diff --git a/changelog.d/2924.change.2.rst b/changelog.d/2924.change.2.rst
new file mode 100644
index 0000000000..3cc29aee88
--- /dev/null
+++ b/changelog.d/2924.change.2.rst
@@ -0,0 +1,5 @@
+Added vendored dependencies for :pypi:`tomli`,
+:pypi:`validate-pyproject` and :pypi:`ini2toml`.
+
+These dependencies are used to read ``pyproject.toml``
+or automatically convert existing ``setup.cfg`` files.
diff --git a/changelog.d/2924.change.3.rst b/changelog.d/2924.change.3.rst
new file mode 100644
index 0000000000..3bfdb12cc0
--- /dev/null
+++ b/changelog.d/2924.change.3.rst
@@ -0,0 +1,3 @@
+Changed means of interaction between ``setuptools.build_meta`` and
+``setuptools.setup``. Instead of simply executing the script, the backend now
+relies on ``distutils.core.run_setup`` to obtain a distribution object.
diff --git a/changelog.d/2924.deprecation.1.rst b/changelog.d/2924.deprecation.1.rst
new file mode 100644
index 0000000000..b5c2f89127
--- /dev/null
+++ b/changelog.d/2924.deprecation.1.rst
@@ -0,0 +1,7 @@
+Deprecated ``setuptools.config.read_configuration``,
+``setuptools.config.parse_configuration`` and other functions or classes
+from ``setuptools.config``.
+
+This API is now replaced by ``setuptools.config.read`` and
+``setuptools.config.apply``, which *do not output* the same format of data
+(instead they are oriented toward the ``pyproject.toml`` format).
diff --git a/docs/conf.py b/docs/conf.py
index f6ccff0fcf..bf2eca0286 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -97,8 +97,12 @@
 # Add support for linking usernames
 github_url = 'https://github.com'
 github_sponsors_url = f'{github_url}/sponsors'
+repository = f'{github_url}/pypa/setuptools'
 extlinks = {
     'user': (f'{github_sponsors_url}/%s', '@'),  # noqa: WPS323
+    'issue': (f'{repository}/issues/%s', 'issue #%s'),  # noqa: WPS323
+    'pr': (f'{repository}/pull/%s', 'PR #%s'),  # noqa: WPS323
+    'pypi': ('https://pypi.org/project/%s', '%s'),  # noqa: WPS323
 }
 extensions += ['sphinx.ext.extlinks']
 

From 077c2be8e097b84d9a1095e3b73761795ef19c4d Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sun, 12 Dec 2021 00:39:10 +0000
Subject: [PATCH 45/55] Add 'dist_info' to the bootstrap entry-points

---
 bootstrap.egg-info/entry_points.txt | 1 +
 1 file changed, 1 insertion(+)

diff --git a/bootstrap.egg-info/entry_points.txt b/bootstrap.egg-info/entry_points.txt
index c00d1d3a02..fc4e17b2a8 100644
--- a/bootstrap.egg-info/entry_points.txt
+++ b/bootstrap.egg-info/entry_points.txt
@@ -2,6 +2,7 @@
 egg_info = setuptools.command.egg_info:egg_info
 build_py = setuptools.command.build_py:build_py
 sdist = setuptools.command.sdist:sdist
+dist_info = setuptools.command.dist_info:dist_info
 
 [distutils.setup_keywords]
 include_package_data = setuptools.dist:assert_bool

From 37586339a8e822a2585d62aa9f09a879cca2410d Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sun, 12 Dec 2021 00:43:56 +0000
Subject: [PATCH 46/55] Add 'develop' to the bootstrap entry-points

---
 bootstrap.egg-info/entry_points.txt | 1 +
 1 file changed, 1 insertion(+)

diff --git a/bootstrap.egg-info/entry_points.txt b/bootstrap.egg-info/entry_points.txt
index fc4e17b2a8..e7ca4b7b67 100644
--- a/bootstrap.egg-info/entry_points.txt
+++ b/bootstrap.egg-info/entry_points.txt
@@ -3,6 +3,7 @@ egg_info = setuptools.command.egg_info:egg_info
 build_py = setuptools.command.build_py:build_py
 sdist = setuptools.command.sdist:sdist
 dist_info = setuptools.command.dist_info:dist_info
+develop = setuptools.command.develop:develop
 
 [distutils.setup_keywords]
 include_package_data = setuptools.dist:assert_bool

From 67f7ef9b6805198353fb9ec93486da1c853b0fda Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 20 Dec 2021 15:07:30 +0000
Subject: [PATCH 47/55] Use finalize_requires to process optional-dependencies
 for editable installs

---
 setuptools/metadata.py | 58 +++++++++++++++++++++++++++++++++++++++---
 1 file changed, 55 insertions(+), 3 deletions(-)

diff --git a/setuptools/metadata.py b/setuptools/metadata.py
index 23e9bee0ac..8fdba1f1a5 100644
--- a/setuptools/metadata.py
+++ b/setuptools/metadata.py
@@ -5,10 +5,13 @@
 .. _core metadata: https://packaging.python.org/en/latest/specifications/core-metadata
 """
 import os
+import re
+import string
 from email.headerregistry import Address
 from functools import partial
 from itertools import chain
-from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Set, Union
+from typing import (TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Sequence, Set,
+                    Union)
 
 from setuptools.extern.packaging import version
 from setuptools.extern.packaging.requirements import Requirement
@@ -164,12 +167,14 @@ def _add_extra(dep: str, extra_name: str) -> str:
 
 
 def _optional_dependencies(val: dict, dest: dict, root_dir: _Path):
-    extra = set(dest.get("provides_extra", []))
+    from setuptools.extern.ordered_set import OrderedSet
+
+    extra = OrderedSet(dest.get("provides_extra", []))
     for key, deps in val.items():
         extra.add(key)
         cond_deps = [_add_extra(x, key) for x in deps]
         _dependencies(cond_deps, dest, root_dir)
-    dest["provides_extra"] = list(extra)
+    dest["provides_extra"] = extra
 
 
 PYPROJECT_CORRESPONDENCE: Dict[str, _CorrespFn] = {
@@ -301,6 +306,53 @@ def apply(metadata: dict, dist: "Distribution", _source: str = "pyproject.toml")
         else:
             setattr(metadata_obj, norm_key, value)
 
+    _ensure_editable_install_requirements(dist)
+
+
+def _ensure_editable_install_requirements(dist: "Distribution"):
+    """Ensure requirements work for editable installs"""
+    reqs, extras = _separate_extras(dist.metadata.requires or [])
+    install_reqs = getattr(dist, 'install_requires', ())
+    extra_reqs = getattr(dist, 'extras_require', {})
+    dist.extras_require = extra_reqs
+
+    # egg-info writer need `install_requires` to be set to generate `requires.txt`
+    dist.install_requires = _dedup_deps(chain(install_reqs, reqs))
+    for extra in {*extras.keys(), *extra_reqs.keys()}:
+        deps = _dedup_deps(chain(extra_reqs.get(extra, []), extras.get(extra, [])))
+        dist.extras_require[extra] = deps
+
+    dist._finalize_requires()
+    # ^-- markers need to go to headers section in `*.egg-info/requires.txt`
+
+
+def _dedup_deps(deps: Iterable[str]) -> List[str]:
+    from pkg_resources import parse_requirements
+
+    return list({r.key: str(r) for r in parse_requirements(deps)}.values())
+
+
+def _separate_extras(requires: Sequence[str]):
+    # NOTE: this function only handles 'extra == ...' (enough for _add_extra)
+    base = []
+    extras = {}
+    for dep in requires:
+        matches = re.search(r"(extra\s*==\s*(['\"])(.*?)\2)", dep, re.M)
+        if matches:
+            extra, extra_marker = matches[3], matches[1]
+            extras.setdefault(extra, []).append(_remove_extra(dep, extra_marker))
+        else:
+            base.append(dep)
+
+    return base, extras
+
+
+def _remove_extra(dep: str, extra: str):
+    dep = dep.replace(extra, "")
+    # Fix operations
+    dep = re.sub(r"\b(and|or)\s+(and|or)\b", r"\2", dep)
+    return re.sub(r"\b(and|or)\s*$", "", dep.strip()).strip(string.whitespace + ";")
+
 
 def compare(metadata1: dict, metadata2: dict) -> Union[bool, int]:
     """Compare ``metadata1`` and ``metadata2`` and return:

From 574cb204de1b9a22f84141679e190e2c987f7362 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 20 Dec 2021 19:15:00 +0000
Subject: [PATCH 48/55] Fix test_bdist_deprecations

in _distutils/dist/_parse_command_opts a issubclass check is done
to verify the given command is a subclass of distutils.cmd.Command.
This check will fail if the command is mocked (the mock object is not a
class object and will generate a TypeError).
---
 setuptools/tests/test_bdist_deprecations.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/setuptools/tests/test_bdist_deprecations.py b/setuptools/tests/test_bdist_deprecations.py
index 28482fd027..e550265160 100644
--- a/setuptools/tests/test_bdist_deprecations.py
+++ b/setuptools/tests/test_bdist_deprecations.py
@@ -1,7 +1,7 @@
 """develop tests
 """
-import mock
 import sys
+from unittest import mock
 
 import pytest
 
@@ -10,8 +10,8 @@
 
 
 @pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only')
-@mock.patch('distutils.command.bdist_rpm.bdist_rpm')
-def test_bdist_rpm_warning(distutils_cmd):
+@mock.patch('distutils.command.bdist_rpm.bdist_rpm.run')
+def test_bdist_rpm_warning(distutils_cmd_run):
     dist = Distribution(
         dict(
             script_name='setup.py',
@@ -24,4 +24,4 @@ def test_bdist_rpm_warning(distutils_cmd):
     with pytest.warns(SetuptoolsDeprecationWarning):
         dist.run_commands()
 
-    distutils_cmd.run.assert_called_once()
+    distutils_cmd_run.assert_called_once()

From 30cdd31dc73204d44e1ad371c05b7892a4f0854b Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 20 Dec 2021 20:04:36 +0000
Subject: [PATCH 49/55] Use uuid4 to improve tempfile parallel creation

---
 setuptools/build_meta.py | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index 3c884eb775..bcfc1611ad 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -34,6 +34,7 @@
 import tempfile
 import warnings
 from itertools import chain
+from uuid import uuid4
 
 import setuptools
 import distutils
@@ -100,7 +101,8 @@ def _patch_distutils_exec():
 
     def _exec(code, global_vars):
         try:
-            _, tmp = tempfile.mkstemp(suffix="setup.py")
+            fid, tmp = tempfile.mkstemp(suffix=f"{uuid4()}-setup.py", text=False)
+            os.close(fid)  # Ignore the low level API
             with open(tmp, "wb") as f:
                 f.write(code)
             with tokenize.open(tmp) as f:

From 2c6b1444562cc44752a7114c4da21573095a9e06 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 20 Dec 2021 21:40:32 +0000
Subject: [PATCH 50/55] Adequate tests and code to run on Windows

---
 setuptools/config/expand.py                   |  7 ++++++-
 setuptools/tests/config/test_expand.py        | 14 +++++++-------
 setuptools/tests/config/test_pyprojecttoml.py |  3 ++-
 setuptools/tests/test_options.py              |  5 +++--
 4 files changed, 18 insertions(+), 11 deletions(-)

diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index 06e52e458c..f40c625b2e 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -23,6 +23,7 @@
 from glob import iglob
 from itertools import chain
 from configparser import ConfigParser
+from pathlib import Path
 
 from distutils.errors import DistutilsOptionError
 
@@ -248,7 +249,11 @@ def find_packages(*, namespaces=False, root_dir=None, **kwargs):
 
 def _nest_url_style_path(parent, path):
     path = parent if path == "." else os.path.join(parent, path)
-    return path.replace(os.sep, "/").rstrip("/")
+    return _url_style_path(path)
+
+
+def _url_style_path(path):
+    return "/".join(Path(os.path.normpath(path)).parts) or "."
 
 
 def version(value):
diff --git a/setuptools/tests/config/test_expand.py b/setuptools/tests/config/test_expand.py
index 11dc74aa3d..0e4c3e7dfd 100644
--- a/setuptools/tests/config/test_expand.py
+++ b/setuptools/tests/config/test_expand.py
@@ -16,13 +16,13 @@ def write_files(files, root_dir):
 
 def test_glob_relative(tmp_path):
     files = {
-        "dir1/dir2/dir3/file1.txt",
-        "dir1/dir2/file2.txt",
-        "dir1/file3.txt",
-        "a.ini",
-        "b.ini",
-        "dir1/c.ini",
-        "dir1/dir2/a.ini",
+        os.path.join("dir1", "dir2", "dir3", "file1.txt"),
+        os.path.join("dir1", "dir2", "file2.txt"),
+        os.path.join("dir1", "file3.txt"),
+        os.path.join("a.ini"),
+        os.path.join("b.ini"),
+        os.path.join("dir1", "c.ini"),
+        os.path.join("dir1", "dir2", "a.ini"),
     }
 
     write_files({k: "" for k in files}, tmp_path)
diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py
index 088ca1a72f..1a3779d4ae 100644
--- a/setuptools/tests/config/test_pyprojecttoml.py
+++ b/setuptools/tests/config/test_pyprojecttoml.py
@@ -1,3 +1,4 @@
+import os
 from configparser import ConfigParser
 
 from setuptools.config.pyprojecttoml import read_configuration, expand_configuration
@@ -101,7 +102,7 @@ def test_read_configuration(tmp_path):
     assert "" in expanded["tool"]["setuptools"]["package-data"]
     assert "*" not in expanded["tool"]["setuptools"]["package-data"]
     assert expanded["tool"]["setuptools"]["data-files"] == [
-        ("data", ["files/file.txt"])
+        ("data", [os.path.join("files", "file.txt")])
     ]
 
 
diff --git a/setuptools/tests/test_options.py b/setuptools/tests/test_options.py
index 9c8d49956b..8086dc39f6 100644
--- a/setuptools/tests/test_options.py
+++ b/setuptools/tests/test_options.py
@@ -1,3 +1,4 @@
+import os
 from configparser import ConfigParser
 from unittest.mock import Mock
 
@@ -106,7 +107,7 @@ def _project_files(root_dir):
     "package_dir": {"": "src"},
     "packages": ["pkg", "other", "other.nested"],
     "package_data": {"": ["*.txt"]},
-    "data_files": [("data", ["files/file.txt"])],
+    "data_files": [("data", [os.path.join("files", "file.txt")])],
     "cmdclass": {"sdist": Mock(__qualname__="pkg.mod.CustomSdist")},
     "entry_points": {"console_scripts": ["exec = pkg.__main__:exec"]},
     "command_options": {
@@ -134,7 +135,7 @@ def test_apply(tmp_path):
     options.apply(opts, dist)
     assert dist.zip_safe is True
     assert dist.include_package_data is True
-    assert set(dist.data_files[0][1]) == {"files/file.txt"}
+    assert set(dist.data_files[0][1]) == {os.path.join("files", "file.txt")}
     cls = dist.cmdclass["sdist"]
     assert f"{cls.__module__}.{cls.__name__}" == "pkg.mod.CustomSdist"
     assert set(dist.entry_points["console_scripts"]) == {"exec = pkg.__main__:exec"}

From f765076a84d3f61c345bd3efe82fcfec5ec862d9 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 21 Dec 2021 03:09:37 +0000
Subject: [PATCH 51/55] Add 'install_scripts' to the bootstrap entry-points

---
 bootstrap.egg-info/entry_points.txt | 1 +
 1 file changed, 1 insertion(+)

diff --git a/bootstrap.egg-info/entry_points.txt b/bootstrap.egg-info/entry_points.txt
index e7ca4b7b67..e5a2523436 100644
--- a/bootstrap.egg-info/entry_points.txt
+++ b/bootstrap.egg-info/entry_points.txt
@@ -4,6 +4,7 @@ build_py = setuptools.command.build_py:build_py
 sdist = setuptools.command.sdist:sdist
 dist_info = setuptools.command.dist_info:dist_info
 develop = setuptools.command.develop:develop
+install_scripts = setuptools.command.install_scripts:install_scripts
 
 [distutils.setup_keywords]
 include_package_data = setuptools.dist:assert_bool

From 31793491d8af54ccba996b3569b6f63d37391190 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 21 Dec 2021 03:43:37 +0000
Subject: [PATCH 52/55] Add 'install_egg_info' and 'bdist_egg' to the bootstrap
 entry-points

---
 bootstrap.egg-info/entry_points.txt | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/bootstrap.egg-info/entry_points.txt b/bootstrap.egg-info/entry_points.txt
index e5a2523436..98414c7c1a 100644
--- a/bootstrap.egg-info/entry_points.txt
+++ b/bootstrap.egg-info/entry_points.txt
@@ -5,6 +5,8 @@ sdist = setuptools.command.sdist:sdist
 dist_info = setuptools.command.dist_info:dist_info
 develop = setuptools.command.develop:develop
 install_scripts = setuptools.command.install_scripts:install_scripts
+bdist_egg = setuptools.command.bdist_egg:bdist_egg
+install_egg_info = setuptools.command.install_egg_info:install_egg_info
 
 [distutils.setup_keywords]
 include_package_data = setuptools.dist:assert_bool

From fb3dc26a4b08aa34a5e62d4ff89485537d244d4e Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 21 Dec 2021 15:43:39 +0000
Subject: [PATCH 53/55] [WIP] Attempt to debug failures with cygwin CI

For some reason it is not easy to replicate the cygwin failures pointed out
by the CI in the local environment, this commit just adds some extra
debugging statements so we can have more information about the problem
---
 setuptools/config/expand.py | 16 ++++++++++++++--
 1 file changed, 14 insertions(+), 2 deletions(-)

diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index f40c625b2e..5de6726023 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -243,17 +243,29 @@ def find_packages(*, namespaces=False, root_dir=None, **kwargs):
     where = kwargs.pop('where', ['.'])
     if isinstance(where, str):
         where = [where]
-    target = (_nest_url_style_path(root_dir, path) for path in where)
+    target = [_nest_url_style_path(root_dir, path) for path in where]
+    from distutils import log
+    log.warn(f"find_packages cwd={os.getcwd()!r} root_dir={root_dir!r}")
+    for p in target:
+        log.warn(f"find_packages where={p!r} {kwargs!r}")
     return list(chain_iter(PackageFinder.find(x, **kwargs) for x in target))
 
 
 def _nest_url_style_path(parent, path):
+    from distutils import log
+
+    log.warn(f"_nest_url_style_path parent={parent!r} path={path!r}")
+
     path = parent if path == "." else os.path.join(parent, path)
     return _url_style_path(path)
 
 
 def _url_style_path(path):
-    return "/".join(Path(os.path.normpath(path)).parts) or "."
+    from distutils import log
+
+    parts = Path(os.path.normpath(path)).parts
+    log.warn(f"_url_style_path parts={parts!r}")
+    return "/".join(parts) or "."
 
 
 def version(value):

From 72904ab6b77c97628d8a3089f356b5209fae8d04 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 21 Dec 2021 16:26:06 +0000
Subject: [PATCH 54/55] [WIP] Attempt to fix problem with cygwin

---
 setuptools/config/expand.py | 18 +++++-------------
 1 file changed, 5 insertions(+), 13 deletions(-)

diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index 5de6726023..9885e50136 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -23,7 +23,6 @@
 from glob import iglob
 from itertools import chain
 from configparser import ConfigParser
-from pathlib import Path
 
 from distutils.errors import DistutilsOptionError
 
@@ -243,7 +242,7 @@ def find_packages(*, namespaces=False, root_dir=None, **kwargs):
     where = kwargs.pop('where', ['.'])
     if isinstance(where, str):
         where = [where]
-    target = [_nest_url_style_path(root_dir, path) for path in where]
+    target = [_nest_path(root_dir, path) for path in where]
     from distutils import log
     log.warn(f"find_packages cwd={os.getcwd()!r} root_dir={root_dir!r}")
     for p in target:
@@ -251,21 +250,14 @@ def find_packages(*, namespaces=False, root_dir=None, **kwargs):
     return list(chain_iter(PackageFinder.find(x, **kwargs) for x in target))
 
 
-def _nest_url_style_path(parent, path):
+def _nest_path(parent, path):
     from distutils import log
 
-    log.warn(f"_nest_url_style_path parent={parent!r} path={path!r}")
+    log.warn(f"_nest_path parent={parent!r} path={path!r}"
+             f" norm={os.path.normpath(path)!r}")
 
     path = parent if path == "." else os.path.join(parent, path)
-    return _url_style_path(path)
-
-
-def _url_style_path(path):
-    from distutils import log
-
-    parts = Path(os.path.normpath(path)).parts
-    log.warn(f"_url_style_path parts={parts!r}")
-    return "/".join(parts) or "."
+    return os.path.normpath(path)
 
 
 def version(value):

From bfcafd52162df1a53b867c58858a13e9091d0aab Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 21 Dec 2021 17:53:46 +0000
Subject: [PATCH 55/55] Manually add missing license files for vendored
 projects

As pointed out in
https://github.com/pypa/setuptools/pull/2924#issuecomment-998968591,
license files are likely to be required for vendored packages.
This change manually adds the missing license files for the dependencies
introduced in the PR 2924.
---
 setuptools/_vendor/ini2toml/LICENSE.txt | 362 ++++++++++++++++++++++++
 setuptools/_vendor/tomli/LICENSE        |  21 ++
 2 files changed, 383 insertions(+)
 create mode 100644 setuptools/_vendor/ini2toml/LICENSE.txt
 create mode 100644 setuptools/_vendor/tomli/LICENSE

diff --git a/setuptools/_vendor/ini2toml/LICENSE.txt b/setuptools/_vendor/ini2toml/LICENSE.txt
new file mode 100644
index 0000000000..7bad356140
--- /dev/null
+++ b/setuptools/_vendor/ini2toml/LICENSE.txt
@@ -0,0 +1,362 @@
+Mozilla Public License, version 2.0
+
+1. Definitions
+
+1.1. "Contributor"
+
+     means each individual or legal entity that creates, contributes to the
+     creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+
+     means the combination of the Contributions of others (if any) used by a
+     Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+
+     means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+
+     means Source Code Form to which the initial Contributor has attached the
+     notice in Exhibit A, the Executable Form of such Source Code Form, and
+     Modifications of such Source Code Form, in each case including portions
+     thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+     means
+
+     a. that the initial Contributor has attached the notice described in
+        Exhibit B to the Covered Software; or
+
+     b. that the Covered Software was made available under the terms of
+        version 1.1 or earlier of the License, but not also under the terms of
+        a Secondary License.
+
+1.6. "Executable Form"
+
+     means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+
+     means a work that combines Covered Software with other material, in a
+     separate file or files, that is not Covered Software.
+
+1.8. "License"
+
+     means this document.
+
+1.9. "Licensable"
+
+     means having the right to grant, to the maximum extent possible, whether
+     at the time of the initial grant or subsequently, any and all of the
+     rights conveyed by this License.
+
+1.10. "Modifications"
+
+     means any of the following:
+
+     a. any file in Source Code Form that results from an addition to,
+        deletion from, or modification of the contents of Covered Software; or
+
+     b. any new file in Source Code Form that contains any Covered Software.
+
+1.11. "Patent Claims" of a Contributor
+
+      means any patent claim(s), including without limitation, method,
+      process, and apparatus claims, in any patent Licensable by such
+      Contributor that would be infringed, but for the grant of the License,
+      by the making, using, selling, offering for sale, having made, import,
+      or transfer of either its Contributions or its Contributor Version.
+
+1.12. "Secondary License"
+
+      means either the GNU General Public License, Version 2.0, the GNU Lesser
+      General Public License, Version 2.1, the GNU Affero General Public
+      License, Version 3.0, or any later versions of those licenses.
+
+1.13. "Source Code Form"
+
+      means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+
+      means an individual or a legal entity exercising rights under this
+      License. For legal entities, "You" includes any entity that controls, is
+      controlled by, or is under common control with You. For purposes of this
+      definition, "control" means (a) the power, direct or indirect, to cause
+      the direction or management of such entity, whether by contract or
+      otherwise, or (b) ownership of more than fifty percent (50%) of the
+      outstanding shares or beneficial ownership of such entity.
+
+
+2. License Grants and Conditions
+
+2.1. Grants
+
+     Each Contributor hereby grants You a world-wide, royalty-free,
+     non-exclusive license:
+
+     a. under intellectual property rights (other than patent or trademark)
+        Licensable by such Contributor to use, reproduce, make available,
+        modify, display, perform, distribute, and otherwise exploit its
+        Contributions, either on an unmodified basis, with Modifications, or
+        as part of a Larger Work; and
+
+     b. under Patent Claims of such Contributor to make, use, sell, offer for
+        sale, have made, import, and otherwise transfer either its
+        Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+     The licenses granted in Section 2.1 with respect to any Contribution
+     become effective for each Contribution on the date the Contributor first
+     distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+     The licenses granted in this Section 2 are the only rights granted under
+     this License. No additional rights or licenses will be implied from the
+     distribution or licensing of Covered Software under this License.
+     Notwithstanding Section 2.1(b) above, no patent license is granted by a
+     Contributor:
+
+     a. for any code that a Contributor has removed from Covered Software; or
+
+     b. for infringements caused by: (i) Your and any other third party's
+        modifications of Covered Software, or (ii) the combination of its
+        Contributions with other software (except as part of its Contributor
+        Version); or
+
+     c. under Patent Claims infringed by Covered Software in the absence of
+        its Contributions.
+
+     This License does not grant any rights in the trademarks, service marks,
+     or logos of any Contributor (except as may be necessary to comply with
+     the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+     No Contributor makes additional grants as a result of Your choice to
+     distribute the Covered Software under a subsequent version of this
+     License (see Section 10.2) or under the terms of a Secondary License (if
+     permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+     Each Contributor represents that the Contributor believes its
+     Contributions are its original creation(s) or it has sufficient rights to
+     grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+     This License is not intended to limit any rights You have under
+     applicable copyright doctrines of fair use, fair dealing, or other
+     equivalents.
+
+2.7. Conditions
+
+     Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
+     Section 2.1.
+
+
+3. Responsibilities
+
+3.1. Distribution of Source Form
+
+     All distribution of Covered Software in Source Code Form, including any
+     Modifications that You create or to which You contribute, must be under
+     the terms of this License. You must inform recipients that the Source
+     Code Form of the Covered Software is governed by the terms of this
+     License, and how they can obtain a copy of this License. You may not
+     attempt to alter or restrict the recipients' rights in the Source Code
+     Form.
+
+3.2. Distribution of Executable Form
+
+     If You distribute Covered Software in Executable Form then:
+
+     a. such Covered Software must also be made available in Source Code Form,
+        as described in Section 3.1, and You must inform recipients of the
+        Executable Form how they can obtain a copy of such Source Code Form by
+        reasonable means in a timely manner, at a charge no more than the cost
+        of distribution to the recipient; and
+
+     b. You may distribute such Executable Form under the terms of this
+        License, or sublicense it under different terms, provided that the
+        license for the Executable Form does not attempt to limit or alter the
+        recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+     You may create and distribute a Larger Work under terms of Your choice,
+     provided that You also comply with the requirements of this License for
+     the Covered Software. If the Larger Work is a combination of Covered
+     Software with a work governed by one or more Secondary Licenses, and the
+     Covered Software is not Incompatible With Secondary Licenses, this
+     License permits You to additionally distribute such Covered Software
+     under the terms of such Secondary License(s), so that the recipient of
+     the Larger Work may, at their option, further distribute the Covered
+     Software under the terms of either this License or such Secondary
+     License(s).
+
+3.4. Notices
+
+     You may not remove or alter the substance of any license notices
+     (including copyright notices, patent notices, disclaimers of warranty, or
+     limitations of liability) contained within the Source Code Form of the
+     Covered Software, except that You may alter any license notices to the
+     extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+     You may choose to offer, and to charge a fee for, warranty, support,
+     indemnity or liability obligations to one or more recipients of Covered
+     Software. However, You may do so only on Your own behalf, and not on
+     behalf of any Contributor. You must make it absolutely clear that any
+     such warranty, support, indemnity, or liability obligation is offered by
+     You alone, and You hereby agree to indemnify every Contributor for any
+     liability incurred by such Contributor as a result of warranty, support,
+     indemnity or liability terms You offer. You may include additional
+     disclaimers of warranty and limitations of liability specific to any
+     jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+
+   If it is impossible for You to comply with any of the terms of this License
+   with respect to some or all of the Covered Software due to statute,
+   judicial order, or regulation then You must: (a) comply with the terms of
+   this License to the maximum extent possible; and (b) describe the
+   limitations and the code they affect. Such description must be placed in a
+   text file included with all distributions of the Covered Software under
+   this License. Except to the extent prohibited by statute or regulation,
+   such description must be sufficiently detailed for a recipient of ordinary
+   skill to be able to understand it.
+
+5. Termination
+
+5.1. The rights granted under this License will terminate automatically if You
+     fail to comply with any of its terms. However, if You become compliant,
+     then the rights granted under this License from a particular Contributor
+     are reinstated (a) provisionally, unless and until such Contributor
+     explicitly and finally terminates Your grants, and (b) on an ongoing
+     basis, if such Contributor fails to notify You of the non-compliance by
+     some reasonable means prior to 60 days after You have come back into
+     compliance. Moreover, Your grants from a particular Contributor are
+     reinstated on an ongoing basis if such Contributor notifies You of the
+     non-compliance by some reasonable means, this is the first time You have
+     received notice of non-compliance with this License from such
+     Contributor, and You become compliant prior to 30 days after Your receipt
+     of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+     infringement claim (excluding declaratory judgment actions,
+     counter-claims, and cross-claims) alleging that a Contributor Version
+     directly or indirectly infringes any patent, then the rights granted to
+     You by any and all Contributors for the Covered Software under Section
+     2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
+     license agreements (excluding distributors and resellers) which have been
+     validly granted by You or Your distributors under this License prior to
+     termination shall survive termination.
+
+6. Disclaimer of Warranty
+
+   Covered Software is provided under this License on an "as is" basis,
+   without warranty of any kind, either expressed, implied, or statutory,
+   including, without limitation, warranties that the Covered Software is free
+   of defects, merchantable, fit for a particular purpose or non-infringing.
+   The entire risk as to the quality and performance of the Covered Software
+   is with You. Should any Covered Software prove defective in any respect,
+   You (not any Contributor) assume the cost of any necessary servicing,
+   repair, or correction. This disclaimer of warranty constitutes an essential
+   part of this License. No use of  any Covered Software is authorized under
+   this License except under this disclaimer.
+
+7. Limitation of Liability
+
+   Under no circumstances and under no legal theory, whether tort (including
+   negligence), contract, or otherwise, shall any Contributor, or anyone who
+   distributes Covered Software as permitted above, be liable to You for any
+   direct, indirect, special, incidental, or consequential damages of any
+   character including, without limitation, damages for lost profits, loss of
+   goodwill, work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses, even if such party shall have been
+   informed of the possibility of such damages. This limitation of liability
+   shall not apply to liability for death or personal injury resulting from
+   such party's negligence to the extent applicable law prohibits such
+   limitation. Some jurisdictions do not allow the exclusion or limitation of
+   incidental or consequential damages, so this exclusion and limitation may
+   not apply to You.
+
+8. Litigation
+
+   Any litigation relating to this License may be brought only in the courts
+   of a jurisdiction where the defendant maintains its principal place of
+   business and such litigation shall be governed by laws of that
+   jurisdiction, without reference to its conflict-of-law provisions. Nothing
+   in this Section shall prevent a party's ability to bring cross-claims or
+   counter-claims.
+
+9. Miscellaneous
+
+   This License represents the complete agreement concerning the subject
+   matter hereof. If any provision of this License is held to be
+   unenforceable, such provision shall be reformed only to the extent
+   necessary to make it enforceable. Any law or regulation which provides that
+   the language of a contract shall be construed against the drafter shall not
+   be used to construe this License against a Contributor.
+
+
+10. Versions of the License
+
+10.1. New Versions
+
+      Mozilla Foundation is the license steward. Except as provided in Section
+      10.3, no one other than the license steward has the right to modify or
+      publish new versions of this License. Each version will be given a
+      distinguishing version number.
+
+10.2. Effect of New Versions
+
+      You may distribute the Covered Software under the terms of the version
+      of the License under which You originally received the Covered Software,
+      or under the terms of any subsequent version published by the license
+      steward.
+
+10.3. Modified Versions
+
+      If you create software not governed by this License, and you want to
+      create a new license for such software, you may create and use a
+      modified version of this License if you rename the license and remove
+      any references to the name of the license steward (except to note that
+      such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+      Licenses If You choose to distribute Source Code Form that is
+      Incompatible With Secondary Licenses under the terms of this version of
+      the License, the notice described in Exhibit B of this License must be
+      attached.
+
+Exhibit A - Source Code Form License Notice
+
+      This Source Code Form is subject to the
+      terms of the Mozilla Public License, v.
+      2.0. If a copy of the MPL was not
+      distributed with this file, You can
+      obtain one at
+      https://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular file,
+then You may include the notice in a location (such as a LICENSE file in a
+relevant directory) where a recipient would be likely to look for such a
+notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+
+      This Source Code Form is "Incompatible
+      With Secondary Licenses", as defined by
+      the Mozilla Public License, v. 2.0.
diff --git a/setuptools/_vendor/tomli/LICENSE b/setuptools/_vendor/tomli/LICENSE
new file mode 100644
index 0000000000..e859590f88
--- /dev/null
+++ b/setuptools/_vendor/tomli/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2021 Taneli Hukkinen
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.