diff --git a/.github/workflows/base.yml b/.github/workflows/base.yml index e8240bc..fcbbbea 100644 --- a/.github/workflows/base.yml +++ b/.github/workflows/base.yml @@ -12,24 +12,32 @@ on: pull_request: branches: - main + +defaults: + run: + shell: bash -l {0} + jobs: # pre-job to read nox tests matrix - see https://stackoverflow.com/q/66747359/7262247 list_nox_test_sessions: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v1 + - name: Checkout + uses: actions/checkout@v4.1.1 + + - name: Install python 3.9 + uses: actions/setup-python@v5.0.0 with: - python-version: 3.7 + python-version: 3.9 architecture: x64 - name: Install noxfile requirements - shell: bash -l {0} run: pip install -r noxfile-requirements.txt - - name: List 'tests' nox sessions + - name: List 'tests' nox sessions and required python versions id: set-matrix - run: echo "::set-output name=matrix::$(nox -s gha_list -- tests)" + run: echo "::set-output name=matrix::$(nox -s gha_list -- -s tests -v)" + outputs: matrix: ${{ steps.set-matrix.outputs.matrix }} # save nox sessions list to outputs @@ -38,45 +46,72 @@ jobs: strategy: fail-fast: false matrix: + # see https://github.com/actions/setup-python/issues/544 + # os: [ ubuntu-20.04 ] os: [ ubuntu-latest ] # , macos-latest, windows-latest] # all nox sessions: manually > dynamically from previous job # nox_session: ["tests-2.7", "tests-3.7"] nox_session: ${{ fromJson(needs.list_nox_test_sessions.outputs.matrix) }} - name: ${{ matrix.os }} ${{ matrix.nox_session }} # ${{ matrix.name_suffix }} + name: ${{ matrix.os }} ${{ matrix.nox_session.python }} ${{ matrix.nox_session.session }} # ${{ matrix.name_suffix }} runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v2 + - name: Checkout + uses: actions/checkout@v4.1.1 + + - name: Install python ${{ matrix.nox_session.python }} for tests + uses: MatteoH2O1999/setup-python@v3.0.0 # actions/setup-python@v5.0.0 + id: set-py + with: + python-version: ${{ matrix.nox_session.python }} + architecture: x64 + allow-build: info + cache-build: true - # Conda install - - name: Install conda v3.7 - uses: conda-incubator/setup-miniconda@v2 + - name: Install python 3.12 for nox + uses: actions/setup-python@v5.0.0 with: - # auto-update-conda: true - python-version: 3.7 - activate-environment: noxenv - - run: conda info - shell: bash -l {0} # so that conda works - - run: conda list - shell: bash -l {0} # so that conda works - - # Nox install + run + python-version: 3.12 + architecture: x64 + + - name: pin virtualenv==20.15.1 in old python versions + # pinned to keep compatibility with old versions, see https://github.com/MatteoH2O1999/setup-python/issues/28#issuecomment-1745613621 + if: contains(fromJson('["2.7", "3.5", "3.6"]'), matrix.nox_session.python ) + run: sed -i "s/virtualenv/virtualenv==20.15.1/g" noxfile-requirements.txt + - name: Install noxfile requirements - shell: bash -l {0} # so that conda works run: pip install -r noxfile-requirements.txt - - run: conda list - shell: bash -l {0} # so that conda works - - run: nox -s "${{ matrix.nox_session }}" - shell: bash -l {0} # so that conda works + + - name: Run nox session ${{ matrix.nox_session.session }} + run: nox -s "${{ matrix.nox_session.session }}" -v # Share ./docs/reports so that they can be deployed with doc in next job - name: Share reports with other jobs - # if: matrix.nox_session == '...': not needed, if empty wont be shared - uses: actions/upload-artifact@master + # if: matrix.nox_session == '...': not needed, if empty won't be shared + uses: actions/upload-artifact@v4.3.0 with: name: reports_dir path: ./docs/reports + build_doc: + runs-on: ubuntu-latest + if: github.event_name == 'pull_request' + steps: + - name: Checkout + uses: actions/checkout@v4.1.1 + + - name: Install python 3.9 for nox + uses: actions/setup-python@v5.0.0 + with: + python-version: 3.9 + architecture: x64 + + - name: Install noxfile requirements + run: pip install -r noxfile-requirements.txt + + - name: Build the doc including example gallery + run: nox -s docs -- build + publish_release: needs: run_all_tests runs-on: ubuntu-latest @@ -87,39 +122,30 @@ jobs: GITHUB_CONTEXT: ${{ toJSON(github) }} run: echo "$GITHUB_CONTEXT" - - uses: actions/checkout@v2 + - name: Checkout with no depth + uses: actions/checkout@v4.1.1 with: fetch-depth: 0 # so that gh-deploy works + - name: Install python 3.9 for nox + uses: actions/setup-python@v5.0.0 + with: + python-version: 3.9 + architecture: x64 + # 1) retrieve the reports generated previously - name: Retrieve reports - uses: actions/download-artifact@master + uses: actions/download-artifact@v4.1.1 with: name: reports_dir path: ./docs/reports - # Conda install - - name: Install conda v3.7 - uses: conda-incubator/setup-miniconda@v2 - with: - # auto-update-conda: true - python-version: 3.7 - activate-environment: noxenv - - run: conda info - shell: bash -l {0} # so that conda works - - run: conda list - shell: bash -l {0} # so that conda works - # Nox install - name: Install noxfile requirements - shell: bash -l {0} # so that conda works run: pip install -r noxfile-requirements.txt - - run: conda list - shell: bash -l {0} # so that conda works # 5) Run the flake8 report and badge - name: Run flake8 analysis and generate corresponding badge - shell: bash -l {0} # so that conda works run: nox -s flake8 # -------------- only on Ubuntu + MAIN PUSH (no pull request, no tag) ----------- @@ -127,7 +153,6 @@ jobs: # 5) Publish the doc and test reports - name: \[not on TAG\] Publish documentation, tests and coverage reports if: github.event_name == 'push' && startsWith(github.ref, 'refs/heads') # startsWith(matrix.os,'ubuntu') - shell: bash -l {0} # so that conda works run: nox -s publish # 6) Publish coverage report @@ -142,7 +167,7 @@ jobs: EOF - name: \[not on TAG\] Publish coverage report if: github.event_name == 'push' && startsWith(github.ref, 'refs/heads') - uses: codecov/codecov-action@v1 + uses: codecov/codecov-action@v4.0.1 with: files: ./docs/reports/coverage/coverage.xml @@ -151,13 +176,12 @@ jobs: # 7) Create github release and build the wheel - name: \[TAG only\] Build wheel and create github release if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') - shell: bash -l {0} # so that conda works run: nox -s release -- ${{ secrets.GITHUB_TOKEN }} # 8) Publish the wheel on PyPi - name: \[TAG only\] Deploy on PyPi if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') - uses: pypa/gh-action-pypi-publish@release/v1 + uses: pypa/gh-action-pypi-publish@v1.8.11 with: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.gitignore b/.gitignore index 3f6fee9..aae5488 100644 --- a/.gitignore +++ b/.gitignore @@ -50,7 +50,7 @@ coverage.xml *.py,cover .hypothesis/ .pytest_cache/ -makefun/_version.py +src/makefun/_version.py # Translations *.mo diff --git a/ci_tools/nox_utils.py b/ci_tools/nox_utils.py index 4cb8d7d..b7aebfa 100644 --- a/ci_tools/nox_utils.py +++ b/ci_tools/nox_utils.py @@ -1,42 +1,48 @@ -from itertools import product - import asyncio -from collections import namedtuple -from inspect import signature, isfunction import logging -from pathlib import Path +import os +import re +from shlex import split import shutil import subprocess import sys -import os - -from typing import Sequence, Dict, Union, Iterable, Mapping, Any, IO, Tuple, Optional, List - -from makefun import wraps, remove_signature_parameters, add_signature_parameters +from collections import namedtuple +from inspect import isfunction, signature +from itertools import product +from pathlib import Path +from typing import IO, Any, Dict, Iterable, Mapping, Optional, Sequence, Tuple, Union import nox +from makefun import add_signature_parameters, remove_signature_parameters, wraps from nox.sessions import Session - nox_logger = logging.getLogger("nox") -PY27, PY35, PY36, PY37, PY38, PY39, PY310 = "2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10" +PY27 = "2.7" +PY35 = "3.5" +PY36 = "3.6" +PY37 = "3.7" +PY38 = "3.8" +PY39 = "3.9" +PY310 = "3.10" +PY311 = "3.11" +PY312 = "3.12" DONT_INSTALL = "dont_install" def power_session( - func=None, - envs=None, - grid_param_name="env", - python=None, - py=None, - reuse_venv=None, - name=None, - venv_backend=None, - venv_params=None, - logsdir=None, - **kwargs + func=None, + envs=None, + grid_param_name="env", + python=None, + py=None, + reuse_venv=None, + name=None, + venv_backend=None, + venv_params=None, + logsdir=None, + **kwargs ): """A nox.session on steroids @@ -59,25 +65,34 @@ def power_session( if func is not None: return power_session()(func) else: - def combined_decorator(f): - # replace Session with PowerSession - f = with_power_session(f) + def combined_decorator(f): # open a log file for the session, use it to stream the commands stdout and stderrs, # and possibly inject the log file in the session function if logsdir is not None: f = with_logfile(logs_dir=logsdir)(f) + # replace Session with PowerSession before it is passed to `with_logfile` + f = with_power_session(f) + # decorate with @nox.session and possibly @nox.parametrize to create the grid - return nox_session_with_grid(python=python, py=py, envs=envs, reuse_venv=reuse_venv, name=name, - grid_param_name=grid_param_name, venv_backend=venv_backend, - venv_params=venv_params, **kwargs)(f) + return nox_session_with_grid( + python=python, + py=py, + envs=envs, + reuse_venv=reuse_venv, + name=name, + grid_param_name=grid_param_name, + venv_backend=venv_backend, + venv_params=venv_params, + **kwargs + )(f) return combined_decorator def with_power_session(f=None): - """ A decorator to patch the session objects in order to add all methods from Session2""" + """A decorator to patch the session objects in order to add all methods from Session2""" if f is not None: return with_power_session()(f) @@ -86,7 +101,7 @@ def _decorator(f): @wraps(f) def _f_wrapper(**kwargs): # patch the session arg - PowerSession.patch(kwargs['session']) + PowerSession.patch(kwargs["session"]) # finally execute the session return f(**kwargs) @@ -103,10 +118,7 @@ class PowerSession(Session): # ------------ commandline runners ----------- - def run2(self, - command: Union[Iterable[str], str], - logfile: Union[bool, str, Path] = True, - **kwargs): + def run2(self, command: Union[Iterable[str], str], logfile: Union[bool, str, Path] = True, **kwargs): """ An improvement of session.run that is able to @@ -119,14 +131,11 @@ def run2(self, :return: """ if isinstance(command, str): - command = command.split(' ') + command = split(command) self.run(*command, logfile=logfile, **kwargs) - def run_multi(self, - cmds: str, - logfile: Union[bool, str, Path] = True, - **kwargs): + def run_multi(self, cmds: str, logfile: Union[bool, str, Path] = True, **kwargs): """ An improvement of session.run that is able to @@ -144,16 +153,16 @@ def run_multi(self, # ------------ requirements installers ----------- def install_reqs( - self, - # pre wired phases - setup=False, - install=False, - tests=False, - extras=(), - # custom phase - phase=None, - phase_reqs=None, - versions_dct=None + self, + # pre wired phases + setup=False, + install=False, + tests=False, + extras=(), + # custom phase + phase=None, + phase_reqs=None, + versions_dct=None, ): """ A high-level helper to install requirements from the various project files @@ -195,47 +204,77 @@ def install_reqs( # Read requirements from pyproject.toml toml_setup_reqs, toml_use_conda_for = read_pyproject_toml() if setup: - self.install_any("pyproject.toml#build-system", toml_setup_reqs, - use_conda_for=toml_use_conda_for, versions_dct=versions_dct) + self.install_any( + "pyproject.toml#build-system", + toml_setup_reqs, + use_conda_for=toml_use_conda_for, + versions_dct=versions_dct, + ) # Read test requirements from setup.cfg setup_cfg = read_setuptools_cfg() if setup: - self.install_any("setup.cfg#setup_requires", setup_cfg.setup_requires, - use_conda_for=toml_use_conda_for, versions_dct=versions_dct) + self.install_any( + "setup.cfg#setup_requires", + setup_cfg.setup_requires, + use_conda_for=toml_use_conda_for, + versions_dct=versions_dct, + ) if install: - self.install_any("setup.cfg#install_requires", setup_cfg.install_requires, - use_conda_for=toml_use_conda_for, versions_dct=versions_dct) + self.install_any( + "setup.cfg#install_requires", + setup_cfg.install_requires, + use_conda_for=toml_use_conda_for, + versions_dct=versions_dct, + ) if tests: - self.install_any("setup.cfg#tests_requires", setup_cfg.tests_requires, - use_conda_for=toml_use_conda_for, versions_dct=versions_dct) + self.install_any( + "setup.cfg#tests_requires", + setup_cfg.tests_requires, + use_conda_for=toml_use_conda_for, + versions_dct=versions_dct, + ) for extra in extras: - self.install_any("setup.cfg#extras_require#%s" % extra, setup_cfg.extras_require[extra], - use_conda_for=toml_use_conda_for, versions_dct=versions_dct) + self.install_any( + "setup.cfg#extras_require#%s" % extra, + setup_cfg.extras_require[extra], + use_conda_for=toml_use_conda_for, + versions_dct=versions_dct, + ) if phase is not None: - self.install_any(phase, phase_reqs, use_conda_for=toml_use_conda_for, versions_dct=versions_dct) - - def install_any(self, - phase_name: str, - pkgs: Sequence[str], - use_conda_for: Sequence[str] = (), - versions_dct: Dict[str, str] = None, - logfile: Union[bool, str, Path] = True, - ): + self.install_any( + phase, + phase_reqs, + use_conda_for=toml_use_conda_for, + versions_dct=versions_dct, + ) + + def uses_conda(self): + return isinstance(self.virtualenv, nox.virtualenv.CondaEnv) + + def install_any( + self, + phase_name: str, + pkgs: Sequence[str], + use_conda_for: Sequence[str] = (), + versions_dct: Dict[str, str] = None, + logfile: Union[bool, str, Path] = True, + ): """Install the `pkgs` provided with `session.install(*pkgs)`, except for those present in `use_conda_for`""" - nox_logger.debug("\nAbout to install *%s* requirements: %s.\n " - "Conda pkgs are %s" % (phase_name, pkgs, use_conda_for)) + nox_logger.debug( + "\nAbout to install *%s* requirements: %s.\n " "Conda pkgs are %s" % (phase_name, pkgs, use_conda_for) + ) # use the provided versions dictionary to update the versions if versions_dct is None: versions_dct = dict() - pkgs = [pkg + versions_dct.get(pkg, "") for pkg in pkgs if versions_dct.get(pkg, "") != DONT_INSTALL] + pkgs = [pkg + _get_suffix(pkg, versions_dct) for pkg in pkgs if versions_dct.get(pkg, "") != DONT_INSTALL] # install on conda... if the session uses conda backend - if not isinstance(self.virtualenv, nox.virtualenv.CondaEnv): + if not self.uses_conda(): conda_pkgs = [] else: conda_pkgs = [pkg_req for pkg_req in pkgs if any(get_req_pkg_name(pkg_req) == c for c in use_conda_for)] @@ -250,11 +289,7 @@ def install_any(self, nox_logger.info("[%s] Installing requirements with pip: %s" % (phase_name, pip_pkgs)) self.install2(*pip_pkgs, logfile=logfile) - def conda_install2(self, - *conda_pkgs, - logfile: Union[bool, str, Path] = True, - **kwargs - ): + def conda_install2(self, *conda_pkgs, logfile: Union[bool, str, Path] = True, **kwargs): """ Same as session.conda_install() but with support for `logfile`. @@ -264,11 +299,7 @@ def conda_install2(self, """ return self.conda_install(*conda_pkgs, logfile=logfile, **kwargs) - def install2(self, - *pip_pkgs, - logfile: Union[bool, str, Path] = True, - **kwargs - ): + def install2(self, *pip_pkgs, logfile: Union[bool, str, Path] = True, **kwargs): """ Same as session.install() but with support for `logfile`. @@ -280,7 +311,10 @@ def install2(self, def get_session_id(self): """Return the session id""" - return Path(self.bin).name + if self.uses_conda(): + return Path(self.bin).name + else: + return Path(self.bin).parent.name @classmethod def is_power_session(cls, session: Session): @@ -318,16 +352,23 @@ def read_pyproject_toml(): """ if os.path.exists("pyproject.toml"): import toml + nox_logger.debug("\nA `pyproject.toml` file exists. Loading it.") pyproject = toml.load("pyproject.toml") - requires = pyproject['build-system']['requires'] - conda_pkgs = pyproject['tool']['conda']['conda_packages'] + requires = pyproject["build-system"]["requires"] + try: + conda_pkgs = pyproject["tool"]["conda"]["conda_packages"] + except KeyError: + conda_pkgs = dict() return requires, conda_pkgs else: raise FileNotFoundError("No `pyproject.toml` file exists. No dependency will be installed ...") -SetupCfg = namedtuple('SetupCfg', ('setup_requires', 'install_requires', 'tests_requires', 'extras_require')) +SetupCfg = namedtuple( + "SetupCfg", + ("setup_requires", "install_requires", "tests_requires", "extras_require"), +) def read_setuptools_cfg(): @@ -336,12 +377,15 @@ def read_setuptools_cfg(): """ # see https://stackoverflow.com/a/30679041/7262247 from setuptools import Distribution + dist = Distribution() dist.parse_config_files() - return SetupCfg(setup_requires=dist.setup_requires, - install_requires=dist.install_requires, - tests_requires=dist.tests_require, - extras_require=dist.extras_require) + return SetupCfg( + setup_requires=dist.setup_requires, + install_requires=dist.install_requires, + tests_requires=dist.tests_require, + extras_require=dist.extras_require, + ) def get_req_pkg_name(r): @@ -351,17 +395,18 @@ def get_req_pkg_name(r): "funcsigs;python<'3.5'" will return "funcsigs" "pytest>=3" will return "pytest" """ - return r.replace('<', '=').replace('>', '=').replace(';', '=').split("=")[0] + return r.replace("<", "=").replace(">", "=").replace(";", "=").split("=")[0] # ------------- log related -def with_logfile(logs_dir: Path, - logfile_arg: str = "logfile", - logfile_handler_arg: str = "logfilehandler" - ): - """ A decorator to inject a logfile""" +def with_logfile( + logs_dir: Path, + logfile_arg: str = "logfile", + logfile_handler_arg: str = "logfilehandler", +): + """A decorator to inject a logfile""" def _decorator(f): # check the signature of f @@ -379,7 +424,7 @@ def _decorator(f): @wraps(f, new_sig=new_sig) def _f_wrapper(**kwargs): # find the session arg - session = kwargs['session'] # type: Session + session = kwargs["session"] # type: Session # add file handler to logger logfile = logs_dir / ("%s.log" % PowerSession.get_session_id(session)) @@ -418,8 +463,7 @@ def _f_wrapper(**kwargs): return _decorator -def log_to_file(file_path: Union[str, Path] - ): +def log_to_file(file_path: Union[str, Path]): """ Closes and removes all file handlers from the nox logger, and add a new one to the provided file path @@ -431,7 +475,7 @@ def log_to_file(file_path: Union[str, Path] if isinstance(h, logging.FileHandler): h.close() nox_logger.removeHandler(h) - fh = logging.FileHandler(str(file_path), mode='w') + fh = logging.FileHandler(str(file_path), mode="w") nox_logger.addHandler(fh) return fh @@ -469,16 +513,18 @@ def remove_file_logger(): # ------------ environment grid / parametrization related -def nox_session_with_grid(python = None, - py = None, - envs: Mapping[str, Mapping[str, Any]] = None, - reuse_venv: Optional[bool] = None, - name: Optional[str] = None, - venv_backend: Any = None, - venv_params: Any = None, - grid_param_name: str = None, - **kwargs - ): + +def nox_session_with_grid( + python=None, + py=None, + envs: Mapping[str, Mapping[str, Any]] = None, + reuse_venv: Optional[bool] = None, + name: Optional[str] = None, + venv_backend: Any = None, + venv_params: Any = None, + grid_param_name: str = None, + **kwargs +): """ Since nox is not yet capable to define a build matrix with python and parameters mixed in the same parametrize this implements it with a dirty hack. @@ -490,13 +536,22 @@ def nox_session_with_grid(python = None, """ if envs is None: # Fast track default to @nox.session - return nox.session(python=python, py=py, reuse_venv=reuse_venv, name=name, venv_backend=venv_backend, - venv_params=venv_params, **kwargs) + return nox.session( + python=python, + py=py, + reuse_venv=reuse_venv, + name=name, + venv_backend=venv_backend, + venv_params=venv_params, + **kwargs + ) else: # Current limitation : session param names can be 'python' or 'py' only if py is not None or python is not None: - raise ValueError("`python` session argument can not be provided both directly and through the " - "`env` with `session_param_names`") + raise ValueError( + "`python` session argument can not be provided both directly and through the " + "`env` with `session_param_names`" + ) # First examine the env and collect the parameter values for python all_python = [] @@ -530,8 +585,10 @@ def nox_session_with_grid(python = None, env_contents_names = set(env_params.keys()) else: if env_contents_names != set(env_params.keys()): - raise ValueError("Environment %r parameters %r does not match parameters in the first environment: %r" - % (env_id, env_contents_names, set(env_params.keys()))) + raise ValueError( + "Environment %r parameters %r does not match parameters in the first environment: %r" + % (env_id, env_contents_names, set(env_params.keys())) + ) if has_parameter and not grid_param_name: raise ValueError("You must provide a grid parameter name when the env keys are tuples.") @@ -541,7 +598,7 @@ def _decorator(f): for pyv, _param in product(all_python, all_params): if (pyv, _param) not in envs: # create a dummy folder to avoid creating a useless venv ? - env_dir = Path(".nox") / ("%s-%s-%s-%s" % (s_name, pyv.replace('.', '-'), grid_param_name, _param)) + env_dir = Path(".nox") / ("%s-%s-%s-%s" % (s_name, pyv.replace(".", "-"), grid_param_name, _param)) env_dir.mkdir(parents=True, exist_ok=True) # check the signature of f @@ -564,7 +621,7 @@ def _decorator(f): @wraps(f, new_sig=new_sig) def _f_wrapper(**kwargs): # find the session arg - session = kwargs['session'] # type: Session + session = kwargs["session"] # type: Session # get the versions to use for this environment try: @@ -576,7 +633,9 @@ def _f_wrapper(**kwargs): except KeyError: # Skip this session, it is a dummy one nox_logger.warning( - "Skipping configuration, this is not supported in python version %r" % session.python) + "Skipping configuration, %r is not meant to be executed in this session for python version %r" % + (grid_param if has_parameter else "this", session.python) + ) return # inject the parameters in the args: @@ -588,8 +647,13 @@ def _f_wrapper(**kwargs): if has_parameter: _f_wrapper = nox.parametrize(grid_param_name, all_params)(_f_wrapper) - _f_wrapper = nox.session(python=all_python, reuse_venv=reuse_venv, name=name, - venv_backend=venv_backend, venv_params=venv_params)(_f_wrapper) + _f_wrapper = nox.session( + python=all_python, + reuse_venv=reuse_venv, + name=name, + venv_backend=venv_backend, + venv_params=venv_params, + )(_f_wrapper) return _f_wrapper return _decorator @@ -598,8 +662,17 @@ def _f_wrapper(**kwargs): # ----------- other goodies -def rm_file(folder: Union[str, Path] - ): +def _get_suffix(pkg, versions_dct): + res = re.split("<|=|>|;", pkg.strip()) + prefix = "" + suffix = versions_dct.get(res[0], "") + if len(res) > 1 and len(suffix) > 0: + prefix = "," + + return prefix + suffix + + +def rm_file(folder: Union[str, Path]): """Since on windows Path.unlink throws permission error sometimes, os.remove is preferred.""" if isinstance(folder, str): folder = Path(folder) @@ -609,8 +682,7 @@ def rm_file(folder: Union[str, Path] # Folders.site.unlink() --> possible PermissionError -def rm_folder(folder: Union[str, Path] - ): +def rm_folder(folder: Union[str, Path]): """Since on windows Path.unlink throws permission error sometimes, shutil is preferred.""" if isinstance(folder, str): folder = Path(folder) @@ -624,6 +696,7 @@ def rm_folder(folder: Union[str, Path] import nox.popen as nox_popen_module + orig_nox_popen = nox_popen_module.popen @@ -707,20 +780,44 @@ def patched_popen( # define the async coroutines async def async_popen(): - process = await asyncio.create_subprocess_exec(*args, env=env, stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE, **kwargs) + process = await asyncio.create_subprocess_exec( + *args, env=env, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, **kwargs + ) # bind the out and err streams - see https://stackoverflow.com/a/59041913/7262247 # to mimic nox behaviour we only use a single capturing list outlines = [] - await asyncio.wait([ - # process out is only redirected to STDOUT if not silent - _read_stream(process.stdout, lambda l: tee(l, sinklist=outlines, sinkstream=log_file_stream, - quiet=silent, verbosepipe=sys.stdout)), - # process err is always redirected to STDOUT (quiet=False) with a specific label - _read_stream(process.stderr, lambda l: tee(l, sinklist=outlines, sinkstream=log_file_stream, - quiet=False, verbosepipe=sys.stdout, label="ERR:")) - ]) + await asyncio.wait( + [ + asyncio.create_task( + # process out is only redirected to STDOUT if not silent + _read_stream( + process.stdout, + lambda l: tee( + l, + sinklist=outlines, + sinkstream=log_file_stream, + quiet=silent, + verbosepipe=sys.stdout, + ), + ) + ), + # process err is always redirected to STDOUT (quiet=False) with a specific label + asyncio.create_task( + _read_stream( + process.stderr, + lambda l: tee( + l, + sinklist=outlines, + sinkstream=log_file_stream, + quiet=False, + verbosepipe=sys.stdout, + label="ERR:", + ), + ), + ), + ] + ) return_code = await process.wait() # make sur the process has ended and retrieve its return code return return_code, outlines @@ -763,7 +860,7 @@ def tee(linebytes, sinklist, sinkstream, verbosepipe, quiet, label=""): append it to the sink, and if quiet=False, write it to pipe too. """ - line = linebytes.decode('utf-8').rstrip() + line = linebytes.decode("utf-8").rstrip() if sinklist is not None: sinklist.append(line) @@ -781,13 +878,14 @@ def patch_popen(): nox_popen_module.popen = patched_popen from nox.command import popen + if popen is not patched_popen: nox.command.popen = patched_popen # change event loop on windows # see https://stackoverflow.com/a/44639711/7262247 # and https://docs.python.org/3/library/asyncio-platforms.html#subprocess-support-on-windows - if 'win32' in sys.platform: + if "win32" in sys.platform: # Windows specific event-loop policy & cmd asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy()) # cmds = [['C:/Windows/system32/HOSTNAME.EXE']] diff --git a/noxfile-requirements.txt b/noxfile-requirements.txt index 6fb3e1c..3b4ab32 100644 --- a/noxfile-requirements.txt +++ b/noxfile-requirements.txt @@ -1,3 +1,4 @@ +virtualenv nox toml makefun diff --git a/noxfile.py b/noxfile.py index 12ffb3a..6068373 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,3 +1,4 @@ +import argparse from itertools import product from json import dumps import logging @@ -9,7 +10,7 @@ # add parent folder to python path so that we can import noxfile_utils.py # note that you need to "pip install -r noxfile-requiterements.txt" for this file to work. sys.path.append(str(Path(__file__).parent / "ci_tools")) -from nox_utils import PY27, PY37, PY36, PY35, PY38, PY39, PY310, power_session, rm_folder, rm_file, PowerSession # noqa +from nox_utils import PY27, PY37, PY36, PY35, PY38, PY39, PY310, PY311, PY312, power_session, rm_folder, rm_file, PowerSession, DONT_INSTALL # noqa pkg_name = "makefun" @@ -17,8 +18,9 @@ gh_repo = "python-makefun" ENVS = { - # python 3.10 is not available on conda yet - # PY310: {"coverage": False, "pkg_specs": {"pip": ">19"}}, + PY312: {"coverage": False, "pkg_specs": {"pip": ">19"}}, + PY311: {"coverage": False, "pkg_specs": {"pip": ">19"}}, + PY310: {"coverage": False, "pkg_specs": {"pip": ">19"}}, PY39: {"coverage": False, "pkg_specs": {"pip": ">19"}}, PY38: {"coverage": False, "pkg_specs": {"pip": ">19"}}, PY27: {"coverage": False, "pkg_specs": {"pip": ">10"}}, @@ -30,10 +32,11 @@ # set the default activated sessions, minimal for CI -nox.options.sessions = ["tests", "flake8"] # , "docs", "gh_pages" +nox.options.sessions = ["tests", "flake8", "docs"] # , "docs", "gh_pages" +nox.options.error_on_missing_interpreters = True nox.options.reuse_existing_virtualenvs = True # this can be done using -r # if platform.system() == "Windows": >> always use this for better control -nox.options.default_venv_backend = "conda" +nox.options.default_venv_backend = "virtualenv" # os.environ["NO_COLOR"] = "True" # nox.options.nocolor = True does not work # nox.options.verbose = True @@ -95,13 +98,13 @@ def tests(session: PowerSession, coverage, pkg_specs): # list all (conda list alone does not work correctly on github actions) # session.run2("conda list") - conda_prefix = Path(session.bin) - if conda_prefix.name == "bin": - conda_prefix = conda_prefix.parent - session.run2("conda list", env={"CONDA_PREFIX": str(conda_prefix), "CONDA_DEFAULT_ENV": session.get_session_id()}) + # conda_prefix = Path(session.bin) + # if conda_prefix.name == "bin": + # conda_prefix = conda_prefix.parent + # session.run2("conda list", env={"CONDA_PREFIX": str(conda_prefix), "CONDA_DEFAULT_ENV": session.get_session_id()}) # Fail if the assumed python version is not the actual one - session.run2("python ci_tools/check_python_version.py %s" % session.python) + session.run2(f"python ci_tools/check_python_version.py {session.python}") # check that it can be imported even from a different folder # Important: do not surround the command into double quotes as in the shell ! @@ -111,7 +114,6 @@ def tests(session: PowerSession, coverage, pkg_specs): if not coverage: # install self so that it is recognized by pytest session.run2("pip install . --no-deps") - # session.install(".", "--no-deps") # simple: pytest only session.run2("python -m pytest --cache-clear -v tests/") @@ -125,23 +127,24 @@ def tests(session: PowerSession, coverage, pkg_specs): versions_dct=pkg_specs) # --coverage + junit html reports - session.run2("coverage run --source src/{pkg_name} " - "-m pytest --cache-clear --junitxml={test_xml} --html={test_html} -v tests/" - "".format(pkg_name=pkg_name, test_xml=Folders.test_xml, test_html=Folders.test_html)) + session.run2(f"coverage run --source src/{pkg_name} " + f"-m pytest --cache-clear " + f'--junitxml="{Folders.test_xml}" --html="{Folders.test_html}" ' + f"-v tests/") session.run2("coverage report") - session.run2("coverage xml -o {covxml}".format(covxml=Folders.coverage_xml)) - session.run2("coverage html -d {dst}".format(dst=Folders.coverage_reports)) + session.run2(f'coverage xml -o "{Folders.coverage_xml}"') + session.run2(f'coverage html -d "{Folders.coverage_reports}"') # delete this intermediate file, it is not needed anymore rm_file(Folders.coverage_intermediate_file) # --generates the badge for the test results and fail build if less than x% tests pass nox_logger.info("Generating badge for tests coverage") # Use our own package to generate the badge - session.run2("genbadge tests -i %s -o %s -t 100" % (Folders.test_xml, Folders.test_badge)) - session.run2("genbadge coverage -i %s -o %s" % (Folders.coverage_xml, Folders.coverage_badge)) + session.run2(f'genbadge tests -i "{Folders.test_xml}" -o "{Folders.test_badge}" -t 100') + session.run2(f'genbadge coverage -i "{Folders.coverage_xml}" -o "{Folders.coverage_badge}"') -@power_session(python=PY38, logsdir=Folders.runlogs) +@power_session(python=PY39, logsdir=Folders.runlogs) def flake8(session: PowerSession): """Launch flake8 qualimetry.""" @@ -158,28 +161,28 @@ def flake8(session: PowerSession): session.run("flake8", pkg_name, "--exit-zero", "--format=html", "--htmldir", str(Folders.flake8_reports), "--statistics", "--tee", "--output-file", str(Folders.flake8_intermediate_file)) # generate our badge - session.run2("genbadge flake8 -i %s -o %s" % (Folders.flake8_intermediate_file, Folders.flake8_badge)) + session.run2(f'genbadge flake8 -i "{Folders.flake8_intermediate_file}" -o "{Folders.flake8_badge}"') rm_file(Folders.flake8_intermediate_file) -@power_session(python=[PY37]) +@power_session(python=[PY39]) def docs(session: PowerSession): - """Generates the doc and serves it on a local http server. Pass '-- build' to build statically instead.""" + """Generates the doc. Pass '-- serve' to serve it on a local http server instead.""" session.install_reqs(phase="docs", phase_reqs=["mkdocs-material", "mkdocs", "pymdown-extensions", "pygments"]) if session.posargs: - # use posargs instead of "serve" + # use posargs instead of "build" session.run2("mkdocs %s" % " ".join(session.posargs)) else: - session.run2("mkdocs serve") + session.run2("mkdocs build") -@power_session(python=[PY37]) +@power_session(python=[PY39]) def publish(session: PowerSession): """Deploy the docs+reports on github pages. Note: this rebuilds the docs""" - session.install_reqs(phase="mkdocs", phase_reqs=["mkdocs-material", "mkdocs", "pymdown-extensions", "pygments"]) + session.install_reqs(phase="publish", phase_reqs=["mkdocs-material", "mkdocs", "pymdown-extensions", "pygments"]) # possibly rebuild the docs in a static way (mkdocs serve does not build locally) session.run2("mkdocs build") @@ -200,7 +203,7 @@ def publish(session: PowerSession): # session.run2('codecov -t %s -f %s' % (codecov_token, Folders.coverage_xml)) -@power_session(python=[PY37]) +@power_session(python=[PY39]) def release(session: PowerSession): """Create a release on github corresponding to the latest tag""" @@ -249,10 +252,9 @@ def my_scheme(version_): # create the github release session.install_reqs(phase="release", phase_reqs=["click", "PyGithub"]) - session.run2("python ci_tools/github_release.py -s {gh_token} " - "--repo-slug {gh_org}/{gh_repo} -cf ./docs/changelog.md " - "-d https://{gh_org}.github.io/{gh_repo}/changelog {tag}" - "".format(gh_token=gh_token, gh_org=gh_org, gh_repo=gh_repo, tag=current_tag)) + session.run2(f"python ci_tools/github_release.py -s {gh_token} " + f"--repo-slug {gh_org}/{gh_repo} -cf ./docs/changelog.md " + f"-d https://{gh_org}.github.io/{gh_repo}/changelog {current_tag}") @nox.session(python=False) @@ -261,19 +263,36 @@ def gha_list(session): # see https://stackoverflow.com/q/66747359/7262247 + # The options + parser = argparse.ArgumentParser() + parser.add_argument("-s", "--session", help="The nox base session name") + parser.add_argument( + "-v", + "--with_version", + action="store_true", + default=False, + help="Return a list of lists where the first element is the python version and the second the nox session.", + ) + additional_args = parser.parse_args(session.posargs) + # get the desired base session to generate the list for - if len(session.posargs) != 1: - raise ValueError("This session has a mandatory argument: ") - session_func = globals()[session.posargs[0]] + session_func = globals()[additional_args.session] # list all sessions for this base session try: session_func.parametrize except AttributeError: - sessions_list = ["%s-%s" % (session_func.__name__, py) for py in session_func.python] + if additional_args.with_version: + sessions_list = [{"python": py, "session": f"{session_func.__name__}-{py}"} for py in session_func.python] + else: + sessions_list = [f"{session_func.__name__}-{py}" for py in session_func.python] else: - sessions_list = ["%s-%s(%s)" % (session_func.__name__, py, param) - for py, param in product(session_func.python, session_func.parametrize)] + if additional_args.with_version: + sessions_list = [{"python": py, "session": f"{session_func.__name__}-{py}({param})"} + for py, param in product(session_func.python, session_func.parametrize)] + else: + sessions_list = [f"{session_func.__name__}-{py}({param})" + for py, param in product(session_func.python, session_func.parametrize)] # print the list so that it can be caught by GHA. # Note that json.dumps is optional since this is a list of string. diff --git a/pyproject.toml b/pyproject.toml index 5116021..056a1e4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,18 +2,9 @@ requires = [ "setuptools>=39.2", "setuptools_scm", + "wheel" ] build-backend = "setuptools.build_meta" # pip: no ! does not work in old python 2.7 and not recommended here # https://setuptools.readthedocs.io/en/latest/userguide/quickstart.html#basic-use - -[tool.conda] -# Declare that the following packages should be installed with conda instead of pip -# Note: this includes packages declared everywhere, here and in setup.cfg -conda_packages = [ - "setuptools", - "wheel", - "pip" -] -# pytest: not with conda ! does not work in old python 2.7 and 3.5 diff --git a/setup.cfg b/setup.cfg index 1b2eb49..fbbbcba 100644 --- a/setup.cfg +++ b/setup.cfg @@ -27,12 +27,14 @@ classifiers = Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 + Programming Language :: Python :: 3.12 [options] # one day these will be able to come from requirement files, see https://github.com/pypa/setuptools/issues/1951. But will it be better ? setup_requires = setuptools_scm - pytest-runner install_requires = # note: do not use double quotes in these, this triggers a weird bug in PyCharm in debug mode only funcsigs;python_version<'3.3'