diff options
-rwxr-xr-x | .github/workflows/manylinux.sh | 7 | ||||
-rw-r--r-- | MANIFEST.in | 1 | ||||
-rw-r--r-- | Makefile | 14 | ||||
-rw-r--r-- | data/share/bash-completion/completions/pkgcheck (renamed from completion/bash/pkgcheck) | 0 | ||||
-rwxr-xr-x | data/share/pkgcheck/ci.py (renamed from data/ci.py) | 0 | ||||
-rw-r--r-- | data/share/pkgcheck/perl-version.pl (renamed from data/perl-version.pl) | 0 | ||||
-rw-r--r-- | data/share/pkgcheck/pkgcheck.conf (renamed from data/pkgcheck.conf) | 0 | ||||
-rw-r--r-- | data/share/zsh/site-functions/_pkgcheck (renamed from completion/zsh/_pkgcheck) | 0 | ||||
-rw-r--r-- | pyproject.toml | 72 | ||||
-rw-r--r-- | requirements/ci.txt | 1 | ||||
-rw-r--r-- | requirements/dev.txt | 8 | ||||
-rw-r--r-- | requirements/dist.txt | 4 | ||||
-rw-r--r-- | requirements/docs.txt | 1 | ||||
-rw-r--r-- | requirements/install.txt | 7 | ||||
-rw-r--r-- | requirements/pyproject.toml | 12 | ||||
-rw-r--r-- | requirements/test.txt | 2 | ||||
-rw-r--r-- | requirements/tox.txt | 3 | ||||
-rw-r--r-- | setup.cfg | 4 | ||||
-rw-r--r--[-rwxr-xr-x] | setup.py | 330 | ||||
-rw-r--r-- | src/pkgcheck/const.py | 2 | ||||
-rwxr-xr-x | src/pkgcheck/scripts/__init__.py | 8 | ||||
-rw-r--r-- | tests/scripts/test_pkgcheck.py | 10 | ||||
-rw-r--r-- | tox.ini | 25 |
23 files changed, 247 insertions, 264 deletions
diff --git a/.github/workflows/manylinux.sh b/.github/workflows/manylinux.sh index d6eb4ec8..c56892aa 100755 --- a/.github/workflows/manylinux.sh +++ b/.github/workflows/manylinux.sh @@ -1,13 +1,12 @@ #!/bin/sh # Mangle the manylinux docker image to successfully build and test wheels. -set -e -set +x +set -ex # install git -if type -P apk; then +if command -v apk; then apk add --no-cache git bash py3-lxml -elif type -P yum; then +elif command -v yum; then yum update -y yum install -y libxslt-devel libxml2-devel python-devel else diff --git a/MANIFEST.in b/MANIFEST.in index cd152a18..5784287b 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -2,7 +2,6 @@ include LICENSE *.py *.rst include tox.ini pyproject.toml .coveragerc .pylintrc recursive-include bin * recursive-include contrib * -recursive-include completion * recursive-include data * recursive-include doc * recursive-include requirements * diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..3eb6e2e4 --- /dev/null +++ b/Makefile @@ -0,0 +1,14 @@ +PYTHON ?= python +SPHINX_BUILD ?= $(PYTHON) -m sphinx.cmd.build + +.PHONY: man html +man html: + $(SPHINX_BUILD) -a -b $@ doc build/sphinx/$@ + +.PHONY: sdist wheel +sdist wheel: + $(PYTHON) -m build --$@ + +.PHONY: clean +clean: + $(RM) -r build/sphinx doc/api doc/generated dist diff --git a/completion/bash/pkgcheck b/data/share/bash-completion/completions/pkgcheck index 8801391b..8801391b 100644 --- a/completion/bash/pkgcheck +++ b/data/share/bash-completion/completions/pkgcheck diff --git a/data/ci.py b/data/share/pkgcheck/ci.py index c9f438cd..c9f438cd 100755 --- a/data/ci.py +++ b/data/share/pkgcheck/ci.py diff --git a/data/perl-version.pl b/data/share/pkgcheck/perl-version.pl index 0065ebb6..0065ebb6 100644 --- a/data/perl-version.pl +++ b/data/share/pkgcheck/perl-version.pl diff --git a/data/pkgcheck.conf b/data/share/pkgcheck/pkgcheck.conf index 2d592458..2d592458 100644 --- a/data/pkgcheck.conf +++ b/data/share/pkgcheck/pkgcheck.conf diff --git a/completion/zsh/_pkgcheck b/data/share/zsh/site-functions/_pkgcheck index 0c1aee6e..0c1aee6e 100644 --- a/completion/zsh/_pkgcheck +++ b/data/share/zsh/site-functions/_pkgcheck diff --git a/pyproject.toml b/pyproject.toml index 39d8f6a8..2f0ac255 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,12 +1,78 @@ [build-system] requires = [ - "wheel", "setuptools", + "setuptools >= 61.0.0", + "wheel", + # repeat all normal runtime dependencies here "chardet", "lazy-object-proxy", "lxml", "pathspec", "tree-sitter>=0.19.0", - "snakeoil~=0.9.10", - "pkgcore~=0.12.11", + "snakeoil~=0.10.2", + "pkgcore~=0.12.17", ] build-backend = "setuptools.build_meta" + +[project] +name = "pkgcheck" +description = "pkgcore-based QA utility for ebuild repos" +readme = "README.rst" +license = {file = "LICENSE"} +requires-python = "~=3.9" +authors = [ + {name = "Tim Harder", email = "radhermit@gmail.com"}, + {name = "Arthur Zamarin", email = "arthurzam@gentoo.org"}, +] +maintainers = [ + {name = "Arthur Zamarin", email = "arthurzam@gentoo.org"}, +] +classifiers = [ + "License :: OSI Approved :: BSD License", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", +] +dynamic = ["version"] + +dependencies = [ + "chardet", + "lazy-object-proxy", + "lxml", + "pathspec", + "tree-sitter>=0.19.0", + "snakeoil~=0.10.2", + "pkgcore~=0.12.17", +] + +[project.optional-dependencies] +test = [ + "pytest>=6.0", + "pytest-cov", + "requests", +] +doc = [ + "sphinx", + "tomli; python_version < '3.11'" +] +network = [ + "requests", +] + +[project.urls] +Homepage = "https://github.com/pkgcore/pkgcheck" +Documentation = "https://pkgcore.github.io/pkgcheck/" +Source = "https://github.com/pkgcore/pkgcheck" + +[project.scripts] +pkgcheck = "pkgcheck.scripts.__init__:main" + +[tool.setuptools] +zip-safe = false + +[tool.setuptools.dynamic] +version = {attr = "pkgcheck.__version__"} + +[tool.pytest.ini_options] +minversion = "6.0" +addopts = "-vv -ra -l" +testpaths = ["tests"] diff --git a/requirements/ci.txt b/requirements/ci.txt deleted file mode 100644 index c75c448b..00000000 --- a/requirements/ci.txt +++ /dev/null @@ -1 +0,0 @@ -pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt deleted file mode 100644 index fb59aa65..00000000 --- a/requirements/dev.txt +++ /dev/null @@ -1,8 +0,0 @@ -cython -chardet -lazy-object-proxy -lxml -pathspec -tree-sitter>=0.19.0 -snakeoil @ https://github.com/pkgcore/snakeoil/archive/master.tar.gz -pkgcore @ https://github.com/pkgcore/pkgcore/archive/master.tar.gz diff --git a/requirements/dist.txt b/requirements/dist.txt deleted file mode 100644 index ec031163..00000000 --- a/requirements/dist.txt +++ /dev/null @@ -1,4 +0,0 @@ -# deps for building sdist/wheels for pypi --r install.txt --r docs.txt -wheel diff --git a/requirements/docs.txt b/requirements/docs.txt deleted file mode 100644 index 6966869c..00000000 --- a/requirements/docs.txt +++ /dev/null @@ -1 +0,0 @@ -sphinx diff --git a/requirements/install.txt b/requirements/install.txt deleted file mode 100644 index c1f38977..00000000 --- a/requirements/install.txt +++ /dev/null @@ -1,7 +0,0 @@ -chardet -lazy-object-proxy -lxml -pathspec -tree-sitter>=0.19.0 -snakeoil~=0.10.1 -pkgcore~=0.12.15 diff --git a/requirements/pyproject.toml b/requirements/pyproject.toml deleted file mode 100644 index 072a1b47..00000000 --- a/requirements/pyproject.toml +++ /dev/null @@ -1,12 +0,0 @@ -[build-system] -requires = [ - "wheel", "setuptools", - "chardet", - "lazy-object-proxy", - "lxml", - "pathspec", - "tree-sitter>=0.19.0", - "snakeoil~=0.10.1", - "pkgcore~=0.12.15", -] -build-backend = "setuptools.build_meta" diff --git a/requirements/test.txt b/requirements/test.txt deleted file mode 100644 index 547de5c5..00000000 --- a/requirements/test.txt +++ /dev/null @@ -1,2 +0,0 @@ -pytest -requests diff --git a/requirements/tox.txt b/requirements/tox.txt deleted file mode 100644 index 4e49649d..00000000 --- a/requirements/tox.txt +++ /dev/null @@ -1,3 +0,0 @@ --r dev.txt --r test.txt --r ci.txt diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index e02618b3..00000000 --- a/setup.cfg +++ /dev/null @@ -1,4 +0,0 @@ -[build_sphinx] -source-dir = doc -build-dir = build/sphinx -all_files = 1 @@ -1,39 +1,64 @@ -#!/usr/bin/env python3 - +import logging import os import sys from collections import defaultdict -from itertools import chain +from contextlib import contextmanager +from pathlib import Path from textwrap import dedent -from setuptools import setup -from distutils import log -from distutils.command import build_clib as dst_build_clib -from distutils.command import install_data as dst_install_data -from distutils.command import install_lib as dst_install_lib -from distutils.util import byte_compile -from snakeoil.dist import distutils_extensions as pkgdist +from setuptools import setup, Command +from setuptools.command.build import SubCommand, build as orig_build +from setuptools.command.install import install as orig_install +from setuptools.command.sdist import sdist as orig_sdist +from wheel.bdist_wheel import bdist_wheel as orig_bdist_wheel + + +use_system_tree_sitter_bash = bool(os.environ.get( + 'USE_SYSTEM_TREE_SITTER_BASH', False)) + -pkgdist_setup, pkgdist_cmds = pkgdist.setup() +@contextmanager +def sys_path(): + orig_path = sys.path[:] + sys.path.insert(0, str(Path.cwd() / 'src')) + try: + yield + finally: + sys.path = orig_path -DATA_INSTALL_OFFSET = 'share/pkgcheck' -use_system_tree_sitter_bash = bool(os.environ.get('USE_SYSTEM_TREE_SITTER_BASH', False)) +class build_treesitter(Command, SubCommand): + description = 'build tree-sitter-bash library' + def initialize_options(self): + pass -class install_lib(dst_install_lib.install_lib): - """Wrapper to install bash parsing library.""" + def finalize_options(self): + pass + + def get_source_files(self): + cwd = Path(__file__).parent / 'tree-sitter-bash/src' + return [ + str(cwd / 'GNUmakefile'), str(cwd / 'tree_sitter/parser.h'), + str(cwd / 'parser.c'), str(cwd / 'scanner.cc'), + ] + + library_path = Path(__file__).parent / 'src/pkgcheck/bash/lang.so' def run(self): - super().run() if not use_system_tree_sitter_bash: - build_clib = self.reinitialize_command('build_clib') - build_clib.ensure_finalized() - self.copy_tree(build_clib.build_clib, self.install_dir) + if not self.library_path.exists(): + logging.info('building tree-sitter-bash library') + with sys_path(): + from pkgcheck.bash import build_library + build_library(self.library_path, ['tree-sitter-bash']) -class install(pkgdist.install): - """Install wrapper to generate and install pkgcheck-related files.""" +class build(orig_build): + sub_commands = orig_build.sub_commands + [('build_treesitter', None)] + + +class install(orig_install): def finalize_options(self): """Force platlib install since non-python libraries are included.""" @@ -42,175 +67,128 @@ class install(pkgdist.install): def run(self): super().run() - target = self.install_data - root = self.root or '/' - if target.startswith(root): - target = os.path.join('/', os.path.relpath(target, root)) - target = os.path.abspath(target) - - if not self.dry_run: - # Install configuration data so the program can find its content, - # rather than assuming it is running from a tarball/git repo. - write_obj_lists(self.install_lib, target) - - -def write_obj_lists(python_base, install_prefix): - """Generate config file of keyword, check, and other object lists.""" - objects_path = os.path.join(python_base, pkgdist.MODULE_NAME, "_objects.py") - os.makedirs(os.path.dirname(objects_path), exist_ok=True) - log.info(f'writing config to {objects_path!r}') - - wheel_install = ( - install_prefix != os.path.abspath(sys.prefix) - and not install_prefix.startswith(pkgdist.REPODIR) - ) - - # hack to drop quotes on modules in generated files - class _kls: - - def __init__(self, module): - self.module = module - - def __repr__(self): - return self.module - - with pkgdist.syspath(pkgdist.PACKAGEDIR): - from pkgcheck import objects - - modules = defaultdict(set) - objs = defaultdict(list) - for obj in ('KEYWORDS', 'CHECKS', 'REPORTERS'): - for name, cls in getattr(objects, obj).items(): - parent, module = cls.__module__.rsplit('.', 1) - modules[parent].add(module) - objs[obj].append((name, _kls(f'{module}.{name}'))) - - keywords = tuple(objs['KEYWORDS']) - checks = tuple(objs['CHECKS']) - reporters = tuple(objs['REPORTERS']) - - with open(objects_path, 'w') as f: - os.chmod(objects_path, 0o644) - for k, v in sorted(modules.items()): - f.write(f"from {k} import {', '.join(sorted(v))}\n") - f.write(dedent(f"""\ - KEYWORDS = {keywords} - CHECKS = {checks} - REPORTERS = {reporters} - """)) - - const_path = os.path.join(python_base, pkgdist.MODULE_NAME, "_const.py") - with open(const_path, 'w') as f: - os.chmod(const_path, 0o644) - # write install path constants to config - if wheel_install: - # write more dynamic _const file for wheel installs - f.write(dedent("""\ - import os.path as osp - import sys - INSTALL_PREFIX = osp.abspath(sys.prefix) - DATA_PATH = osp.join(INSTALL_PREFIX, {!r}) - """.format(DATA_INSTALL_OFFSET))) - else: - f.write("INSTALL_PREFIX=%r\n" % install_prefix) - f.write("DATA_PATH=%r\n" % - os.path.join(install_prefix, DATA_INSTALL_OFFSET)) - f.close() + self.write_obj_lists() + self.generate_files() - # byte compile generated modules - for path in (const_path, objects_path): - byte_compile([path], prefix=python_base) - byte_compile([path], optimize=1, prefix=python_base) - byte_compile([path], optimize=2, prefix=python_base) + self.copy_tree('data', self.install_data) + install_dir = Path(self.install_lib) + if not use_system_tree_sitter_bash: + self.reinitialize_command('build').ensure_finalized() + (dst := install_dir / 'pkgcheck/bash').mkdir(parents=True, exist_ok=True) + self.copy_file(build_treesitter.library_path, dst / 'lang.so', + preserve_mode=True, preserve_times=False) -class install_data(dst_install_data.install_data): - """Generate data files for install. + def write_obj_lists(self): + """Generate config file of keyword, check, and other object lists.""" + (base_dir := Path(self.install_lib) / "pkgcheck").mkdir(parents=True, exist_ok=True) + objects_path = base_dir / "_objects.py" + const_path = base_dir / "_const.py" + verinfo_path = base_dir / "_verinfo.py" - Currently this includes keyword, check, and reporter name lists. - """ + # hack to drop quotes on modules in generated files + class _kls: - def run(self): - self._generate_files() - super().run() + def __init__(self, module): + self.module = module + + def __repr__(self): + return self.module - def _generate_files(self): - with pkgdist.syspath(pkgdist.PACKAGEDIR): + with sys_path(): + from pkgcheck import objects + + modules = defaultdict(set) + objs = defaultdict(list) + for obj in ('KEYWORDS', 'CHECKS', 'REPORTERS'): + for name, cls in getattr(objects, obj).items(): + parent, module = cls.__module__.rsplit('.', 1) + modules[parent].add(module) + objs[obj].append((name, _kls(f'{module}.{name}'))) + + keywords = tuple(objs['KEYWORDS']) + checks = tuple(objs['CHECKS']) + reporters = tuple(objs['REPORTERS']) + + logging.info(f'writing objects to {objects_path!r}') + with objects_path.open('w') as f: + objects_path.chmod(0o644) + for k, v in sorted(modules.items()): + f.write(f"from {k} import {', '.join(sorted(v))}\n") + f.write(dedent(f"""\ + KEYWORDS = {keywords} + CHECKS = {checks} + REPORTERS = {reporters} + """)) + + logging.info(f'writing path constants to {const_path!r}') + with const_path.open('w') as f: + const_path.chmod(0o644) + f.write(dedent("""\ + from os.path import abspath, exists, join + import sys + INSTALL_PREFIX = abspath(sys.prefix) + if not exists(join(INSTALL_PREFIX, 'lib/pkgcore')): + INSTALL_PREFIX = abspath(sys.base_prefix) + DATA_PATH = join(INSTALL_PREFIX, 'share/pkgcheck') + """)) + + logging.info("generating version info") + from snakeoil.version import get_git_version + verinfo_path.write_text(f"version_info={get_git_version(Path(__file__).parent)!r}") + + def generate_files(self): + with sys_path(): from pkgcheck import base, objects from pkgcheck.addons import caches - os.makedirs(os.path.join(pkgdist.REPODIR, '.generated'), exist_ok=True) - files = [] + (dst := Path(self.install_data) / 'share/pkgcheck').mkdir(parents=True, exist_ok=True) - # generate available scopes - path = os.path.join(pkgdist.REPODIR, '.generated', 'scopes') - with open(path, 'w') as f: - f.write('\n'.join(base.scopes) + '\n') - files.append(os.path.join('.generated', 'scopes')) + logging.info('Generating available scopes') + (dst / 'scopes').write_text('\n'.join(base.scopes) + '\n') - # generate available cache types - path = os.path.join(pkgdist.REPODIR, '.generated', 'caches') + logging.info('Generating available cache types') cache_objs = caches.CachedAddon.caches.values() - with open(path, 'w') as f: - f.write('\n'.join(x.type for x in cache_objs)) - files.append(os.path.join('.generated', 'caches')) + (dst / 'caches').write_text('\n'.join(x.type for x in cache_objs) + '\n') - # generate available object lists for obj in ('KEYWORDS', 'CHECKS', 'REPORTERS'): - log.info(f'Generating {obj.lower()} list') - path = os.path.join(pkgdist.REPODIR, '.generated', obj.lower()) - with open(path, 'w') as f: - f.write('\n'.join(getattr(objects, obj)) + '\n') - files.append(os.path.join('.generated', obj.lower())) - self.data_files.append(('share/pkgcheck', files)) + logging.info(f'Generating {obj.lower()} list') + (dst / obj.lower()).write_text('\n'.join(getattr(objects, obj)) + '\n') -class build_clib(dst_build_clib.build_clib): - """Build bash parsing library.""" +class bdist_wheel(orig_bdist_wheel): - def run(self): - if not use_system_tree_sitter_bash: - with pkgdist.syspath(pkgdist.PACKAGEDIR): - from pkgcheck.bash import build_library - path = os.path.join(self.build_clib, 'pkgcheck', 'bash', 'lang.so') - build_library(path, ['tree-sitter-bash']) - - -class build(pkgdist.build): - """Force build_clib to run to build bash parsing library.""" - - sub_commands = pkgdist.build.sub_commands[:] - sub_commands.append(('build_clib', None)) - - -setup(**dict( - pkgdist_setup, - license='BSD', - author='Tim Harder', - author_email='radhermit@gmail.com', - description='pkgcore-based QA utility for ebuild repos', - url='https://github.com/pkgcore/pkgcheck', - data_files=list(chain( - pkgdist.data_mapping('share/bash-completion/completions', 'completion/bash'), - pkgdist.data_mapping('share/zsh/site-functions', 'completion/zsh'), - pkgdist.data_mapping(DATA_INSTALL_OFFSET, 'data'), - )), - cmdclass=dict( - pkgdist_cmds, - install_data=install_data, - install_lib=install_lib, - install=install, - build_clib=build_clib, - build=build, - ), - classifiers=[ - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - ], - extras_require={ - 'network': ['requests'], - }, - distclass=pkgdist.BinaryDistribution, -)) + def finalize_options(self): + super().finalize_options() + self.root_is_pure = False # Mark us as not a pure python package + + def get_tag(self): + _, _, plat = super().get_tag() + # We don't contain any python source, nor any python extensions + return 'py3', 'none', plat + + +class sdist(orig_sdist): + + def make_release_tree(self, base_dir, files): + super().make_release_tree(base_dir, files) + base_dir = Path(base_dir) + + if (man_page := Path(__file__).parent / 'build/sphinx/man/pkgcheck.1').exists(): + (base_dir / 'man').mkdir(parents=True, exist_ok=True) + self.copy_file(man_page, base_dir / 'man/pkgcheck.1', preserve_mode=False, preserve_times=False) + + logging.info("generating version info") + from snakeoil.version import get_git_version + (base_dir / 'src/pkgcheck/_verinfo.py').write_text(f"version_info={get_git_version(Path(__file__).parent)!r}") + + +setup( + cmdclass={ + 'bdist_wheel': bdist_wheel, + 'build': build, + 'build_treesitter': build_treesitter, + 'install': install, + 'sdist': sdist, + } +) diff --git a/src/pkgcheck/const.py b/src/pkgcheck/const.py index d2ca9ae0..7e440ce4 100644 --- a/src/pkgcheck/const.py +++ b/src/pkgcheck/const.py @@ -33,7 +33,7 @@ for xdg_var, var_name, fallback_dir in ( os.environ.get(xdg_var, os.path.join(os.path.expanduser(fallback_dir), 'pkgcheck'))) REPO_PATH = _GET_CONST('REPO_PATH', _reporoot) -DATA_PATH = _GET_CONST('DATA_PATH', '%(REPO_PATH)s/data') +DATA_PATH = _GET_CONST('DATA_PATH', '%(REPO_PATH)s/data/share/pkgcheck') USER_CACHE_DIR = getattr(_module, 'USER_CACHE_PATH') USER_CONF_FILE = os.path.join(getattr(_module, 'USER_CONFIG_PATH'), 'pkgcheck.conf') diff --git a/src/pkgcheck/scripts/__init__.py b/src/pkgcheck/scripts/__init__.py index ae5cb5f9..351cc7c9 100755 --- a/src/pkgcheck/scripts/__init__.py +++ b/src/pkgcheck/scripts/__init__.py @@ -38,9 +38,13 @@ def run(script_name): sys.exit(tool()) -if __name__ == '__main__': +def main(): # We're in a git repo or tarball so add the src dir to the system path. # Note that this assumes a certain module layout. src_dir = os.path.realpath(__file__).rsplit(os.path.sep, 3)[0] sys.path.insert(0, src_dir) - run(os.path.basename(__file__)) + run(os.path.basename(sys.argv[0])) + + +if __name__ == '__main__': + main() diff --git a/tests/scripts/test_pkgcheck.py b/tests/scripts/test_pkgcheck.py index 48d1a2f8..8478a746 100644 --- a/tests/scripts/test_pkgcheck.py +++ b/tests/scripts/test_pkgcheck.py @@ -50,13 +50,3 @@ class TestPkgcheck: assert excinfo.value.code == 0 out, err = capsys.readouterr() assert out.startswith(project) - - def test_installed(self): - """Verify tests are running in environment where generated modules exist.""" - try: - importlib.import_module(f'{project}._verinfo') - except ImportError: - pytest.fail( - 'not running against installed or released package\n' - '(use `setup.py test` when running from git)' - ) diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 41b77fba..00000000 --- a/tox.ini +++ /dev/null @@ -1,25 +0,0 @@ -[tox] -envlist = py38, py39 -[testenv] -# force latest virtualenv/pip -download = true -deps = -rrequirements/tox.txt -commands = - pytest --cov {posargs:-v} - -# build docs -[testenv:docs] -skip_install = true -deps = - -rrequirements/dev.txt - -rrequirements/docs.txt -commands = - python setup.py build_docs - -# build dist files -[testenv:dist] -skip_install = true -deps = -rrequirements/dist.txt -commands = - python setup.py sdist - python setup.py bdist_wheel |