BDEPEND=test? ( dev-python/six[python_targets_python3_10(-)?,python_targets_python3_11(-)?,python_targets_python3_12(-)?] python_targets_python3_10? ( dev-python/unittest-or-fail[python_targets_python3_10(-)?,python_targets_python3_11(-)?] ) python_targets_python3_11? ( dev-python/unittest-or-fail[python_targets_python3_10(-)?,python_targets_python3_11(-)?] ) ) python_targets_python3_10? ( dev-lang/python:3.10 ) python_targets_python3_11? ( dev-lang/python:3.11 ) python_targets_python3_12? ( dev-lang/python:3.12 ) >=dev-python/setuptools-69.0.3[python_targets_python3_10(-)?,python_targets_python3_11(-)?,python_targets_python3_12(-)?] DEFINED_PHASES=compile configure install prepare test DESCRIPTION=Module to detect if a given HTTP User Agent is a web crawler EAPI=8 HOMEPAGE=https://github.com/rory/robot-detection IUSE=test python_targets_python3_10 python_targets_python3_11 python_targets_python3_12 KEYWORDS=~amd64 LICENSE=GPL-3 RDEPEND=dev-python/six[python_targets_python3_10(-)?,python_targets_python3_11(-)?,python_targets_python3_12(-)?] python_targets_python3_10? ( dev-lang/python:3.10 ) python_targets_python3_11? ( dev-lang/python:3.11 ) python_targets_python3_12? ( dev-lang/python:3.12 ) REQUIRED_USE=|| ( python_targets_python3_10 python_targets_python3_11 python_targets_python3_12 ) RESTRICT=!test? ( test ) SLOT=0 SRC_URI=https://github.com/rory/robot-detection/archive/v0.4.0.tar.gz -> robot-detection-0.4.0.tar.gz _eclasses_=distutils-r1 85ccd3b54a6533fb120ee52b7c76a3df flag-o-matic f14aba975c94ccaa9f357a27e3b17ffe multibuild 4650a65187015567b4e041bb9bfdb364 multilib b2a329026f2e404e9e371097dda47f96 multiprocessing 1e32df7deee68372153dca65f4a7c21f ninja-utils 2df4e452cea39a9ec8fb543ce059f8d6 out-of-source-utils dbf9e34ee8964084651e25907fa8f52c python-r1 2def7a4ce233dfa344f613025c619a23 python-utils-r1 42c5abe4a656a4993a06a4fc61dbdd12 toolchain-funcs fa554cc3cff825d21dfe3f24841e29cf _md5_=fadcb028073ab17db0bb105931c574d3