Add logging to CLI

This commit is contained in:
2022-06-18 05:56:58 +03:00
parent af76973273
commit bdcf5aa366
5 changed files with 32 additions and 7 deletions

View File

@@ -30,7 +30,7 @@ loguru = "^0.6.0"
[tool.poetry.dev-dependencies]
[tool.poetry.scripts]
traps = "traps.cli:cli"
traps = "traps.cli:main"
[build-system]
requires = ["poetry-core>=1.0.0"]

View File

@@ -1,4 +1,4 @@
from .cli import cli
from .cli import main
if __name__ == "__main__":
cli()
main()

View File

@@ -26,6 +26,7 @@ def cli(verbose: bool):
else:
loglevel = "INFO"
# Remove the default logger if it exists.
try:
logger.remove(0)
except ValueError:
@@ -35,7 +36,8 @@ def cli(verbose: bool):
sys.stderr,
level=loglevel,
format="<green>{time:YYYY-MM-DD HH:mm:ss}</green> | <level>"
"{level: <8}</level> | <level>{message}</level>"
"{level: <8}</level> | <level>{message}</level>",
filter=lambda record: record["extra"].get("name") == "traps-logger"
)
@@ -50,9 +52,13 @@ def install(directory: pathlib.Path, amount: int):
@cli.command("version", help="Print version and exit.")
def version():
print(f"traps {traps.__version__}")
click.echo(f"{traps.__name__} {traps.__version__}")
sys.exit(0)
def main():
cli.main(windows_expand_args=False)
if __name__ == "__main__":
cli()
main()

View File

@@ -8,6 +8,7 @@ import requests
from click import BadParameter
from traps.utils import filename_from_url
from traps.utils import logger
__all__ = ["get"]
API_URL = "https://safebooru.org/index.php"
@@ -19,6 +20,7 @@ def _fetch_urls(n: int = 1) -> List[str]:
raise BadParameter("you can't download more than 5000 files at a time")
if n < 1:
raise BadParameter("you can't download a negative number of files")
used_offsets = []
urls = []
@@ -43,6 +45,7 @@ def _fetch_urls(n: int = 1) -> List[str]:
for _ in range(limit)
]
logger.info("Fetching urls")
if n > 100:
with ThreadPoolExecutor(max_workers=16) as p:
for i in p.map(lambda _: fetch(100), range(n // 100)):
@@ -50,23 +53,35 @@ def _fetch_urls(n: int = 1) -> List[str]:
n %= 100
if n < 100:
urls += fetch(n)
logger.info("Done")
return urls
def _download(directory: Path, url: str) -> None:
logger.debug(f"Downloading {url}")
resp = requests.get(url, stream=True)
if not resp.ok:
logger.warning(f"Couldn't download {url}: HTTP error {resp.status_code}")
return
filename = filename_from_url(url)
with open(directory / filename, "wb") as f:
for part in resp.iter_content(1024):
if not part:
break
f.write(part)
else:
logger.success(f"Downloaded {url}")
def get(directory: Union[str, Path] = "traps", amount: int = 1) -> None:
if not isinstance(directory, Path):
directory = Path(directory)
directory.mkdir(exist_ok=True)
if not directory.exists():
logger.debug(f"Creating {directory.name} directory")
directory.mkdir()
logger.debug("Done")
urls = _fetch_urls(amount)
logger.info("Downloading traps")
with ThreadPoolExecutor(max_workers=16) as p:
p.map(lambda url: _download(directory, url), urls)
logger.success(f"Downloaded {amount} traps")

View File

@@ -1,6 +1,10 @@
import pathlib
import urllib.parse
import loguru
logger = loguru.logger.bind(name="traps-logger")
def filename_from_url(url: str) -> str:
path = urllib.parse.urlparse(url).path