update to ruff formatter and fix py3.8 compatibility

This commit is contained in:
Ivan Schaller 2024-02-01 13:59:45 +01:00
parent 45dca15d39
commit ea1eab403d
47 changed files with 432 additions and 870 deletions

1
.envrc
View file

@ -1 +0,0 @@
use asdf

View file

@ -0,0 +1,30 @@
name: build package and container
on:
push:
tags:
- "v*.*.*"
pull_request:
branches: [main, master]
jobs:
build-container:
uses: actions/workflows/.gitea/workflows/build_container.yml@master
with:
registry: git.44net.ch
image-name: 44net/invh-server
context: .
dockerfile: docker/Dockerfile
platforms: linux/amd64,linux/arm64
secrets:
cr-username: ${{ secrets.CR_USERNAME }}
cr-password: ${{ secrets.CR_PASSWORD }}
build-pypackage:
uses: actions/workflows/.gitea/workflows/release_pypackage.yml@master
with:
repository: https://git.44net.ch/api/packages/44net/pypi
secrets:
username: actions-bot
token: ${{ secrets.PACKAGE_TOKEN }}

View file

@ -0,0 +1,33 @@
name: check code
on:
push:
branches: [main, master]
pull_request:
branches: [main, master]
jobs:
check-code:
uses: actions/workflows/.gitea/workflows/check_python_hatch.yml@master
with:
run-tests: true
scan-code:
uses: actions/workflows/.gitea/workflows/sonarqube_python.yml@master
needs: [check-code]
if: gitea.event_name != 'pull_request'
with:
run-coverage: true
secrets:
sonar-host: ${{ secrets.SONARQUBE_HOST }}
sonar-token: ${{ secrets.SONARQUBE_TOKEN }}
check-docs:
runs-on: python311
steps:
- name: "build docs"
run: |
python3 -m pip install mkdocs
cd docs || exit 1
mkdocs build --strict

View file

@ -0,0 +1,56 @@
name: create release
on:
push:
tags:
- "v*.*.*"
pull_request:
branches: [main, master]
jobs:
release-pypackage:
runs-on: python311
env:
HATCH_INDEX_REPO: main
HATCH_INDEX_USER: __token__
HATCH_INDEX_AUTH: ${{ secrets.PYPI_TOKEN }}
steps:
- name: checkout code
uses: actions/checkout@v3
- name: setup go
uses: actions/setup-go@v4
with:
go-version: '>=1.20'
- name: install hatch
run: pip install -U hatch hatchling
- name: build package
run: hatch build --clean
- name: read changelog
id: changelog
uses: juliangruber/read-file-action@v1
with:
path: ./CHANGELOG.md
- name: create gitea release
uses: https://gitea.com/actions/release-action@main
if: gitea.event_name != 'pull_request'
with:
title: ${{ gitea.ref_name }}
body: ${{ steps.changelog.outputs.content }}
files: |-
dist/**
- name: create github release
uses: softprops/action-gh-release@v1
if: gitea.event_name != 'pull_request'
with:
token: ${{ secrets.GH_TOKEN }}
title: ${{ gitea.ref_name }}
body: ${{ steps.changelog.outputs.content }}
files: |-
dist/**

View file

@ -1,5 +1,4 @@
python 3.9.13 3.10.5 3.8.13
shellcheck 0.9.0 shellcheck 0.9.0
shfmt 3.7.0 shfmt 3.7.0
direnv 2.32.2 just 1.23.0
just 1.23.0 lefthook 1.4.6

View file

@ -1,36 +0,0 @@
#########################################
# build and publish docker images amd64 #
#########################################
# branch: master
# event: tag
platform: linux/amd64
depends_on:
- tests
clone:
git:
image: woodpeckerci/plugin-git:v1.6.0
when:
event: tag
pipeline:
# build and publish docker image for amd64 - x86
build-amd64:
image: plugins/docker
pull: true
when:
event: tag
settings:
repo: olofvndrhr/manga-dlp
platforms: linux/amd64
dockerfile: docker/Dockerfile.amd64
auto_tag: true
auto_tag_suffix: linux-amd64
build_args: BUILD_VERSION=${CI_COMMIT_TAG}
username:
from_secret: cr-dhub-username
password:
from_secret: cr-dhub-key

View file

@ -1,36 +0,0 @@
#########################################
# build and publish docker images arm64 #
#########################################
# branch: master
# event: tag
platform: linux/arm64
depends_on:
- tests
clone:
git:
image: woodpeckerci/plugin-git:v1.6.0
when:
event: tag
pipeline:
# build and publish docker image for arm64
build-arm64:
image: plugins/docker
pull: true
when:
event: tag
settings:
repo: olofvndrhr/manga-dlp
platforms: linux/arm64
dockerfile: docker/Dockerfile.arm64
auto_tag: true
auto_tag_suffix: linux-arm64
build_args: BUILD_VERSION=${CI_COMMIT_TAG}
username:
from_secret: cr-dhub-username
password:
from_secret: cr-dhub-key

View file

@ -1,36 +0,0 @@
###########################
# publish docker manifest #
###########################
# branch: master
# event: tag
platform: linux/amd64
depends_on:
- publish_docker_amd64
- publish_docker_arm64
clone:
git:
image: woodpeckerci/plugin-git:v1.6.0
when:
event: tag
tag: "*[!-dev]"
pipeline:
# publish docker manifest for automatic multi arch pulls
publish-manifest:
image: plugins/manifest
pull: true
when:
event: tag
tag: "*[!-dev]"
settings:
spec: docker/manifest.tmpl
auto_tag: true
ignore_missing: true
username:
from_secret: cr-dhub-username
password:
from_secret: cr-dhub-key

View file

@ -1,77 +0,0 @@
###################
# publish release #
###################
# branch: master
# event: tag
platform: linux/amd64
depends_on:
- tests
clone:
git:
image: woodpeckerci/plugin-git:v1.6.0
when:
event: tag
pipeline:
# build wheel and dist
build-pypi:
image: cr.44net.ch/ci-plugins/tests
pull: true
when:
event: tag
commands:
- python3 -m hatch build --clean
# create release-notes
create-release-notes:
image: cr.44net.ch/baseimages/debian-base
pull: true
when:
event: tag
commands:
- bash get_release_notes.sh ${CI_COMMIT_TAG%%-dev}
# publish release on github (github.com/olofvndrhr/manga-dlp)
publish-release-github:
image: woodpeckerci/plugin-github-release
pull: true
when:
event: tag
settings:
api_key:
from_secret: github-olofvndrhr-token
files: dist/*
title: ${CI_COMMIT_TAG}
note: RELEASENOTES.md
# publish release on gitea (git.44net.ch/olofvndrhr/manga-dlp)
publish-release-gitea:
image: woodpeckerci/plugin-gitea-release
pull: true
when:
event: tag
settings:
api_key:
from_secret: gitea-olofvndrhr-token
base_url: https://git.44net.ch
files: dist/*
title: ${CI_COMMIT_TAG}
note: RELEASENOTES.md
# release pypi
release-pypi:
image: cr.44net.ch/ci-plugins/tests
pull: true
when:
event: tag
secrets:
- source: pypi_username
target: HATCH_INDEX_USER
- source: pypi_token
target: HATCH_INDEX_AUTH
commands:
- python3 -m hatch publish --no-prompt --yes

View file

@ -1,35 +0,0 @@
##################################
# test build docker images amd64 #
##################################
# branch: master
# event: pull_request
platform: linux/amd64
depends_on:
- tests
clone:
git:
image: woodpeckerci/plugin-git:v1.6.0
when:
branch: master
event: pull_request
pipeline:
# build docker image for amd64 - x86
test-build-amd64:
image: plugins/docker
pull: true
when:
branch: master
event: pull_request
settings:
dry_run: true
repo: olofvndrhr/manga-dlp
platforms: linux/amd64
dockerfile: docker/Dockerfile.amd64
auto_tag: true
auto_tag_suffix: linux-amd64-test
build_args: BUILD_VERSION=test

View file

@ -1,35 +0,0 @@
##################################
# test build docker images arm64 #
##################################
# branch: master
# event: pull_request
platform: linux/arm64
depends_on:
- tests
clone:
git:
image: woodpeckerci/plugin-git:v1.6.0
when:
branch: master
event: pull_request
pipeline:
# build docker image for arm64
test-build-arm64:
image: plugins/docker
pull: true
when:
branch: master
event: pull_request
settings:
dry_run: true
repo: olofvndrhr/manga-dlp
platforms: linux/arm64
dockerfile: docker/Dockerfile.arm64
auto_tag: true
auto_tag_suffix: linux-arm64-test
build_args: BUILD_VERSION=test

View file

@ -1,40 +0,0 @@
################
# test release #
################
# branch: master
# event: pull_request
platform: linux/amd64
depends_on:
- tests
clone:
git:
image: woodpeckerci/plugin-git:v1.6.0
when:
branch: master
event: pull_request
pipeline:
# build wheel and dist
test-build-pypi:
image: cr.44net.ch/ci-plugins/tests
pull: true
when:
branch: master
event: pull_request
commands:
- just test_build
# create release-notes
test-create-release-notes:
image: cr.44net.ch/baseimages/debian-base
pull: true
when:
branch: master
event: pull_request
commands:
- bash get_release_notes.sh latest
- cat RELEASENOTES.md

View file

@ -1,29 +0,0 @@
##################
# test tox amd64 #
##################
# branch: master
# event: pull_request
platform: linux/amd64
depends_on:
- tests
clone:
git:
image: woodpeckerci/plugin-git:v1.6.0
when:
branch: master
event: pull_request
pipeline:
# test code with different python versions - amd64
test-tox-amd64:
image: cr.44net.ch/ci-plugins/multipy
pull: true
when:
branch: master
event: pull_request
commands:
- just test_tox

View file

@ -1,32 +0,0 @@
##################
# test tox arm64 #
##################
# branch: master
# event: pull_request
platform: linux/arm64
depends_on:
- tests
clone:
git:
image: woodpeckerci/plugin-git:v1.6.0
when:
branch: master
event: pull_request
pipeline:
# test code with different python versions - arm64
test-tox-arm64:
image: cr.44net.ch/ci-plugins/multipy
pull: true
when:
branch: master
event: pull_request
commands:
- grep -v img2pdf contrib/requirements_dev.txt > contrib/requirements_dev_arm64.txt
- rm -f contrib/requirements_dev.txt
- mv contrib/requirements_dev_arm64.txt contrib/requirements_dev.txt
- just test_tox

View file

@ -1,83 +0,0 @@
##############################
# code testing and analysis #
#############################
# branch: all
# event: all
platform: linux/amd64
clone:
git:
image: woodpeckerci/plugin-git:v1.6.0
pipeline:
# check code style - shell
test-shfmt:
image: cr.44net.ch/ci-plugins/tests
pull: true
commands:
- just test_shfmt
# check code style - python
test-black:
image: cr.44net.ch/ci-plugins/tests
pull: true
commands:
- just test_black
# check static typing - python
test-pyright:
image: cr.44net.ch/ci-plugins/tests
pull: true
commands:
- just install_deps
- just test_pyright
# ruff test - python
test-ruff:
image: cr.44net.ch/ci-plugins/tests
pull: true
commands:
- just test_ruff
# test mkdocs generation
test-mkdocs:
image: cr.44net.ch/ci-plugins/tests
pull: true
commands:
- python3 -m pip install mkdocs
- cd docs || exit 1
- python3 -m mkdocs build --strict
# test code with pytest - python
test-tox-pytest:
when:
event: [ push ]
image: cr.44net.ch/ci-plugins/tests
pull: true
commands:
- just test_pytest
# generate coverage report - python
test-tox-coverage:
when:
branch: master
event: [ pull_request ]
image: cr.44net.ch/ci-plugins/tests
pull: true
commands:
- just test_coverage
# analyse code with sonarqube and upload it
sonarqube-analysis:
when:
branch: master
event: [ pull_request ]
image: cr.44net.ch/ci-plugins/sonar-scanner
pull: true
settings:
sonar_host: https://sonarqube.44net.ch
sonar_token:
from_secret: sq-44net-token
usingProperties: true

View file

@ -1,6 +1,6 @@
MIT License MIT License
Copyright (c) 2021-2023 Ivan Schaller Copyright (c) 2021-present Ivan Schaller <ivan@schaller.sh>
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal

View file

@ -1,13 +1 @@
include *.json graft src
include *.md
include *.properties
include *.py
include *.txt
include *.yml
include *.xml
recursive-include contrib *.py
recursive-include mangadlp *.py
recursive-include mangadlp *.xml
recursive-include tests *.py
recursive-include tests *.xml
recursive-include tests *.txt

View file

@ -1,6 +1,7 @@
from typing import Dict, List, Union from typing import Dict, List, Union
from mangadlp.types import ChapterData,ComicInfo from mangadlp.models import ChapterData, ComicInfo
# api template for manga-dlp # api template for manga-dlp
@ -39,13 +40,13 @@ class YourAPI:
self.manga_uuid = "abc" self.manga_uuid = "abc"
self.manga_title = "abc" self.manga_title = "abc"
self.chapter_list = ["1", "2", "2.1", "5", "10"] self.chapter_list = ["1", "2", "2.1", "5", "10"]
self.manga_chapter_data: Dict[str, ChapterData] = { # example data self.manga_chapter_data: dict[str, ChapterData] = { # example data
"1": { "1": {
"uuid": "abc", "uuid": "abc",
"volume": "1", "volume": "1",
"chapter": "1", "chapter": "1",
"name": "test", "name": "test",
"pages" 2, "pages": 2,
}, },
"2": { "2": {
"uuid": "abc", "uuid": "abc",
@ -56,7 +57,7 @@ class YourAPI:
}, },
} }
# or with --forcevol # or with --forcevol
self.manga_chapter_data: Dict[str, ChapterData] = { self.manga_chapter_data: dict[str, ChapterData] = {
"1:1": { "1:1": {
"uuid": "abc", "uuid": "abc",
"volume": "1", "volume": "1",
@ -71,7 +72,7 @@ class YourAPI:
}, },
} }
def get_chapter_images(self, chapter: str, wait_time: float) -> List[str]: def get_chapter_images(self, chapter: str, wait_time: float) -> list[str]:
"""Get chapter images as a list (full links). """Get chapter images as a list (full links).
Args: Args:

39
docker/Dockerfile Normal file
View file

@ -0,0 +1,39 @@
FROM git.44net.ch/44net-services/python311:11 AS builder
COPY pyproject.toml README.md /build/
COPY src /build/src
WORKDIR /build
RUN \
echo "**** building package ****" \
&& pip3 install hatch hatchling \
&& python3 -m hatch build --clean
FROM git.44net.ch/44net-services/debian-s6:11
LABEL maintainer="Ivan Schaller" \
description="A CLI manga downloader"
ENV PATH="/opt/python3/bin:${PATH}"
COPY --from=builder /opt/python3 /opt/python3
COPY --from=builder /build/dist/*.whl /build/dist/
COPY docker/rootfs /
RUN \
echo "**** creating folders ****" \
&& mkdir -p /app \
&& echo "**** updating pip ****" \
&& python3 -m pip install --upgrade pip setuptools wheel \
&& echo "**** install python packages ****" \
&& python3 -m pip install /build/dist/*.whl
RUN \
echo "**** cleanup ****" \
&& apt-get purge --auto-remove -y \
&& apt-get clean \
&& rm -rf \
/tmp/* \
/var/lib/apt/lists/* \
/var/tmp/*
WORKDIR /app

View file

@ -1,50 +0,0 @@
FROM cr.44net.ch/baseimages/debian-s6:11.6-linux-amd64
# set version label
ARG BUILD_VERSION
ENV IMAGE_VERSION=${BUILD_VERSION}
LABEL version="${BUILD_VERSION}"
LABEL maintainer="Ivan Schaller"
LABEL description="A CLI manga downloader"
# install packages
RUN \
echo "**** install base packages ****" \
&& apt-get update \
&& apt-get install -y --no-install-recommends \
python3 \
python3-pip
# prepare app
RUN \
echo "**** creating folders ****" \
&& mkdir -p /app \
&& echo "**** updating pip ****" \
&& python3 -m pip install --upgrade pip
# cleanup installation
RUN \
echo "**** cleanup ****" \
&& apt-get purge --auto-remove -y \
&& apt-get clean \
&& rm -rf \
/tmp/* \
/var/lib/apt/lists/* \
/var/tmp/*
# copy files to container
COPY docker/rootfs /
COPY mangadlp/ /app/mangadlp/
COPY \
manga-dlp.py \
requirements.txt \
LICENSE \
/app/
# install requirements
RUN pip install -r /app/requirements.txt
WORKDIR /app

View file

@ -1,52 +0,0 @@
FROM cr.44net.ch/baseimages/debian-s6:11.6-linux-arm64
# set version label
ARG BUILD_VERSION
ENV IMAGE_VERSION=${BUILD_VERSION}
LABEL version="${BUILD_VERSION}"
LABEL maintainer="Ivan Schaller"
LABEL description="A CLI manga downloader"
# install packages
RUN \
echo "**** install base packages ****" \
&& apt-get update \
&& apt-get install -y --no-install-recommends \
python3 \
python3-pip
# prepare app
RUN \
echo "**** creating folders ****" \
&& mkdir -p /app \
&& echo "**** updating pip ****" \
&& python3 -m pip install --upgrade pip
# cleanup installation
RUN \
echo "**** cleanup ****" \
&& apt-get purge --auto-remove -y \
&& apt-get clean \
&& rm -rf \
/tmp/* \
/var/lib/apt/lists/* \
/var/tmp/*
# copy files to container
COPY docker/rootfs /
COPY mangadlp/ /app/mangadlp/
COPY \
manga-dlp.py \
requirements.txt \
LICENSE \
/app/
# install requirements (without img2pdf)
RUN grep -v img2pdf /app/requirements.txt > /app/requirements-arm64.txt
RUN pip install -r /app/requirements-arm64.txt
WORKDIR /app

View file

@ -1,20 +0,0 @@
image: olofvndrhr/manga-dlp:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}dev{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
- "latest"
{{/if}}
manifests:
-
image: olofvndrhr/manga-dlp:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{else}}dev-{{/if}}linux-amd64
platform:
architecture: amd64
os: linux
-
image: olofvndrhr/manga-dlp:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{else}}dev-{{/if}}linux-arm64
platform:
architecture: arm64
os: linux
variant: v8

146
justfile
View file

@ -3,21 +3,8 @@
default: show_receipts default: show_receipts
set shell := ["bash", "-uc"] set shell := ["bash", "-uc"]
set dotenv-load := true set dotenv-load
#set export
# aliases
alias s := show_receipts
alias i := show_system_info
alias p := prepare_workspace
alias l := lint
alias t := tests
alias f := tests_full
# variables
export asdf_version := "v0.10.2"
# default recipe to display help information
show_receipts: show_receipts:
@just --list @just --list
@ -25,42 +12,14 @@ show_system_info:
@echo "==================================" @echo "=================================="
@echo "os : {{os()}}" @echo "os : {{os()}}"
@echo "arch: {{arch()}}" @echo "arch: {{arch()}}"
@echo "home: ${HOME}" @echo "justfile dir: {{justfile_directory()}}"
@echo "project dir: {{justfile_directory()}}" @echo "invocation dir: {{invocation_directory()}}"
@echo "running dir: `pwd -P`"
@echo "==================================" @echo "=================================="
check_asdf: setup:
@if ! asdf --version; then \ @asdf install
just install_asdf \ @lefthook install
;else \
echo "asdf already installed" \
;fi
just install_asdf_bins
install_asdf:
@echo "installing asdf"
@echo "asdf version: ${asdf_version}"
@git clone https://github.com/asdf-vm/asdf.git ~/.asdf --branch "${asdf_version}"
@echo "adding asdf to .bashrc"
@if ! grep -q ".asdf/asdf.sh" "${HOME}/.bashrc"; then \
echo -e '\n# source asdf' >> "${HOME}/.bashrc" \
;echo 'source "${HOME}/.asdf/asdf.sh"' >> "${HOME}/.bashrc" \
;echo -e 'source "${HOME}/.asdf/completions/asdf.bash"\n' >> "${HOME}/.bashrc" \
;fi
@echo "to load asdf either restart your shell or do: 'source \${HOME}/.bashrc'"
setup_asdf:
@echo "installing asdf bins"
# add plugins
@if ! asdf plugin add python; then :; fi
@if ! asdf plugin add shfmt; then :; fi
@if ! asdf plugin add shellcheck; then :; fi
@if ! asdf plugin add just https://github.com/franklad/asdf-just; then :; fi
@if ! asdf plugin add direnv; then :; fi
# install bins
@if ! asdf install; then :; fi
# setup direnv
@if ! asdf direnv setup --shell bash --version latest; then :; fi
create_venv: create_venv:
@echo "creating venv" @echo "creating venv"
@ -69,81 +28,48 @@ create_venv:
install_deps: install_deps:
@echo "installing dependencies" @echo "installing dependencies"
@pip3 install -r requirements.txt @python3 -m hatch dep show requirements --project-only > /tmp/requirements.txt
@pip3 install -r /tmp/requirements.txt
install_deps_dev: install_deps_dev:
@echo "installing dependencies" @echo "installing dev dependencies"
@pip3 install -r contrib/requirements_dev.txt @python3 -m hatch dep show requirements --project-only > /tmp/requirements.txt
@python3 -m hatch dep show requirements --env-only >> /tmp/requirements.txt
@pip3 install -r /tmp/requirements.txt
create_reqs: create_reqs:
@echo "creating requirements" @echo "creating requirements"
@pipreqs --savepath requirements.txt --mode gt --force mangadlp/ @pipreqs --force --savepath requirements.txt src/mangadlp/
test_shfmt: test_shfmt:
@find . -type f \( -name "**.sh" -and -not -path "./.**" -and -not -path "./venv**" \) -exec shfmt -d -i 4 -bn -ci -sr "{}" \+; @find . -type f \( -name "**.sh" -and -not -path "./.**" -and -not -path "./venv**" \) -exec shfmt -d -i 4 -bn -ci -sr "{}" \+;
test_black: format_shfmt:
@python3 -m black --check --diff mangadlp/ @find . -type f \( -name "**.sh" -and -not -path "./.**" -and -not -path "./venv**" \) -exec shfmt -w -i 4 -bn -ci -sr "{}" \+;
test_pyright:
@python3 -m pyright mangadlp/
test_ruff:
@python3 -m ruff --diff mangadlp/
test_ci_conf:
@woodpecker-cli lint .woodpecker/
test_pytest:
@python3 -m tox -e basic
test_coverage:
@python3 -m tox -e coverage
test_tox:
@python3 -m tox
test_build:
@python3 -m hatch build --clean
test_docker_build:
@docker build . -f docker/Dockerfile.amd64 -t manga-dlp:test
# install dependecies and set everything up
prepare_workspace:
just show_system_info
just check_asdf
just setup_asdf
just create_venv
lint: lint:
just show_system_info just show_system_info
-just test_ci_conf
just test_shfmt just test_shfmt
just test_black @hatch run lint:style
just test_pyright @hatch run lint:typing
just test_ruff
@echo -e "\n\033[0;32m=== ALL DONE ===\033[0m\n"
tests: format:
just show_system_info just show_system_info
-just test_ci_conf just format_shfmt
just test_shfmt @hatch run lint:fmt
just test_black
just test_pyright
just test_ruff
just test_pytest
@echo -e "\n\033[0;32m=== ALL DONE ===\033[0m\n"
tests_full: check:
just show_system_info just format
-just test_ci_conf just lint
just test_shfmt
just test_black test:
just test_pyright @hatch run default:test
just test_ruff
just test_build coverage:
just test_tox @hatch run default:cov
just test_coverage
just test_docker_build build:
@echo -e "\n\033[0;32m=== ALL DONE ===\033[0m\n" @hatch build --clean
run loglevel *flags:
@hatch run mangadlp --loglevel {{loglevel}} {{flags}}

View file

@ -2,5 +2,6 @@ import sys
import mangadlp.cli import mangadlp.cli
if __name__ == "__main__": if __name__ == "__main__":
sys.exit(mangadlp.cli.main()) # pylint: disable=no-value-for-parameter sys.exit(mangadlp.cli.main()) # pylint: disable=no-value-for-parameter

View file

View file

@ -1,6 +0,0 @@
import sys
import mangadlp.cli
if __name__ == "__main__":
sys.exit(mangadlp.cli.main()) # pylint: disable=no-value-for-parameter

View file

@ -1,9 +1,8 @@
[build-system] [build-system]
requires = ["hatchling>=1.11.0"] requires = ["hatchling>=1.18", "hatch-regex-commit>=0.0.3"]
build-backend = "hatchling.build" build-backend = "hatchling.build"
[project] [project]
dynamic = ["version"]
name = "manga-dlp" name = "manga-dlp"
description = "A cli manga downloader" description = "A cli manga downloader"
readme = "README.md" readme = "README.md"
@ -18,6 +17,7 @@ classifiers = [
"Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
] ]
dependencies = [ dependencies = [
"requests>=2.28.0", "requests>=2.28.0",
@ -25,115 +25,153 @@ dependencies = [
"click>=8.1.3", "click>=8.1.3",
"click-option-group>=0.5.5", "click-option-group>=0.5.5",
"xmltodict>=0.13.0", "xmltodict>=0.13.0",
"img2pdf>=0.4.4",
"pytz==2022.1",
] ]
[project.urls] [project.urls]
Homepage = "https://github.com/olofvndrhr/manga-dlp" Homepage = "https://github.com/olofvndrhr/manga-dlp"
History = "https://github.com/olofvndrhr/manga-dlp/commits/master" History = "https://github.com/olofvndrhr/manga-dlp/commits/master"
Tracker = "https://github.com/olofvndrhr/manga-dlp/issues" Tracker = "https://github.com/olofvndrhr/manga-dlp/issues"
Source = "https://github.com/olofvndrhr/manga-dlp" Source = "https://github.com/olofvndrhr/manga-dlp"
[project.scripts] [project.scripts]
mangadlp = "mangadlp.cli:main" mangadlp = "mangadlp.cli:main"
manga-dlp = "mangadlp.cli:main" manga-dlp = "mangadlp.cli:main"
[tool.hatch.version] [tool.hatch.version]
path = "mangadlp/__about__.py" source = "regex_commit"
path = "src/mangadlp/__about__.py"
tag_sign = false
[tool.hatch.build] [tool.hatch.build]
ignore-vcs = true ignore-vcs = true
[tool.hatch.build.targets.sdist] [tool.hatch.build.targets.sdist]
packages = ["mangadlp"] packages = ["src/mangadlp"]
[tool.hatch.build.targets.wheel] [tool.hatch.build.targets.wheel]
packages = ["mangadlp"] packages = ["src/mangadlp"]
### envs
[tool.hatch.envs.default] [tool.hatch.envs.default]
dependencies = [ dependencies = [
"requests>=2.28.0", "pytest==7.4.3",
"loguru>=0.6.0", "coverage==7.3.2",
"click>=8.1.3",
"click-option-group>=0.5.5",
"xmltodict>=0.13.0",
"xmlschema>=2.2.1",
"img2pdf>=0.4.4",
"hatch>=1.6.0",
"hatchling>=1.11.0",
"pytest>=7.0.0",
"coverage>=6.3.1",
"black>=22.1.0",
"mypy>=0.940",
"tox>=3.24.5",
"ruff>=0.0.247",
] ]
# black [tool.hatch.envs.default.scripts]
test = "pytest {args:tests}"
test-cov = ["coverage erase", "coverage run -m pytest {args:tests}"]
cov-report = ["- coverage combine", "coverage report", "coverage xml"]
cov = ["test-cov", "cov-report"]
[tool.black] [[tool.hatch.envs.lint.matrix]]
line-length = 100 python = ["3.8", "3.9", "3.10", "3.11"]
target-version = ["py39"]
# pyright [tool.hatch.envs.lint]
detached = true
dependencies = [
"mypy==1.7.1",
"ruff==0.1.7",
]
[tool.pyright] [tool.hatch.envs.lint.scripts]
typeCheckingMode = "strict" typing = "mypy --non-interactive --install-types {args:src/mangadlp}"
pythonVersion = "3.9" style = ["ruff check --diff {args:src/mangadlp}", "ruff format --check --diff {args:src/mangadlp}"]
reportUnnecessaryTypeIgnoreComment = true fmt = ["ruff format {args:src/mangadlp}", "ruff check --fix {args:src/mangadlp}", "style"]
reportShadowedImports = true all = ["style", "typing"]
reportUnusedExpression = true
reportMatchNotExhaustive = true
# venvPath = "."
# venv = "venv"
# ruff ### ruff
[tool.ruff] [tool.ruff]
target-version = "py39" target-version = "py38"
select = [
"E", # pycodetyle err
"W", # pycodetyle warn
"D", # pydocstyle
"C90", # mccabe
"I", # isort
"PLE", # pylint err
"PLW", # pylint warn
"PLC", # pylint convention
"PLR", # pylint refactor
"F", # pyflakes
"RUF", # ruff specific
]
line-length = 100 line-length = 100
indent-width = 4
fix = true fix = true
show-fixes = true show-fixes = true
format = "grouped"
ignore-init-module-imports = true ignore-init-module-imports = true
respect-gitignore = true respect-gitignore = true
ignore = ["E501", "D103", "D100", "D102", "PLR2004", "D403"] src = ["src", "tests"]
#unfixable = ["F401"]
exclude = [ exclude = [
".direnv", ".direnv",
".git", ".git",
".mypy_cache", ".mypy_cache",
".ruff_cache", ".ruff_cache",
".svn", ".svn",
".tox",
".nox",
".venv", ".venv",
"venv", "venv",
"__pypackages__", "__pypackages__",
"build", "build",
"dist", "dist",
"node_modules",
"venv", "venv",
] ]
[tool.ruff.lint]
select = [
"A",
"ARG",
"B",
"C",
"DTZ",
"E",
"EM",
"F",
"FBT",
"I",
"ICN",
"ISC",
"N",
"PLC",
"PLE",
"PLR",
"PLW",
"Q",
"RUF",
"S",
"T",
"TID",
"UP",
"W",
"YTT",
]
ignore = ["E501", "D103", "D100", "D102", "PLR2004", "D403", "ISC001", "FBT001", "FBT002", "FBT003", "W505"]
unfixable = ["F401"]
[tool.ruff.format]
quote-style = "double"
indent-style = "space"
skip-magic-trailing-comma = false
line-ending = "lf"
[tool.ruff.per-file-ignores] [tool.ruff.per-file-ignores]
"__init__.py" = ["D104"] "__init__.py" = ["D104"]
"__about__.py" = ["D104", "F841"] "__about__.py" = ["D104", "F841"]
"tests/**/*" = ["PLR2004", "S101", "TID252"]
[tool.ruff.pyupgrade]
keep-runtime-typing = true
[tool.ruff.isort]
lines-after-imports = 2
known-first-party = ["mangadlp"]
[tool.ruff.flake8-tidy-imports]
ban-relative-imports = "all"
[tool.ruff.pylint] [tool.ruff.pylint]
max-args = 10 max-branches = 24
max-returns = 12
max-statements = 100
max-args = 15
allow-magic-value-types = ["str", "bytes", "complex", "float", "int"]
[tool.ruff.mccabe] [tool.ruff.mccabe]
max-complexity = 10 max-complexity = 15
[tool.ruff.pydocstyle] [tool.ruff.pydocstyle]
convention = "google" convention = "google"
@ -141,17 +179,48 @@ convention = "google"
[tool.ruff.pycodestyle] [tool.ruff.pycodestyle]
max-doc-length = 100 max-doc-length = 100
# pytest ### mypy
[tool.mypy]
#plugins = ["pydantic.mypy"]
follow_imports = "silent"
warn_redundant_casts = true
warn_unused_ignores = true
disallow_any_generics = true
check_untyped_defs = true
no_implicit_reexport = true
ignore_missing_imports = true
warn_return_any = true
pretty = true
show_column_numbers = true
show_error_codes = true
show_error_context = true
#[tool.pydantic-mypy]
#init_forbid_extra = true
#init_typed = true
#warn_required_dynamic_aliases = true
### pytest
[tool.pytest.ini_options] [tool.pytest.ini_options]
pythonpath = ["."] pythonpath = ["src"]
addopts = "--color=yes --exitfirst --verbose -ra"
#addopts = "--color=yes --exitfirst --verbose -ra --capture=tee-sys"
filterwarnings = [
'ignore:Jupyter is migrating its paths to use standard platformdirs:DeprecationWarning',
]
# coverage ### coverage
[tool.coverage.run] [tool.coverage.run]
source = ["mangadlp"] source_pkgs = ["mangadlp", "tests"]
branch = true branch = true
command_line = "-m pytest --exitfirst" parallel = true
omit = ["src/mangadlp/__about__.py"]
[tool.coverage.paths]
testproj = ["src/mangadlp", "*/mangadlp/src/mangadlp"]
tests = ["tests", "*/mangadlp/tests"]
[tool.coverage.report] [tool.coverage.report]
# Regexes for lines to exclude from consideration # Regexes for lines to exclude from consideration
@ -169,5 +238,7 @@ exclude_lines = [
"if __name__ == .__main__.:", "if __name__ == .__main__.:",
# Don't complain about abstract methods, they aren't run: # Don't complain about abstract methods, they aren't run:
"@(abc.)?abstractmethod", "@(abc.)?abstractmethod",
"no cov",
"if TYPE_CHECKING:",
] ]
ignore_errors = true # ignore_errors = true

View file

@ -1,6 +1,4 @@
{ {
"$schema": "https://docs.renovatebot.com/renovate-schema.json", "$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [ "extends": ["local>44net/renovate"]
"local>44net/renovate"
]
} }

View file

@ -5,8 +5,8 @@ sonar.links.scm=https://github.com/olofvndrhr/manga-dlp
sonar.links.issue=https://github.com/olofvndrhr/manga-dlp/issues sonar.links.issue=https://github.com/olofvndrhr/manga-dlp/issues
sonar.links.ci=https://ci.44net.ch/olofvndrhr/manga-dlp sonar.links.ci=https://ci.44net.ch/olofvndrhr/manga-dlp
# #
sonar.sources=mangadlp
sonar.tests=tests
sonar.exclusions=docker/**,contrib/**
sonar.python.version=3.9 sonar.python.version=3.9
sonar.sources=src/mangadlp
sonar.tests=tests
#sonar.exclusions=
sonar.python.coverage.reportPaths=coverage.xml sonar.python.coverage.reportPaths=coverage.xml

7
src/mangadlp/__main__.py Normal file
View file

@ -0,0 +1,7 @@
import sys
import mangadlp.cli
if __name__ == "__main__":
sys.exit(mangadlp.cli.main())

View file

@ -6,7 +6,7 @@ import requests
from loguru import logger as log from loguru import logger as log
from mangadlp import utils from mangadlp import utils
from mangadlp.types import ChapterData, ComicInfo from mangadlp.models import ChapterData, ComicInfo
class Mangadex: class Mangadex:
@ -22,7 +22,7 @@ class Mangadex:
Attributes: Attributes:
api_name (str): Name of the API api_name (str): Name of the API
manga_uuid (str): UUID of the manga, without the url part manga_uuid (str): UUID of the manga, without the url part
manga_data (dict): Infos of the manga. Name, title etc manga_data (dict): Infos of the manga. Name, title etc.
manga_title (str): The title of the manga, sanitized for all file systems manga_title (str): The title of the manga, sanitized for all file systems
manga_chapter_data (dict): All chapter data of the manga. Volumes, chapters, chapter uuids and chapter names manga_chapter_data (dict): All chapter data of the manga. Volumes, chapters, chapter uuids and chapter names
chapter_list (list): A list of all available chapters for the language chapter_list (list): A list of all available chapters for the language
@ -65,7 +65,7 @@ class Mangadex:
log.error("No valid UUID found") log.error("No valid UUID found")
raise exc raise exc
return uuid # pyright:ignore return uuid
# make initial request # make initial request
def get_manga_data(self) -> Dict[str, Any]: def get_manga_data(self) -> Dict[str, Any]:
@ -84,9 +84,9 @@ class Mangadex:
else: else:
break break
response_body: Dict[str, Dict[str, Any]] = response.json() # pyright:ignore response_body: Dict[str, Dict[str, Any]] = response.json()
# check if manga exists # check if manga exists
if response_body["result"] != "ok": # type:ignore if response_body["result"] != "ok":
log.error("Manga not found") log.error("Manga not found")
raise KeyError raise KeyError
@ -98,30 +98,35 @@ class Mangadex:
attributes = self.manga_data["attributes"] attributes = self.manga_data["attributes"]
# try to get the title in requested language # try to get the title in requested language
try: try:
title = attributes["title"][self.language] found_title = attributes["title"][self.language]
title = utils.fix_name(found_title)
except KeyError: except KeyError:
log.info("Manga title not found in requested language. Trying alt titles") log.info("Manga title not found in requested language. Trying alt titles")
else: else:
log.debug(f"Language={self.language}, Title='{title}'") log.debug(f"Language={self.language}, Title='{title}'")
return utils.fix_name(title) return title # type: ignore
# search in alt titles # search in alt titles
try: try:
log.debug(f"Alt titles: {attributes['altTitles']}") log.debug(f"Alt titles: {attributes['altTitles']}")
for item in attributes["altTitles"]: for item in attributes["altTitles"]:
if item.get(self.language): if item.get(self.language):
alt_title = item alt_title_item = item
break break
title = alt_title[self.language] # pyright:ignore found_title = alt_title_item[self.language]
except (KeyError, UnboundLocalError): except (KeyError, UnboundLocalError):
log.warning("Manga title also not found in alt titles. Falling back to english title") log.warning("Manga title also not found in alt titles. Falling back to english title")
else: else:
log.debug(f"Language={self.language}, Alt-title='{title}'") title = utils.fix_name(found_title)
return utils.fix_name(title) log.debug(f"Language={self.language}, Alt-title='{found_title}'")
return title # type: ignore
found_title = attributes["title"]["en"]
title = utils.fix_name(found_title)
title = attributes["title"]["en"]
log.debug(f"Language=en, Fallback-title='{title}'") log.debug(f"Language=en, Fallback-title='{title}'")
return utils.fix_name(title)
return title # type: ignore
# check if chapters are available in requested language # check if chapters are available in requested language
def check_chapter_lang(self) -> int: def check_chapter_lang(self) -> int:
@ -149,7 +154,7 @@ class Mangadex:
# check for chapters in specified lang # check for chapters in specified lang
total_chapters = self.check_chapter_lang() total_chapters = self.check_chapter_lang()
chapter_data: dict[str, ChapterData] = {} chapter_data: Dict[str, ChapterData] = {}
last_volume, last_chapter = ("", "") last_volume, last_chapter = ("", "")
offset = 0 offset = 0
while offset < total_chapters: # if more than 500 chapters while offset < total_chapters: # if more than 500 chapters
@ -233,8 +238,8 @@ class Mangadex:
if api_error: if api_error:
return [] return []
chapter_hash = api_data["chapter"]["hash"] # pyright:ignore chapter_hash = api_data["chapter"]["hash"]
chapter_img_data = api_data["chapter"]["data"] # pyright:ignore chapter_img_data = api_data["chapter"]["data"]
# get list of image urls # get list of image urls
image_urls: List[str] = [] image_urls: List[str] = []

View file

@ -10,7 +10,7 @@ from mangadlp.api.mangadex import Mangadex
from mangadlp.cache import CacheDB from mangadlp.cache import CacheDB
from mangadlp.hooks import run_hook from mangadlp.hooks import run_hook
from mangadlp.metadata import write_metadata from mangadlp.metadata import write_metadata
from mangadlp.types import ChapterData from mangadlp.models import ChapterData
from mangadlp.utils import get_file_format from mangadlp.utils import get_file_format
@ -73,7 +73,7 @@ class MangaDLP:
add_metadata: Flag to toggle creation & inclusion of metadata add_metadata: Flag to toggle creation & inclusion of metadata
""" """
def __init__( # pylint: disable=too-many-locals def __init__( # noqa
self, self,
url_uuid: str, url_uuid: str,
language: str = "en", language: str = "en",
@ -159,7 +159,7 @@ class MangaDLP:
raise ValueError raise ValueError
# once called per manga # once called per manga
def get_manga(self) -> None: def get_manga(self) -> None: # noqa
print_divider = "=========================================" print_divider = "========================================="
# show infos # show infos
log.info(f"{print_divider}") log.info(f"{print_divider}")
@ -218,10 +218,10 @@ class MangaDLP:
) )
# get chapters # get chapters
skipped_chapters: list[Any] = [] skipped_chapters: List[Any] = []
error_chapters: list[Any] = [] error_chapters: List[Any] = []
for chapter in chapters_to_download: for chapter in chapters_to_download:
if self.cache_path and chapter in cached_chapters: # pyright:ignore if self.cache_path and chapter in cached_chapters:
log.info(f"Chapter '{chapter}' is in cache. Skipping download") log.info(f"Chapter '{chapter}' is in cache. Skipping download")
continue continue
@ -235,7 +235,7 @@ class MangaDLP:
skipped_chapters.append(chapter) skipped_chapters.append(chapter)
# update cache # update cache
if self.cache_path: if self.cache_path:
cache.add_chapter(chapter) # pyright:ignore cache.add_chapter(chapter)
continue continue
except Exception: except Exception:
# skip download/packing due to an error # skip download/packing due to an error
@ -266,7 +266,7 @@ class MangaDLP:
# update cache # update cache
if self.cache_path: if self.cache_path:
cache.add_chapter(chapter) # pyright:ignore cache.add_chapter(chapter)
# start chapter post hook # start chapter post hook
run_hook( run_hook(
@ -429,7 +429,7 @@ class MangaDLP:
# check if image folder is existing # check if image folder is existing
if not chapter_path.exists(): if not chapter_path.exists():
log.error(f"Image folder: {chapter_path} does not exist") log.error(f"Image folder: {chapter_path} does not exist")
raise IOError raise OSError
if self.file_format == ".pdf": if self.file_format == ".pdf":
utils.make_pdf(chapter_path) utils.make_pdf(chapter_path)
else: else:

View file

@ -4,7 +4,7 @@ from typing import List, Union
from loguru import logger as log from loguru import logger as log
from mangadlp.types import CacheData, CacheKeyData from mangadlp.models import CacheData, CacheKeyData
class CacheDB: class CacheDB:
@ -29,11 +29,11 @@ class CacheDB:
self.db_data[self.db_key] = {} self.db_data[self.db_key] = {}
self.db_uuid_data: CacheKeyData = self.db_data[self.db_key] self.db_uuid_data: CacheKeyData = self.db_data[self.db_key]
if not self.db_uuid_data.get("name"): # pyright:ignore if not self.db_uuid_data.get("name"):
self.db_uuid_data.update({"name": self.name}) # pyright:ignore self.db_uuid_data.update({"name": self.name})
self._write_db() self._write_db()
self.db_uuid_chapters: List[str] = self.db_uuid_data.get("chapters") or [] # type:ignore self.db_uuid_chapters: List[str] = self.db_uuid_data.get("chapters") or []
def _prepare_db(self) -> None: def _prepare_db(self) -> None:
if self.db_path.exists(): if self.db_path.exists():

View file

@ -26,7 +26,8 @@ def readin_list(_ctx: click.Context, _param: str, value: str) -> List[str]:
url_str = list_file.read_text(encoding="utf-8") url_str = list_file.read_text(encoding="utf-8")
url_list = url_str.splitlines() url_list = url_str.splitlines()
except Exception as exc: except Exception as exc:
raise click.BadParameter("Can't get links from the file") from exc msg = f"Reading in file '{list_file}'"
raise click.BadParameter(msg) from exc
# filter empty lines and remove them # filter empty lines and remove them
filtered_list = list(filter(len, url_list)) filtered_list = list(filter(len, url_list))
@ -39,8 +40,8 @@ def readin_list(_ctx: click.Context, _param: str, value: str) -> List[str]:
@click.help_option() @click.help_option()
@click.version_option(version=__version__, package_name="manga-dlp") @click.version_option(version=__version__, package_name="manga-dlp")
# manga selection # manga selection
@optgroup.group("source", cls=RequiredMutuallyExclusiveOptionGroup) # type: ignore @optgroup.group("source", cls=RequiredMutuallyExclusiveOptionGroup)
@optgroup.option( # type: ignore @optgroup.option(
"-u", "-u",
"--url", "--url",
"--uuid", "--uuid",
@ -50,7 +51,7 @@ def readin_list(_ctx: click.Context, _param: str, value: str) -> List[str]:
show_default=True, show_default=True,
help="URL or UUID of the manga", help="URL or UUID of the manga",
) )
@optgroup.option( # type: ignore @optgroup.option(
"--read", "--read",
"read_mangas", "read_mangas",
is_eager=True, is_eager=True,
@ -61,8 +62,8 @@ def readin_list(_ctx: click.Context, _param: str, value: str) -> List[str]:
help="Path of file with manga links to download. One per line", help="Path of file with manga links to download. One per line",
) )
# logging options # logging options
@optgroup.group("verbosity", cls=MutuallyExclusiveOptionGroup) # type: ignore @optgroup.group("verbosity", cls=MutuallyExclusiveOptionGroup)
@optgroup.option( # type: ignore @optgroup.option(
"--loglevel", "--loglevel",
"verbosity", "verbosity",
type=int, type=int,
@ -70,7 +71,7 @@ def readin_list(_ctx: click.Context, _param: str, value: str) -> List[str]:
show_default=True, show_default=True,
help="Custom log level", help="Custom log level",
) )
@optgroup.option( # type: ignore @optgroup.option(
"--warn", "--warn",
"verbosity", "verbosity",
flag_value=30, flag_value=30,
@ -78,7 +79,7 @@ def readin_list(_ctx: click.Context, _param: str, value: str) -> List[str]:
show_default=True, show_default=True,
help="Only log warnings and higher", help="Only log warnings and higher",
) )
@optgroup.option( # type: ignore @optgroup.option(
"--debug", "--debug",
"verbosity", "verbosity",
flag_value=10, flag_value=10,
@ -231,7 +232,7 @@ def readin_list(_ctx: click.Context, _param: str, value: str) -> List[str]:
def main(ctx: click.Context, **kwargs: Any) -> None: def main(ctx: click.Context, **kwargs: Any) -> None:
"""Script to download mangas from various sites.""" """Script to download mangas from various sites."""
url_uuid: str = kwargs.pop("url_uuid") url_uuid: str = kwargs.pop("url_uuid")
read_mangas: list[str] = kwargs.pop("read_mangas") read_mangas: List[str] = kwargs.pop("read_mangas")
verbosity: int = kwargs.pop("verbosity") verbosity: int = kwargs.pop("verbosity")
# set log level to INFO if not set # set log level to INFO if not set

View file

@ -48,8 +48,8 @@ def download_chapter(
# write image # write image
try: try:
with image_path.open("wb") as file: with image_path.open("wb") as file:
r.raw.decode_content = True # pyright:ignore r.raw.decode_content = True
shutil.copyfileobj(r.raw, file) # pyright:ignore shutil.copyfileobj(r.raw, file)
except Exception as exc: except Exception as exc:
log.error("Can't write file") log.error("Can't write file")
raise exc raise exc

View file

@ -31,7 +31,7 @@ def run_hook(command: str, hook_type: str, **kwargs: Any) -> int:
# running command # running command
log.info(f"Hook '{hook_type}' - running command: '{command}'") log.info(f"Hook '{hook_type}' - running command: '{command}'")
proc = subprocess.run(command_list, check=False, timeout=15, encoding="utf8") proc = subprocess.run(command_list, check=False, timeout=15, encoding="utf8") # noqa
exit_code = proc.returncode exit_code = proc.returncode
if exit_code == 0: if exit_code == 0:

View file

@ -1,9 +1,10 @@
import logging import logging
import sys import sys
from typing import Any from typing import Any, Dict
from loguru import logger from loguru import logger
LOGURU_FMT = "{time:%Y-%m-%dT%H:%M:%S%z} | <level>[{level: <7}]</level> [{name: <10}] [{function: <20}]: {message}" LOGURU_FMT = "{time:%Y-%m-%dT%H:%M:%S%z} | <level>[{level: <7}]</level> [{name: <10}] [{function: <20}]: {message}"
@ -20,7 +21,7 @@ class InterceptHandler(logging.Handler):
# Find caller from where originated the logged message # Find caller from where originated the logged message
frame, depth = logging.currentframe(), 2 frame, depth = logging.currentframe(), 2
while frame.f_code.co_filename == logging.__file__: # pyright:ignore while frame.f_code.co_filename == logging.__file__:
frame = frame.f_back # type: ignore frame = frame.f_back # type: ignore
depth += 1 depth += 1
@ -29,7 +30,7 @@ class InterceptHandler(logging.Handler):
# init logger with format and log level # init logger with format and log level
def prepare_logger(loglevel: int = 20) -> None: def prepare_logger(loglevel: int = 20) -> None:
stdout_handler: dict[str, Any] = { stdout_handler: Dict[str, Any] = {
"sink": sys.stdout, "sink": sys.stdout,
"level": loglevel, "level": loglevel,
"format": LOGURU_FMT, "format": LOGURU_FMT,

View file

@ -4,7 +4,8 @@ from typing import Any, Dict, List, Tuple, Union
import xmltodict import xmltodict
from loguru import logger as log from loguru import logger as log
from mangadlp.types import ComicInfo from mangadlp.models import ComicInfo
METADATA_FILENAME = "ComicInfo.xml" METADATA_FILENAME = "ComicInfo.xml"
METADATA_TEMPLATE = Path("mangadlp/metadata/ComicInfo_v2.0.xml") METADATA_TEMPLATE = Path("mangadlp/metadata/ComicInfo_v2.0.xml")
@ -64,7 +65,7 @@ METADATA_TYPES: Dict[str, Tuple[Any, Union[str, int, None], List[Union[str, int,
def validate_metadata(metadata_in: ComicInfo) -> Dict[str, ComicInfo]: def validate_metadata(metadata_in: ComicInfo) -> Dict[str, ComicInfo]:
log.info("Validating metadata") log.info("Validating metadata")
metadata_valid: dict[str, ComicInfo] = {"ComicInfo": {}} metadata_valid: Dict[str, ComicInfo] = {"ComicInfo": {}}
for key, value in METADATA_TYPES.items(): for key, value in METADATA_TYPES.items():
metadata_type, metadata_default, metadata_validation = value metadata_type, metadata_default, metadata_validation = value
@ -75,7 +76,7 @@ def validate_metadata(metadata_in: ComicInfo) -> Dict[str, ComicInfo]:
# check if metadata key is available # check if metadata key is available
try: try:
md_to_check: Union[str, int, None] = metadata_in[key] # pyright:ignore md_to_check: Union[str, int, None] = metadata_in[key]
except KeyError: except KeyError:
continue continue
# check if provided metadata item is empty # check if provided metadata item is empty
@ -83,9 +84,7 @@ def validate_metadata(metadata_in: ComicInfo) -> Dict[str, ComicInfo]:
continue continue
# check if metadata type is correct # check if metadata type is correct
log.debug( log.debug(f"Key:{key} -> value={type(md_to_check)} -> check={metadata_type}")
f"Key:{key} -> value={type(md_to_check)} -> check={metadata_type}" # pyright:ignore
)
if not isinstance(md_to_check, metadata_type): if not isinstance(md_to_check, metadata_type):
log.warning(f"Metadata has wrong type: {key}:{metadata_type} -> {md_to_check}") log.warning(f"Metadata has wrong type: {key}:{metadata_type} -> {md_to_check}")
continue continue
@ -103,7 +102,7 @@ def validate_metadata(metadata_in: ComicInfo) -> Dict[str, ComicInfo]:
def write_metadata(chapter_path: Path, metadata: ComicInfo) -> None: def write_metadata(chapter_path: Path, metadata: ComicInfo) -> None:
if metadata["Format"] == "pdf": # pyright:ignore if metadata["Format"] == "pdf":
log.warning("Can't add metadata for pdf format. Skipping") log.warning("Can't add metadata for pdf format. Skipping")
return return

View file

@ -4,6 +4,7 @@ from pathlib import Path
from typing import Any, List from typing import Any, List
from zipfile import ZipFile from zipfile import ZipFile
import pytz
from loguru import logger as log from loguru import logger as log
@ -24,17 +25,17 @@ def make_archive(chapter_path: Path, file_format: str) -> None:
def make_pdf(chapter_path: Path) -> None: def make_pdf(chapter_path: Path) -> None:
try: try:
import img2pdf # pylint: disable=import-outside-toplevel # pyright:ignore import img2pdf # pylint: disable=import-outside-toplevel
except Exception as exc: except Exception as exc:
log.error("Cant import img2pdf. Please install it first") log.error("Cant import img2pdf. Please install it first")
raise exc raise exc
pdf_path = Path(f"{chapter_path}.pdf") pdf_path = Path(f"{chapter_path}.pdf")
images: list[str] = [] images: List[str] = []
for file in chapter_path.iterdir(): for file in chapter_path.iterdir():
images.append(str(file)) images.append(str(file))
try: try:
pdf_path.write_bytes(img2pdf.convert(images)) # pyright:ignore pdf_path.write_bytes(img2pdf.convert(images))
except Exception as exc: except Exception as exc:
log.error("Can't create '.pdf' archive") log.error("Can't create '.pdf' archive")
raise exc raise exc
@ -43,13 +44,13 @@ def make_pdf(chapter_path: Path) -> None:
# create a list of chapters # create a list of chapters
def get_chapter_list(chapters: str, available_chapters: List[str]) -> List[str]: def get_chapter_list(chapters: str, available_chapters: List[str]) -> List[str]:
# check if there are available chapter # check if there are available chapter
chapter_list: list[str] = [] chapter_list: List[str] = []
for chapter in chapters.split(","): for chapter in chapters.split(","):
# check if chapter list is with volumes and ranges (forcevol) # check if chapter list is with volumes and ranges (forcevol)
if "-" in chapter and ":" in chapter: if "-" in chapter and ":" in chapter:
# split chapters and volumes apart for list generation # split chapters and volumes apart for list generation
lower_num_fv: list[str] = chapter.split("-")[0].split(":") lower_num_fv: List[str] = chapter.split("-")[0].split(":")
upper_num_fv: list[str] = chapter.split("-")[1].split(":") upper_num_fv: List[str] = chapter.split("-")[1].split(":")
vol_fv: str = lower_num_fv[0] vol_fv: str = lower_num_fv[0]
chap_beg_fv: int = int(lower_num_fv[1]) chap_beg_fv: int = int(lower_num_fv[1])
chap_end_fv: int = int(upper_num_fv[1]) chap_end_fv: int = int(upper_num_fv[1])
@ -70,7 +71,7 @@ def get_chapter_list(chapters: str, available_chapters: List[str]) -> List[str]:
# select all chapters from the volume --> 1: == 1:1,1:2,1:3... # select all chapters from the volume --> 1: == 1:1,1:2,1:3...
if vol_num and not chap_num: if vol_num and not chap_num:
regex: Any = re.compile(f"{vol_num}:[0-9]{{1,4}}") regex: Any = re.compile(f"{vol_num}:[0-9]{{1,4}}")
vol_list: list[str] = [n for n in available_chapters if regex.match(n)] vol_list: List[str] = [n for n in available_chapters if regex.match(n)]
chapter_list.extend(vol_list) chapter_list.extend(vol_list)
else: else:
chapter_list.append(chapter) chapter_list.append(chapter)
@ -160,7 +161,7 @@ def get_file_format(file_format: str) -> str:
def progress_bar(progress: float, total: float) -> None: def progress_bar(progress: float, total: float) -> None:
time = datetime.now().strftime("%Y-%m-%dT%H:%M:%S") time = datetime.now(tz=pytz.timezone("Europe/Zurich")).strftime("%Y-%m-%dT%H:%M:%S")
percent = int(progress / (int(total) / 100)) percent = int(progress / (int(total) / 100))
bar_length = 50 bar_length = 50
bar_progress = int(progress / (int(total) / bar_length)) bar_progress = int(progress / (int(total) / bar_length))
@ -168,9 +169,9 @@ def progress_bar(progress: float, total: float) -> None:
whitespace_texture = " " * (bar_length - bar_progress) whitespace_texture = " " * (bar_length - bar_progress)
if progress == total: if progress == total:
full_bar = "" * bar_length full_bar = "" * bar_length
print(f"\r{time}{' '*6}| [BAR ] ❙{full_bar}❙ 100%", end="\n") print(f"\r{time}{' '*6}| [BAR ] ❙{full_bar}❙ 100%", end="\n") # noqa
else: else:
print( print( # noqa
f"\r{time}{' '*6}| [BAR ] ❙{bar_texture}{whitespace_texture}{percent}%", f"\r{time}{' '*6}| [BAR ] ❙{bar_texture}{whitespace_texture}{percent}%",
end="\r", end="\r",
) )

View file

@ -52,7 +52,7 @@ def test_no_volume():
def test_readin_list(): def test_readin_list():
list_file = "tests/test_list.txt" list_file = "tests/test_list.txt"
test_list = mdlpinput.readin_list(None, None, list_file) # pyright:ignore test_list = mdlpinput.readin_list(None, None, list_file)
assert test_list == [ assert test_list == [
"https://mangadex.org/title/a96676e5-8ae2-425e-b549-7f15dd34a6d8/komi-san-wa-komyushou-desu", "https://mangadex.org/title/a96676e5-8ae2-425e-b549-7f15dd34a6d8/komi-san-wa-komyushou-desu",

View file

@ -34,7 +34,7 @@ def test_metadata_creation():
"Format": "cbz", "Format": "cbz",
} }
write_metadata(metadata_path, metadata) # pyright:ignore write_metadata(metadata_path, metadata)
assert metadata_file.exists() assert metadata_file.exists()
read_in_metadata = metadata_file.read_text(encoding="utf8") read_in_metadata = metadata_file.read_text(encoding="utf8")
@ -60,7 +60,7 @@ def test_metadata_validation():
"Format": "cbz", "Format": "cbz",
} }
valid_metadata = validate_metadata(metadata) # pyright:ignore valid_metadata = validate_metadata(metadata)
assert valid_metadata["ComicInfo"] == { assert valid_metadata["ComicInfo"] == {
"Title": "title1", "Title": "title1",
@ -83,7 +83,7 @@ def test_metadata_validation_values():
"CommunityRating": 4, "CommunityRating": 4,
} }
valid_metadata = validate_metadata(metadata) # pyright:ignore valid_metadata = validate_metadata(metadata)
assert valid_metadata["ComicInfo"] == { assert valid_metadata["ComicInfo"] == {
"Notes": "Downloaded with https://github.com/olofvndrhr/manga-dlp", "Notes": "Downloaded with https://github.com/olofvndrhr/manga-dlp",
@ -102,7 +102,7 @@ def test_metadata_validation_values2():
"CommunityRating": 10, # invalid "CommunityRating": 10, # invalid
} }
valid_metadata = validate_metadata(metadata) # pyright:ignore valid_metadata = validate_metadata(metadata)
assert valid_metadata["ComicInfo"] == { assert valid_metadata["ComicInfo"] == {
"Notes": "Downloaded with https://github.com/olofvndrhr/manga-dlp", "Notes": "Downloaded with https://github.com/olofvndrhr/manga-dlp",

View file

@ -389,11 +389,11 @@ def test_chapter_metadata():
forcevol = False forcevol = False
test = Mangadex(url_uuid, language, forcevol) test = Mangadex(url_uuid, language, forcevol)
chapter_metadata = test.create_metadata("1") chapter_metadata = test.create_metadata("1")
manga_name = chapter_metadata["Series"] # pyright:ignore manga_name = chapter_metadata["Series"]
chapter_name = chapter_metadata["Title"] # pyright:ignore chapter_name = chapter_metadata["Title"]
chapter_num = chapter_metadata["Number"] # pyright:ignore chapter_num = chapter_metadata["Number"]
chapter_volume = chapter_metadata["Volume"] # pyright:ignore chapter_volume = chapter_metadata["Volume"]
chapter_url = chapter_metadata["Web"] # pyright:ignore chapter_url = chapter_metadata["Web"]
assert (manga_name, chapter_name, chapter_volume, chapter_num, chapter_url) == ( assert (manga_name, chapter_name, chapter_volume, chapter_num, chapter_url) == (
"Komi-san wa Komyushou Desu", "Komi-san wa Komyushou Desu",

26
tox.ini
View file

@ -1,26 +0,0 @@
[tox]
envlist = py38, py39, py310
isolated_build = True
[testenv]
deps =
-rcontrib/requirements_dev.txt
commands =
pytest --verbose --exitfirst --basetemp="{envtmpdir}" {posargs}
[testenv:basic]
deps =
-rcontrib/requirements_dev.txt
commands =
pytest --verbose --exitfirst --basetemp="{envtmpdir}" {posargs}
[testenv:coverage]
deps =
-rcontrib/requirements_dev.txt
commands =
coverage erase
coverage run
coverage xml -i