From a360cba3f146769ea6ca54c2b98e59b9c7fc0374 Mon Sep 17 00:00:00 2001 From: qx6ghqkz <93668667+qx6ghqkz@users.noreply.github.com> Date: Thu, 16 Jan 2025 22:23:47 +0000 Subject: [PATCH] test: refactor codebase, extend configuration file support --- .github/workflows/dependabot-auto-merge.yaml | 16 +- .github/workflows/docker-image.yaml | 11 +- .pre-commit-config.yaml | 11 +- Dockerfile | 4 + README.md | 20 +- docs/config.toml | 798 +++++++++++++++++ docs/config.yaml | 851 +++++++++++++++++++ gallery_dl_server/__init__.py | 67 +- gallery_dl_server/__main__.py | 2 + gallery_dl_server/config.py | 261 +++--- gallery_dl_server/download.py | 127 ++- gallery_dl_server/output.py | 168 +++- gallery_dl_server/utils.py | 37 +- gallery_dl_server/version.py | 3 + pyproject.toml | 9 + requirements.txt | 1 + templates/index.html | 72 +- 17 files changed, 2165 insertions(+), 293 deletions(-) create mode 100644 docs/config.toml create mode 100644 docs/config.yaml create mode 100644 gallery_dl_server/version.py create mode 100644 pyproject.toml diff --git a/.github/workflows/dependabot-auto-merge.yaml b/.github/workflows/dependabot-auto-merge.yaml index 949c7b5..5a415a2 100644 --- a/.github/workflows/dependabot-auto-merge.yaml +++ b/.github/workflows/dependabot-auto-merge.yaml @@ -8,17 +8,17 @@ permissions: jobs: dependabot: - name: 'Dependabot' + name: Dependabot runs-on: ubuntu-latest - if: ${{ github.actor == 'dependabot[bot]' && github.event_name == 'pull_request'}} + if: ${{ github.actor == 'dependabot[bot]' && github.event_name == 'pull_request' }} steps: - - name: Dependabot metadata - id: metadata - uses: dependabot/fetch-metadata@v2.2.0 + - name: Fetch Dependabot metadata + id: dependabot-metadata + uses: dependabot/fetch-metadata@v2 with: - github-token: '${{ secrets.PERSONAL_TOKEN }}' + github-token: ${{ secrets.PERSONAL_TOKEN }} - name: Enable auto-merge for Dependabot PRs run: gh pr merge --auto --rebase "$PR_URL" env: - PR_URL: ${{github.event.pull_request.html_url}} - GITHUB_TOKEN: ${{secrets.PERSONAL_TOKEN}} + PR_URL: ${{ github.event.pull_request.html_url }} + GITHUB_TOKEN: ${{ secrets.PERSONAL_TOKEN }} diff --git a/.github/workflows/docker-image.yaml b/.github/workflows/docker-image.yaml index 5f8045e..76f898d 100644 --- a/.github/workflows/docker-image.yaml +++ b/.github/workflows/docker-image.yaml @@ -58,7 +58,7 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} - name: Build and push - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: . platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6 @@ -67,3 +67,12 @@ jobs: labels: ${{ steps.meta.outputs.labels }} cache-from: type=gha cache-to: type=gha,mode=max + + - name: Update Docker Hub description + uses: peter-evans/dockerhub-description@v4 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + repository: ${{ github.repository }} + short-description: ${{ github.event.repository.description }} + enable-url-completion: true diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9f3f2a5..43275de 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,21 +1,24 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: check-added-large-files - id: check-ast - id: check-case-conflict + - id: check-merge-conflict - id: check-json - types: [file] # override `types: [json]` + types: [file] files: \.(json|conf)$ - id: check-yaml + - id: check-toml - id: detect-private-key - id: end-of-file-fixer - id: requirements-txt-fixer - id: trailing-whitespace + - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.10 + rev: v0.9.2 hooks: - id: ruff - args: [--fix-only] + args: [--fix] - id: ruff-format diff --git a/Dockerfile b/Dockerfile index edecc3a..d4b053a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,9 @@ FROM python:3.12-alpine +LABEL org.opencontainers.image.source=https://github.com/qx6ghqkz/gallery-dl-server +LABEL org.opencontainers.image.description="Docker image for gallery-dl-server, a simple web and REST interface designed for downloading media using gallery-dl and yt-dlp. It serves as middleware, allowing users to supply URLs to the server through a user-friendly web UI and API. The server processes requests to fetch media from a wide range of sources and allows users to monitor progress through real-time logging." +LABEL org.opencontainers.image.licenses=MIT + RUN apk add --no-cache \ bash \ ffmpeg \ diff --git a/README.md b/README.md index 511d6be..71268c2 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ services: ### Python -If you have Python 3.9 or above installed and on your PATH, you can simply run the server using the command line. Clone this repository and install the required dependencies located in `requirements.txt` in a virtual environment. +If you have Python 3.12 or later installed and on your PATH, you can simply run the server using the command line. Clone this repository and install the required dependencies located in `requirements.txt` in a virtual environment. Run the command below in the root folder while inside the virtual environment. On Windows, replace `python3` with `python`. @@ -128,22 +128,30 @@ services: ## Configuration -Configuration of gallery-dl is as documented in the [official documentation](https://github.com/mikf/gallery-dl#configuration). +Configuration of gallery-dl is as documented in the [official documentation](https://github.com/mikf/gallery-dl#configuration). A configuration file is **required.** -A configuration file is **required.** If running outside of Docker, the [default locations](https://github.com/mikf/gallery-dl#locations) will be used to search for a configuration file. When running *with* Docker, the configuration file must be mounted inside the Docker container in one of the locations where gallery-dl-server will search for the config file. +If run outside of Docker, the [default locations](https://github.com/mikf/gallery-dl#locations) will be used to search for a configuration file. If run as an executable, the current directory will also be searched for a valid configuration file. + +Additionally, YAML and TOML configuration files are supported at any of the pre-defined locations. + +When run with Docker, the configuration file must be mounted inside the `/config` directory inside the container. ### Locations - `/config/gallery-dl.conf` +- `/config/gallery-dl.{yaml, yml}` +- `/config/gallery-dl.toml` - `/config/config.json` +- `/config/config.{yaml, yml}` +- `/config/config.toml` -A [default configuration file](docs/gallery-dl.conf) for use with gallery-dl-server has been provided and will automatically be placed in the directory mounted to `/config` if no valid config file exists in that location. +A [default configuration file](docs/gallery-dl.conf) for use with gallery-dl-server has been provided and will automatically be placed in the directory mounted to `/config` if no valid configuration file exists in that location. For more information on configuration file options, see [gallery-dl/docs/configuration.rst](https://github.com/mikf/gallery-dl/blob/master/docs/configuration.rst). -Any additional locations specified in the configuration file must also exist inside the Docker container. For example, if you specify a cookies file location, make sure that location is accessible from within the Docker container. +Any additional locations specified in the configuration file must also exist inside the Docker container. For example, if you specify a cookies file location, ensure that location is accessible from within the Docker container. -It is recommended you place any additional files such as archive, cache and cookies files inside the same directory mounted to `/config` along with the config file. +It is recommended you place any additional files such as archive, cache and cookies files inside the same directory mounted to `/config` along with the configuration file. ## Usage diff --git a/docs/config.toml b/docs/config.toml new file mode 100644 index 0000000..862cb22 --- /dev/null +++ b/docs/config.toml @@ -0,0 +1,798 @@ +# gallery-dl-server default configuration file 2.0.1 + +# full documentation at +# https://gdl-org.github.io/docs/configuration.html + +# =============================================================== +# ==== General Extractor Options ========================== +# (these can be set as site-specific extractor options as well) + +[extractor] +base-directory = "/gallery-dl/" +skip = true +user-agent = "auto" +referer = true +headers = { } +tls12 = true +proxy-env = true +retries = 4 +retry-codes = [ ] +timeout = 30 +verify = true +download = true +fallback = true +archive-pragma = [ ] +archive-event = [ "file" ] +archive-mode = "file" +cookies-update = true +image-unique = false +chapter-unique = false +keywords = { } +keywords-eval = false +parent-directory = false +parent-metadata = false +parent-skip = false +path-restrict = "auto" +path-replace = "_" +path-remove = "\\u0000-\\u001f\\u007f" +path-strip = "auto" +path-extended = true +sleep = 0 +sleep-request = 0 +sleep-extractor = 0 +sleep-429 = 60 +actions = [ ] +netrc = false + +[extractor.extension-map] +jpeg = "jpg" +jpe = "jpg" +jfif = "jpg" +jif = "jpg" +jfi = "jpg" + +[[extractor.postprocessors]] +name = "metadata" +mode = "tags" +whitelist = [ "danbooru", "gelbooru" ] + +# =============================================================== +# ==== Site-specific Extractor Options ==================== + +[extractor.ao3] +username = "" +password = "" +sleep-request = "0.5-1.5" +formats = [ "pdf" ] + +[extractor.artstation] +external = false +previews = false +videos = true + +[extractor.artstation.search] +pro-first = true + +[extractor.aryion] +username = "" +password = "" +recursive = true + +[extractor.bbc] +width = 1_920 + +[extractor.behance] +sleep-request = "2.0-4.0" +modules = [ "image", "video", "mediacollection", "embed" ] + +[extractor.bilibili] +sleep-request = "2.0-4.0" + +[extractor.bluesky] +username = "" +password = "" +include = [ "media" ] +metadata = false +quoted = false +reposts = false +videos = true + +[extractor.bluesky.post] +depth = 0 + +[extractor.boosty] +allowed = true +bought = false +metadata = false +videos = true + +[extractor.bunkr] +tlds = false + +[extractor.cien] +sleep-request = "1.0-2.0" +files = [ "image", "video", "download", "gallery" ] + +[extractor.civitai] +sleep-request = "0.5-1.5" +api = "trpc" +files = [ "image" ] +include = [ "user-models", "user-posts" ] +metadata = false +nsfw = true +quality = "original=true" + +[extractor.cohost] +asks = true +avatar = false +background = false +pinned = false +replies = true +shares = true + +[extractor.coomerparty] +username = "" +password = "" +announcements = false +comments = false +dms = false +duplicates = false +favorites = "artist" +files = [ "attachments", "file", "inline" ] +metadata = false +revisions = false +order-revisions = "desc" + +[extractor.cyberdrop] + +[extractor.deviantart] +auto-watch = false +auto-unwatch = false +comments = false +comments-avatars = false +extra = false +flat = true +folders = false +group = true +include = "gallery" +intermediary = true +journals = "html" +jwt = false +mature = true +metadata = false +original = true +pagination = "api" +previews = false +public = true +quality = 100 +wait-min = 0 + +[extractor.deviantart.avatar] + +[extractor.exhentai] +username = "" +password = "" +sleep-request = "3.0-6.0" +domain = "auto" +gp = "resized" +metadata = false +original = true +tags = false +fallback-retries = 2 +directory = [ "{category}", "{title}" ] +filename = "{num:>03}.{extension}" +postprocessors = [ "cbz" ] + +[extractor.fanbox] +comments = false +embeds = true +metadata = false + +[extractor.flickr] +sleep-request = "1.0-2.0" +contexts = false +exif = false +metadata = false +videos = true + +[extractor.furaffinity] +descriptions = "text" +external = false +include = [ "gallery" ] +layout = "auto" + +[extractor.gelbooru] + +[extractor.gelbooru.favorite] +order-posts = "desc" + +[extractor.generic] +enabled = false + +[extractor.gofile] +recursive = false + +[extractor.hentaifoundry] +include = [ "pictures" ] + +[extractor.hitomi] +format = "webp" + +[extractor.idolcomplex] +username = "" +password = "" +referer = false +sleep-request = "3.0-6.0" + +[extractor.imagechest] + +[extractor.imagefap] +sleep-request = "2.0-4.0" + +[extractor.imgbb] +username = "" +password = "" + +[extractor.imgur] +mp4 = true + +[extractor.inkbunny] +username = "" +password = "" +orderby = "create_datetime" + +[extractor.instagram] +sleep-request = "6.0-12.0" +api = "rest" +cursor = true +include = "posts" +metadata = false +order-files = "asc" +order-posts = "asc" +previews = false +videos = true + +[extractor.itaku] +sleep-request = "0.5-1.5" +videos = true + +[extractor.kemonoparty] +username = "" +password = "" +announcements = false +comments = false +dms = false +duplicates = false +favorites = "artist" +files = [ "attachments", "file", "inline" ] +metadata = false +revisions = false +order-revisions = "desc" + +[extractor.khinsider] +format = "mp3" + +[extractor.koharu] +username = "" +password = "" +sleep-request = "0.5-1.5" +cbz = true +format = [ "0", "1600", "1280", "980", "780" ] +tags = false + +[extractor.luscious] +gif = false + +[extractor.mangadex] +username = "" +password = "" +api-server = "https://api.mangadex.org" +ratings = [ "safe", "suggestive", "erotica", "pornographic" ] +postprocessors = [ "cbz" ] + +[extractor.mangoxo] +username = "" +password = "" + +[extractor.newgrounds] +username = "" +password = "" +sleep-request = "0.5-1.5" +flash = true +format = "original" +include = [ "art" ] + +[extractor.nhentai] +directory = [ "{category}", "{title}" ] +filename = "{num:>03}.{extension}" +postprocessors = [ "cbz" ] + +[extractor.nsfwalbum] +referer = false + +[extractor.oauth] +browser = true +cache = true +host = "localhost" +port = 6_414 + +[extractor.paheal] +metadata = false + +[extractor.patreon] +files = [ + "images", + "image_large", + "attachments", + "postfile", + "content" +] + +[extractor.pillowfort] +username = "" +password = "" +external = false +inline = true +reblogs = false + +[extractor.pinterest] +domain = "auto" +sections = true +stories = true +videos = true + +[extractor.pixeldrain] + +[extractor.pixiv] +captions = false +comments = false +include = [ "artworks" ] +metadata = false +metadata-bookmark = false +sanity = true +tags = "japanese" +ugoira = true +postprocessors = [ "ugoira-copy" ] +covers = false +embeds = false +full-series = false + +[extractor.pixiv.directory] +"page_count > 1" = [ "{category}", "{user[id]} {user[account]}", "{id} {title}" ] +"" = [ "{category}", "{user[id]} {user[account]}" ] + +[extractor.pixiv.filename] +"page_count > 1" = "{num:>03}.{extension}" +"" = "{id} {title}.{extension}" + +[extractor.plurk] +sleep-request = "0.5-1.5" +comments = false + +[extractor.poipiku] +sleep-request = "0.5-1.5" + +[extractor.pornpics] +sleep-request = "0.5-1.5" + +[extractor.readcomiconline] +sleep-request = "3.0-6.0" +captcha = "stop" +quality = "auto" + +[extractor.reddit] +comments = 0 +morecomments = false +embeds = true +date-min = 0 +date-max = 253_402_210_800 +date-format = "%Y-%m-%dT%H:%M:%S" +previews = true +recursion = 0 +videos = true +whitelist = [ "imgur", "redgifs" ] +parent-directory = true +parent-metadata = "_reddit" + +[extractor."reddit>imgur"] +directory = [ ] +filename = "{_reddit[id]} {_reddit[title]} {id}.{extension}" + +[extractor."reddit>redgifs"] +directory = [ ] +filename = "{_reddit[id]} {_reddit[title]} {id}.{extension}" + +[extractor.redgifs] +format = [ "hd", "sd", "gif" ] + +[extractor.rule34xyz] +format = [ "10", "40", "41", "2" ] + +[extractor.sankaku] +username = "" +password = "" +id-format = "numeric" +refresh = false +tags = false + +[extractor.sankakucomplex] +embeds = false +videos = true + +[extractor.scrolller] +username = "" +password = "" +sleep-request = "0.5-1.5" + +[extractor.skeb] +article = false +sent-requests = false +thumbnails = false + +[extractor.skeb.search] + +[extractor.smugmug] +videos = true + +[extractor.soundgasm] +sleep-request = "0.5-1.5" +directory = [ "{category}", "{user}" ] + +[extractor.steamgriddb] +animated = true +epilepsy = true +humor = true +dimensions = "all" +file-types = "all" +languages = "all," +nsfw = true +sort = "score_desc" +static = true +styles = "all" +untagged = true +download-fake-png = true + +[extractor.seiga] +username = "" +password = "" + +[extractor.subscribestar] +username = "" +password = "" + +[extractor.tapas] +username = "" +password = "" + +[extractor.tsumino] +username = "" +password = "" + +[extractor.tumblr] +avatar = false +date-min = 0 +external = false +inline = true +offset = 0 +original = true +pagination = "offset" +posts = "all" +ratelimit = "abort" +reblogs = true +fallback-delay = 120 +fallback-retries = 2 + +[extractor.tumblrgallery] +referer = false + +[extractor.twitter] +username = "" +username-alt = "" +password = "" +ads = false +cards = false +cards-blacklist = [ ] +csrf = "cookies" +cursor = true +expand = false +include = [ "timeline" ] +locked = "abort" +logout = true +pinned = false +quoted = false +ratelimit = "wait" +relogin = true +replies = true +retweets = false +size = [ "orig", "4096x4096", "large", "medium", "small" ] +text-tweets = false +tweet-endpoint = "auto" +transform = true +twitpic = false +unavailable = false +unique = true +users = "user" +videos = true + +[extractor.twitter.timeline] +strategy = "auto" + +[extractor.twitter.tweet] +conversations = false + +[extractor.twitter.directory] +"quote_id != 0" = [ "{category!c}", "{quote_by}", "quote-retweets" ] +"retweet_id != 0" = [ "{category!c}", "{user[name]}", "retweets" ] +"" = [ "{category!c}", "{user[name]}" ] + +[[extractor.twitter.postprocessors]] +name = "metadata" +event = "post" +filename = "twitter_{author[name]}_{tweet_id}_main.json" + +[extractor.unsplash] +format = "raw" + +[extractor.urlgalleries] +sleep-request = "0.5-1.5" + +[extractor.vipergirls] +username = "" +password = "" +sleep-request = "0.5" +domain = "vipergirls.to" +like = false + +[extractor.vk] +sleep-request = "0.5-1.5" +offset = 0 + +[extractor.vsco] +include = [ "gallery" ] +videos = true + +[extractor.wallhaven] +include = [ "uploads" ] +metadata = false + +[extractor.weasyl] +metadata = false + +[extractor.weibo] +sleep-request = "1.0-2.0" +gifs = true +include = [ "feed" ] +livephoto = true +retweets = false +videos = true + +[extractor.ytdl] +cmdline-args = [ + "--ignore-errors", + "--no-cache-dir", + "--no-playlist", + "--merge-output-format", + "mp4", + "--convert-thumbnails", + "jpg", + "--embed-thumbnail", + "--sub-langs", + "all", + "--sub-format", + "srt/best", + "--convert-subs", + "srt", + "--embed-subs", + "--embed-metadata" +] +enabled = true +format = "bestvideo*+bestaudio/best" +generic = true +logging = true +module = "yt_dlp" +directory = [ "{subcategory!l}" ] +filename = "{title} [{id}].{extension}" + +[extractor.zerochan] +username = "" +password = "" +sleep-request = "0.5-1.5" +metadata = false +pagination = "api" +redirects = false + +# =============================================================== +# ==== Base-Extractor and Instance Options ================ + +[extractor.blogger] +videos = true + +[extractor.Danbooru] +sleep-request = "0.5-1.5" +external = false +metadata = false +threshold = "auto" +ugoira = false + +[extractor.danbooru] +username = "" +password = "" + +[extractor.atfbooru] +username = "" +password = "" + +[extractor.aibooru] +username = "" +password = "" + +[extractor.booruvar] +username = "" +password = "" + +[extractor.E621] +sleep-request = "0.5-1.5" +metadata = false +threshold = "auto" + +[extractor.e621] +username = "" +password = "" + +[extractor.e926] +username = "" +password = "" + +[extractor.e6ai] +username = "" +password = "" + +[extractor.foolfuuka] +sleep-request = "0.5-1.5" + +[extractor.archivedmoe] +referer = false + +[extractor.mastodon] +cards = false +reblogs = false +replies = true +text-posts = false + +[extractor.misskey] +renotes = false +replies = true + +[extractor.Nijie] +sleep-request = "2.0-4.0" +include = [ "illustration", "doujin" ] + +[extractor.nijie] +username = "" +password = "" + +[extractor.horne] +username = "" +password = "" + +[extractor.nitter] +quoted = false +retweets = false +videos = true + +[extractor.philomena] +sleep-request = "0.5-1.5" +svg = true +filter = 2 + +[extractor.derpibooru] +filter = 56_027 + +[extractor.ponybooru] +filter = 3 + +[extractor.twibooru] +sleep-request = "6.0-6.1" + +[extractor.postmill] +save-link-post-body = false + +[extractor.reactor] +sleep-request = "3.0-6.0" +gif = false + +[extractor.wikimedia] +sleep-request = "1.0-2.0" +limit = 50 + +[extractor.booru] +tags = false +notes = false +url = "file_url" + +# =================================================================== +# ==== Downloader Options ===================================== + +[downloader] +mtime = true +part = true +progress = 3 +retries = 4 +timeout = 30 +verify = true + +[downloader.http] +adjust-extensions = true +chunk-size = 32_768 +consume-content = false +enabled = true +retry-codes = [ 404, 429, 430 ] +validate = true + +[downloader.ytdl] +enabled = true +forward-cookies = true +logging = true +module = "yt_dlp" + +# =================================================================== +# ==== Output Options ========================================= + +[output] +ansi = true +fallback = true +mode = "auto" +private = false +progress = true +shorten = true +skip = true +log = "[{name}] {message}" + +[output.colors] +success = "1;32" +skip = "2" +debug = "0;37" +info = "1;37" +warning = "1;33" +error = "1;31" + +# =================================================================== +# ==== Postprocessor Options ================================== + +[postprocessor.cbz] +name = "zip" +compression = "store" +extension = "cbz" +keep-files = false +mode = "default" +filter = "extension not in ('zip', 'rar')" + +[postprocessor.ugoira-mp4] +name = "ugoira" +extension = "mp4" +ffmpeg-args = [ + "-c:v", + "libx264", + "-an", + "-b:v", + "4M", + "-preset", + "veryslow" +] +ffmpeg-twopass = true +libx264-prevent-odd = true + +[postprocessor.ugoira-gif] +name = "ugoira" +extension = "gif" +ffmpeg-args = [ + "-filter_complex", + "[0:v] split [a][b];[a] palettegen [p];[b][p] paletteuse" +] + +[postprocessor.ugoira-copy] +name = "ugoira" +extension = "mkv" +ffmpeg-args = [ "-c", "copy" ] +libx264-prevent-odd = false +repeat-last-frame = false diff --git a/docs/config.yaml b/docs/config.yaml new file mode 100644 index 0000000..5fb5d97 --- /dev/null +++ b/docs/config.yaml @@ -0,0 +1,851 @@ +# gallery-dl-server default configuration file 2.0.1 + +# full documentation at +# https://gdl-org.github.io/docs/configuration.html + +# =============================================================== +# ==== General Extractor Options ========================== +# (these can be set as site-specific extractor options as well) + +extractor: + base-directory: /gallery-dl/ + skip: true + skip-filter: null + user-agent: auto + referer: true + headers: {} + ciphers: null + tls12: true + browser: null + proxy: null + proxy-env: true + source-address: null + retries: 4 + retry-codes: [] + timeout: 30 + verify: true + download: true + fallback: true + archive: null + archive-format: null + archive-prefix: null + archive-pragma: [] + archive-event: + - file + archive-mode: file + cookies: null + cookies-select: null + cookies-update: true + image-filter: null + image-range: null + image-unique: false + chapter-filter: null + chapter-range: null + chapter-unique: false + keywords: {} + keywords-eval: false + keywords-default: null + parent-directory: false + parent-metadata: false + parent-skip: false + path-restrict: auto + path-replace: _ + path-remove: \u0000-\u001f\u007f + path-strip: auto + path-extended: true + metadata-extractor: null + metadata-http: null + metadata-parent: null + metadata-path: null + metadata-url: null + metadata-version: null + sleep: 0 + sleep-request: 0 + sleep-extractor: 0 + sleep-429: 60 + actions: [] + input: null + netrc: false + extension-map: + jpeg: jpg + jpe: jpg + jfif: jpg + jif: jpg + jfi: jpg + postprocessors: + - name: metadata + mode: tags + whitelist: + - danbooru + - gelbooru + + # =============================================================== + # ==== Site-specific Extractor Options ==================== + + ao3: + username: "" + password: "" + sleep-request: 0.5-1.5 + formats: + - pdf + artstation: + external: false + max-posts: null + previews: false + videos: true + search: + pro-first: true + aryion: + username: "" + password: "" + recursive: true + bbc: + width: 1920 + behance: + sleep-request: 2.0-4.0 + modules: + - image + - video + - mediacollection + - embed + bilibili: + sleep-request: 2.0-4.0 + bluesky: + username: "" + password: "" + include: + - media + metadata: false + quoted: false + reposts: false + videos: true + post: + depth: 0 + boosty: + allowed: true + bought: false + metadata: false + videos: true + bunkr: + tlds: false + cien: + sleep-request: 1.0-2.0 + files: + - image + - video + - download + - gallery + civitai: + api-key: null + sleep-request: 0.5-1.5 + api: trpc + files: + - image + include: + - user-models + - user-posts + metadata: false + nsfw: true + quality: original=true + cohost: + asks: true + avatar: false + background: false + pinned: false + replies: true + shares: true + coomerparty: + username: "" + password: "" + announcements: false + comments: false + dms: false + duplicates: false + favorites: artist + files: + - attachments + - file + - inline + max-posts: null + metadata: false + revisions: false + order-revisions: desc + cyberdrop: + domain: null + deviantart: + client-id: null + client-secret: null + refresh-token: null + auto-watch: false + auto-unwatch: false + comments: false + comments-avatars: false + extra: false + flat: true + folders: false + group: true + include: gallery + intermediary: true + journals: html + jwt: false + mature: true + metadata: false + original: true + pagination: api + previews: false + public: true + quality: 100 + wait-min: 0 + avatar: + formats: null + exhentai: + username: "" + password: "" + cookies: null + sleep-request: 3.0-6.0 + domain: auto + fav: null + gp: resized + limits: null + metadata: false + original: true + source: null + tags: false + fallback-retries: 2 + directory: + - "{category}" + - "{title}" + filename: "{num:>03}.{extension}" + postprocessors: + - cbz + fanbox: + cookies: null + comments: false + embeds: true + metadata: false + flickr: + access-token: null + access-token-secret: null + sleep-request: 1.0-2.0 + contexts: false + exif: false + metadata: false + size-max: null + videos: true + furaffinity: + cookies: null + descriptions: text + external: false + include: + - gallery + layout: auto + gelbooru: + api-key: null + user-id: null + favorite: + order-posts: desc + generic: + enabled: false + gofile: + api-token: null + website-token: null + recursive: false + hentaifoundry: + include: + - pictures + hitomi: + format: webp + idolcomplex: + username: "" + password: "" + referer: false + sleep-request: 3.0-6.0 + imagechest: + access-token: null + imagefap: + sleep-request: 2.0-4.0 + imgbb: + username: "" + password: "" + imgur: + client-id: null + mp4: true + inkbunny: + username: "" + password: "" + orderby: create_datetime + instagram: + cookies: null + sleep-request: 6.0-12.0 + api: rest + cursor: true + include: posts + max-posts: null + metadata: false + order-files: asc + order-posts: asc + previews: false + videos: true + itaku: + sleep-request: 0.5-1.5 + videos: true + kemonoparty: + username: "" + password: "" + announcements: false + comments: false + dms: false + duplicates: false + favorites: artist + files: + - attachments + - file + - inline + max-posts: null + metadata: false + revisions: false + order-revisions: desc + khinsider: + format: mp3 + koharu: + username: "" + password: "" + sleep-request: 0.5-1.5 + cbz: true + format: + - "0" + - "1600" + - "1280" + - "980" + - "780" + tags: false + luscious: + gif: false + mangadex: + username: "" + password: "" + api-server: https://api.mangadex.org + api-parameters: null + lang: null + ratings: + - safe + - suggestive + - erotica + - pornographic + postprocessors: + - cbz + mangoxo: + username: "" + password: "" + newgrounds: + username: "" + password: "" + sleep-request: 0.5-1.5 + flash: true + format: original + include: + - art + nhentai: + directory: + - "{category}" + - "{title}" + filename: "{num:>03}.{extension}" + postprocessors: + - cbz + nsfwalbum: + referer: false + oauth: + browser: true + cache: true + host: localhost + port: 6414 + paheal: + metadata: false + patreon: + cookies: null + files: + - images + - image_large + - attachments + - postfile + - content + pillowfort: + username: "" + password: "" + external: false + inline: true + reblogs: false + pinterest: + domain: auto + sections: true + stories: true + videos: true + pixeldrain: + api-key: null + pixiv: + refresh-token: null + cookies: null + captions: false + comments: false + include: + - artworks + max-posts: null + metadata: false + metadata-bookmark: false + sanity: true + tags: japanese + ugoira: true + postprocessors: + - ugoira-copy + covers: false + embeds: false + full-series: false + directory: + page_count > 1: + - "{category}" + - "{user[id]} {user[account]}" + - "{id} {title}" + "": + - "{category}" + - "{user[id]} {user[account]}" + filename: + page_count > 1: "{num:>03}.{extension}" + "": "{id} {title}.{extension}" + plurk: + sleep-request: 0.5-1.5 + comments: false + poipiku: + sleep-request: 0.5-1.5 + pornpics: + sleep-request: 0.5-1.5 + readcomiconline: + sleep-request: 3.0-6.0 + captcha: stop + quality: auto + reddit: + client-id: null + user-agent: null + refresh-token: null + comments: 0 + morecomments: false + embeds: true + date-min: 0 + date-max: 253402210800 + date-format: "%Y-%m-%dT%H:%M:%S" + id-min: null + id-max: null + previews: true + recursion: 0 + videos: true + whitelist: + - imgur + - redgifs + parent-directory: true + parent-metadata: _reddit + reddit>imgur: + directory: [] + filename: "{_reddit[id]} {_reddit[title]} {id}.{extension}" + reddit>redgifs: + directory: [] + filename: "{_reddit[id]} {_reddit[title]} {id}.{extension}" + redgifs: + format: + - hd + - sd + - gif + rule34xyz: + format: + - "10" + - "40" + - "41" + - "2" + sankaku: + username: "" + password: "" + id-format: numeric + refresh: false + tags: false + sankakucomplex: + embeds: false + videos: true + scrolller: + username: "" + password: "" + sleep-request: 0.5-1.5 + skeb: + article: false + sent-requests: false + thumbnails: false + search: + filters: null + smugmug: + access-token: null + access-token-secret: null + videos: true + soundgasm: + sleep-request: 0.5-1.5 + directory: + - "{category}" + - "{user}" + steamgriddb: + animated: true + epilepsy: true + humor: true + dimensions: all + file-types: all + languages: all, + nsfw: true + sort: score_desc + static: true + styles: all + untagged: true + download-fake-png: true + seiga: + username: "" + password: "" + cookies: null + subscribestar: + username: "" + password: "" + tapas: + username: "" + password: "" + tsumino: + username: "" + password: "" + tumblr: + access-token: null + access-token-secret: null + avatar: false + date-min: 0 + date-max: null + external: false + inline: true + offset: 0 + original: true + pagination: offset + posts: all + ratelimit: abort + reblogs: true + fallback-delay: 120 + fallback-retries: 2 + tumblrgallery: + referer: false + twitter: + username: "" + username-alt: "" + password: "" + cookies: null + ads: false + cards: false + cards-blacklist: [] + csrf: cookies + cursor: true + expand: false + include: + - timeline + locked: abort + logout: true + pinned: false + quoted: false + ratelimit: wait + relogin: true + replies: true + retweets: false + size: + - orig + - 4096x4096 + - large + - medium + - small + text-tweets: false + tweet-endpoint: auto + transform: true + twitpic: false + unavailable: false + unique: true + users: user + videos: true + timeline: + strategy: auto + tweet: + conversations: false + directory: + quote_id != 0: + - "{category!c}" + - "{quote_by}" + - quote-retweets + retweet_id != 0: + - "{category!c}" + - "{user[name]}" + - retweets + "": + - "{category!c}" + - "{user[name]}" + postprocessors: + - name: metadata + event: post + filename: twitter_{author[name]}_{tweet_id}_main.json + unsplash: + format: raw + urlgalleries: + sleep-request: 0.5-1.5 + vipergirls: + username: "" + password: "" + sleep-request: "0.5" + domain: vipergirls.to + like: false + vk: + sleep-request: 0.5-1.5 + offset: 0 + vsco: + include: + - gallery + videos: true + wallhaven: + api-key: null + include: + - uploads + metadata: false + weasyl: + api-key: null + metadata: false + weibo: + sleep-request: 1.0-2.0 + gifs: true + include: + - feed + livephoto: true + retweets: false + videos: true + ytdl: + cmdline-args: + - --ignore-errors + - --no-cache-dir + - --no-playlist + - --merge-output-format + - mp4 + - --convert-thumbnails + - jpg + - --embed-thumbnail + - --sub-langs + - all + - --sub-format + - srt/best + - --convert-subs + - srt + - --embed-subs + - --embed-metadata + config-file: null + enabled: true + format: bestvideo*+bestaudio/best + generic: true + logging: true + module: yt_dlp + raw-options: null + directory: + - "{subcategory!l}" + filename: "{title} [{id}].{extension}" + zerochan: + username: "" + password: "" + sleep-request: 0.5-1.5 + metadata: false + pagination: api + redirects: false + + # =============================================================== + # ==== Base-Extractor and Instance Options ================ + + blogger: + api-key: null + videos: true + Danbooru: + sleep-request: 0.5-1.5 + external: false + metadata: false + threshold: auto + ugoira: false + danbooru: + username: "" + password: "" + atfbooru: + username: "" + password: "" + aibooru: + username: "" + password: "" + booruvar: + username: "" + password: "" + E621: + sleep-request: 0.5-1.5 + metadata: false + threshold: auto + e621: + username: "" + password: "" + e926: + username: "" + password: "" + e6ai: + username: "" + password: "" + foolfuuka: + sleep-request: 0.5-1.5 + archivedmoe: + referer: false + mastodon: + access-token: null + cards: false + reblogs: false + replies: true + text-posts: false + misskey: + access-token: null + renotes: false + replies: true + Nijie: + sleep-request: 2.0-4.0 + include: + - illustration + - doujin + nijie: + username: "" + password: "" + horne: + username: "" + password: "" + nitter: + quoted: false + retweets: false + videos: true + philomena: + api-key: null + sleep-request: 0.5-1.5 + svg: true + filter: 2 + derpibooru: + filter: 56027 + ponybooru: + filter: 3 + twibooru: + sleep-request: 6.0-6.1 + postmill: + save-link-post-body: false + reactor: + sleep-request: 3.0-6.0 + gif: false + wikimedia: + sleep-request: 1.0-2.0 + limit: 50 + booru: + tags: false + notes: false + url: file_url + +# =================================================================== +# ==== Downloader Options ===================================== + +downloader: + filesize-min: null + filesize-max: null + mtime: true + part: true + part-directory: null + progress: 3 + proxy: null + rate: null + retries: 4 + timeout: 30 + verify: true + http: + adjust-extensions: true + chunk-size: 32768 + consume-content: false + enabled: true + headers: null + retry-codes: + - 404 + - 429 + - 430 + validate: true + ytdl: + cmdline-args: null + config-file: null + enabled: true + format: null + forward-cookies: true + logging: true + module: yt_dlp + outtmpl: null + raw-options: null + +# =================================================================== +# ==== Output Options ========================================= + +output: + ansi: true + fallback: true + mode: auto + private: false + progress: true + shorten: true + skip: true + stdin: null + stdout: null + stderr: null + log: "[{name}] {message}" + logfile: null + errorfile: null + unsupportedfile: null + colors: + success: 1;32 + skip: "2" + debug: 0;37 + info: 1;37 + warning: 1;33 + error: 1;31 +cache: + file: null + +# =================================================================== +# ==== Postprocessor Options ================================== + +postprocessor: + cbz: + name: zip + compression: store + extension: cbz + files: null + keep-files: false + mode: default + filter: extension not in ('zip', 'rar') + ugoira-mp4: + name: ugoira + extension: mp4 + ffmpeg-args: + - -c:v + - libx264 + - -an + - -b:v + - 4M + - -preset + - veryslow + ffmpeg-twopass: true + libx264-prevent-odd: true + ugoira-gif: + name: ugoira + extension: gif + ffmpeg-args: + - -filter_complex + - "[0:v] split [a][b];[a] palettegen [p];[b][p] paletteuse" + ugoira-copy: + name: ugoira + extension: mkv + ffmpeg-args: + - -c + - copy + libx264-prevent-odd: false + repeat-last-frame: false diff --git a/gallery_dl_server/__init__.py b/gallery_dl_server/__init__.py index 8544f6e..23f1fbc 100644 --- a/gallery_dl_server/__init__.py +++ b/gallery_dl_server/__init__.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- + import os import multiprocessing import queue @@ -6,21 +8,21 @@ from contextlib import asynccontextmanager -import aiofiles - from starlette.applications import Starlette from starlette.background import BackgroundTask +from starlette.datastructures import UploadFile from starlette.responses import RedirectResponse, JSONResponse, StreamingResponse +from starlette.requests import Request from starlette.routing import Route, Mount from starlette.staticfiles import StaticFiles from starlette.status import HTTP_303_SEE_OTHER from starlette.templating import Jinja2Templates -from gallery_dl import version as gdl_version -from yt_dlp import version as ydl_version +import aiofiles +import gallery_dl.version +import yt_dlp.version -from . import output, download -from .utils import resource_path +from . import download, output, utils, version log_file = output.LOG_FILE @@ -31,24 +33,23 @@ blank_sent = False -async def redirect(request): +async def redirect(request: Request): return RedirectResponse(url="/gallery-dl") -async def dl_queue_list(request): - templates = Jinja2Templates(directory=resource_path("templates")) - +async def dl_queue_list(request: Request): return templates.TemplateResponse( "index.html", { "request": request, - "gallerydl_version": gdl_version.__version__, - "ytdlp_version": ydl_version.__version__, + "app_version": version.__version__, + "gallery_dl_version": gallery_dl.version.__version__, + "yt_dlp_version": yt_dlp.version.__version__, }, ) -async def q_put(request): +async def q_put(request: Request): global blank_sent if not blank_sent: @@ -56,9 +57,20 @@ async def q_put(request): blank_sent = True form = await request.form() - url = form.get("url").strip() + + url = form.get("url") ui = form.get("ui") - options = {"video-options": form.get("video-opts")} + video_opts = form.get("video-opts") + + data = [url, ui, video_opts] + data = [None if isinstance(value, UploadFile) else value for value in data] + + url, ui, video_opts = data + + if not video_opts: + video_opts = "none-selected" + + options = {"video-options": video_opts} if not url: log.error("No URL provided.") @@ -70,22 +82,20 @@ async def q_put(request): return RedirectResponse(url="/gallery-dl", status_code=HTTP_303_SEE_OTHER) - task = BackgroundTask(download_task, url, options) + task = BackgroundTask(download_task, url.strip(), options) log.info("Added URL to the download queue: %s", url) if not ui: - return JSONResponse( - {"success": True, "url": url, "options": options}, background=task - ) + return JSONResponse({"success": True, "url": url, "options": options}, background=task) return RedirectResponse( url="/gallery-dl?added=" + url, status_code=HTTP_303_SEE_OTHER, background=task ) -async def log_route(request): - async def file_iterator(file_path): +async def log_route(request: Request): + async def file_iterator(file_path: str): async with aiofiles.open(file_path, mode="r", encoding="utf-8") as file: while True: chunk = await file.read(64 * 1024) @@ -97,21 +107,20 @@ async def file_iterator(file_path): @asynccontextmanager -async def lifespan(app): +async def lifespan(app: Starlette): + output.stdout_write(f"\033[32mINFO\033[0m: Starting {type(app).__name__} application.") yield - if os.path.isdir("/config"): + if utils.CONTAINER and os.path.isdir("/config"): if os.path.isfile(log_file) and os.path.getsize(log_file) > 0: dst_dir = "/config/logs" os.makedirs(dst_dir, exist_ok=True) - dst = os.path.join( - dst_dir, "app_" + time.strftime("%Y-%m-%d_%H-%M-%S") + ".log" - ) + dst = os.path.join(dst_dir, "app_" + time.strftime("%Y-%m-%d_%H-%M-%S") + ".log") shutil.copy2(log_file, dst) -def download_task(url, options): +def download_task(url: str, options: dict[str, str]): """Initiate download as a subprocess and log output.""" log_queue = multiprocessing.Queue() return_status = multiprocessing.Queue() @@ -151,12 +160,14 @@ def download_task(url, options): log.error("Download job failed with exit code: %s", exit_code) +templates = Jinja2Templates(directory=utils.resource_path("templates")) + routes = [ Route("/", endpoint=redirect, methods=["GET"]), Route("/gallery-dl", endpoint=dl_queue_list, methods=["GET"]), Route("/gallery-dl/q", endpoint=q_put, methods=["POST"]), Route("/gallery-dl/logs", endpoint=log_route, methods=["GET"]), - Mount("/icons", app=StaticFiles(directory=resource_path("icons")), name="icons"), + Mount("/icons", app=StaticFiles(directory=utils.resource_path("icons")), name="icons"), ] app = Starlette(debug=True, routes=routes, lifespan=lifespan) diff --git a/gallery_dl_server/__main__.py b/gallery_dl_server/__main__.py index 38a67cc..12061d3 100644 --- a/gallery_dl_server/__main__.py +++ b/gallery_dl_server/__main__.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- + import os import multiprocessing diff --git a/gallery_dl_server/config.py b/gallery_dl_server/config.py index 84f54ec..138dba4 100644 --- a/gallery_dl_server/config.py +++ b/gallery_dl_server/config.py @@ -1,27 +1,33 @@ +# -*- coding: utf-8 -*- + import os +import sys import logging -from collections.abc import MutableMapping +from typing import Any + +import tomllib as toml +import yaml from gallery_dl import config -from . import output +from . import output, utils log = output.initialise_logging(__name__) -_config = config._config -_files = config._files +_config: dict[str, Any] = config._config +_files: list[str] = config._files -def clear(): +def clear(conf: dict[str, Any] = _config): """Clear loaded configuration.""" - config.clear() + conf.clear() def get_default_configs(): """Return default gallery-dl configuration file locations.""" - if os.name == "nt": + if utils.WINDOWS: _default_configs = [ "%APPDATA%\\gallery-dl\\config.json", "%USERPROFILE%\\gallery-dl\\config.json", @@ -36,139 +42,184 @@ def get_default_configs(): "${HOME}/.gallery-dl.conf", ] + if utils.EXECUTABLE: + _default_configs.extend( + utils.join_paths( + os.path.dirname(sys.executable), + "gallery-dl.conf", + "config.json", + ) + ) + return _default_configs -def load(_configs): +def get_new_configs(_configs: list[str], exts: list[str]) -> list[str]: + """Return list of original paths and paths with new extensions.""" + _new_configs = [] + + for path in _configs: + _new_configs.append(path) + + for ext in exts: + base_path = path.rsplit(".", 1)[0] + _new_configs.append(base_path + ext) + + return _new_configs + + +def load(_configs: list[str]): """Load configuration files.""" - exit_code = None - loads = 0 + exit_codes = [] + messages = [] + loaded = 0 + + new_exts = [".yaml", ".yml", ".toml"] - if os.name == "nt": - _configs = get_default_configs() + if utils.CONTAINER: + _configs = get_new_configs(_configs, new_exts) else: - _configs = _configs + get_default_configs() + _configs = get_new_configs(get_default_configs(), new_exts) - if config.log.level <= logging.ERROR: - config.log.setLevel(logging.CRITICAL) + log_buffer = output.StringLogger() for path in _configs: try: - config.load([path], strict=True) + if path.endswith((".conf", ".json")): + config.load([path], strict=True) + if path.endswith((".yaml", ".yml")): + config.load([path], strict=True, loads=yaml.safe_load) + if path.endswith(".toml"): + config.load([path], strict=True, loads=toml.loads) except SystemExit as e: - if not exit_code: - exit_code = e.code + exit_codes.append(e.code) + if e.code == 2: + messages.append(log_buffer.get_logs().split(output.LOG_SEPARATOR)[-1]) else: - loads += 1 + loaded += 1 - if loads > 0: - log.info(f"Loaded gallery-dl configuration file(s): {_files}") - elif exit_code: - log.error(f"Unable to load configuration file: Exit code {exit_code}") + log_buffer.close() - if exit_code == 1: - log.info(f"Valid configuration file locations: {_configs}") + if loaded > 0: + log.info(f"Loaded gallery-dl configuration file(s): [{output.join(_files)}]") + else: + if 2 not in exit_codes: + log.error("Loading configuration files failed with exit code: 1") + log.info(f"Valid configuration file locations: [{output.join(_configs)}]") + else: + log.error("Loading configuration files failed with exit code: 2") + for message in messages: + log.log_multiline(logging.ERROR, message) -def add(dict=None, conf=_config, fixed=False, **kwargs): - """Add entries to a nested dictionary.""" - if dict: - for k, v in dict.items(): - if isinstance(v, MutableMapping): - if k in conf.keys() or not fixed: + if loaded == 0: + raise SystemExit(1) + + +def get(path: list[str], default: Any = None, conf: dict[str, Any] = _config): + """Get a value from a nested dictionary or return a default value.""" + if isinstance(path, (list, tuple)): + try: + for p in path: + conf = conf[p] + return conf + except Exception: + return default + + +def add( + _dict: dict[str, Any] | None = None, conf: dict[str, Any] = _config, fixed=False, **kwargs: Any +): + """Add entries to a nested dictionary or list.""" + if _dict: + for k, v in _dict.items(): + if k in conf.keys() or not fixed: + if isinstance(v, dict): conf[k] = add(v, conf.get(k) or {}, fixed=fixed)[0] - elif isinstance(v, list): - if k in conf.keys() or not fixed: + elif isinstance(v, list): for i in v: - if not isinstance(i, MutableMapping): + if not isinstance(i, dict): if i not in conf.get(k, []) or not str(i).startswith("-"): conf[k] = conf.get(k, []) + [i] else: if i not in conf.get(k, []): conf[k] = conf.get(k, []) + [i] - else: - if k in conf.keys() or not fixed: + else: conf[k] = v - while isinstance(d := list(dict.values())[0], MutableMapping): - dict = d + while isinstance(d := list(_dict.values())[0], dict): + _dict = d if kwargs: for key, val in kwargs.items(): for k, v in conf.items(): - if isinstance(v, MutableMapping): + if k == key and key in conf.keys(): + conf[k] = val + elif isinstance(v, dict): conf[k] = add(conf=v, fixed=fixed, **{key: val})[0] - else: - if k == key and key in conf.keys(): - conf[k] = val - if dict: - return (conf, [dict, kwargs]) - else: - return (conf, [kwargs]) - return (conf, [dict]) + return (conf, [_dict] if not kwargs else [kwargs] if not _dict else [_dict, kwargs]) -def remove(path, item=None, key=None, value=None): - """Remove entries from a nested dictionary.""" - entries = [] - removed = [] +def remove( + path: dict[str, Any] | list, item: str | None = None, key: str | None = None, value: Any = None +): + """Remove entries from a nested dictionary or list.""" + entries_removed = [] - if isinstance(path, list): - _list = path + if isinstance(path, dict) and key: + entries_removed.extend(remove_from_dict(path, key, value)) + elif isinstance(path, list) and (item or key): + entries_removed.extend(remove_from_list(path, item, key, value)) - for entry in _list: - if item: - if entry == item: - if value: - try: - entry_index = _list.index(entry) - entry_next = _list[entry_index + 1] - except IndexError: - if "any" == value: - entries.append(entry) - else: - if "any" == value: - entries.extend([entry, entry_next]) - elif entry_next == value: - entries.extend([entry, entry_next]) - else: - entries.append(entry) - elif key: - if value: - if entry.get(key) == value: - entries.append(entry) - else: - if entry.get(key): - entries.append(entry) + return entries_removed - for entry in entries: - try: - _list.remove(entry) - except Exception as e: - log.error(f"Exception: {e}") - else: - removed.append(entry) - - if isinstance(path, dict): - _dict = path - - if key: - if value: - for k, v in _dict.items(): - if k == key and v == value: - entries.append(k) - else: - for k in _dict.keys(): - if k == key: - entries.append(k) - - for entry in entries: + +def remove_from_dict(_dict: dict[str, Any], key: str, value: Any): + """Remove keys from a nested dictionary.""" + keys_to_remove = [] + + for k, v in _dict.items(): + if k == key and (value is None or v == value): + keys_to_remove.append(k) + + keys_removed = [] + for k in keys_to_remove: + try: + v = _dict.pop(k) + keys_removed.append({k: v}) + except Exception as e: + log.error(f"Exception: {type(e).__name__}", exc_info=True) + + return keys_removed + + +def remove_from_list(_list: list, item: str | None, key: str | None, value: Any): + """Remove elements from a nested list.""" + elements_to_remove = [] + + for element in _list: + if isinstance(element, dict): + if key and (element.get(key) == value or not value and element.get(key)): + elements_to_remove.append(element) + elif item and element == item: try: - val = _dict.pop(entry) - except Exception as e: - log.error(f"Exception: {e}") - else: - removed.append({entry: val}) + element_index = _list.index(element) + element_next = _list[element_index + 1] if element_index + 1 < len(_list) else None + if value == "any" or (element_next == value): + elements_to_remove.append(element) + if element_next: + elements_to_remove.append(element_next) + except IndexError: + if value == "any": + elements_to_remove.append(element) + + elements_removed = [] + for element in elements_to_remove: + try: + _list.remove(element) + elements_removed.append(element) + except Exception as e: + log.error(f"Exception: {type(e).__name__}", exc_info=True) - return removed + return elements_removed diff --git a/gallery_dl_server/download.py b/gallery_dl_server/download.py index 20ce240..a35f68c 100644 --- a/gallery_dl_server/download.py +++ b/gallery_dl_server/download.py @@ -1,3 +1,8 @@ +# -*- coding: utf-8 -*- + +from multiprocessing import Queue +from itertools import chain + from gallery_dl import job, exception from . import config, output @@ -6,7 +11,7 @@ log = output.initialise_logging(__name__) -def run(url, options, log_queue, return_status): +def run(url: str, options: dict[str, str], log_queue: Queue, return_status: Queue): """Set gallery-dl configuration, set up logging and run download job.""" config.clear() @@ -41,104 +46,72 @@ def run(url, options, log_queue, return_status): log.error(f"Exception: {e.__module__}.{type(e).__name__}: {e}") except Exception as e: status = -1 - log.error(f"Exception: {e}") + log.error(f"Exception: {type(e).__name__}: {e}") return_status.put(status) -def config_update(options): +def config_update(options: dict[str, str]): """Update loaded configuration with request options.""" entries_added = [] entries_removed = [] requested_format = options.get("video-options", "none-selected") - if requested_format == "download-video": - try: - cmdline_args = ( - config._config.get("extractor", {}) - .get("ytdl", {}) - .get("cmdline-args", []) - ) - except AttributeError: - pass - else: - entries_removed.extend( - config.remove(cmdline_args, item="--extract-audio") - + config.remove(cmdline_args, item="-x") - ) + if requested_format == "none-selected": + return (entries_added, entries_removed) - try: - raw_options = ( - config._config.get("extractor", {}) - .get("ytdl", {}) - .get("raw-options", {}) - ) - except AttributeError: - pass - else: - entries_removed.extend( - config.remove(raw_options, key="writethumbnail", value=False) - ) + cmdline_args = config.get(["extractor", "ytdl", "cmdline-args"]) + raw_options = config.get(["extractor", "ytdl", "raw-options"]) + postprocessors = config.get(["postprocessors"], conf=raw_options) - try: - postprocessors = ( - config._config.get("extractor", {}) - .get("ytdl", {}) - .get("raw-options", {}) - .get("postprocessors", []) - ) - except AttributeError: - pass - else: - entries_removed.extend( - config.remove(postprocessors, key="key", value="FFmpegExtractAudio") + if requested_format == "download-video": + entries_removed.extend( + chain( + config.remove(cmdline_args, item="--extract-audio"), + config.remove(cmdline_args, item="-x"), + config.remove(raw_options, key="writethumbnail", value=False), + config.remove(postprocessors, key="key", value="FFmpegExtractAudio"), ) + ) if requested_format == "extract-audio": entries_added.extend( - config.add( - { - "extractor": { - "ytdl": { - "cmdline-args": [ - "--extract-audio", - ] + chain( + config.add( + { + "extractor": { + "ytdl": { + "cmdline-args": [ + "--extract-audio", + ] + } } } - } - )[1] - + config.add( - { - "extractor": { - "ytdl": { - "raw-options": { - "writethumbnail": False, - "postprocessors": [ - { - "key": "FFmpegExtractAudio", - "preferredcodec": "best", - "preferredquality": 320, - } - ], + )[1], + config.add( + { + "extractor": { + "ytdl": { + "raw-options": { + "writethumbnail": False, + "postprocessors": [ + { + "key": "FFmpegExtractAudio", + "preferredcodec": "best", + "preferredquality": 320, + } + ], + } } } } - } - )[1] + )[1], + ) ) - try: - cmdline_args = ( - config._config.get("extractor", {}) - .get("ytdl", {}) - .get("cmdline-args", []) - ) - except AttributeError: - pass - else: - entries_removed.extend( - config.remove(cmdline_args, item="--merge-output-format", value="any") - ) + entries_removed.extend( + config.remove(cmdline_args, item="--merge-output-format", value="any") + ) return (entries_added, entries_removed) diff --git a/gallery_dl_server/output.py b/gallery_dl_server/output.py index 9be8c6d..4e4b49b 100644 --- a/gallery_dl_server/output.py +++ b/gallery_dl_server/output.py @@ -1,52 +1,62 @@ +# -*- coding: utf-8 -*- + import os import sys import logging -import pickle import re +import pickle +import io + +from multiprocessing import Queue +from typing import TextIO, Any from gallery_dl import output, job +from . import utils -LOG_FILE = os.path.join(os.path.dirname(os.path.dirname(__file__)), "logs", "app.log") + +LOG_FILE = os.path.join(utils.dirname_parent(__file__), "logs", "app.log") LOG_LEVEL = logging.INFO LOG_FORMAT = "%(asctime)s [%(levelname)s] %(message)s" -LOG_FORMAT_DEBUG = ( - "%(asctime)s [%(name)s] [%(filename)s:%(lineno)d] [%(levelname)s] %(message)s" -) +LOG_FORMAT_DEBUG = "%(asctime)s [%(name)s] [%(filename)s:%(lineno)d] [%(levelname)s] %(message)s" LOG_FORMAT_DATE = "%Y-%m-%d %H:%M:%S" +LOG_SEPARATOR = "/sep/" def initialise_logging( - name="gallery-dl-server", stream=sys.stdout, file=LOG_FILE, level=LOG_LEVEL + name=utils.get_package_name(), stream=sys.stdout, file=LOG_FILE, level=LOG_LEVEL ): """Set up basic logging functionality for gallery-dl-server.""" - logger = logging.getLogger(name) + logger = Logger(name) if not logger.hasHandlers(): formatter = Formatter(LOG_FORMAT, LOG_FORMAT_DATE) - handler_console = logging.StreamHandler(stream) - handler_console.setFormatter(formatter) - + handler_console = setup_stream_handler(stream, formatter) logger.addHandler(handler_console) if file: - os.makedirs(os.path.dirname(file), exist_ok=True) - - handler_file = logging.FileHandler( - file, mode="a", encoding="utf-8", delay=False - ) - handler_file.setFormatter(formatter) - + handler_file = setup_file_handler(file, formatter) logger.addHandler(handler_file) logger.setLevel(level) - logger.propagate = False return logger +class Logger(logging.Logger): + """Custom logger which has a method to log multi-line messages.""" + + def __init__(self, name, level=logging.NOTSET): + super().__init__(name, level) + + def log_multiline(self, level: int, message: str): + """Log each line of a multi-line message separately.""" + for line in message.split("\n"): + self.log(level, line) + + class Formatter(logging.Formatter): """Custom formatter which removes ANSI escape sequences.""" @@ -57,25 +67,44 @@ def format(self, record): return remove_ansi_escape_sequences(message) -def remove_ansi_escape_sequences(text): +def remove_ansi_escape_sequences(text: str): """Remove ANSI escape sequences from the given text.""" ansi_escape_pattern = re.compile(r"\x1B\[[0-?9;]*[mGKH]") return ansi_escape_pattern.sub("", text) -def get_logger(name): +def setup_stream_handler(stream: TextIO | Any, formatter: logging.Formatter): + """Set up a console handler for logging.""" + handler = logging.StreamHandler(stream) + handler.setFormatter(formatter) + + return handler + + +def setup_file_handler(file: str, formatter: logging.Formatter): + """Set up a file handler for logging.""" + os.makedirs(os.path.dirname(file), exist_ok=True) + + handler = logging.FileHandler(file, mode="a", encoding="utf-8", delay=False) + handler.setFormatter(formatter) + + return handler + + +def get_logger(name: str | None = None): """Return a logger with the specified name.""" return logging.getLogger(name) def get_blank_logger(name="blank", stream=sys.stdout, level=logging.INFO): """Return a basic logger with no formatter.""" - logger = logging.getLogger(name) - handler = logging.StreamHandler(stream) + logger = Logger(name) - logger.addHandler(handler) - logger.setLevel(level) + if not logger.hasHandlers(): + handler = logging.StreamHandler(stream) + logger.addHandler(handler) + logger.setLevel(level) logger.propagate = False return logger @@ -91,7 +120,7 @@ def setup_logging(level=LOG_LEVEL): handler = output.setup_logging_handler("unsupportedfile", fmt="{message}") if handler: - ulog = logging.getLogger("unsupportedfile") + ulog = logging.getLogger("unsupported") ulog.addHandler(handler) ulog.propagate = False @@ -100,7 +129,7 @@ def setup_logging(level=LOG_LEVEL): return logger -def capture_logs(log_queue): +def capture_logs(log_queue: Queue): """Send logs that reach the root logger to a queue.""" root = logging.getLogger() queue_handler = QueueHandler(log_queue) @@ -119,11 +148,11 @@ def capture_logs(log_queue): class QueueHandler(logging.Handler): """Custom logging handler that sends log messages to a queue.""" - def __init__(self, queue): + def __init__(self, queue: Queue): super().__init__() self.queue = queue - def emit(self, record): + def emit(self, record: logging.LogRecord): record.msg = remove_ansi_escape_sequences(self.format(record)) record.args = () record_dict = record_to_dict(record) @@ -131,7 +160,7 @@ def emit(self, record): self.queue.put(record_dict) -def record_to_dict(record): +def record_to_dict(record: logging.LogRecord): """Convert a log record into a dictionary.""" record_dict = record.__dict__.copy() record_dict["level"] = record.levelno @@ -141,7 +170,7 @@ def record_to_dict(record): return record_dict -def sanitise_dict(record_dict): +def sanitise_dict(record_dict: dict[str, Any]): """Remove non-serialisable values from a dictionary.""" keys_to_remove = [] @@ -153,7 +182,7 @@ def sanitise_dict(record_dict): record_dict.pop(key) -def is_serialisable(value): +def is_serialisable(value: Any): """Check if a value can be serialised.""" try: pickle.dumps(value) @@ -162,18 +191,18 @@ def is_serialisable(value): return False -def dict_to_record(record_dict): +def dict_to_record(record_dict: dict[str, Any]): """Convert a dictionary back into a log record.""" return logging.LogRecord(**record_dict) -def stdout_write(s): +def stdout_write(s: str, /): """Write directly to stdout.""" sys.stdout.write(s + "\n") sys.stdout.flush() -def stderr_write(s): +def stderr_write(s: str, /): """Write directly to stderr.""" sys.stderr.write(s + "\n") sys.stderr.flush() @@ -199,12 +228,13 @@ def __init__(self, level=logging.INFO): self.level = level self.logger = initialise_logging(__name__) - def write(self, msg): + def write(self, msg: str): + """Prepare and then log messages.""" if not msg.strip(): return if msg.startswith("# "): - msg = f"File already exists or its ID is in a download archive: {msg.removeprefix('# ')}" + msg = f"File already exists or its ID is in a download archive: {msg[2:]}" self.level = logging.WARNING self.logger.log(self.level, msg.strip()) @@ -216,8 +246,72 @@ def flush(self): class NullWriter: """Suppress writes to stdout or stderr.""" - def write(self, msg): + def write(self, msg: str): pass def flush(self): pass + + +class StringLogger: + """Add StringHandler to the root logger and get logs.""" + + def __init__(self, level=LOG_LEVEL): + self.root = logging.getLogger() + + self.handler = StringHandler() + self.handler.setLevel(level) + + self.root.addHandler(self.handler) + + def get_logs(self): + """Return logs captured by StringHandler.""" + return self.handler.get_logs() + + def close(self): + """Remove StringHandler from the root logger and close buffer.""" + self.root.removeHandler(self.handler) + self.handler.close() + + +class StringHandler(logging.Handler): + """Capture log messages and write them to a string object.""" + + def __init__(self): + super().__init__() + self.buffer = io.StringIO() + self.terminator = LOG_SEPARATOR + + def emit(self, record): + msg = self.format(record) + self.buffer.write(msg + self.terminator) + + def get_logs(self): + """Retrieve logs from the string object.""" + logs = self.buffer.getvalue() + return logs.rstrip(self.terminator) + + def close(self): + super().close() + self.buffer.close() + + +def last_line(message: str, string: str, case_sensitive=True): + """Get the last line containing a string in a message.""" + lines = message.split("\n") + + for line in reversed(lines): + if (string in line) if case_sensitive else (string.lower() in line.lower()): + return line + + return message + + +def join(_list: list[str]): + """Join a list of strings with commas and single quotes.""" + formatted_list = [] + + for item in _list: + formatted_list.append(f"'{item}'") + + return ", ".join(formatted_list) diff --git a/gallery_dl_server/utils.py b/gallery_dl_server/utils.py index b0c0e75..e22a02b 100644 --- a/gallery_dl_server/utils.py +++ b/gallery_dl_server/utils.py @@ -1,10 +1,39 @@ +# -*- coding: utf-8 -*- + import os import sys +WINDOWS = os.name == "nt" +DOCKER = os.path.isfile("/.dockerenv") +KUBERNETES = os.environ.get("KUBERNETES_SERVICE_HOST") is not None +EXECUTABLE = bool(getattr(sys, "frozen", False)) +_MEIPASS_PATH = getattr(sys, "_MEIPASS", None) + +CONTAINER = DOCKER or KUBERNETES +_MEIPASS = _MEIPASS_PATH is not None +PYINSTALLER = EXECUTABLE and _MEIPASS + -def resource_path(relative_path): - """Get absolute path to resource for frozen PyInstaller executable.""" - if getattr(sys, "frozen", False): - return os.path.join(getattr(sys, "_MEIPASS", ""), relative_path) +def resource_path(relative_path: str): + """Return absolute path to resource for frozen PyInstaller executable.""" + if PYINSTALLER: + assert _MEIPASS_PATH + return os.path.join(_MEIPASS_PATH, relative_path) else: return relative_path + + +def dirname_parent(path: str): + """Return grandparent directory of the given path.""" + return os.path.dirname(os.path.dirname(path)) + + +def join_paths(base_path: str, /, *paths: str): + """Join each path with the base path and return a list.""" + return [os.path.join(base_path, path) for path in paths] + + +def get_package_name(): + """Return the name of the package.""" + fallback = os.path.basename(os.path.dirname(os.path.abspath(__file__))) + return __package__ if __package__ else fallback diff --git a/gallery_dl_server/version.py b/gallery_dl_server/version.py new file mode 100644 index 0000000..c23edcc --- /dev/null +++ b/gallery_dl_server/version.py @@ -0,0 +1,3 @@ +# -*- coding: utf-8 -*- + +__version__ = "0.6.0-preview" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..e39fdb1 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,9 @@ +[tool.ruff] +line-length = 100 +indent-width = 4 +target-version = "py312" + +[tool.ruff.format] +quote-style = "double" +indent-style = "space" +line-ending = "auto" diff --git a/requirements.txt b/requirements.txt index ec2ba7d..9d8e57e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,6 +2,7 @@ aiofiles==24.1.0 gallery_dl==1.28.3 Jinja2==3.1.5 python-multipart==0.0.20 +PyYAML==6.0.2 requests==2.32.3 starlette==0.45.2 uvicorn==0.34.0; platform_machine != 'x86_64' diff --git a/templates/index.html b/templates/index.html index ff36dc2..81be4b0 100644 --- a/templates/index.html +++ b/templates/index.html @@ -1,7 +1,6 @@ - @@ -74,12 +73,11 @@ text-overflow: ellipsis; } - #btn-logs { - margin: 24px auto 12px auto; + select optgroup{ + font-style: normal; } #box { - margin: 12px auto 12px auto; resize: vertical; overflow-y: auto; min-height: 95px; @@ -108,9 +106,8 @@ } footer > p { - margin: 8px auto 24px auto; font-size: 16px; - line-height: 24px; + line-height: 16px; } @@ -118,26 +115,33 @@ -
+
-

gallery-dl

+

gallery-dl

-

- Enter a URL to download the contents to the server. The URL can be - from any supported site listed by +

+ Enter a URL to download the contents to the server. + The URL can be from any supported site listed by gallery-dl or + target="_blank" + rel="noopener noreferrer" + >gallery-dl + or yt-dlp. -

+ target="_blank" + rel="noopener noreferrer" + >yt-dlp.

-
+
gallery-dl
-
+
-
+