Bläddra i källkod

Merge branch 'dev' into issue1316

Nick Sweeting 1 år sedan
förälder
incheckning
ef856e8051
50 ändrade filer med 1464 tillägg och 1689 borttagningar
  1. 5 0
      .dockerignore
  2. 2 3
      .github/FUNDING.yml
  3. 19 13
      .github/workflows/docker.yml
  4. 2 4
      .gitignore
  5. 48 43
      Dockerfile
  6. 4 1
      README.md
  7. 6 0
      archivebox/__init__.py
  8. 1 0
      archivebox/api/__init__.py
  9. 2 0
      archivebox/api/apps.py
  10. 0 184
      archivebox/api/archive.py
  11. 92 33
      archivebox/api/auth.py
  12. 7 6
      archivebox/api/migrations/0001_initial.py
  13. 17 0
      archivebox/api/migrations/0002_alter_apitoken_options.py
  14. 48 15
      archivebox/api/models.py
  15. 11 8
      archivebox/api/tests.py
  16. 17 0
      archivebox/api/urls.py
  17. 111 0
      archivebox/api/v1_api.py
  18. 52 0
      archivebox/api/v1_auth.py
  19. 234 0
      archivebox/api/v1_cli.py
  20. 210 0
      archivebox/api/v1_core.py
  21. 44 41
      archivebox/config.py
  22. 27 0
      archivebox/core/admin.py
  23. 18 0
      archivebox/core/apps.py
  24. 3 2
      archivebox/core/auth.py
  25. 0 2
      archivebox/core/auth_ldap.py
  26. 67 0
      archivebox/core/settings.py
  27. 9 14
      archivebox/core/urls.py
  28. 135 4
      archivebox/core/views.py
  29. 1 0
      archivebox/index/schema.py
  30. 2 2
      archivebox/logging_util.py
  31. 3 9
      archivebox/main.py
  32. 1 1
      archivebox/manage.py
  33. 0 53
      archivebox/parsers/__init__.py
  34. 88 5
      archivebox/parsers/generic_html.py
  35. 5 13
      archivebox/parsers/generic_json.py
  36. 0 2
      archivebox/parsers/generic_jsonl.py
  37. 2 15
      archivebox/parsers/generic_txt.py
  38. 2 1
      archivebox/templates/core/navigation.html
  39. 67 11
      archivebox/util.py
  40. 12 12
      bin/build_docker.sh
  41. 1 0
      bin/docker_entrypoint.sh
  42. 1 1
      bin/lint.sh
  43. 2 2
      bin/lock_pkgs.sh
  44. 2 2
      bin/setup.sh
  45. 14 14
      docker-compose.yml
  46. 1 1
      docs
  47. 35 15
      package-lock.json
  48. 0 1128
      pdm.lock
  49. 20 20
      pyproject.toml
  50. 14 9
      requirements.txt

+ 5 - 0
.dockerignore

@@ -17,6 +17,11 @@ venv/
 .venv-old/
 .venv-old/
 .docker-venv/
 .docker-venv/
 node_modules/
 node_modules/
+chrome/
+chromeprofile/
+
+pdm.dev.lock
+pdm.lock
 
 
 docs/
 docs/
 build/
 build/

+ 2 - 3
.github/FUNDING.yml

@@ -1,3 +1,2 @@
-github: pirate
-patreon: theSquashSH
-custom: ["https://hcb.hackclub.com/donations/start/archivebox", "https://paypal.me/NicholasSweeting"]
+github: ["ArchiveBox", "pirate"]
+custom: ["https://donate.archivebox.io", "https://paypal.me/NicholasSweeting"]

+ 19 - 13
.github/workflows/docker.yml

@@ -11,7 +11,7 @@ on:
 
 
 env:
 env:
   DOCKER_IMAGE: archivebox-ci
   DOCKER_IMAGE: archivebox-ci
-      
+
 jobs:
 jobs:
   buildx:
   buildx:
     runs-on: ubuntu-latest
     runs-on: ubuntu-latest
@@ -24,21 +24,21 @@ jobs:
 
 
       - name: Set up QEMU
       - name: Set up QEMU
         uses: docker/setup-qemu-action@v3
         uses: docker/setup-qemu-action@v3
-      
+
       - name: Set up Docker Buildx
       - name: Set up Docker Buildx
         id: buildx
         id: buildx
         uses: docker/setup-buildx-action@v3
         uses: docker/setup-buildx-action@v3
         with:
         with:
           version: latest
           version: latest
           install: true
           install: true
-          platforms: linux/amd64,linux/arm64,linux/arm/v7
-      
+          platforms: linux/amd64,linux/arm64
+
       - name: Builder instance name
       - name: Builder instance name
         run: echo ${{ steps.buildx.outputs.name }}
         run: echo ${{ steps.buildx.outputs.name }}
-      
+
       - name: Available platforms
       - name: Available platforms
         run: echo ${{ steps.buildx.outputs.platforms }}
         run: echo ${{ steps.buildx.outputs.platforms }}
-      
+
       - name: Cache Docker layers
       - name: Cache Docker layers
         uses: actions/cache@v3
         uses: actions/cache@v3
         with:
         with:
@@ -51,21 +51,27 @@ jobs:
         uses: docker/login-action@v3
         uses: docker/login-action@v3
         if: github.event_name != 'pull_request'
         if: github.event_name != 'pull_request'
         with:
         with:
-           username: ${{ secrets.DOCKER_USERNAME }}
-           password: ${{ secrets.DOCKER_PASSWORD }}
-      
+          username: ${{ secrets.DOCKER_USERNAME }}
+          password: ${{ secrets.DOCKER_PASSWORD }}
+
       - name: Collect Docker tags
       - name: Collect Docker tags
+        # https://github.com/docker/metadata-action
         id: docker_meta
         id: docker_meta
         uses: docker/metadata-action@v5
         uses: docker/metadata-action@v5
         with:
         with:
           images: archivebox/archivebox,nikisweeting/archivebox
           images: archivebox/archivebox,nikisweeting/archivebox
           tags: |
           tags: |
+              # :stable
               type=ref,event=branch
               type=ref,event=branch
+              # :0.7.3
               type=semver,pattern={{version}}
               type=semver,pattern={{version}}
+              # :0.7
               type=semver,pattern={{major}}.{{minor}}
               type=semver,pattern={{major}}.{{minor}}
+              # :sha-463ea54
               type=sha
               type=sha
-              type=raw,value=latest,enable={{is_default_branch}}
-      
+              # :latest
+              type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'stable') }}
+
       - name: Build and push
       - name: Build and push
         id: docker_build
         id: docker_build
         uses: docker/build-push-action@v5
         uses: docker/build-push-action@v5
@@ -77,7 +83,7 @@ jobs:
           tags: ${{ steps.docker_meta.outputs.tags }}
           tags: ${{ steps.docker_meta.outputs.tags }}
           cache-from: type=local,src=/tmp/.buildx-cache
           cache-from: type=local,src=/tmp/.buildx-cache
           cache-to: type=local,dest=/tmp/.buildx-cache-new
           cache-to: type=local,dest=/tmp/.buildx-cache-new
-          platforms: linux/amd64,linux/arm64,linux/arm/v7
+          platforms: linux/amd64,linux/arm64
 
 
       - name: Image digest
       - name: Image digest
         run: echo ${{ steps.docker_build.outputs.digest }}
         run: echo ${{ steps.docker_build.outputs.digest }}
@@ -88,7 +94,7 @@ jobs:
           username: ${{ secrets.DOCKER_USERNAME }}
           username: ${{ secrets.DOCKER_USERNAME }}
           password: ${{ secrets.DOCKER_PASSWORD }}
           password: ${{ secrets.DOCKER_PASSWORD }}
           repository: archivebox/archivebox
           repository: archivebox/archivebox
-       
+
       # This ugly bit is necessary if you don't want your cache to grow forever
       # This ugly bit is necessary if you don't want your cache to grow forever
       # until it hits GitHub's limit of 5GB.
       # until it hits GitHub's limit of 5GB.
       # Temp fix
       # Temp fix

+ 2 - 4
.gitignore

@@ -13,8 +13,9 @@ venv/
 node_modules/
 node_modules/
 
 
 # Ignore dev lockfiles (should always be built fresh)
 # Ignore dev lockfiles (should always be built fresh)
-requirements-dev.txt
+pdm.lock
 pdm.dev.lock
 pdm.dev.lock
+requirements-dev.txt
 
 
 # Packaging artifacts
 # Packaging artifacts
 .pdm-python
 .pdm-python
@@ -26,9 +27,6 @@ dist/
 
 
 # Data folders
 # Data folders
 data/
 data/
-data1/
-data2/
-data3/
 data*/
 data*/
 output/
 output/
 
 

+ 48 - 43
Dockerfile

@@ -37,7 +37,7 @@ LABEL name="archivebox" \
     com.docker.extension.detailed-description='See here for detailed documentation: https://wiki.archivebox.io' \
     com.docker.extension.detailed-description='See here for detailed documentation: https://wiki.archivebox.io' \
     com.docker.extension.changelog='See here for release notes: https://github.com/ArchiveBox/ArchiveBox/releases' \
     com.docker.extension.changelog='See here for release notes: https://github.com/ArchiveBox/ArchiveBox/releases' \
     com.docker.extension.categories='database,utility-tools'
     com.docker.extension.categories='database,utility-tools'
-    
+
 ARG TARGETPLATFORM
 ARG TARGETPLATFORM
 ARG TARGETOS
 ARG TARGETOS
 ARG TARGETARCH
 ARG TARGETARCH
@@ -87,7 +87,9 @@ COPY --chown=root:root --chmod=755 package.json "$CODE_DIR/"
 RUN grep '"version": ' "${CODE_DIR}/package.json" | awk -F'"' '{print $4}' > /VERSION.txt
 RUN grep '"version": ' "${CODE_DIR}/package.json" | awk -F'"' '{print $4}' > /VERSION.txt
 
 
 # Force apt to leave downloaded binaries in /var/cache/apt (massively speeds up Docker builds)
 # Force apt to leave downloaded binaries in /var/cache/apt (massively speeds up Docker builds)
-RUN echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache \
+RUN echo 'Binary::apt::APT::Keep-Downloaded-Packages "1";' > /etc/apt/apt.conf.d/99keep-cache \
+    && echo 'APT::Install-Recommends "0";' > /etc/apt/apt.conf.d/99no-intall-recommends \
+    && echo 'APT::Install-Suggests "0";' > /etc/apt/apt.conf.d/99no-intall-suggests \
     && rm -f /etc/apt/apt.conf.d/docker-clean
     && rm -f /etc/apt/apt.conf.d/docker-clean
 
 
 # Print debug info about build and save it to disk, for human eyes only, not used by anything else
 # Print debug info about build and save it to disk, for human eyes only, not used by anything else
@@ -120,10 +122,10 @@ RUN echo "[*] Setting up $ARCHIVEBOX_USER user uid=${DEFAULT_PUID}..." \
 # Install system apt dependencies (adding backports to access more recent apt updates)
 # Install system apt dependencies (adding backports to access more recent apt updates)
 RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=apt-$TARGETARCH$TARGETVARIANT \
 RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=apt-$TARGETARCH$TARGETVARIANT \
     echo "[+] Installing APT base system dependencies for $TARGETPLATFORM..." \
     echo "[+] Installing APT base system dependencies for $TARGETPLATFORM..." \
-    && echo 'deb https://deb.debian.org/debian bookworm-backports main contrib non-free' >> /etc/apt/sources.list.d/backports.list \
+    && echo 'deb https://deb.debian.org/debian bookworm-backports main contrib non-free' > /etc/apt/sources.list.d/backports.list \
     && mkdir -p /etc/apt/keyrings \
     && mkdir -p /etc/apt/keyrings \
     && apt-get update -qq \
     && apt-get update -qq \
-    && apt-get install -qq -y -t bookworm-backports --no-install-recommends \
+    && apt-get install -qq -y -t bookworm-backports \
         # 1. packaging dependencies
         # 1. packaging dependencies
         apt-transport-https ca-certificates apt-utils gnupg2 curl wget \
         apt-transport-https ca-certificates apt-utils gnupg2 curl wget \
         # 2. docker and init system dependencies
         # 2. docker and init system dependencies
@@ -134,27 +136,13 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=apt-$TARGETARCH$T
 
 
 ######### Language Environments ####################################
 ######### Language Environments ####################################
 
 
-# Install Node environment
-RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=apt-$TARGETARCH$TARGETVARIANT --mount=type=cache,target=/root/.npm,sharing=locked,id=npm-$TARGETARCH$TARGETVARIANT \
-    echo "[+] Installing Node $NODE_VERSION environment in $NODE_MODULES..." \
-    && echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_${NODE_VERSION}.x nodistro main" >> /etc/apt/sources.list.d/nodejs.list \
-    && curl -fsSL "https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key" | gpg --dearmor | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg \
-    && apt-get update -qq \
-    && apt-get install -qq -y -t bookworm-backports --no-install-recommends \
-        nodejs libatomic1 python3-minimal \
-    && rm -rf /var/lib/apt/lists/* \
-    # Update NPM to latest version
-    && npm i -g npm --cache /root/.npm \
-    # Save version info
-    && ( \
-        which node && node --version \
-        && which npm && npm --version \
-        && echo -e '\n\n' \
-    ) | tee -a /VERSION.txt
-
 # Install Python environment
 # Install Python environment
 RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=apt-$TARGETARCH$TARGETVARIANT --mount=type=cache,target=/root/.cache/pip,sharing=locked,id=pip-$TARGETARCH$TARGETVARIANT \
 RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=apt-$TARGETARCH$TARGETVARIANT --mount=type=cache,target=/root/.cache/pip,sharing=locked,id=pip-$TARGETARCH$TARGETVARIANT \
     echo "[+] Setting up Python $PYTHON_VERSION runtime..." \
     echo "[+] Setting up Python $PYTHON_VERSION runtime..." \
+    # && apt-get update -qq \
+    # && apt-get install -qq -y -t bookworm-backports --no-upgrade \
+    #     python${PYTHON_VERSION} python${PYTHON_VERSION}-minimal python3-pip \
+    # && rm -rf /var/lib/apt/lists/* \
     # tell PDM to allow using global system python site packages
     # tell PDM to allow using global system python site packages
     # && rm /usr/lib/python3*/EXTERNALLY-MANAGED \
     # && rm /usr/lib/python3*/EXTERNALLY-MANAGED \
     # create global virtual environment GLOBAL_VENV to use (better than using pip install --global)
     # create global virtual environment GLOBAL_VENV to use (better than using pip install --global)
@@ -171,13 +159,34 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=apt-$TARGETARCH$T
         && echo -e '\n\n' \
         && echo -e '\n\n' \
     ) | tee -a /VERSION.txt
     ) | tee -a /VERSION.txt
 
 
+
+# Install Node environment
+RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=apt-$TARGETARCH$TARGETVARIANT --mount=type=cache,target=/root/.npm,sharing=locked,id=npm-$TARGETARCH$TARGETVARIANT \
+    echo "[+] Installing Node $NODE_VERSION environment in $NODE_MODULES..." \
+    && echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_${NODE_VERSION}.x nodistro main" >> /etc/apt/sources.list.d/nodejs.list \
+    && curl -fsSL "https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key" | gpg --dearmor | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg \
+    && apt-get update -qq \
+    && apt-get install -qq -y -t bookworm-backports --no-upgrade libatomic1 \
+    && apt-get install -y -t bookworm-backports --no-upgrade \
+        nodejs \
+    && rm -rf /var/lib/apt/lists/* \
+    # Update NPM to latest version
+    && npm i -g npm --cache /root/.npm \
+    # Save version info
+    && ( \
+        which node && node --version \
+        && which npm && npm --version \
+        && echo -e '\n\n' \
+    ) | tee -a /VERSION.txt
+
+
 ######### Extractor Dependencies ##################################
 ######### Extractor Dependencies ##################################
 
 
 # Install apt dependencies
 # Install apt dependencies
 RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=apt-$TARGETARCH$TARGETVARIANT --mount=type=cache,target=/root/.cache/pip,sharing=locked,id=pip-$TARGETARCH$TARGETVARIANT \
 RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=apt-$TARGETARCH$TARGETVARIANT --mount=type=cache,target=/root/.cache/pip,sharing=locked,id=pip-$TARGETARCH$TARGETVARIANT \
     echo "[+] Installing APT extractor dependencies globally using apt..." \
     echo "[+] Installing APT extractor dependencies globally using apt..." \
     && apt-get update -qq \
     && apt-get update -qq \
-    && apt-get install -qq -y -t bookworm-backports --no-install-recommends \
+    && apt-get install -qq -y -t bookworm-backports \
         curl wget git yt-dlp ffmpeg ripgrep \
         curl wget git yt-dlp ffmpeg ripgrep \
         # Packages we have also needed in the past:
         # Packages we have also needed in the past:
         # youtube-dl wget2 aria2 python3-pyxattr rtmpdump libfribidi-bin mpv \
         # youtube-dl wget2 aria2 python3-pyxattr rtmpdump libfribidi-bin mpv \
@@ -196,25 +205,21 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=apt-$TARGETARCH$T
 RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=apt-$TARGETARCH$TARGETVARIANT --mount=type=cache,target=/root/.cache/pip,sharing=locked,id=pip-$TARGETARCH$TARGETVARIANT --mount=type=cache,target=/root/.cache/ms-playwright,sharing=locked,id=browsers-$TARGETARCH$TARGETVARIANT \
 RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=apt-$TARGETARCH$TARGETVARIANT --mount=type=cache,target=/root/.cache/pip,sharing=locked,id=pip-$TARGETARCH$TARGETVARIANT --mount=type=cache,target=/root/.cache/ms-playwright,sharing=locked,id=browsers-$TARGETARCH$TARGETVARIANT \
     echo "[+] Installing Browser binary dependencies to $PLAYWRIGHT_BROWSERS_PATH..." \
     echo "[+] Installing Browser binary dependencies to $PLAYWRIGHT_BROWSERS_PATH..." \
     && apt-get update -qq \
     && apt-get update -qq \
-    && apt-get install -qq -y -t bookworm-backports --no-install-recommends \
+    && apt-get install -qq -y -t bookworm-backports \
         fontconfig fonts-ipafont-gothic fonts-wqy-zenhei fonts-thai-tlwg fonts-khmeros fonts-kacst fonts-symbola fonts-noto fonts-freefont-ttf \
         fontconfig fonts-ipafont-gothic fonts-wqy-zenhei fonts-thai-tlwg fonts-khmeros fonts-kacst fonts-symbola fonts-noto fonts-freefont-ttf \
+        at-spi2-common fonts-liberation fonts-noto-color-emoji fonts-tlwg-loma-otf fonts-unifont libatk-bridge2.0-0 libatk1.0-0 libatspi2.0-0 libavahi-client3 \
+        libavahi-common-data libavahi-common3 libcups2 libfontenc1 libice6 libnspr4 libnss3 libsm6 libunwind8 \
+        libxaw7 libxcomposite1 libxdamage1 libxfont2 \
+        libxkbfile1 libxmu6 libxpm4 libxt6 x11-xkb-utils xfonts-encodings \
+        # xfonts-scalable xfonts-utils xserver-common xvfb \
         # chrome can run without dbus/upower technically, it complains about missing dbus but should run ok anyway
         # chrome can run without dbus/upower technically, it complains about missing dbus but should run ok anyway
         # libxss1 dbus dbus-x11 upower \
         # libxss1 dbus dbus-x11 upower \
     # && service dbus start \
     # && service dbus start \
-    && if [[ "$TARGETPLATFORM" == *amd64* || "$TARGETPLATFORM" == *arm64* ]]; then \
-        # install Chromium using playwright
-        pip install playwright \
-        && cp -r /root/.cache/ms-playwright "$PLAYWRIGHT_BROWSERS_PATH" \
-        && playwright install --with-deps chromium \
-        && export CHROME_BINARY="$(python -c 'from playwright.sync_api import sync_playwright; print(sync_playwright().start().chromium.executable_path)')"; \
-    else \
-        # fall back to installing Chromium via apt-get on platforms not supported by playwright (e.g. risc, ARMv7, etc.)
-        # apt-get install -qq -y -t bookworm-backports --no-install-recommends \
-        #     chromium \
-        # && export CHROME_BINARY="$(which chromium)"; \
-        echo 'armv7 no longer supported in versions after v0.7.3' \
-        exit 1; \
-    fi \
+    # install Chromium using playwright
+    && pip install playwright \
+    && cp -r /root/.cache/ms-playwright "$PLAYWRIGHT_BROWSERS_PATH" \
+    && playwright install chromium \
+    && export CHROME_BINARY="$(python -c 'from playwright.sync_api import sync_playwright; print(sync_playwright().start().chromium.executable_path)')" \
     && rm -rf /var/lib/apt/lists/* \
     && rm -rf /var/lib/apt/lists/* \
     && ln -s "$CHROME_BINARY" /usr/bin/chromium-browser \
     && ln -s "$CHROME_BINARY" /usr/bin/chromium-browser \
     && mkdir -p "/home/${ARCHIVEBOX_USER}/.config/chromium/Crash Reports/pending/" \
     && mkdir -p "/home/${ARCHIVEBOX_USER}/.config/chromium/Crash Reports/pending/" \
@@ -247,8 +252,8 @@ COPY --chown=root:root --chmod=755 "./pyproject.toml" "requirements.txt" "$CODE_
 RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=apt-$TARGETARCH$TARGETVARIANT --mount=type=cache,target=/root/.cache/pip,sharing=locked,id=pip-$TARGETARCH$TARGETVARIANT \
 RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=apt-$TARGETARCH$TARGETVARIANT --mount=type=cache,target=/root/.cache/pip,sharing=locked,id=pip-$TARGETARCH$TARGETVARIANT \
     echo "[+] Installing PIP ArchiveBox dependencies from requirements.txt for ${TARGETPLATFORM}..." \
     echo "[+] Installing PIP ArchiveBox dependencies from requirements.txt for ${TARGETPLATFORM}..." \
     && apt-get update -qq \
     && apt-get update -qq \
-    && apt-get install -qq -y -t bookworm-backports --no-install-recommends \
-        build-essential \
+    && apt-get install -qq -y -t bookworm-backports \
+        # build-essential \
         libssl-dev libldap2-dev libsasl2-dev \
         libssl-dev libldap2-dev libsasl2-dev \
         python3-ldap python3-msgpack python3-mutagen python3-regex python3-pycryptodome procps \
         python3-ldap python3-msgpack python3-mutagen python3-regex python3-pycryptodome procps \
     # && ln -s "$GLOBAL_VENV" "$APP_VENV" \
     # && ln -s "$GLOBAL_VENV" "$APP_VENV" \
@@ -258,8 +263,8 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=apt-$TARGETARCH$T
     # && pdm export -o requirements.txt --without-hashes \
     # && pdm export -o requirements.txt --without-hashes \
     # && source $GLOBAL_VENV/bin/activate \
     # && source $GLOBAL_VENV/bin/activate \
     && pip install -r requirements.txt \
     && pip install -r requirements.txt \
-    && apt-get purge -y \
-        build-essential \
+    # && apt-get purge -y \
+        # build-essential \
     && apt-get autoremove -y \
     && apt-get autoremove -y \
     && rm -rf /var/lib/apt/lists/*
     && rm -rf /var/lib/apt/lists/*
 
 
@@ -269,7 +274,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=apt-$TARGETARCH$T
     echo "[*] Installing PIP ArchiveBox package from $CODE_DIR..." \
     echo "[*] Installing PIP ArchiveBox package from $CODE_DIR..." \
     # && apt-get update -qq \
     # && apt-get update -qq \
     # install C compiler to build deps on platforms that dont have 32-bit wheels available on pypi
     # install C compiler to build deps on platforms that dont have 32-bit wheels available on pypi
-    # && apt-get install -qq -y -t bookworm-backports --no-install-recommends \
+    # && apt-get install -qq -y -t bookworm-backports \
     #     build-essential  \
     #     build-essential  \
     # INSTALL ARCHIVEBOX python package globally from CODE_DIR, with all optional dependencies
     # INSTALL ARCHIVEBOX python package globally from CODE_DIR, with all optional dependencies
     && pip install -e "$CODE_DIR"[sonic,ldap] \
     && pip install -e "$CODE_DIR"[sonic,ldap] \

+ 4 - 1
README.md

@@ -407,7 +407,7 @@ See <a href="#%EF%B8%8F-cli-usage">below</a> for usage examples using the CLI, W
 > *Warning: These are contributed by external volunteers and may lag behind the official `pip` channel.*
 > *Warning: These are contributed by external volunteers and may lag behind the official `pip` channel.*
 
 
 <ul>
 <ul>
-<li>TrueNAS: <a href="https://truecharts.org/charts/incubator/archivebox/">Official ArchiveBox TrueChart</a> / <a href="https://dev.to/finloop/setting-up-archivebox-on-truenas-scale-1788">Custom App Guide</a></li>
+<li>TrueNAS: <a href="https://truecharts.org/charts/stable/archivebox/">Official ArchiveBox TrueChart</a> / <a href="https://dev.to/finloop/setting-up-archivebox-on-truenas-scale-1788">Custom App Guide</a></li>
 <li><a href="https://unraid.net/community/apps?q=archivebox#r">UnRaid</a></li>
 <li><a href="https://unraid.net/community/apps?q=archivebox#r">UnRaid</a></li>
 <li><a href="https://github.com/YunoHost-Apps/archivebox_ynh">Yunohost</a></li>
 <li><a href="https://github.com/YunoHost-Apps/archivebox_ynh">Yunohost</a></li>
 <li><a href="https://www.cloudron.io/store/io.archivebox.cloudronapp.html">Cloudron</a></li>
 <li><a href="https://www.cloudron.io/store/io.archivebox.cloudronapp.html">Cloudron</a></li>
@@ -445,6 +445,9 @@ Other providers of paid ArchiveBox hosting (not officially endorsed):<br/>
 <li><a href="https://fly.io/">
 <li><a href="https://fly.io/">
  <img src="https://img.shields.io/badge/Unmanaged_App-Fly.io-%239a2de6.svg?style=flat" height="22px"/>
  <img src="https://img.shields.io/badge/Unmanaged_App-Fly.io-%239a2de6.svg?style=flat" height="22px"/>
 </a> (USD $10-50+/mo, <a href="https://fly.io/docs/hands-on/start/">instructions</a>)</li>
 </a> (USD $10-50+/mo, <a href="https://fly.io/docs/hands-on/start/">instructions</a>)</li>
+<li><a href="https://railway.app/template/2Vvhmy">
+ <img src="https://img.shields.io/badge/Unmanaged_App-Railway-%23A11BE6.svg?style=flat" height="22px"/>
+</a> (USD $0-5+/mo)</li>
 <li><a href="https://aws.amazon.com/marketplace/pp/Linnovate-Open-Source-Innovation-Support-For-Archi/B08RVW6MJ2"><img src="https://img.shields.io/badge/Unmanaged_VPS-AWS-%23ee8135.svg?style=flat" height="22px"/></a> (USD $60-200+/mo)</li>
 <li><a href="https://aws.amazon.com/marketplace/pp/Linnovate-Open-Source-Innovation-Support-For-Archi/B08RVW6MJ2"><img src="https://img.shields.io/badge/Unmanaged_VPS-AWS-%23ee8135.svg?style=flat" height="22px"/></a> (USD $60-200+/mo)</li>
 <li><a href="https://azuremarketplace.microsoft.com/en-us/marketplace/apps/meanio.archivebox?ocid=gtmrewards_whatsnewblog_archivebox_vol118"><img src="https://img.shields.io/badge/Unmanaged_VPS-Azure-%237cb300.svg?style=flat" height="22px"/></a> (USD $60-200+/mo)</li>
 <li><a href="https://azuremarketplace.microsoft.com/en-us/marketplace/apps/meanio.archivebox?ocid=gtmrewards_whatsnewblog_archivebox_vol118"><img src="https://img.shields.io/badge/Unmanaged_VPS-Azure-%237cb300.svg?style=flat" height="22px"/></a> (USD $60-200+/mo)</li>
 <br/>
 <br/>

+ 6 - 0
archivebox/__init__.py

@@ -1 +1,7 @@
 __package__ = 'archivebox'
 __package__ = 'archivebox'
+
+
+# monkey patch django timezone to add back utc (it was removed in Django 5.0)
+import datetime
+from django.utils import timezone
+timezone.utc = datetime.timezone.utc

+ 1 - 0
archivebox/api/__init__.py

@@ -0,0 +1 @@
+__package__ = 'archivebox.api'

+ 2 - 0
archivebox/api/apps.py

@@ -1,3 +1,5 @@
+__package__ = 'archivebox.api'
+
 from django.apps import AppConfig
 from django.apps import AppConfig
 
 
 
 

+ 0 - 184
archivebox/api/archive.py

@@ -1,184 +0,0 @@
-# archivebox_api.py
-from typing import List, Optional
-from enum import Enum
-from pydantic import BaseModel
-from ninja import Router
-from main import (
-    add,
-    remove,
-    update,
-    list_all,
-    ONLY_NEW,
-)  # Assuming these functions are defined in main.py
-
-
-# Schemas
-
-class StatusChoices(str, Enum):
-    indexed = 'indexed'
-    archived = 'archived'
-    unarchived = 'unarchived'
-    present = 'present'
-    valid = 'valid'
-    invalid = 'invalid'
-    duplicate = 'duplicate'
-    orphaned = 'orphaned'
-    corrupted = 'corrupted'
-    unrecognized = 'unrecognized'
-
-
-class AddURLSchema(BaseModel):
-    urls: List[str]
-    tag: str = ""
-    depth: int = 0
-    update: bool = not ONLY_NEW  # Default to the opposite of ONLY_NEW
-    update_all: bool = False
-    index_only: bool = False
-    overwrite: bool = False
-    init: bool = False
-    extractors: str = ""
-    parser: str = "auto"
-
-
-class RemoveURLSchema(BaseModel):
-    yes: bool = False
-    delete: bool = False
-    before: Optional[float] = None
-    after: Optional[float] = None
-    filter_type: str = "exact"
-    filter_patterns: Optional[List[str]] = None
-
-
-class UpdateSchema(BaseModel):
-    resume: Optional[float] = None
-    only_new: Optional[bool] = None
-    index_only: Optional[bool] = False
-    overwrite: Optional[bool] = False
-    before: Optional[float] = None
-    after: Optional[float] = None
-    status: Optional[StatusChoices] = None
-    filter_type: Optional[str] = 'exact'
-    filter_patterns: Optional[List[str]] = None
-    extractors: Optional[str] = ""
-
-
-class ListAllSchema(BaseModel):
-    filter_patterns: Optional[List[str]] = None
-    filter_type: str = 'exact'
-    status: Optional[StatusChoices] = None
-    after: Optional[float] = None
-    before: Optional[float] = None
-    sort: Optional[str] = None
-    csv: Optional[str] = None
-    json: bool = False
-    html: bool = False
-    with_headers: bool = False
-
-
-# API Router
-router = Router()
-
-
[email protected]("/add", response={200: dict})
-def api_add(request, payload: AddURLSchema):
-    try:
-        result = add(
-            urls=payload.urls,
-            tag=payload.tag,
-            depth=payload.depth,
-            update=payload.update,
-            update_all=payload.update_all,
-            index_only=payload.index_only,
-            overwrite=payload.overwrite,
-            init=payload.init,
-            extractors=payload.extractors,
-            parser=payload.parser,
-        )
-        # Currently the add function returns a list of ALL items in the DB, ideally only return new items
-        return {
-            "status": "success",
-            "message": "URLs added successfully.",
-            "result": str(result),
-        }
-    except Exception as e:
-        # Handle exceptions raised by the add function or during processing
-        return {"status": "error", "message": str(e)}
-
-
[email protected]("/remove", response={200: dict})
-def api_remove(request, payload: RemoveURLSchema):
-    try:
-        result = remove(
-            yes=payload.yes,
-            delete=payload.delete,
-            before=payload.before,
-            after=payload.after,
-            filter_type=payload.filter_type,
-            filter_patterns=payload.filter_patterns,
-        )
-        return {
-            "status": "success",
-            "message": "URLs removed successfully.",
-            "result": result,
-        }
-    except Exception as e:
-        # Handle exceptions raised by the remove function or during processing
-        return {"status": "error", "message": str(e)}
-
-
[email protected]("/update", response={200: dict})
-def api_update(request, payload: UpdateSchema):
-    try:
-        result = update(
-            resume=payload.resume,
-            only_new=payload.only_new,
-            index_only=payload.index_only,
-            overwrite=payload.overwrite,
-            before=payload.before,
-            after=payload.after,
-            status=payload.status,
-            filter_type=payload.filter_type,
-            filter_patterns=payload.filter_patterns,
-            extractors=payload.extractors,
-        )
-        return {
-            "status": "success",
-            "message": "Archive updated successfully.",
-            "result": result,
-        }
-    except Exception as e:
-        # Handle exceptions raised by the update function or during processing
-        return {"status": "error", "message": str(e)}
-
-
[email protected]("/list_all", response={200: dict})
-def api_list_all(request, payload: ListAllSchema):
-    try:
-        result = list_all(
-            filter_patterns=payload.filter_patterns,
-            filter_type=payload.filter_type,
-            status=payload.status,
-            after=payload.after,
-            before=payload.before,
-            sort=payload.sort,
-            csv=payload.csv,
-            json=payload.json,
-            html=payload.html,
-            with_headers=payload.with_headers,
-        )
-        # TODO: This is kind of bad, make the format a choice field
-        if payload.json:
-            return {"status": "success", "format": "json", "data": result}
-        elif payload.html:
-            return {"status": "success", "format": "html", "data": result}
-        elif payload.csv:
-            return {"status": "success", "format": "csv", "data": result}
-        else:
-            return {
-                "status": "success",
-                "message": "List generated successfully.",
-                "data": result,
-            }
-    except Exception as e:
-        # Handle exceptions raised by the list_all function or during processing
-        return {"status": "error", "message": str(e)}

+ 92 - 33
archivebox/api/auth.py

@@ -1,48 +1,107 @@
+__package__ = 'archivebox.api'
+
+from typing import Optional
+
+from django.http import HttpRequest
+from django.contrib.auth import login
 from django.contrib.auth import authenticate
 from django.contrib.auth import authenticate
-from ninja import Form, Router, Schema
-from ninja.security import HttpBearer
+from django.contrib.auth.models import AbstractBaseUser
 
 
-from api.models import Token
+from ninja.security import HttpBearer, APIKeyQuery, APIKeyHeader, HttpBasicAuth, django_auth_superuser
 
 
-router = Router()
 
 
+def auth_using_token(token, request: Optional[HttpRequest]=None) -> Optional[AbstractBaseUser]:
+    """Given an API token string, check if a corresponding non-expired APIToken exists, and return its user"""
+    from api.models import APIToken        # lazy import model to avoid loading it at urls.py import time
+    
+    user = None
 
 
-class GlobalAuth(HttpBearer):
-    def authenticate(self, request, token):
+    submitted_empty_form = token in ('string', '', None)
+    if submitted_empty_form:
+        user = request.user       # see if user is authed via django session and use that as the default
+    else:
         try:
         try:
-            return Token.objects.get(token=token).user
-        except Token.DoesNotExist:
+            token = APIToken.objects.get(token=token)
+            if token.is_valid():
+                user = token.user
+        except APIToken.DoesNotExist:
             pass
             pass
 
 
+    if not user:
+        print('[❌] Failed to authenticate API user using API Key:', request)
 
 
-class AuthSchema(Schema):
-    email: str
-    password: str
-
+    return None
 
 
-@router.post("/authenticate", auth=None)  # overriding global auth
-def get_token(request, auth_data: AuthSchema):
-    user = authenticate(username=auth_data.email, password=auth_data.password)
-    if user:
-        # Assuming a user can have multiple tokens and you want to create a new one every time
-        new_token = Token.objects.create(user=user)
-        return {"token": new_token.token, "expires": new_token.expiry_as_iso8601}
+def auth_using_password(username, password, request: Optional[HttpRequest]=None) -> Optional[AbstractBaseUser]:
+    """Given a username and password, check if they are valid and return the corresponding user"""
+    user = None
+    
+    submitted_empty_form = (username, password) in (('string', 'string'), ('', ''), (None, None))
+    if submitted_empty_form:
+        user = request.user       # see if user is authed via django session and use that as the default
     else:
     else:
-        return {"error": "Invalid credentials"}
+        user = authenticate(
+            username=username,
+            password=password,
+        )
+
+    if not user:
+        print('[❌] Failed to authenticate API user using API Key:', request)
+
+    return user
+
+
+### Base Auth Types
+
+class APITokenAuthCheck:
+    """The base class for authentication methods that use an api.models.APIToken"""
+    def authenticate(self, request: HttpRequest, key: Optional[str]=None) -> Optional[AbstractBaseUser]:
+        user = auth_using_token(
+            token=key,
+            request=request,
+        )
+        if user is not None:
+            login(request, user, backend='django.contrib.auth.backends.ModelBackend')
+        return user
+
+class UserPassAuthCheck:
+    """The base class for authentication methods that use a username & password"""
+    def authenticate(self, request: HttpRequest, username: Optional[str]=None, password: Optional[str]=None) -> Optional[AbstractBaseUser]:
+        user = auth_using_password(
+            username=username,
+            password=password,
+            request=request,
+        )
+        if user is not None:
+            login(request, user, backend='django.contrib.auth.backends.ModelBackend')
+        return user
+
+
+### Django-Ninja-Provided Auth Methods
+
+class UsernameAndPasswordAuth(UserPassAuthCheck, HttpBasicAuth):
+    """Allow authenticating by passing username & password via HTTP Basic Authentication (not recommended)"""
+    pass
+
+class QueryParamTokenAuth(APITokenAuthCheck, APIKeyQuery):
+    """Allow authenticating by passing api_key=xyz as a GET/POST query parameter"""
+    param_name = "api_key"
+
+class HeaderTokenAuth(APITokenAuthCheck, APIKeyHeader):
+    """Allow authenticating by passing X-API-Key=xyz as a request header"""
+    param_name = "X-API-Key"
 
 
+class BearerTokenAuth(APITokenAuthCheck, HttpBearer):
+    """Allow authenticating by passing Bearer=xyz as a request header"""
+    pass
 
 
-class TokenValidationSchema(Schema):
-    token: str
 
 
+### Enabled Auth Methods
 
 
[email protected]("/validate_token", auth=None) # No authentication required for this endpoint
-def validate_token(request, token_data: TokenValidationSchema):
-    try:
-        # Attempt to authenticate using the provided token
-        user = GlobalAuth().authenticate(request, token_data.token)
-        if user:
-            return {"status": "valid"}
-        else:
-            return {"status": "invalid"}
-    except Token.DoesNotExist:
-        return {"status": "invalid"}
+API_AUTH_METHODS = [
+    QueryParamTokenAuth(), 
+    HeaderTokenAuth(),
+    BearerTokenAuth(),
+    django_auth_superuser,
+    UsernameAndPasswordAuth(),
+]

+ 7 - 6
archivebox/api/migrations/0001_initial.py

@@ -1,9 +1,10 @@
-# Generated by Django 3.1.14 on 2024-04-09 18:52
+# Generated by Django 4.2.11 on 2024-04-25 04:19
 
 
 import api.models
 import api.models
 from django.conf import settings
 from django.conf import settings
 from django.db import migrations, models
 from django.db import migrations, models
 import django.db.models.deletion
 import django.db.models.deletion
+import uuid
 
 
 
 
 class Migration(migrations.Migration):
 class Migration(migrations.Migration):
@@ -16,13 +17,13 @@ class Migration(migrations.Migration):
 
 
     operations = [
     operations = [
         migrations.CreateModel(
         migrations.CreateModel(
-            name='Token',
+            name='APIToken',
             fields=[
             fields=[
-                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
-                ('token', models.CharField(default=auth.models.hex_uuid, max_length=32, unique=True)),
+                ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
+                ('token', models.CharField(default=api.models.generate_secret_token, max_length=32, unique=True)),
                 ('created', models.DateTimeField(auto_now_add=True)),
                 ('created', models.DateTimeField(auto_now_add=True)),
-                ('expiry', models.DateTimeField(blank=True, null=True)),
-                ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tokens', to=settings.AUTH_USER_MODEL)),
+                ('expires', models.DateTimeField(blank=True, null=True)),
+                ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
             ],
             ],
         ),
         ),
     ]
     ]

+ 17 - 0
archivebox/api/migrations/0002_alter_apitoken_options.py

@@ -0,0 +1,17 @@
+# Generated by Django 5.0.4 on 2024-04-26 05:28
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('api', '0001_initial'),
+    ]
+
+    operations = [
+        migrations.AlterModelOptions(
+            name='apitoken',
+            options={'verbose_name': 'API Key', 'verbose_name_plural': 'API Keys'},
+        ),
+    ]

+ 48 - 15
archivebox/api/models.py

@@ -1,30 +1,63 @@
+__package__ = 'archivebox.api'
+
 import uuid
 import uuid
+import secrets
 from datetime import timedelta
 from datetime import timedelta
 
 
 from django.conf import settings
 from django.conf import settings
 from django.db import models
 from django.db import models
 from django.utils import timezone
 from django.utils import timezone
-from django.utils.translation import gettext_lazy as _
 
 
-def hex_uuid():
-    return uuid.uuid4().hex
+from django_stubs_ext.db.models import TypedModelMeta
+
+
+def generate_secret_token() -> str:
+    # returns cryptographically secure string with len() == 32
+    return secrets.token_hex(16)
 
 
 
 
-class Token(models.Model):
-    user = models.ForeignKey(
-        settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="tokens"
-    )
-    token = models.CharField(max_length=32, default=hex_uuid, unique=True)
+class APIToken(models.Model):
+    id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
+
+    user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
+    token = models.CharField(max_length=32, default=generate_secret_token, unique=True)
+    
     created = models.DateTimeField(auto_now_add=True)
     created = models.DateTimeField(auto_now_add=True)
-    expiry = models.DateTimeField(null=True, blank=True)
+    expires = models.DateTimeField(null=True, blank=True)
+
+    class Meta(TypedModelMeta):
+        verbose_name = "API Key"
+        verbose_name_plural = "API Keys"
+
+    def __str__(self) -> str:
+        return self.token
+
+    def __repr__(self) -> str:
+        return f'<APIToken user={self.user.username} token=************{self.token[-4:]}>'
+
+    def __json__(self) -> dict:
+        return {
+            "TYPE":             "APIToken",    
+            "id":               str(self.id),
+            "user_id":          str(self.user.id),
+            "user_username":    self.user.username,
+            "token":            self.token,
+            "created":          self.created.isoformat(),
+            "expires":          self.expires_as_iso8601,
+        }
 
 
     @property
     @property
-    def expiry_as_iso8601(self):
+    def expires_as_iso8601(self):
         """Returns the expiry date of the token in ISO 8601 format or a date 100 years in the future if none."""
         """Returns the expiry date of the token in ISO 8601 format or a date 100 years in the future if none."""
-        expiry_date = (
-            self.expiry if self.expiry else timezone.now() + timedelta(days=365 * 100)
-        )
+        expiry_date = self.expires or (timezone.now() + timedelta(days=365 * 100))
+
         return expiry_date.isoformat()
         return expiry_date.isoformat()
 
 
-    def __str__(self):
-        return self.token
+    def is_valid(self, for_date=None):
+        for_date = for_date or timezone.now()
+
+        if self.expires and self.expires < for_date:
+            return False
+
+        return True
+

+ 11 - 8
archivebox/api/tests.py

@@ -1,27 +1,30 @@
+__package__ = 'archivebox.api'
+
 from django.test import TestCase
 from django.test import TestCase
 from ninja.testing import TestClient
 from ninja.testing import TestClient
-from archivebox.api.archive import router as archive_router
 
 
-class ArchiveBoxAPITestCase(TestCase):
+from .routes_cli import router
+
+class ArchiveBoxCLIAPITestCase(TestCase):
     def setUp(self):
     def setUp(self):
-        self.client = TestClient(archive_router)
+        self.client = TestClient(router)
 
 
     def test_add_endpoint(self):
     def test_add_endpoint(self):
-        response = self.client.post("/add", json={"urls": ["http://example.com"], "tag": "test"})
+        response = self.client.post("/add", json={"urls": ["http://example.com"], "tag": "testTag1,testTag2"})
         self.assertEqual(response.status_code, 200)
         self.assertEqual(response.status_code, 200)
-        self.assertEqual(response.json()["status"], "success")
+        self.assertTrue(response.json()["success"])
 
 
     def test_remove_endpoint(self):
     def test_remove_endpoint(self):
         response = self.client.post("/remove", json={"filter_patterns": ["http://example.com"]})
         response = self.client.post("/remove", json={"filter_patterns": ["http://example.com"]})
         self.assertEqual(response.status_code, 200)
         self.assertEqual(response.status_code, 200)
-        self.assertEqual(response.json()["status"], "success")
+        self.assertTrue(response.json()["success"])
 
 
     def test_update_endpoint(self):
     def test_update_endpoint(self):
         response = self.client.post("/update", json={})
         response = self.client.post("/update", json={})
         self.assertEqual(response.status_code, 200)
         self.assertEqual(response.status_code, 200)
-        self.assertEqual(response.json()["status"], "success")
+        self.assertTrue(response.json()["success"])
 
 
     def test_list_all_endpoint(self):
     def test_list_all_endpoint(self):
         response = self.client.post("/list_all", json={})
         response = self.client.post("/list_all", json={})
         self.assertEqual(response.status_code, 200)
         self.assertEqual(response.status_code, 200)
-        self.assertTrue("success" in response.json()["status"])
+        self.assertTrue(response.json()["success"])

+ 17 - 0
archivebox/api/urls.py

@@ -0,0 +1,17 @@
+__package__ = 'archivebox.api'
+
+from django.urls import path
+from django.views.generic.base import RedirectView
+
+from .v1_api import urls as v1_api_urls
+
+urlpatterns = [
+    path("",                 RedirectView.as_view(url='/api/v1')),
+
+    path("v1/",              v1_api_urls),
+    path("v1",               RedirectView.as_view(url='/api/v1/docs')),
+
+    # ... v2 can be added here ...
+    # path("v2/",              v2_api_urls),
+    # path("v2",               RedirectView.as_view(url='/api/v2/docs')),
+]

+ 111 - 0
archivebox/api/v1_api.py

@@ -0,0 +1,111 @@
+__package__ = 'archivebox.api'
+
+
+from io import StringIO
+from traceback import format_exception
+from contextlib import redirect_stdout, redirect_stderr
+
+from django.http import HttpRequest, HttpResponse
+from django.core.exceptions import ObjectDoesNotExist, EmptyResultSet, PermissionDenied
+
+from ninja import NinjaAPI, Swagger
+
+# TODO: explore adding https://eadwincode.github.io/django-ninja-extra/
+
+from api.auth import API_AUTH_METHODS
+from ..config import VERSION, COMMIT_HASH
+
+
+COMMIT_HASH = COMMIT_HASH or 'unknown'
+
+html_description=f'''
+<h3>Welcome to your ArchiveBox server's REST API <code>[v1 ALPHA]</code> homepage!</h3>
+<br/>
+<i><b>WARNING: This API is still in an early development stage and may change!</b></i>
+<br/>
+<ul>
+<li>⬅️ Manage your server: <a href="/admin/api/"><b>Setup API Keys</b></a>, <a href="/admin/">Go to your Server Admin UI</a>, <a href="/">Go to your Snapshots list</a> 
+<li>💬 Ask questions and get help here: <a href="https://zulip.archivebox.io">ArchiveBox Chat Forum</a></li>
+<li>🐞 Report API bugs here: <a href="https://github.com/ArchiveBox/ArchiveBox/issues">Github Issues</a></li>
+<li>📚 ArchiveBox Documentation: <a href="https://github.com/ArchiveBox/ArchiveBox/wiki">Github Wiki</a></li>
+<li>📜 See the API source code: <a href="https://github.com/ArchiveBox/ArchiveBox/blob/dev/archivebox/api"><code>archivebox/api/</code></a></li>
+</ul>
+<small>Served by ArchiveBox v{VERSION} (<a href="https://github.com/ArchiveBox/ArchiveBox/commit/{COMMIT_HASH}"><code>{COMMIT_HASH[:8]}</code></a>), API powered by <a href="https://django-ninja.dev/"><code>django-ninja</code></a>.</small>
+'''
+
+
+def register_urls(api: NinjaAPI) -> NinjaAPI:
+    api.add_router('/auth/',     'api.v1_auth.router')
+    api.add_router('/core/',     'api.v1_core.router')
+    api.add_router('/cli/',      'api.v1_cli.router')
+    return api
+
+
+class NinjaAPIWithIOCapture(NinjaAPI):    
+    def create_temporal_response(self, request: HttpRequest) -> HttpResponse:
+        stdout, stderr = StringIO(), StringIO()
+
+        with redirect_stderr(stderr):
+            with redirect_stdout(stdout):
+                request.stdout = stdout
+                request.stderr = stderr
+
+                response = super().create_temporal_response(request)
+
+        print('RESPONDING NOW', response)
+
+        return response
+
+
+api = NinjaAPIWithIOCapture(
+    title='ArchiveBox API',
+    description=html_description,
+    version='1.0.0',
+    csrf=False,
+    auth=API_AUTH_METHODS,
+    urls_namespace="api",
+    docs=Swagger(settings={"persistAuthorization": True}),
+    # docs_decorator=login_required,
+    # renderer=ORJSONRenderer(),
+)
+api = register_urls(api)
+urls = api.urls
+
+
[email protected]_handler(Exception)
+def generic_exception_handler(request, err):
+    status = 503
+    if isinstance(err, (ObjectDoesNotExist, EmptyResultSet, PermissionDenied)):
+        status = 404
+
+    print(''.join(format_exception(err)))
+
+    return api.create_response(
+        request,
+        {
+            "succeeded": False,
+            "message": f'{err.__class__.__name__}: {err}',
+            "errors": [
+                ''.join(format_exception(err)),
+                # or send simpler parent-only traceback:
+                # *([str(err.__context__)] if getattr(err, '__context__', None) else []),
+            ],
+        },
+        status=status,
+    )
+
+
+
+# import orjson
+# from ninja.renderers import BaseRenderer
+# class ORJSONRenderer(BaseRenderer):
+#     media_type = "application/json"
+#     def render(self, request, data, *, response_status):
+#         return {
+#             "success": True,
+#             "errors": [],
+#             "result": data,
+#             "stdout": ansi_to_html(stdout.getvalue().strip()),
+#             "stderr": ansi_to_html(stderr.getvalue().strip()),
+#         }
+#         return orjson.dumps(data)

+ 52 - 0
archivebox/api/v1_auth.py

@@ -0,0 +1,52 @@
+__package__ = 'archivebox.api'
+
+from typing import Optional
+
+from ninja import Router, Schema
+
+from api.models import APIToken
+from api.auth import auth_using_token, auth_using_password
+
+
+router = Router(tags=['Authentication'])
+
+
+class PasswordAuthSchema(Schema):
+    """Schema for a /get_api_token request"""
+    username: Optional[str] = None
+    password: Optional[str] = None
+
+
[email protected]("/get_api_token", auth=None, summary='Generate an API token for a given username & password (or currently logged-in user)')             # auth=None because they are not authed yet
+def get_api_token(request, auth_data: PasswordAuthSchema):
+    user = auth_using_password(
+        username=auth_data.username,
+        password=auth_data.password,
+        request=request,
+    )
+
+    if user:
+        # TODO: support multiple tokens in the future, for now we just have one per user
+        api_token, created = APIToken.objects.get_or_create(user=user)
+
+        return api_token.__json__()
+    
+    return {"success": False, "errors": ["Invalid credentials"]}
+
+
+
+class TokenAuthSchema(Schema):
+    """Schema for a /check_api_token request"""
+    token: str
+
+
[email protected]("/check_api_token", auth=None, summary='Validate an API token to make sure its valid and non-expired')        # auth=None because they are not authed yet
+def check_api_token(request, token_data: TokenAuthSchema):
+    user = auth_using_token(
+        token=token_data.token,
+        request=request,
+    )
+    if user:
+        return {"success": True, "user_id": str(user.id)}
+    
+    return {"success": False, "user_id": None}

+ 234 - 0
archivebox/api/v1_cli.py

@@ -0,0 +1,234 @@
+__package__ = 'archivebox.api'
+
+from typing import List, Dict, Any, Optional
+from enum import Enum
+
+from ninja import Router, Schema
+
+from ..main import (
+    add,
+    remove,
+    update,
+    list_all,
+    schedule,
+)
+from ..util import ansi_to_html
+from ..config import ONLY_NEW
+
+
+# router for API that exposes archivebox cli subcommands as REST endpoints
+router = Router(tags=['ArchiveBox CLI Sub-Commands'])
+
+
+# Schemas
+
+JSONType = List[Any] | Dict[str, Any] | bool | int | str | None
+
+class CLICommandResponseSchema(Schema):
+    success: bool
+    errors: List[str]
+    result: JSONType
+    stdout: str
+    stderr: str
+
+class FilterTypeChoices(str, Enum):
+    exact = 'exact'
+    substring = 'substring'
+    regex = 'regex'
+    domain = 'domain'
+    tag = 'tag'
+    timestamp = 'timestamp'
+
+class StatusChoices(str, Enum):
+    indexed = 'indexed'
+    archived = 'archived'
+    unarchived = 'unarchived'
+    present = 'present'
+    valid = 'valid'
+    invalid = 'invalid'
+    duplicate = 'duplicate'
+    orphaned = 'orphaned'
+    corrupted = 'corrupted'
+    unrecognized = 'unrecognized'
+
+
+class AddCommandSchema(Schema):
+    urls: List[str]
+    tag: str = ""
+    depth: int = 0
+    update: bool = not ONLY_NEW  # Default to the opposite of ONLY_NEW
+    update_all: bool = False
+    index_only: bool = False
+    overwrite: bool = False
+    init: bool = False
+    extractors: str = ""
+    parser: str = "auto"
+
+class UpdateCommandSchema(Schema):
+    resume: Optional[float] = 0
+    only_new: bool = ONLY_NEW
+    index_only: bool = False
+    overwrite: bool = False
+    after: Optional[float] = 0
+    before: Optional[float] = 999999999999999
+    status: Optional[StatusChoices] = StatusChoices.unarchived
+    filter_type: Optional[str] = FilterTypeChoices.substring
+    filter_patterns: Optional[List[str]] = ['https://example.com']
+    extractors: Optional[str] = ""
+
+class ScheduleCommandSchema(Schema):
+    import_path: Optional[str] = None
+    add: bool = False
+    every: Optional[str] = None
+    tag: str = ''
+    depth: int = 0
+    overwrite: bool = False
+    update: bool = not ONLY_NEW
+    clear: bool = False
+
+class ListCommandSchema(Schema):
+    filter_patterns: Optional[List[str]] = ['https://example.com']
+    filter_type: str = FilterTypeChoices.substring
+    status: Optional[StatusChoices] = StatusChoices.indexed
+    after: Optional[float] = 0
+    before: Optional[float] = 999999999999999
+    sort: str = 'added'
+    as_json: bool = True
+    as_html: bool = False
+    as_csv: str | bool = 'timestamp,url'
+    with_headers: bool = False
+
+class RemoveCommandSchema(Schema):
+    delete: bool = True
+    after: Optional[float] = 0
+    before: Optional[float] = 999999999999999
+    filter_type: str = FilterTypeChoices.exact
+    filter_patterns: Optional[List[str]] = ['https://example.com']
+
+
+
+
+
[email protected]("/add", response=CLICommandResponseSchema, summary='archivebox add [args] [urls]')
+def cli_add(request, args: AddCommandSchema):
+    result = add(
+        urls=args.urls,
+        tag=args.tag,
+        depth=args.depth,
+        update=args.update,
+        update_all=args.update_all,
+        index_only=args.index_only,
+        overwrite=args.overwrite,
+        init=args.init,
+        extractors=args.extractors,
+        parser=args.parser,
+    )
+
+    return {
+        "success": True,
+        "errors": [],
+        "result": result,
+        "stdout": ansi_to_html(request.stdout.getvalue().strip()),
+        "stderr": ansi_to_html(request.stderr.getvalue().strip()),
+    }
+
+
[email protected]("/update", response=CLICommandResponseSchema, summary='archivebox update [args] [filter_patterns]')
+def cli_update(request, args: UpdateCommandSchema):
+    result = update(
+        resume=args.resume,
+        only_new=args.only_new,
+        index_only=args.index_only,
+        overwrite=args.overwrite,
+        before=args.before,
+        after=args.after,
+        status=args.status,
+        filter_type=args.filter_type,
+        filter_patterns=args.filter_patterns,
+        extractors=args.extractors,
+    )
+    return {
+        "success": True,
+        "errors": [],
+        "result": result,
+        "stdout": ansi_to_html(request.stdout.getvalue().strip()),
+        "stderr": ansi_to_html(request.stderr.getvalue().strip()),
+    }
+
+
[email protected]("/schedule", response=CLICommandResponseSchema, summary='archivebox schedule [args] [import_path]')
+def cli_schedule(request, args: ScheduleCommandSchema):
+    result = schedule(
+        import_path=args.import_path,
+        add=args.add,
+        show=args.show,
+        clear=args.clear,
+        every=args.every,
+        tag=args.tag,
+        depth=args.depth,
+        overwrite=args.overwrite,
+        update=args.update,
+    )
+
+    return {
+        "success": True,
+        "errors": [],
+        "result": result,
+        "stdout": ansi_to_html(request.stdout.getvalue().strip()),
+        "stderr": ansi_to_html(request.stderr.getvalue().strip()),
+    }
+
+
+
[email protected]("/list", response=CLICommandResponseSchema, summary='archivebox list [args] [filter_patterns]')
+def cli_list(request, args: ListCommandSchema):
+    result = list_all(
+        filter_patterns=args.filter_patterns,
+        filter_type=args.filter_type,
+        status=args.status,
+        after=args.after,
+        before=args.before,
+        sort=args.sort,
+        csv=args.as_csv,
+        json=args.as_json,
+        html=args.as_html,
+        with_headers=args.with_headers,
+    )
+
+    result_format = 'txt'
+    if args.as_json:
+        result_format = "json"
+    elif args.as_html:
+        result_format = "html"
+    elif args.as_csv:
+        result_format = "csv"
+
+    return {
+        "success": True,
+        "errors": [],
+        "result": result,
+        "result_format": result_format,
+        "stdout": ansi_to_html(request.stdout.getvalue().strip()),
+        "stderr": ansi_to_html(request.stderr.getvalue().strip()),
+    }
+    
+
+
[email protected]("/remove", response=CLICommandResponseSchema, summary='archivebox remove [args] [filter_patterns]')
+def cli_remove(request, args: RemoveCommandSchema):
+    result = remove(
+        yes=True,            # no way to interactively ask for confirmation via API, so we force yes
+        delete=args.delete,
+        before=args.before,
+        after=args.after,
+        filter_type=args.filter_type,
+        filter_patterns=args.filter_patterns,
+    )
+    return {
+        "success": True,
+        "errors": [],
+        "result": result,
+        "stdout": ansi_to_html(request.stdout.getvalue().strip()),
+        "stderr": ansi_to_html(request.stderr.getvalue().strip()),
+    }
+    

+ 210 - 0
archivebox/api/v1_core.py

@@ -0,0 +1,210 @@
+__package__ = 'archivebox.api'
+
+from uuid import UUID
+from typing import List, Optional
+from datetime import datetime
+
+from django.shortcuts import get_object_or_404
+
+from ninja import Router, Schema, FilterSchema, Field, Query
+from ninja.pagination import paginate
+
+from core.models import Snapshot, ArchiveResult, Tag
+
+
+router = Router(tags=['Core Models'])
+
+
+
+
+### ArchiveResult #########################################################################
+
+class ArchiveResultSchema(Schema):
+    id: UUID
+
+    snapshot_id: UUID
+    snapshot_url: str
+    snapshot_tags: str
+
+    extractor: str
+    cmd: List[str]
+    pwd: str
+    cmd_version: str
+    output: str
+    status: str
+
+    created: datetime
+
+    @staticmethod
+    def resolve_id(obj):
+        return obj.uuid
+
+    @staticmethod
+    def resolve_created(obj):
+        return obj.start_ts
+
+    @staticmethod
+    def resolve_snapshot_url(obj):
+        return obj.snapshot.url
+
+    @staticmethod
+    def resolve_snapshot_tags(obj):
+        return obj.snapshot.tags_str()
+
+
+class ArchiveResultFilterSchema(FilterSchema):
+    id: Optional[UUID] = Field(None, q='uuid')
+
+    search: Optional[str] = Field(None, q=['snapshot__url__icontains', 'snapshot__title__icontains', 'snapshot__tags__name__icontains', 'extractor', 'output__icontains'])
+    snapshot_id: Optional[UUID] = Field(None, q='snapshot_id')
+    snapshot_url: Optional[str] = Field(None, q='snapshot__url')
+    snapshot_tag: Optional[str] = Field(None, q='snapshot__tags__name')
+    
+    status: Optional[str] = Field(None, q='status')
+    output: Optional[str] = Field(None, q='output__icontains')
+    extractor: Optional[str] = Field(None, q='extractor__icontains')
+    cmd: Optional[str] = Field(None, q='cmd__0__icontains')
+    pwd: Optional[str] = Field(None, q='pwd__icontains')
+    cmd_version: Optional[str] = Field(None, q='cmd_version')
+
+    created: Optional[datetime] = Field(None, q='updated')
+    created__gte: Optional[datetime] = Field(None, q='updated__gte')
+    created__lt: Optional[datetime] = Field(None, q='updated__lt')
+
+
[email protected]("/archiveresults", response=List[ArchiveResultSchema])
+@paginate
+def list_archiveresults(request, filters: ArchiveResultFilterSchema = Query(...)):
+    qs = ArchiveResult.objects.all()
+    results = filters.filter(qs)
+    return results
+
+
[email protected]("/archiveresult/{archiveresult_id}", response=ArchiveResultSchema)
+def get_archiveresult(request, archiveresult_id: str):
+    archiveresult = get_object_or_404(ArchiveResult, id=archiveresult_id)
+    return archiveresult
+
+
+# @router.post("/archiveresult", response=ArchiveResultSchema)
+# def create_archiveresult(request, payload: ArchiveResultSchema):
+#     archiveresult = ArchiveResult.objects.create(**payload.dict())
+#     return archiveresult
+#
+# @router.put("/archiveresult/{archiveresult_id}", response=ArchiveResultSchema)
+# def update_archiveresult(request, archiveresult_id: str, payload: ArchiveResultSchema):
+#     archiveresult = get_object_or_404(ArchiveResult, id=archiveresult_id)
+#   
+#     for attr, value in payload.dict().items():
+#         setattr(archiveresult, attr, value)
+#     archiveresult.save()
+#
+#     return archiveresult
+#
+# @router.delete("/archiveresult/{archiveresult_id}")
+# def delete_archiveresult(request, archiveresult_id: str):
+#     archiveresult = get_object_or_404(ArchiveResult, id=archiveresult_id)
+#     archiveresult.delete()
+#     return {"success": True}
+
+
+
+
+
+### Snapshot #########################################################################
+
+
+class SnapshotSchema(Schema):
+    id: UUID
+
+    url: str
+    tags: str
+    title: Optional[str]
+    timestamp: str
+    bookmarked: datetime
+    added: datetime
+    updated: datetime
+    archive_path: str
+
+    archiveresults: List[ArchiveResultSchema]
+
+    # @staticmethod
+    # def resolve_id(obj):
+    #     return str(obj.id)
+
+    @staticmethod
+    def resolve_tags(obj):
+        return obj.tags_str()
+
+    @staticmethod
+    def resolve_archiveresults(obj, context):
+        if context['request'].with_archiveresults:
+            return obj.archiveresult_set.all().distinct()
+        return ArchiveResult.objects.none()
+
+
+class SnapshotFilterSchema(FilterSchema):
+    id: Optional[UUID] = Field(None, q='id')
+
+    search: Optional[str] = Field(None, q=['url__icontains', 'title__icontains', 'tags__name__icontains'])
+    url: Optional[str] = Field(None, q='url')
+    tag: Optional[str] = Field(None, q='tags__name')
+    title: Optional[str] = Field(None, q='title__icontains')
+    
+    timestamp: Optional[str] = Field(None, q='timestamp__startswith')
+    
+    added: Optional[datetime] = Field(None, q='added')
+    added__gte: Optional[datetime] = Field(None, q='added__gte')
+    added__lt: Optional[datetime] = Field(None, q='added__lt')
+
+
[email protected]("/snapshots", response=List[SnapshotSchema])
+@paginate
+def list_snapshots(request, filters: SnapshotFilterSchema = Query(...), with_archiveresults: bool=True):
+    request.with_archiveresults = with_archiveresults
+
+    qs = Snapshot.objects.all()
+    results = filters.filter(qs)
+    return results
+
[email protected]("/snapshot/{snapshot_id}", response=SnapshotSchema)
+def get_snapshot(request, snapshot_id: str, with_archiveresults: bool=True):
+    request.with_archiveresults = with_archiveresults
+    snapshot = get_object_or_404(Snapshot, id=snapshot_id)
+    return snapshot
+
+
+# @router.post("/snapshot", response=SnapshotSchema)
+# def create_snapshot(request, payload: SnapshotSchema):
+#     snapshot = Snapshot.objects.create(**payload.dict())
+#     return snapshot
+#
+# @router.put("/snapshot/{snapshot_id}", response=SnapshotSchema)
+# def update_snapshot(request, snapshot_id: str, payload: SnapshotSchema):
+#     snapshot = get_object_or_404(Snapshot, id=snapshot_id)
+#
+#     for attr, value in payload.dict().items():
+#         setattr(snapshot, attr, value)
+#     snapshot.save()
+#
+#     return snapshot
+#
+# @router.delete("/snapshot/{snapshot_id}")
+# def delete_snapshot(request, snapshot_id: str):
+#     snapshot = get_object_or_404(Snapshot, id=snapshot_id)
+#     snapshot.delete()
+#     return {"success": True}
+
+
+
+### Tag #########################################################################
+
+
+class TagSchema(Schema):
+    name: str
+    slug: str
+
+
[email protected]("/tags", response=List[TagSchema])
+def list_tags(request):
+    return Tag.objects.all()

+ 44 - 41
archivebox/config.py

@@ -112,7 +112,7 @@ CONFIG_SCHEMA: Dict[str, ConfigDefaultDict] = {
         'LDAP_FIRSTNAME_ATTR':       {'type': str,   'default': None},
         'LDAP_FIRSTNAME_ATTR':       {'type': str,   'default': None},
         'LDAP_LASTNAME_ATTR':        {'type': str,   'default': None},
         'LDAP_LASTNAME_ATTR':        {'type': str,   'default': None},
         'LDAP_EMAIL_ATTR':           {'type': str,   'default': None},
         'LDAP_EMAIL_ATTR':           {'type': str,   'default': None},
-        'LDAP_CREATE_SUPERUSER':      {'type': bool,  'default': False},
+        'LDAP_CREATE_SUPERUSER':     {'type': bool,  'default': False},
     },
     },
 
 
     'ARCHIVE_METHOD_TOGGLES': {
     'ARCHIVE_METHOD_TOGGLES': {
@@ -265,7 +265,7 @@ CONFIG_ALIASES = {
         for key, default in section.items()
         for key, default in section.items()
             for alias in default.get('aliases', ())
             for alias in default.get('aliases', ())
 }
 }
-USER_CONFIG = {key for section in CONFIG_SCHEMA.values() for key in section.keys()}
+USER_CONFIG = {key: section[key] for section in CONFIG_SCHEMA.values() for key in section.keys()}
 
 
 def get_real_name(key: str) -> str:
 def get_real_name(key: str) -> str:
     """get the current canonical name for a given deprecated config key"""
     """get the current canonical name for a given deprecated config key"""
@@ -282,6 +282,7 @@ ARCHIVE_DIR_NAME = 'archive'
 SOURCES_DIR_NAME = 'sources'
 SOURCES_DIR_NAME = 'sources'
 LOGS_DIR_NAME = 'logs'
 LOGS_DIR_NAME = 'logs'
 PERSONAS_DIR_NAME = 'personas'
 PERSONAS_DIR_NAME = 'personas'
+CRONTABS_DIR_NAME = 'crontabs'
 SQL_INDEX_FILENAME = 'index.sqlite3'
 SQL_INDEX_FILENAME = 'index.sqlite3'
 JSON_INDEX_FILENAME = 'index.json'
 JSON_INDEX_FILENAME = 'index.json'
 HTML_INDEX_FILENAME = 'index.html'
 HTML_INDEX_FILENAME = 'index.html'
@@ -355,7 +356,7 @@ ALLOWED_IN_OUTPUT_DIR = {
     'static',
     'static',
     'sonic',
     'sonic',
     'search.sqlite3',
     'search.sqlite3',
-    'crontabs',
+    CRONTABS_DIR_NAME,
     ARCHIVE_DIR_NAME,
     ARCHIVE_DIR_NAME,
     SOURCES_DIR_NAME,
     SOURCES_DIR_NAME,
     LOGS_DIR_NAME,
     LOGS_DIR_NAME,
@@ -598,7 +599,6 @@ DYNAMIC_CONFIG_SCHEMA: ConfigDefaultDict = {
 
 
     'DEPENDENCIES':             {'default': lambda c: get_dependency_info(c)},
     'DEPENDENCIES':             {'default': lambda c: get_dependency_info(c)},
     'CODE_LOCATIONS':           {'default': lambda c: get_code_locations(c)},
     'CODE_LOCATIONS':           {'default': lambda c: get_code_locations(c)},
-    'EXTERNAL_LOCATIONS':       {'default': lambda c: get_external_locations(c)},
     'DATA_LOCATIONS':           {'default': lambda c: get_data_locations(c)},
     'DATA_LOCATIONS':           {'default': lambda c: get_data_locations(c)},
     'CHROME_OPTIONS':           {'default': lambda c: get_chrome_info(c)},
     'CHROME_OPTIONS':           {'default': lambda c: get_chrome_info(c)},
     'CHROME_EXTRA_ARGS':        {'default': lambda c: c['CHROME_EXTRA_ARGS'] or []},
     'CHROME_EXTRA_ARGS':        {'default': lambda c: c['CHROME_EXTRA_ARGS'] or []},
@@ -985,11 +985,6 @@ def get_code_locations(config: ConfigDict) -> SimpleConfigValueDict:
             'enabled': True,
             'enabled': True,
             'is_valid': (config['TEMPLATES_DIR'] / 'static').exists(),
             'is_valid': (config['TEMPLATES_DIR'] / 'static').exists(),
         },
         },
-        'CUSTOM_TEMPLATES_DIR': {
-            'path': config['CUSTOM_TEMPLATES_DIR'] and Path(config['CUSTOM_TEMPLATES_DIR']).resolve(),
-            'enabled': bool(config['CUSTOM_TEMPLATES_DIR']),
-            'is_valid': config['CUSTOM_TEMPLATES_DIR'] and Path(config['CUSTOM_TEMPLATES_DIR']).exists(),
-        },
         # 'NODE_MODULES_DIR': {
         # 'NODE_MODULES_DIR': {
         #     'path': ,
         #     'path': ,
         #     'enabled': ,
         #     'enabled': ,
@@ -997,29 +992,42 @@ def get_code_locations(config: ConfigDict) -> SimpleConfigValueDict:
         # },
         # },
     }
     }
 
 
-def get_external_locations(config: ConfigDict) -> ConfigValue:
-    abspath = lambda path: None if path is None else Path(path).resolve()
-    return {
-        'CHROME_USER_DATA_DIR': {
-            'path': abspath(config['CHROME_USER_DATA_DIR']),
-            'enabled': config['USE_CHROME'] and config['CHROME_USER_DATA_DIR'],
-            'is_valid': False if config['CHROME_USER_DATA_DIR'] is None else (Path(config['CHROME_USER_DATA_DIR']) / 'Default').exists(),
-        },
-        'COOKIES_FILE': {
-            'path': abspath(config['COOKIES_FILE']),
-            'enabled': config['USE_WGET'] and config['COOKIES_FILE'],
-            'is_valid': False if config['COOKIES_FILE'] is None else Path(config['COOKIES_FILE']).exists(),
-        },
-    }
-
 def get_data_locations(config: ConfigDict) -> ConfigValue:
 def get_data_locations(config: ConfigDict) -> ConfigValue:
     return {
     return {
+        # OLD: migrating to personas
+        # 'CHROME_USER_DATA_DIR': {
+        #     'path': os.path.abspath(config['CHROME_USER_DATA_DIR']),
+        #     'enabled': config['USE_CHROME'] and config['CHROME_USER_DATA_DIR'],
+        #     'is_valid': False if config['CHROME_USER_DATA_DIR'] is None else (Path(config['CHROME_USER_DATA_DIR']) / 'Default').exists(),
+        # },
+        # 'COOKIES_FILE': {
+        #     'path': os.path.abspath(config['COOKIES_FILE']),
+        #     'enabled': config['USE_WGET'] and config['COOKIES_FILE'],
+        #     'is_valid': False if config['COOKIES_FILE'] is None else Path(config['COOKIES_FILE']).exists(),
+        # },
         'OUTPUT_DIR': {
         'OUTPUT_DIR': {
             'path': config['OUTPUT_DIR'].resolve(),
             'path': config['OUTPUT_DIR'].resolve(),
             'enabled': True,
             'enabled': True,
             'is_valid': (config['OUTPUT_DIR'] / SQL_INDEX_FILENAME).exists(),
             'is_valid': (config['OUTPUT_DIR'] / SQL_INDEX_FILENAME).exists(),
             'is_mount': os.path.ismount(config['OUTPUT_DIR'].resolve()),
             'is_mount': os.path.ismount(config['OUTPUT_DIR'].resolve()),
         },
         },
+        'CONFIG_FILE': {
+            'path': config['CONFIG_FILE'].resolve(),
+            'enabled': True,
+            'is_valid': config['CONFIG_FILE'].exists(),
+        },
+        'SQL_INDEX': {
+            'path': (config['OUTPUT_DIR'] / SQL_INDEX_FILENAME).resolve(),
+            'enabled': True,
+            'is_valid': (config['OUTPUT_DIR'] / SQL_INDEX_FILENAME).exists(),
+            'is_mount': os.path.ismount((config['OUTPUT_DIR'] / SQL_INDEX_FILENAME).resolve()),
+        },
+        'ARCHIVE_DIR': {
+            'path': config['ARCHIVE_DIR'].resolve(),
+            'enabled': True,
+            'is_valid': config['ARCHIVE_DIR'].exists(),
+            'is_mount': os.path.ismount(config['ARCHIVE_DIR'].resolve()),
+        },
         'SOURCES_DIR': {
         'SOURCES_DIR': {
             'path': config['SOURCES_DIR'].resolve(),
             'path': config['SOURCES_DIR'].resolve(),
             'enabled': True,
             'enabled': True,
@@ -1030,28 +1038,22 @@ def get_data_locations(config: ConfigDict) -> ConfigValue:
             'enabled': True,
             'enabled': True,
             'is_valid': config['LOGS_DIR'].exists(),
             'is_valid': config['LOGS_DIR'].exists(),
         },
         },
+        'CUSTOM_TEMPLATES_DIR': {
+            'path': config['CUSTOM_TEMPLATES_DIR'] and Path(config['CUSTOM_TEMPLATES_DIR']).resolve(),
+            'enabled': bool(config['CUSTOM_TEMPLATES_DIR']),
+            'is_valid': config['CUSTOM_TEMPLATES_DIR'] and Path(config['CUSTOM_TEMPLATES_DIR']).exists(),
+        },
         'PERSONAS_DIR': {
         'PERSONAS_DIR': {
             'path': config['PERSONAS_DIR'].resolve(),
             'path': config['PERSONAS_DIR'].resolve(),
             'enabled': True,
             'enabled': True,
             'is_valid': config['PERSONAS_DIR'].exists(),
             'is_valid': config['PERSONAS_DIR'].exists(),
         },
         },
-        'ARCHIVE_DIR': {
-            'path': config['ARCHIVE_DIR'].resolve(),
-            'enabled': True,
-            'is_valid': config['ARCHIVE_DIR'].exists(),
-            'is_mount': os.path.ismount(config['ARCHIVE_DIR'].resolve()),
-        },
-        'CONFIG_FILE': {
-            'path': config['CONFIG_FILE'].resolve(),
-            'enabled': True,
-            'is_valid': config['CONFIG_FILE'].exists(),
-        },
-        'SQL_INDEX': {
-            'path': (config['OUTPUT_DIR'] / SQL_INDEX_FILENAME).resolve(),
-            'enabled': True,
-            'is_valid': (config['OUTPUT_DIR'] / SQL_INDEX_FILENAME).exists(),
-            'is_mount': os.path.ismount((config['OUTPUT_DIR'] / SQL_INDEX_FILENAME).resolve()),
-        },
+        # managed by bin/docker_entrypoint.sh and python-crontab:
+        # 'CRONTABS_DIR': {
+        #     'path': config['CRONTABS_DIR'].resolve(),
+        #     'enabled': True,
+        #     'is_valid': config['CRONTABS_DIR'].exists(),
+        # },
     }
     }
 
 
 def get_dependency_info(config: ConfigDict) -> ConfigValue:
 def get_dependency_info(config: ConfigDict) -> ConfigValue:
@@ -1366,6 +1368,7 @@ def check_data_folder(out_dir: Union[str, Path, None]=None, config: ConfigDict=C
         stderr('        archivebox init')
         stderr('        archivebox init')
         raise SystemExit(2)
         raise SystemExit(2)
 
 
+
 def check_migrations(out_dir: Union[str, Path, None]=None, config: ConfigDict=CONFIG):
 def check_migrations(out_dir: Union[str, Path, None]=None, config: ConfigDict=CONFIG):
     output_dir = out_dir or config['OUTPUT_DIR']
     output_dir = out_dir or config['OUTPUT_DIR']
     from .index.sql import list_migrations
     from .index.sql import list_migrations

+ 27 - 0
archivebox/core/admin.py

@@ -14,12 +14,17 @@ from django.shortcuts import render, redirect
 from django.contrib.auth import get_user_model
 from django.contrib.auth import get_user_model
 from django import forms
 from django import forms
 
 
+
+from signal_webhooks.apps import DjangoSignalWebhooksConfig
+from signal_webhooks.admin import WebhookAdmin, WebhookModel
+
 from ..util import htmldecode, urldecode, ansi_to_html
 from ..util import htmldecode, urldecode, ansi_to_html
 
 
 from core.models import Snapshot, ArchiveResult, Tag
 from core.models import Snapshot, ArchiveResult, Tag
 from core.forms import AddLinkForm
 from core.forms import AddLinkForm
 
 
 from core.mixins import SearchResultsAdminMixin
 from core.mixins import SearchResultsAdminMixin
+from api.models import APIToken
 
 
 from index.html import snapshot_icons
 from index.html import snapshot_icons
 from logging_util import printable_filesize
 from logging_util import printable_filesize
@@ -98,10 +103,32 @@ class ArchiveBoxAdmin(admin.AdminSite):
 
 
         return render(template_name='add.html', request=request, context=context)
         return render(template_name='add.html', request=request, context=context)
 
 
+
+# monkey patch django-signals-webhooks to change how it shows up in Admin UI
+DjangoSignalWebhooksConfig.verbose_name = 'API'
+WebhookModel._meta.get_field('name').help_text = 'Give your webhook a descriptive name (e.g. Notify ACME Slack channel of any new ArchiveResults).'
+WebhookModel._meta.get_field('signal').help_text = 'The type of event the webhook should fire for (e.g. Create, Update, Delete).'
+WebhookModel._meta.get_field('ref').help_text = 'Dot import notation of the model the webhook should fire for (e.g. core.models.Snapshot or core.models.ArchiveResult).'
+WebhookModel._meta.get_field('endpoint').help_text = 'External URL to POST the webhook notification to (e.g. https://someapp.example.com/webhook/some-webhook-receiver).'
+WebhookModel._meta.app_label = 'api'
+
+
 archivebox_admin = ArchiveBoxAdmin()
 archivebox_admin = ArchiveBoxAdmin()
 archivebox_admin.register(get_user_model())
 archivebox_admin.register(get_user_model())
+archivebox_admin.register(APIToken)
+archivebox_admin.register(WebhookModel, WebhookAdmin)
 archivebox_admin.disable_action('delete_selected')
 archivebox_admin.disable_action('delete_selected')
 
 
+
+# patch admin with methods to add data views
+from admin_data_views.admin import get_app_list, admin_data_index_view, get_admin_data_urls, get_urls
+
+archivebox_admin.get_app_list = get_app_list.__get__(archivebox_admin, ArchiveBoxAdmin)
+archivebox_admin.admin_data_index_view = admin_data_index_view.__get__(archivebox_admin, ArchiveBoxAdmin)
+archivebox_admin.get_admin_data_urls = get_admin_data_urls.__get__(archivebox_admin, ArchiveBoxAdmin)
+archivebox_admin.get_urls = get_urls(archivebox_admin.get_urls).__get__(archivebox_admin, ArchiveBoxAdmin)
+
+
 class ArchiveResultInline(admin.TabularInline):
 class ArchiveResultInline(admin.TabularInline):
     model = ArchiveResult
     model = ArchiveResult
 
 

+ 18 - 0
archivebox/core/apps.py

@@ -1,3 +1,5 @@
+__package__ = 'archivebox.core'
+
 from django.apps import AppConfig
 from django.apps import AppConfig
 
 
 
 
@@ -5,6 +7,22 @@ class CoreConfig(AppConfig):
     name = 'core'
     name = 'core'
 
 
     def ready(self):
     def ready(self):
+        # register our custom admin as the primary django admin
+        from django.contrib import admin
+        from django.contrib.admin import sites
+        from core.admin import archivebox_admin
+
+        admin.site = archivebox_admin
+        sites.site = archivebox_admin
+
+
+        # register signal handlers
         from .auth import register_signals
         from .auth import register_signals
 
 
         register_signals()
         register_signals()
+
+
+
+# from django.contrib.admin.apps import AdminConfig
+# class CoreAdminConfig(AdminConfig):
+#     default_site = "core.admin.get_admin_site"

+ 3 - 2
archivebox/core/auth.py

@@ -1,5 +1,6 @@
-import os
-from django.conf import settings
+__package__ = 'archivebox.core'
+
+
 from ..config import (
 from ..config import (
     LDAP
     LDAP
 )
 )

+ 0 - 2
archivebox/core/auth_ldap.py

@@ -1,10 +1,8 @@
-from django.conf import settings
 from ..config import (
 from ..config import (
     LDAP_CREATE_SUPERUSER
     LDAP_CREATE_SUPERUSER
 )
 )
 
 
 def create_user(sender, user=None, ldap_user=None, **kwargs):
 def create_user(sender, user=None, ldap_user=None, **kwargs):
-
     if not user.id and LDAP_CREATE_SUPERUSER:
     if not user.id and LDAP_CREATE_SUPERUSER:
         user.is_superuser = True
         user.is_superuser = True
 
 

+ 67 - 0
archivebox/core/settings.py

@@ -18,6 +18,7 @@ from ..config import (
     CUSTOM_TEMPLATES_DIR,
     CUSTOM_TEMPLATES_DIR,
     SQL_INDEX_FILENAME,
     SQL_INDEX_FILENAME,
     OUTPUT_DIR,
     OUTPUT_DIR,
+    ARCHIVE_DIR,
     LOGS_DIR,
     LOGS_DIR,
     TIMEZONE,
     TIMEZONE,
 
 
@@ -63,6 +64,9 @@ INSTALLED_APPS = [
     'core',
     'core',
     'api',
     'api',
 
 
+    'admin_data_views',
+
+    'signal_webhooks',
     'django_extensions',
     'django_extensions',
 ]
 ]
 
 
@@ -173,6 +177,17 @@ if DEBUG_TOOLBAR:
     ]
     ]
     MIDDLEWARE = [*MIDDLEWARE, 'debug_toolbar.middleware.DebugToolbarMiddleware']
     MIDDLEWARE = [*MIDDLEWARE, 'debug_toolbar.middleware.DebugToolbarMiddleware']
 
 
+
+# https://github.com/bensi94/Django-Requests-Tracker (improved version of django-debug-toolbar)
+# Must delete archivebox/templates/admin to use because it relies on some things we override
+# visit /__requests_tracker__/ to access
+DEBUG_REQUESTS_TRACKER = False
+if DEBUG_REQUESTS_TRACKER:
+    INSTALLED_APPS += ["requests_tracker"]
+    MIDDLEWARE += ["requests_tracker.middleware.requests_tracker_middleware"]
+    INTERNAL_IPS = ["127.0.0.1", "10.0.2.2", "0.0.0.0", "*"]
+
+
 ################################################################################
 ################################################################################
 ### Staticfile and Template Settings
 ### Staticfile and Template Settings
 ################################################################################
 ################################################################################
@@ -242,6 +257,29 @@ CACHES = {
 EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
 EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
 
 
 
 
+STORAGES = {
+    "default": {
+        "BACKEND": "django.core.files.storage.FileSystemStorage",
+    },
+    "staticfiles": {
+        "BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage",
+    },
+    "archive": {
+        "BACKEND": "django.core.files.storage.FileSystemStorage",
+        "OPTIONS": {
+            "base_url": "/archive/",
+            "location": ARCHIVE_DIR,
+        },
+    },
+    # "personas": {
+    #     "BACKEND": "django.core.files.storage.FileSystemStorage",
+    #     "OPTIONS": {
+    #         "base_url": "/personas/",
+    #         "location": PERSONAS_DIR,
+    #     },
+    # },
+}
+
 ################################################################################
 ################################################################################
 ### Security Settings
 ### Security Settings
 ################################################################################
 ################################################################################
@@ -368,3 +406,32 @@ LOGGING = {
         }
         }
     },
     },
 }
 }
+
+
+# Add default webhook configuration to the User model
+SIGNAL_WEBHOOKS = {
+    "HOOKS": {
+        "django.contrib.auth.models.User": ...,
+        "core.models.Snapshot": ...,
+        "core.models.ArchiveResult": ...,
+        "core.models.Tag": ...,
+        "api.models.APIToken": ...,
+    },
+}
+
+
+ADMIN_DATA_VIEWS = {
+    "NAME": "configuration",
+    "URLS": [
+        {
+            "route": "live/",
+            "view": "core.views.live_config_list_view",
+            "name": "live",
+            "items": {
+                "route": "<str:key>/",
+                "view": "core.views.live_config_value_view",
+                "name": "live_config_value",
+            },
+        },
+    ],
+}

+ 9 - 14
archivebox/core/urls.py

@@ -1,4 +1,4 @@
-from .admin import archivebox_admin
+__package__ = 'archivebox.core'
 
 
 from django.urls import path, include
 from django.urls import path, include
 from django.views import static
 from django.views import static
@@ -6,14 +6,9 @@ from django.contrib.staticfiles.urls import staticfiles_urlpatterns
 from django.conf import settings
 from django.conf import settings
 from django.views.generic.base import RedirectView
 from django.views.generic.base import RedirectView
 
 
-from core.views import HomepageView, SnapshotView, PublicIndexView, AddView, HealthCheckView
-
-from ninja import NinjaAPI
-from api.auth import GlobalAuth
+from .admin import archivebox_admin
+from .views import HomepageView, SnapshotView, PublicIndexView, AddView, HealthCheckView
 
 
-api = NinjaAPI(auth=GlobalAuth())
-api.add_router("/auth/", "api.auth.router")
-api.add_router("/archive/", "api.archive.router")
 
 
 # GLOBAL_CONTEXT doesn't work as-is, disabled for now: https://github.com/ArchiveBox/ArchiveBox/discussions/1306
 # GLOBAL_CONTEXT doesn't work as-is, disabled for now: https://github.com/ArchiveBox/ArchiveBox/discussions/1306
 # from config import VERSION, VERSIONS_AVAILABLE, CAN_UPGRADE
 # from config import VERSION, VERSIONS_AVAILABLE, CAN_UPGRADE
@@ -43,10 +38,10 @@ urlpatterns = [
     path('accounts/', include('django.contrib.auth.urls')),
     path('accounts/', include('django.contrib.auth.urls')),
     path('admin/', archivebox_admin.urls),
     path('admin/', archivebox_admin.urls),
     
     
-    path("api/", api.urls),
+    path("api/",      include('api.urls')),
 
 
     path('health/', HealthCheckView.as_view(), name='healthcheck'),
     path('health/', HealthCheckView.as_view(), name='healthcheck'),
-    path('error/', lambda _: 1/0),
+    path('error/', lambda *_: 1/0),
 
 
     # path('jet_api/', include('jet_django.urls')),  Enable to use https://www.jetadmin.io/integrations/django
     # path('jet_api/', include('jet_django.urls')),  Enable to use https://www.jetadmin.io/integrations/django
 
 
@@ -57,10 +52,10 @@ urlpatterns = [
 urlpatterns += staticfiles_urlpatterns()
 urlpatterns += staticfiles_urlpatterns()
 
 
 if settings.DEBUG_TOOLBAR:
 if settings.DEBUG_TOOLBAR:
-    import debug_toolbar
-    urlpatterns += [
-        path('__debug__/', include(debug_toolbar.urls)),
-    ]
+    urlpatterns += [path('__debug__/', include("debug_toolbar.urls"))]
+
+if settings.DEBUG_REQUESTS_TRACKER:
+    urlpatterns += [path("__requests_tracker__/", include("requests_tracker.urls"))]
 
 
 
 
 # # Proposed FUTURE URLs spec
 # # Proposed FUTURE URLs spec

+ 135 - 4
archivebox/core/views.py

@@ -1,10 +1,12 @@
 __package__ = 'archivebox.core'
 __package__ = 'archivebox.core'
 
 
+from typing import Callable
+
 from io import StringIO
 from io import StringIO
 from contextlib import redirect_stdout
 from contextlib import redirect_stdout
 
 
 from django.shortcuts import render, redirect
 from django.shortcuts import render, redirect
-from django.http import HttpResponse, Http404
+from django.http import HttpRequest, HttpResponse, Http404
 from django.utils.html import format_html, mark_safe
 from django.utils.html import format_html, mark_safe
 from django.views import View, static
 from django.views import View, static
 from django.views.generic.list import ListView
 from django.views.generic.list import ListView
@@ -14,6 +16,10 @@ from django.contrib.auth.mixins import UserPassesTestMixin
 from django.views.decorators.csrf import csrf_exempt
 from django.views.decorators.csrf import csrf_exempt
 from django.utils.decorators import method_decorator
 from django.utils.decorators import method_decorator
 
 
+from admin_data_views.typing import TableContext, ItemContext
+from admin_data_views.utils import render_with_table_view, render_with_item_view, ItemLink
+
+
 from core.models import Snapshot
 from core.models import Snapshot
 from core.forms import AddLinkForm
 from core.forms import AddLinkForm
 
 
@@ -26,6 +32,10 @@ from ..config import (
     COMMIT_HASH,
     COMMIT_HASH,
     FOOTER_INFO,
     FOOTER_INFO,
     SNAPSHOTS_PER_PAGE,
     SNAPSHOTS_PER_PAGE,
+    CONFIG,
+    CONFIG_SCHEMA,
+    DYNAMIC_CONFIG_SCHEMA,
+    USER_CONFIG,
 )
 )
 from ..main import add
 from ..main import add
 from ..util import base_url, ansi_to_html
 from ..util import base_url, ansi_to_html
@@ -124,9 +134,9 @@ class SnapshotView(View):
                             '<center><br/><br/><br/>'
                             '<center><br/><br/><br/>'
                             f'Snapshot <a href="/archive/{snapshot.timestamp}/index.html" target="_top"><b><code>[{snapshot.timestamp}]</code></b></a> exists in DB, but resource <b><code>{snapshot.timestamp}/'
                             f'Snapshot <a href="/archive/{snapshot.timestamp}/index.html" target="_top"><b><code>[{snapshot.timestamp}]</code></b></a> exists in DB, but resource <b><code>{snapshot.timestamp}/'
                             '{}'
                             '{}'
-                            f'</code></b> does not exist in <a href="/archive/{snapshot.timestamp}/" target="_top">snapshot dir</a> yet.<br/><br/>'
-                            'Maybe this resource type is not availabe for this Snapshot,<br/>or the archiving process has not completed yet?<br/>'
-                            f'<pre><code># run this cmd to finish archiving this Snapshot<br/>archivebox update -t timestamp {snapshot.timestamp}</code></pre><br/><br/>'
+                            f'</code></b> does not exist in the <a href="/archive/{snapshot.timestamp}/" target="_top">snapshot dir</a> yet.<br/><br/>'
+                            'It\'s possible that this resource type is not available for the Snapshot,<br/>or that the archiving process has not completed yet.<br/>'
+                            f'<pre><code># if interrupted, run this cmd to finish archiving this Snapshot<br/>archivebox update -t timestamp {snapshot.timestamp}</code></pre><br/><br/>'
                             '<div class="text-align: left; width: 100%; max-width: 400px">'
                             '<div class="text-align: left; width: 100%; max-width: 400px">'
                             '<i><b>Next steps:</i></b><br/>'
                             '<i><b>Next steps:</i></b><br/>'
                             f'- list all the <a href="/archive/{snapshot.timestamp}/" target="_top">Snapshot files <code>.*</code></a><br/>'
                             f'- list all the <a href="/archive/{snapshot.timestamp}/" target="_top">Snapshot files <code>.*</code></a><br/>'
@@ -312,3 +322,124 @@ class HealthCheckView(View):
             content_type='text/plain',
             content_type='text/plain',
             status=200
             status=200
         )
         )
+
+
+def find_config_section(key: str) -> str:
+    matching_sections = [
+        name for name, opts in CONFIG_SCHEMA.items() if key in opts
+    ]
+    section = matching_sections[0] if matching_sections else 'DYNAMIC'
+    return section
+
+def find_config_default(key: str) -> str:
+    default_val = USER_CONFIG.get(key, {}).get('default', lambda: None)
+    if isinstance(default_val, Callable):
+        return None
+    else:
+        default_val = repr(default_val)
+    return default_val
+
+def find_config_type(key: str) -> str:
+    if key in USER_CONFIG:
+        return USER_CONFIG[key]['type'].__name__
+    elif key in DYNAMIC_CONFIG_SCHEMA:
+        return type(CONFIG[key]).__name__
+    return 'str'
+
+def key_is_safe(key: str) -> bool:
+    for term in ('key', 'password', 'secret', 'token'):
+        if term in key.lower():
+            return False
+    return True
+
+@render_with_table_view
+def live_config_list_view(request: HttpRequest, **kwargs) -> TableContext:
+
+    assert request.user.is_superuser, 'Must be a superuser to view configuration settings.'
+
+    rows = {
+        "Section": [],
+        "Key": [],
+        "Type": [],
+        "Value": [],
+        "Default": [],
+        # "Documentation": [],
+        "Aliases": [],
+    }
+
+    for section in CONFIG_SCHEMA.keys():
+        for key in CONFIG_SCHEMA[section].keys():
+            rows['Section'].append(section.replace('_', ' ').title().replace(' Config', ''))
+            rows['Key'].append(ItemLink(key, key=key))
+            rows['Type'].append(mark_safe(f'<code>{find_config_type(key)}</code>'))
+            rows['Value'].append(mark_safe(f'<code>{CONFIG[key]}</code>') if key_is_safe(key) else '******** (redacted)')
+            rows['Default'].append(mark_safe(f'<a href="https://github.com/search?q=repo%3AArchiveBox%2FArchiveBox+path%3Aconfig.py+%27{key}%27&type=code"><code style="text-decoration: underline">{find_config_default(key) or 'See here...'}</code></a>'))
+            # rows['Documentation'].append(mark_safe(f'Wiki: <a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration#{key.lower()}">{key}</a>'))
+            rows['Aliases'].append(', '.join(CONFIG_SCHEMA[section][key].get('aliases', [])))
+
+    section = 'DYNAMIC'
+    for key in DYNAMIC_CONFIG_SCHEMA.keys():
+        rows['Section'].append(section.replace('_', ' ').title().replace(' Config', ''))
+        rows['Key'].append(ItemLink(key, key=key))
+        rows['Type'].append(mark_safe(f'<code>{find_config_type(key)}</code>'))
+        rows['Value'].append(mark_safe(f'<code>{CONFIG[key]}</code>') if key_is_safe(key) else '******** (redacted)')
+        rows['Default'].append(mark_safe(f'<a href="https://github.com/search?q=repo%3AArchiveBox%2FArchiveBox+path%3Aconfig.py+%27{key}%27&type=code"><code style="text-decoration: underline">{find_config_default(key) or 'See here...'}</code></a>'))
+        # rows['Documentation'].append(mark_safe(f'Wiki: <a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration#{key.lower()}">{key}</a>'))
+        rows['Aliases'].append(ItemLink(key, key=key) if key in USER_CONFIG else '')
+
+    return TableContext(
+        title="Computed Configuration Values",
+        table=rows,
+    )
+
+@render_with_item_view
+def live_config_value_view(request: HttpRequest, key: str, **kwargs) -> ItemContext:
+
+    assert request.user.is_superuser, 'Must be a superuser to view configuration settings.'
+
+    aliases = USER_CONFIG.get(key, {}).get("aliases", [])
+
+    return ItemContext(
+        slug=key,
+        title=key,
+        data=[
+            {
+                "name": mark_safe(f'data / ArchiveBox.conf &nbsp; [{find_config_section(key)}]  &nbsp; <b><code style="color: lightgray">{key}</code></b>' if key in USER_CONFIG else f'[DYNAMIC CONFIG]   &nbsp; <b><code style="color: lightgray">{key}</code></b> &nbsp; <small>(calculated at runtime)</small>'),
+                "description": None,
+                "fields": {
+                    'Key': key,
+                    'Type': find_config_type(key),
+                    'Value': CONFIG[key] if key_is_safe(key) else '********',
+                },
+                "help_texts": {
+                    'Key': mark_safe(f'''
+                        <a href="https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration#{key.lower()}">Documentation</a>  &nbsp;
+                        <span style="display: {"inline" if aliases else "none"}">
+                            Aliases: {", ".join(aliases)}
+                        </span>
+                    '''),
+                    'Type': mark_safe(f'''
+                        <a href="https://github.com/search?q=repo%3AArchiveBox%2FArchiveBox+path%3Aconfig.py+%27{key}%27&type=code">
+                            See full definition in <code>archivebox/config.py</code>...
+                        </a>
+                    '''),
+                    'Value': mark_safe(f'''
+                        {'<b style="color: red">Value is redacted for your security. (Passwords, secrets, API tokens, etc. cannot be viewed in the Web UI)</b><br/><br/>' if not key_is_safe(key) else ''}
+                        Default: <a href="https://github.com/search?q=repo%3AArchiveBox%2FArchiveBox+path%3Aconfig.py+%27{key}%27&type=code">
+                            <code>{find_config_default(key) or 'See 1here...'}</code>
+                        </a>
+                        <br/><br/>
+                        <p style="display: {"block" if key in USER_CONFIG else "none"}">
+                            <i>To change this value, edit <code>data/ArchiveBox.conf</code> or run:</i>
+                            <br/><br/>
+                            <code>archivebox config --set {key}="{
+                                val.strip("'")
+                                if (val := find_config_default(key)) else
+                                (repr(CONFIG[key] if key_is_safe(key) else '********')).strip("'")
+                            }"</code>
+                        </p>
+                    '''),
+                },
+            },
+        ],
+    )

+ 1 - 0
archivebox/index/schema.py

@@ -4,6 +4,7 @@ WARNING: THIS FILE IS ALL LEGACY CODE TO BE REMOVED.
 
 
 DO NOT ADD ANY NEW FEATURES TO THIS FILE, NEW CODE GOES HERE: core/models.py
 DO NOT ADD ANY NEW FEATURES TO THIS FILE, NEW CODE GOES HERE: core/models.py
 
 
+These are the old types we used to use before ArchiveBox v0.4 (before we switched to Django).
 """
 """
 
 
 __package__ = 'archivebox.index'
 __package__ = 'archivebox.index'

+ 2 - 2
archivebox/logging_util.py

@@ -494,12 +494,12 @@ def log_removal_started(links: List["Link"], yes: bool, delete: bool):
     if delete:
     if delete:
         file_counts = [link.num_outputs for link in links if Path(link.link_dir).exists()]
         file_counts = [link.num_outputs for link in links if Path(link.link_dir).exists()]
         print(
         print(
-            f'    {len(links)} Links will be de-listed from the main index, and their archived content folders will be deleted from disk.\n' +
+            f'    {len(links)} Links will be de-listed from the main index, and their archived content folders will be deleted from disk.\n'
             f'    ({len(file_counts)} data folders with {sum(file_counts)} archived files will be deleted!)'
             f'    ({len(file_counts)} data folders with {sum(file_counts)} archived files will be deleted!)'
         )
         )
     else:
     else:
         print(
         print(
-            '    Matching links will be de-listed from the main index, but their archived content folders will remain in place on disk.\n' +
+            '    Matching links will be de-listed from the main index, but their archived content folders will remain in place on disk.\n'
             '    (Pass --delete if you also want to permanently delete the data folders)'
             '    (Pass --delete if you also want to permanently delete the data folders)'
         )
         )
 
 

+ 3 - 9
archivebox/main.py

@@ -104,7 +104,6 @@ from .config import (
     COMMIT_HASH,
     COMMIT_HASH,
     BUILD_TIME,
     BUILD_TIME,
     CODE_LOCATIONS,
     CODE_LOCATIONS,
-    EXTERNAL_LOCATIONS,
     DATA_LOCATIONS,
     DATA_LOCATIONS,
     DEPENDENCIES,
     DEPENDENCIES,
     CHROME_BINARY,
     CHROME_BINARY,
@@ -231,7 +230,7 @@ def version(quiet: bool=False,
         p = platform.uname()
         p = platform.uname()
         print(
         print(
             'ArchiveBox v{}'.format(get_version(CONFIG)),
             'ArchiveBox v{}'.format(get_version(CONFIG)),
-            *((f'COMMIT_HASH={COMMIT_HASH[:7]}',) if COMMIT_HASH else ()),
+            f'COMMIT_HASH={COMMIT_HASH[:7] if COMMIT_HASH else "unknown"}',
             f'BUILD_TIME={BUILD_TIME}',
             f'BUILD_TIME={BUILD_TIME}',
         )
         )
         print(
         print(
@@ -272,11 +271,6 @@ def version(quiet: bool=False,
         for name, path in CODE_LOCATIONS.items():
         for name, path in CODE_LOCATIONS.items():
             print(printable_folder_status(name, path))
             print(printable_folder_status(name, path))
 
 
-        print()
-        print('{white}[i] Secrets locations:{reset}'.format(**ANSI))
-        for name, path in EXTERNAL_LOCATIONS.items():
-            print(printable_folder_status(name, path))
-
         print()
         print()
         if DATA_LOCATIONS['OUTPUT_DIR']['is_valid']:
         if DATA_LOCATIONS['OUTPUT_DIR']['is_valid']:
             print('{white}[i] Data locations:{reset}'.format(**ANSI))
             print('{white}[i] Data locations:{reset}'.format(**ANSI))
@@ -695,7 +689,7 @@ def add(urls: Union[str, List[str]],
     if CAN_UPGRADE:
     if CAN_UPGRADE:
         hint(f"There's a new version of ArchiveBox available! Your current version is {VERSION}. You can upgrade to {VERSIONS_AVAILABLE['recommended_version']['tag_name']} ({VERSIONS_AVAILABLE['recommended_version']['html_url']}). For more on how to upgrade: https://github.com/ArchiveBox/ArchiveBox/wiki/Upgrading-or-Merging-Archives\n")
         hint(f"There's a new version of ArchiveBox available! Your current version is {VERSION}. You can upgrade to {VERSIONS_AVAILABLE['recommended_version']['tag_name']} ({VERSIONS_AVAILABLE['recommended_version']['html_url']}). For more on how to upgrade: https://github.com/ArchiveBox/ArchiveBox/wiki/Upgrading-or-Merging-Archives\n")
 
 
-    return all_links
+    return new_links
 
 
 @enforce_types
 @enforce_types
 def remove(filter_str: Optional[str]=None,
 def remove(filter_str: Optional[str]=None,
@@ -1362,7 +1356,7 @@ def manage(args: Optional[List[str]]=None, out_dir: Path=OUTPUT_DIR) -> None:
     if (args and "createsuperuser" in args) and (IN_DOCKER and not IS_TTY):
     if (args and "createsuperuser" in args) and (IN_DOCKER and not IS_TTY):
         stderr('[!] Warning: you need to pass -it to use interactive commands in docker', color='lightyellow')
         stderr('[!] Warning: you need to pass -it to use interactive commands in docker', color='lightyellow')
         stderr('    docker run -it archivebox manage {}'.format(' '.join(args or ['...'])), color='lightyellow')
         stderr('    docker run -it archivebox manage {}'.format(' '.join(args or ['...'])), color='lightyellow')
-        stderr()
+        stderr('')
 
 
     execute_from_command_line([f'{ARCHIVEBOX_BINARY} manage', *(args or ['help'])])
     execute_from_command_line([f'{ARCHIVEBOX_BINARY} manage', *(args or ['help'])])
 
 

+ 1 - 1
archivebox/manage.py

@@ -7,7 +7,7 @@ if __name__ == '__main__':
     # versions of ./manage.py commands whenever possible. When that's not possible
     # versions of ./manage.py commands whenever possible. When that's not possible
     # (e.g. makemigrations), you can comment out this check temporarily
     # (e.g. makemigrations), you can comment out this check temporarily
 
 
-    if not ('makemigrations' in sys.argv or 'migrate' in sys.argv):
+    if not ('makemigrations' in sys.argv or 'migrate' in sys.argv or 'startapp' in sys.argv):
         print("[X] Don't run ./manage.py directly (unless you are a developer running makemigrations):")
         print("[X] Don't run ./manage.py directly (unless you are a developer running makemigrations):")
         print()
         print()
         print('    Hint: Use these archivebox CLI commands instead of the ./manage.py equivalents:')
         print('    Hint: Use these archivebox CLI commands instead of the ./manage.py equivalents:')

+ 0 - 53
archivebox/parsers/__init__.py

@@ -7,7 +7,6 @@ For examples of supported import formats see tests/.
 
 
 __package__ = 'archivebox.parsers'
 __package__ = 'archivebox.parsers'
 
 
-import re
 from io import StringIO
 from io import StringIO
 
 
 from typing import IO, Tuple, List, Optional
 from typing import IO, Tuple, List, Optional
@@ -28,7 +27,6 @@ from ..util import (
     htmldecode,
     htmldecode,
     download_url,
     download_url,
     enforce_types,
     enforce_types,
-    URL_REGEX,
 )
 )
 from ..index.schema import Link
 from ..index.schema import Link
 from ..logging_util import TimedProgress, log_source_saved
 from ..logging_util import TimedProgress, log_source_saved
@@ -202,54 +200,3 @@ def save_file_as_source(path: str, timeout: int=TIMEOUT, filename: str='{ts}-{ba
     log_source_saved(source_file=source_path)
     log_source_saved(source_file=source_path)
 
 
     return source_path
     return source_path
-
-
-# Check that plain text regex URL parsing works as expected
-#   this is last-line-of-defense to make sure the URL_REGEX isn't
-#   misbehaving due to some OS-level or environment level quirks (e.g. bad regex lib)
-#   the consequences of bad URL parsing could be disastrous and lead to many
-#   incorrect/badly parsed links being added to the archive, so this is worth the cost of checking
-_test_url_strs = {
-    'example.com': 0,
-    '/example.com': 0,
-    '//example.com': 0,
-    ':/example.com': 0,
-    '://example.com': 0,
-    'htt://example8.com': 0,
-    '/htt://example.com': 0,
-    'https://example': 1,
-    'https://localhost/2345': 1,
-    'https://localhost:1234/123': 1,
-    '://': 0,
-    'https://': 0,
-    'http://': 0,
-    'ftp://': 0,
-    'ftp://example.com': 0,
-    'https://example.com': 1,
-    'https://example.com/': 1,
-    'https://a.example.com': 1,
-    'https://a.example.com/': 1,
-    'https://a.example.com/what/is/happening.html': 1,
-    'https://a.example.com/what/ís/happening.html': 1,
-    'https://a.example.com/what/is/happening.html?what=1&2%20b#höw-about-this=1a': 1,
-    'https://a.example.com/what/is/happéning/?what=1&2%20b#how-aboüt-this=1a': 1,
-    'HTtpS://a.example.com/what/is/happening/?what=1&2%20b#how-about-this=1af&2f%20b': 1,
-    'https://example.com/?what=1#how-about-this=1&2%20baf': 1,
-    'https://example.com?what=1#how-about-this=1&2%20baf': 1,
-    '<test>http://example7.com</test>': 1,
-    'https://<test>': 0,
-    'https://[test]': 0,
-    'http://"test"': 0,
-    'http://\'test\'': 0,
-    '[https://example8.com/what/is/this.php?what=1]': 1,
-    '[and http://example9.com?what=1&other=3#and-thing=2]': 1,
-    '<what>https://example10.com#and-thing=2 "</about>': 1,
-    'abc<this["https://example11.com/what/is#and-thing=2?whoami=23&where=1"]that>def': 1,
-    'sdflkf[what](https://example12.com/who/what.php?whoami=1#whatami=2)?am=hi': 1,
-    '<or>http://examplehttp://15.badc</that>': 2,
-    'https://a.example.com/one.html?url=http://example.com/inside/of/another?=http://': 2,
-    '[https://a.example.com/one.html?url=http://example.com/inside/of/another?=](http://a.example.com)': 3,
-}
-for url_str, num_urls in _test_url_strs.items():
-    assert len(re.findall(URL_REGEX, url_str)) == num_urls, (
-        f'{url_str} does not contain {num_urls} urls')

+ 88 - 5
archivebox/parsers/generic_html.py

@@ -10,7 +10,7 @@ from ..index.schema import Link
 from ..util import (
 from ..util import (
     htmldecode,
     htmldecode,
     enforce_types,
     enforce_types,
-    URL_REGEX,
+    find_all_urls,
 )
 )
 from html.parser import HTMLParser
 from html.parser import HTMLParser
 from urllib.parse import urljoin
 from urllib.parse import urljoin
@@ -40,10 +40,22 @@ def parse_generic_html_export(html_file: IO[str], root_url: Optional[str]=None,
         parser.feed(line)
         parser.feed(line)
         for url in parser.urls:
         for url in parser.urls:
             if root_url:
             if root_url:
-                # resolve relative urls /home.html -> https://example.com/home.html
-                url = urljoin(root_url, url)
-            
-            for archivable_url in re.findall(URL_REGEX, url):
+                url_is_absolute = (url.lower().startswith('http://') or url.lower().startswith('https://'))
+                # url = https://abc.com                       => True
+                # url = /page.php?next=https://example.com    => False
+
+                if not url_is_absolute:                       # resolve it by joining it with root_url
+                    relative_path = url
+
+                    url = urljoin(root_url, relative_path)    # https://example.com/somepage.html + /home.html
+                                                              # => https://example.com/home.html
+
+                    # special case to handle bug around // handling, crucial for urls that contain sub-urls
+                    # e.g. https://web.archive.org/web/https://example.com
+                    if did_urljoin_misbehave(root_url, relative_path, url):
+                        url = fix_urljoin_bug(url)
+
+            for archivable_url in find_all_urls(url):
                 yield Link(
                 yield Link(
                     url=htmldecode(archivable_url),
                     url=htmldecode(archivable_url),
                     timestamp=str(datetime.now(timezone.utc).timestamp()),
                     timestamp=str(datetime.now(timezone.utc).timestamp()),
@@ -56,3 +68,74 @@ def parse_generic_html_export(html_file: IO[str], root_url: Optional[str]=None,
 KEY = 'html'
 KEY = 'html'
 NAME = 'Generic HTML'
 NAME = 'Generic HTML'
 PARSER = parse_generic_html_export
 PARSER = parse_generic_html_export
+
+
+#### WORKAROUND CODE FOR https://github.com/python/cpython/issues/96015 ####
+
+def did_urljoin_misbehave(root_url: str, relative_path: str, final_url: str) -> bool:
+    """
+    Handle urljoin edge case bug where multiple slashes get turned into a single slash:
+    - https://github.com/python/cpython/issues/96015
+    - https://github.com/ArchiveBox/ArchiveBox/issues/1411
+
+    This workaround only fixes the most common case of a sub-URL inside an outer URL, e.g.:
+       https://web.archive.org/web/https://example.com/some/inner/url
+
+    But there are other valid URLs containing // that are not fixed by this workaround, e.g.:
+       https://example.com/drives/C//some/file
+    """
+
+    # if relative path is actually an absolute url, cut off its own scheme so we check the path component only
+    relative_path = relative_path.lower()
+    if relative_path.startswith('http://') or relative_path.startswith('https://'):
+        relative_path = relative_path.split('://', 1)[-1]
+
+    # TODO: properly fix all double // getting stripped by urljoin, not just ://
+    original_path_had_suburl = '://' in relative_path
+    original_root_had_suburl = '://' in root_url[8:]     # ignore first 8 chars because root always starts with https://
+    final_joined_has_suburl = '://' in final_url[8:]     # ignore first 8 chars because final always starts with https://
+
+    urljoin_broke_suburls = (
+        (original_root_had_suburl or original_path_had_suburl)
+        and not final_joined_has_suburl
+    )
+    return urljoin_broke_suburls
+
+
+def fix_urljoin_bug(url: str, nesting_limit=5):
+    """
+    recursively replace broken suburls .../http:/... with .../http://...
+
+    basically equivalent to this for 99.9% of cases:
+      url = url.replace('/http:/',  '/http://')
+      url = url.replace('/https:/', '/https://')
+    except this handles:
+        other schemes besides http/https     (e.g. https://example.com/link/git+ssh://github.com/example)
+        other preceding separators besides / (e.g. https://example.com/login/?next=https://example.com/home)
+        fixing multiple suburls recursively
+    """
+    input_url = url
+    for _ in range(nesting_limit):
+        url = re.sub(
+            r'(?P<root>.+?)'                             # https://web.archive.org/web
+            + r'(?P<separator>[-=/_&+%$#@!*\(\\])'       # /
+            + r'(?P<subscheme>[a-zA-Z0-9+_-]{1,32}?):/'  # http:/
+            + r'(?P<suburl>[^/\\]+)',                    # example.com
+            r"\1\2\3://\4",
+            input_url,
+            re.IGNORECASE | re.UNICODE,
+        )
+        if url == input_url:
+            break                                        # nothing left to replace, all suburls are fixed
+        input_url = url
+
+    return url
+
+
+# sanity check to make sure workaround code works as expected and doesnt introduce *more* bugs
+assert did_urljoin_misbehave('https://web.archive.org/web/https://example.com', 'abc.html', 'https://web.archive.org/web/https:/example.com/abc.html') == True
+assert did_urljoin_misbehave('http://example.com', 'https://web.archive.org/web/http://example.com/abc.html', 'https://web.archive.org/web/http:/example.com/abc.html') == True
+assert fix_urljoin_bug('https:/example.com') == 'https:/example.com'   # should not modify original url's scheme, only sub-urls
+assert fix_urljoin_bug('https://web.archive.org/web/https:/example.com/abc.html') == 'https://web.archive.org/web/https://example.com/abc.html'
+assert fix_urljoin_bug('http://example.com/link/git+ssh:/github.com/example?next=ftp:/example.com') == 'http://example.com/link/git+ssh://github.com/example?next=ftp://example.com'
+

+ 5 - 13
archivebox/parsers/generic_json.py

@@ -72,21 +72,13 @@ def parse_generic_json_export(json_file: IO[str], **_kwargs) -> Iterable[Link]:
 
 
     json_file.seek(0)
     json_file.seek(0)
 
 
-    try:
-        links = json.load(json_file)
-        if type(links) != list:
-            raise Exception('JSON parser expects list of objects, maybe this is JSONL?')
-    except json.decoder.JSONDecodeError:
-        # sometimes the first line is a comment or other junk, so try without
-        json_file.seek(0)
-        first_line = json_file.readline()
-        #print('      > Trying JSON parser without first line: "', first_line.strip(), '"', sep= '')
-        links = json.load(json_file)
-        # we may fail again, which means we really don't know what to do
-
+    links = json.load(json_file)
+    if type(links) != list:
+        raise Exception('JSON parser expects list of objects, maybe this is JSONL?')
+    
     for link in links:
     for link in links:
         if link:
         if link:
-            yield jsonObjectToLink(link,json_file.name)
+            yield jsonObjectToLink(link, json_file.name)
 
 
 KEY = 'json'
 KEY = 'json'
 NAME = 'Generic JSON'
 NAME = 'Generic JSON'

+ 0 - 2
archivebox/parsers/generic_jsonl.py

@@ -3,11 +3,9 @@ __package__ = 'archivebox.parsers'
 import json
 import json
 
 
 from typing import IO, Iterable
 from typing import IO, Iterable
-from datetime import datetime, timezone
 
 
 from ..index.schema import Link
 from ..index.schema import Link
 from ..util import (
 from ..util import (
-    htmldecode,
     enforce_types,
     enforce_types,
 )
 )
 
 

+ 2 - 15
archivebox/parsers/generic_txt.py

@@ -1,8 +1,6 @@
 __package__ = 'archivebox.parsers'
 __package__ = 'archivebox.parsers'
 __description__ = 'Plain Text'
 __description__ = 'Plain Text'
 
 
-import re
-
 from typing import IO, Iterable
 from typing import IO, Iterable
 from datetime import datetime, timezone
 from datetime import datetime, timezone
 from pathlib import Path
 from pathlib import Path
@@ -11,7 +9,7 @@ from ..index.schema import Link
 from ..util import (
 from ..util import (
     htmldecode,
     htmldecode,
     enforce_types,
     enforce_types,
-    URL_REGEX
+    find_all_urls,
 )
 )
 
 
 
 
@@ -39,7 +37,7 @@ def parse_generic_txt_export(text_file: IO[str], **_kwargs) -> Iterable[Link]:
             pass
             pass
 
 
         # otherwise look for anything that looks like a URL in the line
         # otherwise look for anything that looks like a URL in the line
-        for url in re.findall(URL_REGEX, line):
+        for url in find_all_urls(line):
             yield Link(
             yield Link(
                 url=htmldecode(url),
                 url=htmldecode(url),
                 timestamp=str(datetime.now(timezone.utc).timestamp()),
                 timestamp=str(datetime.now(timezone.utc).timestamp()),
@@ -48,17 +46,6 @@ def parse_generic_txt_export(text_file: IO[str], **_kwargs) -> Iterable[Link]:
                 sources=[text_file.name],
                 sources=[text_file.name],
             )
             )
 
 
-            # look inside the URL for any sub-urls, e.g. for archive.org links
-            # https://web.archive.org/web/20200531203453/https://www.reddit.com/r/socialism/comments/gu24ke/nypd_officers_claim_they_are_protecting_the_rule/fsfq0sw/
-            # -> https://www.reddit.com/r/socialism/comments/gu24ke/nypd_officers_claim_they_are_protecting_the_rule/fsfq0sw/
-            for sub_url in re.findall(URL_REGEX, line[1:]):
-                yield Link(
-                    url=htmldecode(sub_url),
-                    timestamp=str(datetime.now(timezone.utc).timestamp()),
-                    title=None,
-                    tags=None,
-                    sources=[text_file.name],
-                )
 
 
 KEY = 'txt'
 KEY = 'txt'
 NAME = 'Generic TXT'
 NAME = 'Generic TXT'

+ 2 - 1
archivebox/templates/core/navigation.html

@@ -6,6 +6,7 @@
     <a href="/admin/core/tag/">Tags</a> |
     <a href="/admin/core/tag/">Tags</a> |
     <a href="/admin/core/archiveresult/?o=-1">Log</a> &nbsp; &nbsp;
     <a href="/admin/core/archiveresult/?o=-1">Log</a> &nbsp; &nbsp;
     <a href="{% url 'Docs' %}" target="_blank" rel="noopener noreferrer">Docs</a> | 
     <a href="{% url 'Docs' %}" target="_blank" rel="noopener noreferrer">Docs</a> | 
+    <a href="/api">API</a> | 
     <a href="{% url 'public-index' %}">Public</a> | 
     <a href="{% url 'public-index' %}">Public</a> | 
     <a href="/admin/">Admin</a>
     <a href="/admin/">Admin</a>
      &nbsp; &nbsp;
      &nbsp; &nbsp;
@@ -16,7 +17,7 @@
         {% endblock %}
         {% endblock %}
         {% block userlinks %}
         {% block userlinks %}
             {% if user.has_usable_password %}
             {% if user.has_usable_password %}
-                <a href="{% url 'admin:password_change' %}">Account</a> /
+                <a href="{% url 'admin:password_change' %}" title="Change your account password">Account</a> /
             {% endif %}
             {% endif %}
             <a href="{% url 'admin:logout' %}">{% trans 'Log out' %}</a>
             <a href="{% url 'admin:logout' %}">{% trans 'Log out' %}</a>
         {% endblock %}
         {% endblock %}

+ 67 - 11
archivebox/util.py

@@ -62,12 +62,12 @@ COLOR_REGEX = re.compile(r'\[(?P<arg_1>\d+)(;(?P<arg_2>\d+)(;(?P<arg_3>\d+))?)?m
 
 
 # https://mathiasbynens.be/demo/url-regex
 # https://mathiasbynens.be/demo/url-regex
 URL_REGEX = re.compile(
 URL_REGEX = re.compile(
-    r'(?=('                           +
-    r'http[s]?://'                    +  # start matching from allowed schemes
-    r'(?:[a-zA-Z]|[0-9]'              +  # followed by allowed alphanum characters
-    r'|[-_$@.&+!*\(\),]'              +  #   or allowed symbols (keep hyphen first to match literal hyphen)
-    r'|[^\u0000-\u007F])+'            +  #   or allowed unicode bytes
-    r'[^\]\[<>"\'\s]+'                +  # stop parsing at these symbols
+    r'(?=('                          
+    r'http[s]?://'                     # start matching from allowed schemes
+    r'(?:[a-zA-Z]|[0-9]'               # followed by allowed alphanum characters
+    r'|[-_$@.&+!*\(\),]'               #   or allowed symbols (keep hyphen first to match literal hyphen)
+    r'|[^\u0000-\u007F])+'             #   or allowed unicode bytes
+    r'[^\]\[<>"\'\s]+'                 # stop parsing at these symbols
     r'))',
     r'))',
     re.IGNORECASE | re.UNICODE,
     re.IGNORECASE | re.UNICODE,
 )
 )
@@ -90,6 +90,11 @@ def fix_url_from_markdown(url_str: str) -> str:
     helpful to fix URLs parsed from markdown e.g.
     helpful to fix URLs parsed from markdown e.g.
       input:  https://wikipedia.org/en/some_article_(Disambiguation).html?abc=def).somemoretext
       input:  https://wikipedia.org/en/some_article_(Disambiguation).html?abc=def).somemoretext
       result: https://wikipedia.org/en/some_article_(Disambiguation).html?abc=def
       result: https://wikipedia.org/en/some_article_(Disambiguation).html?abc=def
+
+    IMPORTANT ASSUMPTION: valid urls wont have unbalanced or incorrectly nested parentheses
+    e.g. this will fail the user actually wants to ingest a url like 'https://example.com/some_wei)(rd_url'
+         in that case it will return https://example.com/some_wei (truncated up to the first unbalanced paren)
+    This assumption is true 99.9999% of the time, and for the rare edge case the user can use url_list parser.
     """
     """
     trimmed_url = url_str
     trimmed_url = url_str
 
 
@@ -353,7 +358,8 @@ def chrome_cleanup():
     if IN_DOCKER and lexists("/home/archivebox/.config/chromium/SingletonLock"):
     if IN_DOCKER and lexists("/home/archivebox/.config/chromium/SingletonLock"):
         remove_file("/home/archivebox/.config/chromium/SingletonLock")
         remove_file("/home/archivebox/.config/chromium/SingletonLock")
 
 
-def ansi_to_html(text):
+@enforce_types
+def ansi_to_html(text: str) -> str:
     """
     """
     Based on: https://stackoverflow.com/questions/19212665/python-converting-ansi-color-codes-to-html
     Based on: https://stackoverflow.com/questions/19212665/python-converting-ansi-color-codes-to-html
     """
     """
@@ -439,11 +445,14 @@ class ExtendedEncoder(pyjson.JSONEncoder):
 
 
 
 
 ### URL PARSING TESTS / ASSERTIONS
 ### URL PARSING TESTS / ASSERTIONS
-# they run at runtime because I like having them inline in this file,
-# I like the peace of mind knowing it's enforced at runtime across all OS's (in case the regex engine ever has any weird locale-specific quirks),
-# and these assertions are basically instant, so not a big performance cost to do it on startup
 
 
-assert fix_url_from_markdown('/a(b)c).x(y)z') == '/a(b)c'
+# Check that plain text regex URL parsing works as expected
+#   this is last-line-of-defense to make sure the URL_REGEX isn't
+#   misbehaving due to some OS-level or environment level quirks (e.g. regex engine / cpython / locale differences)
+#   the consequences of bad URL parsing could be disastrous and lead to many
+#   incorrect/badly parsed links being added to the archive, so this is worth the cost of checking
+
+assert fix_url_from_markdown('http://example.com/a(b)c).x(y)z') == 'http://example.com/a(b)c'
 assert fix_url_from_markdown('https://wikipedia.org/en/some_article_(Disambiguation).html?abc=def).link(with)_trailingtext') == 'https://wikipedia.org/en/some_article_(Disambiguation).html?abc=def'
 assert fix_url_from_markdown('https://wikipedia.org/en/some_article_(Disambiguation).html?abc=def).link(with)_trailingtext') == 'https://wikipedia.org/en/some_article_(Disambiguation).html?abc=def'
 
 
 URL_REGEX_TESTS = [
 URL_REGEX_TESTS = [
@@ -482,3 +491,50 @@ URL_REGEX_TESTS = [
 for urls_str, expected_url_matches in URL_REGEX_TESTS:
 for urls_str, expected_url_matches in URL_REGEX_TESTS:
     url_matches = list(find_all_urls(urls_str))
     url_matches = list(find_all_urls(urls_str))
     assert url_matches == expected_url_matches, 'FAILED URL_REGEX CHECK!'
     assert url_matches == expected_url_matches, 'FAILED URL_REGEX CHECK!'
+
+
+# More test cases
+_test_url_strs = {
+    'example.com': 0,
+    '/example.com': 0,
+    '//example.com': 0,
+    ':/example.com': 0,
+    '://example.com': 0,
+    'htt://example8.com': 0,
+    '/htt://example.com': 0,
+    'https://example': 1,
+    'https://localhost/2345': 1,
+    'https://localhost:1234/123': 1,
+    '://': 0,
+    'https://': 0,
+    'http://': 0,
+    'ftp://': 0,
+    'ftp://example.com': 0,
+    'https://example.com': 1,
+    'https://example.com/': 1,
+    'https://a.example.com': 1,
+    'https://a.example.com/': 1,
+    'https://a.example.com/what/is/happening.html': 1,
+    'https://a.example.com/what/ís/happening.html': 1,
+    'https://a.example.com/what/is/happening.html?what=1&2%20b#höw-about-this=1a': 1,
+    'https://a.example.com/what/is/happéning/?what=1&2%20b#how-aboüt-this=1a': 1,
+    'HTtpS://a.example.com/what/is/happening/?what=1&2%20b#how-about-this=1af&2f%20b': 1,
+    'https://example.com/?what=1#how-about-this=1&2%20baf': 1,
+    'https://example.com?what=1#how-about-this=1&2%20baf': 1,
+    '<test>http://example7.com</test>': 1,
+    'https://<test>': 0,
+    'https://[test]': 0,
+    'http://"test"': 0,
+    'http://\'test\'': 0,
+    '[https://example8.com/what/is/this.php?what=1]': 1,
+    '[and http://example9.com?what=1&other=3#and-thing=2]': 1,
+    '<what>https://example10.com#and-thing=2 "</about>': 1,
+    'abc<this["https://example11.com/what/is#and-thing=2?whoami=23&where=1"]that>def': 1,
+    'sdflkf[what](https://example12.com/who/what.php?whoami=1#whatami=2)?am=hi': 1,
+    '<or>http://examplehttp://15.badc</that>': 2,
+    'https://a.example.com/one.html?url=http://example.com/inside/of/another?=http://': 2,
+    '[https://a.example.com/one.html?url=http://example.com/inside/of/another?=](http://a.example.com)': 3,
+}
+for url_str, num_urls in _test_url_strs.items():
+    assert len(list(find_all_urls(url_str))) == num_urls, (
+        f'{url_str} does not contain {num_urls} urls')

+ 12 - 12
bin/build_docker.sh

@@ -18,7 +18,7 @@ which docker > /dev/null || exit 1
 which jq > /dev/null || exit 1
 which jq > /dev/null || exit 1
 # which pdm > /dev/null || exit 1
 # which pdm > /dev/null || exit 1
 
 
-SUPPORTED_PLATFORMS="linux/amd64,linux/arm64,linux/arm/v7"
+SUPPORTED_PLATFORMS="linux/amd64,linux/arm64"
 
 
 TAG_NAME="${1:-$(git rev-parse --abbrev-ref HEAD)}"
 TAG_NAME="${1:-$(git rev-parse --abbrev-ref HEAD)}"
 VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")"
 VERSION="$(jq -r '.version' < "$REPO_DIR/package.json")"
@@ -80,20 +80,20 @@ echo "[+] Building archivebox:$VERSION docker image..."
 # docker build . --no-cache -t archivebox-dev \
 # docker build . --no-cache -t archivebox-dev \
 # replace --load with --push to deploy
 # replace --load with --push to deploy
 docker buildx build --platform "$SELECTED_PLATFORMS" --load . \
 docker buildx build --platform "$SELECTED_PLATFORMS" --load . \
-               -t archivebox/archivebox \
+               # -t archivebox/archivebox \
                -t archivebox/archivebox:$TAG_NAME \
                -t archivebox/archivebox:$TAG_NAME \
-               -t archivebox/archivebox:$VERSION \
-               -t archivebox/archivebox:$SHORT_VERSION \
+               # -t archivebox/archivebox:$VERSION \
+               # -t archivebox/archivebox:$SHORT_VERSION \
                -t archivebox/archivebox:$GIT_SHA \
                -t archivebox/archivebox:$GIT_SHA \
-               -t archivebox/archivebox:latest \
-               -t nikisweeting/archivebox \
+               # -t archivebox/archivebox:latest \
+               # -t nikisweeting/archivebox \
                -t nikisweeting/archivebox:$TAG_NAME \
                -t nikisweeting/archivebox:$TAG_NAME \
-               -t nikisweeting/archivebox:$VERSION \
-               -t nikisweeting/archivebox:$SHORT_VERSION \
+               # -t nikisweeting/archivebox:$VERSION \
+               # -t nikisweeting/archivebox:$SHORT_VERSION \
                -t nikisweeting/archivebox:$GIT_SHA \
                -t nikisweeting/archivebox:$GIT_SHA \
-               -t nikisweeting/archivebox:latest \
+               # -t nikisweeting/archivebox:latest \
                -t ghcr.io/archivebox/archivebox/archivebox:$TAG_NAME \
                -t ghcr.io/archivebox/archivebox/archivebox:$TAG_NAME \
-               -t ghcr.io/archivebox/archivebox/archivebox:$VERSION \
-               -t ghcr.io/archivebox/archivebox/archivebox:$SHORT_VERSION \
+               # -t ghcr.io/archivebox/archivebox/archivebox:$VERSION \
+               # -t ghcr.io/archivebox/archivebox/archivebox:$SHORT_VERSION \
                -t ghcr.io/archivebox/archivebox/archivebox:$GIT_SHA \
                -t ghcr.io/archivebox/archivebox/archivebox:$GIT_SHA \
-               -t ghcr.io/archivebox/archivebox/archivebox:latest
+               # -t ghcr.io/archivebox/archivebox/archivebox:latest

+ 1 - 0
bin/docker_entrypoint.sh

@@ -18,6 +18,7 @@
 # https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html
 # https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html
 # set -o xtrace
 # set -o xtrace
 # set -o nounset
 # set -o nounset
+shopt -s nullglob
 set -o errexit
 set -o errexit
 set -o errtrace
 set -o errtrace
 set -o pipefail
 set -o pipefail

+ 1 - 1
bin/lint.sh

@@ -15,7 +15,7 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && cd .. && pwd )"
 source "$DIR/.venv/bin/activate"
 source "$DIR/.venv/bin/activate"
 
 
 echo "[*] Running flake8..."
 echo "[*] Running flake8..."
-cd archivebox
+cd "$DIR/archivebox"
 flake8 . && echo "√ No errors found."
 flake8 . && echo "√ No errors found."
 
 
 echo
 echo

+ 2 - 2
bin/lock_pkgs.sh

@@ -48,7 +48,7 @@ echo
 
 
 echo "[+] Generating dev & prod requirements.txt & pdm.lock from pyproject.toml..."
 echo "[+] Generating dev & prod requirements.txt & pdm.lock from pyproject.toml..."
 pip install --upgrade pip setuptools
 pip install --upgrade pip setuptools
-pdm self update
+pdm self update >/dev/null 2>&1 || true
 pdm venv create 3.12
 pdm venv create 3.12
 echo
 echo
 echo "pyproject.toml:    archivebox $(grep 'version = ' pyproject.toml | awk '{print $3}' | jq -r)"
 echo "pyproject.toml:    archivebox $(grep 'version = ' pyproject.toml | awk '{print $3}' | jq -r)"
@@ -73,7 +73,7 @@ cp ./pdm.dev.lock ./pip_dist/
 cp ./requirements-dev.txt ./pip_dist/
 cp ./requirements-dev.txt ./pip_dist/
 
 
 echo
 echo
-echo "[+]] Generating package-lock.json from package.json..."
+echo "[+] Generating package-lock.json from package.json..."
 npm install -g npm
 npm install -g npm
 echo
 echo
 echo "package.json:    archivebox $(jq -r '.version' package.json)"
 echo "package.json:    archivebox $(jq -r '.version' package.json)"

+ 2 - 2
bin/setup.sh

@@ -27,9 +27,9 @@ if (which docker-compose > /dev/null && docker pull archivebox/archivebox:latest
     if [ -f "./index.sqlite3" ]; then
     if [ -f "./index.sqlite3" ]; then
         mv -i ~/archivebox/* ~/archivebox/data/
         mv -i ~/archivebox/* ~/archivebox/data/
     fi
     fi
-    curl -fsSL 'https://raw.githubusercontent.com/ArchiveBox/ArchiveBox/main/docker-compose.yml' > docker-compose.yml
+    curl -fsSL 'https://raw.githubusercontent.com/ArchiveBox/ArchiveBox/stable/docker-compose.yml' > docker-compose.yml
     mkdir -p ./etc
     mkdir -p ./etc
-    curl -fsSL 'https://raw.githubusercontent.com/ArchiveBox/ArchiveBox/main/etc/sonic.cfg' > ./etc/sonic.cfg
+    curl -fsSL 'https://raw.githubusercontent.com/ArchiveBox/ArchiveBox/stable/etc/sonic.cfg' > ./etc/sonic.cfg
     docker compose run --rm archivebox init --setup
     docker compose run --rm archivebox init --setup
     echo
     echo
     echo "[+] Starting ArchiveBox server using: docker compose up -d..."
     echo "[+] Starting ArchiveBox server using: docker compose up -d..."

+ 14 - 14
docker-compose.yml

@@ -48,17 +48,17 @@ services:
     #   $ docker compose restart archivebox_scheduler
     #   $ docker compose restart archivebox_scheduler
 
 
     archivebox_scheduler:
     archivebox_scheduler:
-       image: archivebox/archivebox:latest
-       command: schedule --foreground --update --every=day
-       environment:
-           - TIMEOUT=120                       # use a higher timeout than the main container to give slow tasks more time when retrying
-           # - PUID=502                        # set to your host user's UID & GID if you encounter permissions issues
-           # - PGID=20
-       volumes:
-           - ./data:/data
-       # cpus: 2                               # uncomment / edit these values to limit scheduler container resource consumption
-       # mem_limit: 2048m
-       # restart: always
+        image: archivebox/archivebox:latest
+        command: schedule --foreground --update --every=day
+        environment:
+            - TIMEOUT=120                       # use a higher timeout than the main container to give slow tasks more time when retrying
+            # - PUID=502                        # set to your host user's UID & GID if you encounter permissions issues
+            # - PGID=20
+        volumes:
+            - ./data:/data
+        # cpus: 2                               # uncomment / edit these values to limit scheduler container resource consumption
+        # mem_limit: 2048m
+        # restart: always
 
 
 
 
     ### This runs the optional Sonic full-text search backend (much faster than default rg backend).
     ### This runs the optional Sonic full-text search backend (much faster than default rg backend).
@@ -72,7 +72,7 @@ services:
             # not needed after first run / if you have already have ./etc/sonic.cfg present
             # not needed after first run / if you have already have ./etc/sonic.cfg present
             dockerfile_inline: |
             dockerfile_inline: |
                 FROM quay.io/curl/curl:latest AS config_downloader
                 FROM quay.io/curl/curl:latest AS config_downloader
-                RUN curl -fsSL 'https://raw.githubusercontent.com/ArchiveBox/ArchiveBox/main/etc/sonic.cfg' > /tmp/sonic.cfg
+                RUN curl -fsSL 'https://raw.githubusercontent.com/ArchiveBox/ArchiveBox/stable/etc/sonic.cfg' > /tmp/sonic.cfg
                 FROM valeriansaliou/sonic:latest
                 FROM valeriansaliou/sonic:latest
                 COPY --from=config_downloader /tmp/sonic.cfg /etc/sonic.cfg
                 COPY --from=config_downloader /tmp/sonic.cfg /etc/sonic.cfg
         expose:
         expose:
@@ -99,7 +99,7 @@ services:
             # restricted to access from localhost by default because it has no authentication
             # restricted to access from localhost by default because it has no authentication
             - 127.0.0.1:8080:8080
             - 127.0.0.1:8080:8080
 
 
-    
+
     ### Example: Put Nginx in front of the ArchiveBox server for SSL termination and static file serving.
     ### Example: Put Nginx in front of the ArchiveBox server for SSL termination and static file serving.
     # You can also any other ingress provider for SSL like Apache, Caddy, Traefik, Cloudflare Tunnels, etc.
     # You can also any other ingress provider for SSL like Apache, Caddy, Traefik, Cloudflare Tunnels, etc.
 
 
@@ -173,7 +173,7 @@ services:
 
 
     ### Example: run all your ArchiveBox traffic through a WireGuard VPN tunnel to avoid IP blocks.
     ### Example: run all your ArchiveBox traffic through a WireGuard VPN tunnel to avoid IP blocks.
     # You can also use any other VPN that works at the docker IP level, e.g. Tailscale, OpenVPN, etc.
     # You can also use any other VPN that works at the docker IP level, e.g. Tailscale, OpenVPN, etc.
-    
+
     # wireguard:
     # wireguard:
     #   image: linuxserver/wireguard:latest
     #   image: linuxserver/wireguard:latest
     #   network_mode: 'service:archivebox'
     #   network_mode: 'service:archivebox'

+ 1 - 1
docs

@@ -1 +1 @@
-Subproject commit a1b69c51ba9b249c0b2a6efd141dbb792fc36ad2
+Subproject commit f23abba9773b67ad9f2fd04d6f2e8e056dfa6521

+ 35 - 15
package-lock.json

@@ -25,9 +25,9 @@
       }
       }
     },
     },
     "node_modules/@babel/runtime-corejs2": {
     "node_modules/@babel/runtime-corejs2": {
-      "version": "7.24.4",
-      "resolved": "https://registry.npmjs.org/@babel/runtime-corejs2/-/runtime-corejs2-7.24.4.tgz",
-      "integrity": "sha512-ZCKqyUKt/Coimg+3Kafu43yNetgYnTXzNbEGAgxc81J5sI0qFNbQ613w7PNny+SmijAmGVroL0GDvx5rG/JI5Q==",
+      "version": "7.24.5",
+      "resolved": "https://registry.npmjs.org/@babel/runtime-corejs2/-/runtime-corejs2-7.24.5.tgz",
+      "integrity": "sha512-cC9jiO6s/IN+xwCHYy1AGrcFJ4bwgIwb8HX1KaoEpRsznLlO4x9eBP6AX7RIeMSWlQqEj2WHox637OS8cDq6Ew==",
       "dependencies": {
       "dependencies": {
         "core-js": "^2.6.12",
         "core-js": "^2.6.12",
         "regenerator-runtime": "^0.14.0"
         "regenerator-runtime": "^0.14.0"
@@ -203,9 +203,9 @@
       "integrity": "sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA=="
       "integrity": "sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA=="
     },
     },
     "node_modules/@types/node": {
     "node_modules/@types/node": {
-      "version": "20.12.7",
-      "resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.7.tgz",
-      "integrity": "sha512-wq0cICSkRLVaf3UGLMGItu/PtdY7oaXaI/RVU+xliKVOtRna3PRY57ZDfztpDL0n11vfymMUnXv8QwYCO7L1wg==",
+      "version": "20.12.8",
+      "resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.8.tgz",
+      "integrity": "sha512-NU0rJLJnshZWdE/097cdCBbyW1h4hEg0xpovcoAQYHl8dnEyp/NAOiE45pvc+Bd1Dt+2r94v2eGFpQJ4R7g+2w==",
       "optional": true,
       "optional": true,
       "dependencies": {
       "dependencies": {
         "undici-types": "~5.26.4"
         "undici-types": "~5.26.4"
@@ -713,9 +713,9 @@
       "integrity": "sha512-3VdM/SXBZX2omc9JF9nOPCtDaYQ67BGp5CoLpIQlO2KCAPETs8TcDHacF26jXadGbvUteZzRTeos2fhID5+ucQ=="
       "integrity": "sha512-3VdM/SXBZX2omc9JF9nOPCtDaYQ67BGp5CoLpIQlO2KCAPETs8TcDHacF26jXadGbvUteZzRTeos2fhID5+ucQ=="
     },
     },
     "node_modules/dompurify": {
     "node_modules/dompurify": {
-      "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.1.0.tgz",
-      "integrity": "sha512-yoU4rhgPKCo+p5UrWWWNKiIq+ToGqmVVhk0PmMYBK4kRsR3/qhemNFL8f6CFmBd4gMwm3F4T7HBoydP5uY07fA=="
+      "version": "3.1.2",
+      "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.1.2.tgz",
+      "integrity": "sha512-hLGGBI1tw5N8qTELr3blKjAML/LY4ANxksbS612UiJyDfyf/2D092Pvm+S7pmeTGJRqvlJkFzBoHBQKgQlOQVg=="
     },
     },
     "node_modules/domutils": {
     "node_modules/domutils": {
       "version": "1.5.1",
       "version": "1.5.1",
@@ -1655,6 +1655,26 @@
         "node": ">=18"
         "node": ">=18"
       }
       }
     },
     },
+    "node_modules/puppeteer-core/node_modules/ws": {
+      "version": "8.16.0",
+      "resolved": "https://registry.npmjs.org/ws/-/ws-8.16.0.tgz",
+      "integrity": "sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ==",
+      "engines": {
+        "node": ">=10.0.0"
+      },
+      "peerDependencies": {
+        "bufferutil": "^4.0.1",
+        "utf-8-validate": ">=5.0.2"
+      },
+      "peerDependenciesMeta": {
+        "bufferutil": {
+          "optional": true
+        },
+        "utf-8-validate": {
+          "optional": true
+        }
+      }
+    },
     "node_modules/qs": {
     "node_modules/qs": {
       "version": "6.5.3",
       "version": "6.5.3",
       "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz",
       "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz",
@@ -2071,9 +2091,9 @@
       }
       }
     },
     },
     "node_modules/tough-cookie": {
     "node_modules/tough-cookie": {
-      "version": "4.1.3",
-      "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
-      "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
+      "version": "4.1.4",
+      "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.4.tgz",
+      "integrity": "sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag==",
       "dependencies": {
       "dependencies": {
         "psl": "^1.1.33",
         "psl": "^1.1.33",
         "punycode": "^2.1.1",
         "punycode": "^2.1.1",
@@ -2276,9 +2296,9 @@
       "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
       "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
     },
     },
     "node_modules/ws": {
     "node_modules/ws": {
-      "version": "8.16.0",
-      "resolved": "https://registry.npmjs.org/ws/-/ws-8.16.0.tgz",
-      "integrity": "sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ==",
+      "version": "8.17.0",
+      "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.0.tgz",
+      "integrity": "sha512-uJq6108EgZMAl20KagGkzCKfMEjxmKvZHG7Tlq0Z6nOky7YF7aq4mOx6xK8TJ/i1LeK4Qus7INktacctDgY8Ow==",
       "engines": {
       "engines": {
         "node": ">=10.0.0"
         "node": ">=10.0.0"
       },
       },

+ 0 - 1128
pdm.lock

@@ -1,1128 +0,0 @@
-# This file is @generated by PDM.
-# It is not intended for manual editing.
-
-[metadata]
-groups = ["default", "ldap", "sonic"]
-strategy = ["cross_platform", "inherit_metadata"]
-lock_version = "4.4.1"
-content_hash = "sha256:b7dd7f385f9511475f0778131d62b1405e3f4ea2732be447036e6f7e03199596"
-
-[[package]]
-name = "annotated-types"
-version = "0.6.0"
-requires_python = ">=3.8"
-summary = "Reusable constraint types to use with typing.Annotated"
-groups = ["default"]
-files = [
-    {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"},
-    {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"},
-]
-
-[[package]]
-name = "asgiref"
-version = "3.8.1"
-requires_python = ">=3.8"
-summary = "ASGI specs, helper code, and adapters"
-groups = ["default", "ldap"]
-dependencies = [
-    "typing-extensions>=4; python_version < \"3.11\"",
-]
-files = [
-    {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"},
-    {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"},
-]
-
-[[package]]
-name = "asttokens"
-version = "2.4.1"
-summary = "Annotate AST trees with source code positions"
-groups = ["default"]
-dependencies = [
-    "six>=1.12.0",
-]
-files = [
-    {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"},
-    {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"},
-]
-
-[[package]]
-name = "brotli"
-version = "1.1.0"
-summary = "Python bindings for the Brotli compression library"
-groups = ["default"]
-marker = "implementation_name == \"cpython\""
-files = [
-    {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1140c64812cb9b06c922e77f1c26a75ec5e3f0fb2bf92cc8c58720dec276752"},
-    {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8fd5270e906eef71d4a8d19b7c6a43760c6abcfcc10c9101d14eb2357418de9"},
-    {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ae56aca0402a0f9a3431cddda62ad71666ca9d4dc3a10a142b9dce2e3c0cda3"},
-    {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43ce1b9935bfa1ede40028054d7f48b5469cd02733a365eec8a329ffd342915d"},
-    {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c4855522edb2e6ae7fdb58e07c3ba9111e7621a8956f481c68d5d979c93032e"},
-    {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:38025d9f30cf4634f8309c6874ef871b841eb3c347e90b0851f63d1ded5212da"},
-    {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6a904cb26bfefc2f0a6f240bdf5233be78cd2488900a2f846f3c3ac8489ab80"},
-    {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"},
-    {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"},
-    {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"},
-    {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"},
-    {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"},
-    {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"},
-    {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c8146669223164fc87a7e3de9f81e9423c67a79d6b3447994dfb9c95da16e2d6"},
-    {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30924eb4c57903d5a7526b08ef4a584acc22ab1ffa085faceb521521d2de32dd"},
-    {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceb64bbc6eac5a140ca649003756940f8d6a7c444a68af170b3187623b43bebf"},
-    {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a469274ad18dc0e4d316eefa616d1d0c2ff9da369af19fa6f3daa4f09671fd61"},
-    {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524f35912131cc2cabb00edfd8d573b07f2d9f21fa824bd3fb19725a9cf06327"},
-    {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5b3cc074004d968722f51e550b41a27be656ec48f8afaeeb45ebf65b561481dd"},
-    {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"},
-    {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"},
-    {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"},
-    {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"},
-    {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"},
-    {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"},
-    {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"},
-    {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"},
-    {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f4bf76817c14aa98cc6697ac02f3972cb8c3da93e9ef16b9c66573a68014f91"},
-    {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0c5516f0aed654134a2fc936325cc2e642f8a0e096d075209672eb321cff408"},
-    {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c3020404e0b5eefd7c9485ccf8393cfb75ec38ce75586e046573c9dc29967a0"},
-    {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ed11165dd45ce798d99a136808a794a748d5dc38511303239d4e2363c0695dc"},
-    {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"},
-    {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"},
-    {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"},
-    {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"},
-    {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"},
-    {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},
-]
-
-[[package]]
-name = "brotlicffi"
-version = "1.1.0.0"
-requires_python = ">=3.7"
-summary = "Python CFFI bindings to the Brotli library"
-groups = ["default"]
-marker = "implementation_name != \"cpython\""
-dependencies = [
-    "cffi>=1.0.0",
-]
-files = [
-    {file = "brotlicffi-1.1.0.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9b7ae6bd1a3f0df532b6d67ff674099a96d22bc0948955cb338488c31bfb8851"},
-    {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19ffc919fa4fc6ace69286e0a23b3789b4219058313cf9b45625016bf7ff996b"},
-    {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9feb210d932ffe7798ee62e6145d3a757eb6233aa9a4e7db78dd3690d7755814"},
-    {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84763dbdef5dd5c24b75597a77e1b30c66604725707565188ba54bab4f114820"},
-    {file = "brotlicffi-1.1.0.0-cp37-abi3-win32.whl", hash = "sha256:1b12b50e07c3911e1efa3a8971543e7648100713d4e0971b13631cce22c587eb"},
-    {file = "brotlicffi-1.1.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:994a4f0681bb6c6c3b0925530a1926b7a189d878e6e5e38fae8efa47c5d9c613"},
-    {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2e4aeb0bd2540cb91b069dbdd54d458da8c4334ceaf2d25df2f4af576d6766ca"},
-    {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b7b0033b0d37bb33009fb2fef73310e432e76f688af76c156b3594389d81391"},
-    {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54a07bb2374a1eba8ebb52b6fafffa2afd3c4df85ddd38fcc0511f2bb387c2a8"},
-    {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7901a7dc4b88f1c1475de59ae9be59799db1007b7d059817948d8e4f12e24e35"},
-    {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce01c7316aebc7fce59da734286148b1d1b9455f89cf2c8a4dfce7d41db55c2d"},
-    {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:246f1d1a90279bb6069de3de8d75a8856e073b8ff0b09dcca18ccc14cec85979"},
-    {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc4bc5d82bc56ebd8b514fb8350cfac4627d6b0743382e46d033976a5f80fab6"},
-    {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c26ecb14386a44b118ce36e546ce307f4810bc9598a6e6cb4f7fca725ae7e6"},
-    {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca72968ae4eaf6470498d5c2887073f7efe3b1e7d7ec8be11a06a79cc810e990"},
-    {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:add0de5b9ad9e9aa293c3aa4e9deb2b61e99ad6c1634e01d01d98c03e6a354cc"},
-    {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9b6068e0f3769992d6b622a1cd2e7835eae3cf8d9da123d7f51ca9c1e9c333e5"},
-    {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8557a8559509b61e65083f8782329188a250102372576093c88930c875a69838"},
-    {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a7ae37e5d79c5bdfb5b4b99f2715a6035e6c5bf538c3746abc8e26694f92f33"},
-    {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391151ec86bb1c683835980f4816272a87eaddc46bb91cbf44f62228b84d8cca"},
-    {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2f3711be9290f0453de8eed5275d93d286abe26b08ab4a35d7452caa1fef532f"},
-    {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1a807d760763e398bbf2c6394ae9da5815901aa93ee0a37bca5efe78d4ee3171"},
-    {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa8ca0623b26c94fccc3a1fdd895be1743b838f3917300506d04aa3346fd2a14"},
-    {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3de0cf28a53a3238b252aca9fed1593e9d36c1d116748013339f0949bfc84112"},
-    {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6be5ec0e88a4925c91f3dea2bb0013b3a2accda6f77238f76a34a1ea532a1cb0"},
-    {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d9eb71bb1085d996244439154387266fd23d6ad37161f6f52f1cd41dd95a3808"},
-    {file = "brotlicffi-1.1.0.0.tar.gz", hash = "sha256:b77827a689905143f87915310b93b273ab17888fd43ef350d4832c4a71083c13"},
-]
-
-[[package]]
-name = "certifi"
-version = "2024.2.2"
-requires_python = ">=3.6"
-summary = "Python package for providing Mozilla's CA Bundle."
-groups = ["default"]
-files = [
-    {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"},
-    {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"},
-]
-
-[[package]]
-name = "cffi"
-version = "1.16.0"
-requires_python = ">=3.8"
-summary = "Foreign Function Interface for Python calling C code."
-groups = ["default"]
-marker = "implementation_name != \"cpython\""
-dependencies = [
-    "pycparser",
-]
-files = [
-    {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"},
-    {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"},
-    {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"},
-    {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"},
-    {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"},
-    {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"},
-    {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"},
-    {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"},
-    {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"},
-    {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"},
-    {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"},
-    {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"},
-    {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"},
-    {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"},
-    {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"},
-    {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"},
-    {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"},
-    {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"},
-    {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"},
-    {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"},
-    {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"},
-    {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"},
-    {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"},
-    {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"},
-    {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"},
-    {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"},
-    {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"},
-    {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"},
-    {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"},
-    {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"},
-    {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"},
-    {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"},
-    {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"},
-]
-
-[[package]]
-name = "charset-normalizer"
-version = "3.3.2"
-requires_python = ">=3.7.0"
-summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
-groups = ["default"]
-files = [
-    {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
-    {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
-    {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
-    {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
-    {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
-    {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
-    {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
-    {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
-    {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
-    {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
-    {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
-    {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
-    {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
-    {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
-    {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
-    {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
-    {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
-    {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
-    {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
-    {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
-    {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
-    {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
-    {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
-    {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
-    {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
-    {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
-    {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
-    {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
-    {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
-    {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
-    {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
-    {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
-    {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
-    {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
-    {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
-    {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
-    {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
-    {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
-    {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
-    {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
-    {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
-    {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
-    {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
-    {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
-    {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
-    {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
-    {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
-]
-
-[[package]]
-name = "colorama"
-version = "0.4.6"
-requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
-summary = "Cross-platform colored terminal text."
-groups = ["default"]
-marker = "sys_platform == \"win32\""
-files = [
-    {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
-    {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
-]
-
-[[package]]
-name = "croniter"
-version = "2.0.5"
-requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.6"
-summary = "croniter provides iteration for datetime object with cron like format"
-groups = ["default"]
-dependencies = [
-    "python-dateutil",
-    "pytz>2021.1",
-]
-files = [
-    {file = "croniter-2.0.5-py2.py3-none-any.whl", hash = "sha256:fdbb44920944045cc323db54599b321325141d82d14fa7453bc0699826bbe9ed"},
-    {file = "croniter-2.0.5.tar.gz", hash = "sha256:f1f8ca0af64212fbe99b1bee125ee5a1b53a9c1b433968d8bca8817b79d237f3"},
-]
-
-[[package]]
-name = "dateparser"
-version = "1.2.0"
-requires_python = ">=3.7"
-summary = "Date parsing library designed to parse dates from HTML pages"
-groups = ["default"]
-dependencies = [
-    "python-dateutil",
-    "pytz",
-    "regex!=2019.02.19,!=2021.8.27",
-    "tzlocal",
-]
-files = [
-    {file = "dateparser-1.2.0-py2.py3-none-any.whl", hash = "sha256:0b21ad96534e562920a0083e97fd45fa959882d4162acc358705144520a35830"},
-    {file = "dateparser-1.2.0.tar.gz", hash = "sha256:7975b43a4222283e0ae15be7b4999d08c9a70e2d378ac87385b1ccf2cffbbb30"},
-]
-
-[[package]]
-name = "decorator"
-version = "5.1.1"
-requires_python = ">=3.5"
-summary = "Decorators for Humans"
-groups = ["default"]
-files = [
-    {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
-    {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
-]
-
-[[package]]
-name = "django"
-version = "4.2.11"
-requires_python = ">=3.8"
-summary = "A high-level Python web framework that encourages rapid development and clean, pragmatic design."
-groups = ["default", "ldap"]
-dependencies = [
-    "asgiref<4,>=3.6.0",
-    "sqlparse>=0.3.1",
-    "tzdata; sys_platform == \"win32\"",
-]
-files = [
-    {file = "Django-4.2.11-py3-none-any.whl", hash = "sha256:ddc24a0a8280a0430baa37aff11f28574720af05888c62b7cfe71d219f4599d3"},
-    {file = "Django-4.2.11.tar.gz", hash = "sha256:6e6ff3db2d8dd0c986b4eec8554c8e4f919b5c1ff62a5b4390c17aff2ed6e5c4"},
-]
-
-[[package]]
-name = "django-auth-ldap"
-version = "4.8.0"
-requires_python = ">=3.8"
-summary = "Django LDAP authentication backend"
-groups = ["ldap"]
-dependencies = [
-    "Django>=3.2",
-    "python-ldap>=3.1",
-]
-files = [
-    {file = "django-auth-ldap-4.8.0.tar.gz", hash = "sha256:604250938ddc9fda619f247c7a59b0b2f06e53a7d3f46a156f28aa30dd71a738"},
-    {file = "django_auth_ldap-4.8.0-py3-none-any.whl", hash = "sha256:4b4b944f3c28bce362f33fb6e8db68429ed8fd8f12f0c0c4b1a4344a7ef225ce"},
-]
-
-[[package]]
-name = "django-extensions"
-version = "3.2.3"
-requires_python = ">=3.6"
-summary = "Extensions for Django"
-groups = ["default"]
-dependencies = [
-    "Django>=3.2",
-]
-files = [
-    {file = "django-extensions-3.2.3.tar.gz", hash = "sha256:44d27919d04e23b3f40231c4ab7af4e61ce832ef46d610cc650d53e68328410a"},
-    {file = "django_extensions-3.2.3-py3-none-any.whl", hash = "sha256:9600b7562f79a92cbf1fde6403c04fee314608fefbb595502e34383ae8203401"},
-]
-
-[[package]]
-name = "django-ninja"
-version = "1.1.0"
-requires_python = ">=3.7"
-summary = "Django Ninja - Fast Django REST framework"
-groups = ["default"]
-dependencies = [
-    "Django>=3.1",
-    "pydantic<3.0.0,>=2.0",
-]
-files = [
-    {file = "django_ninja-1.1.0-py3-none-any.whl", hash = "sha256:6330c3497061d9fd1f43c1200f85c13aab7687110e2899f8304e5aa476c10b44"},
-    {file = "django_ninja-1.1.0.tar.gz", hash = "sha256:87bff046416a2653ed2fbef1408e101292bf8170684821bac82accfd73bef059"},
-]
-
-[[package]]
-name = "exceptiongroup"
-version = "1.2.1"
-requires_python = ">=3.7"
-summary = "Backport of PEP 654 (exception groups)"
-groups = ["default"]
-marker = "python_version < \"3.11\""
-files = [
-    {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"},
-    {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"},
-]
-
-[[package]]
-name = "executing"
-version = "2.0.1"
-requires_python = ">=3.5"
-summary = "Get the currently executing AST node of a frame, and other information"
-groups = ["default"]
-files = [
-    {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"},
-    {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"},
-]
-
-[[package]]
-name = "feedparser"
-version = "6.0.11"
-requires_python = ">=3.6"
-summary = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds"
-groups = ["default"]
-dependencies = [
-    "sgmllib3k",
-]
-files = [
-    {file = "feedparser-6.0.11-py3-none-any.whl", hash = "sha256:0be7ee7b395572b19ebeb1d6aafb0028dee11169f1c934e0ed67d54992f4ad45"},
-    {file = "feedparser-6.0.11.tar.gz", hash = "sha256:c9d0407b64c6f2a065d0ebb292c2b35c01050cc0dc33757461aaabdc4c4184d5"},
-]
-
-[[package]]
-name = "greenlet"
-version = "3.0.3"
-requires_python = ">=3.7"
-summary = "Lightweight in-process concurrent programming"
-groups = ["default"]
-marker = "platform_machine != \"armv7l\""
-files = [
-    {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"},
-    {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"},
-    {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"},
-    {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"},
-    {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"},
-    {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"},
-    {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"},
-    {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"},
-    {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"},
-    {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"},
-    {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"},
-    {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"},
-    {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"},
-    {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"},
-    {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"},
-    {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"},
-    {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"},
-    {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"},
-    {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"},
-    {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"},
-    {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"},
-    {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"},
-    {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"},
-    {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"},
-    {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"},
-    {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"},
-    {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"},
-    {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"},
-]
-
-[[package]]
-name = "idna"
-version = "3.7"
-requires_python = ">=3.5"
-summary = "Internationalized Domain Names in Applications (IDNA)"
-groups = ["default"]
-files = [
-    {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
-    {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
-]
-
-[[package]]
-name = "ipython"
-version = "8.23.0"
-requires_python = ">=3.10"
-summary = "IPython: Productive Interactive Computing"
-groups = ["default"]
-dependencies = [
-    "colorama; sys_platform == \"win32\"",
-    "decorator",
-    "exceptiongroup; python_version < \"3.11\"",
-    "jedi>=0.16",
-    "matplotlib-inline",
-    "pexpect>4.3; sys_platform != \"win32\" and sys_platform != \"emscripten\"",
-    "prompt-toolkit<3.1.0,>=3.0.41",
-    "pygments>=2.4.0",
-    "stack-data",
-    "traitlets>=5.13.0",
-    "typing-extensions; python_version < \"3.12\"",
-]
-files = [
-    {file = "ipython-8.23.0-py3-none-any.whl", hash = "sha256:07232af52a5ba146dc3372c7bf52a0f890a23edf38d77caef8d53f9cdc2584c1"},
-    {file = "ipython-8.23.0.tar.gz", hash = "sha256:7468edaf4f6de3e1b912e57f66c241e6fd3c7099f2ec2136e239e142e800274d"},
-]
-
-[[package]]
-name = "jedi"
-version = "0.19.1"
-requires_python = ">=3.6"
-summary = "An autocompletion tool for Python that can be used for text editors."
-groups = ["default"]
-dependencies = [
-    "parso<0.9.0,>=0.8.3",
-]
-files = [
-    {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"},
-    {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"},
-]
-
-[[package]]
-name = "matplotlib-inline"
-version = "0.1.7"
-requires_python = ">=3.8"
-summary = "Inline Matplotlib backend for Jupyter"
-groups = ["default"]
-dependencies = [
-    "traitlets",
-]
-files = [
-    {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"},
-    {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"},
-]
-
-[[package]]
-name = "mutagen"
-version = "1.47.0"
-requires_python = ">=3.7"
-summary = "read and write audio tags for many formats"
-groups = ["default"]
-files = [
-    {file = "mutagen-1.47.0-py3-none-any.whl", hash = "sha256:edd96f50c5907a9539d8e5bba7245f62c9f520aef333d13392a79a4f70aca719"},
-    {file = "mutagen-1.47.0.tar.gz", hash = "sha256:719fadef0a978c31b4cf3c956261b3c58b6948b32023078a2117b1de09f0fc99"},
-]
-
-[[package]]
-name = "mypy-extensions"
-version = "1.0.0"
-requires_python = ">=3.5"
-summary = "Type system extensions for programs checked with the mypy type checker."
-groups = ["default"]
-files = [
-    {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
-    {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
-]
-
-[[package]]
-name = "parso"
-version = "0.8.4"
-requires_python = ">=3.6"
-summary = "A Python Parser"
-groups = ["default"]
-files = [
-    {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"},
-    {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"},
-]
-
-[[package]]
-name = "pexpect"
-version = "4.9.0"
-summary = "Pexpect allows easy control of interactive console applications."
-groups = ["default"]
-marker = "sys_platform != \"win32\" and sys_platform != \"emscripten\""
-dependencies = [
-    "ptyprocess>=0.5",
-]
-files = [
-    {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"},
-    {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"},
-]
-
-[[package]]
-name = "playwright"
-version = "1.43.0"
-requires_python = ">=3.8"
-summary = "A high-level API to automate web browsers"
-groups = ["default"]
-marker = "platform_machine != \"armv7l\""
-dependencies = [
-    "greenlet==3.0.3",
-    "pyee==11.1.0",
-]
-files = [
-    {file = "playwright-1.43.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:b03b12bd4da9c2cfb78dff820deac8b52892fe3c2f89a4d95d6f08c59e41deb9"},
-    {file = "playwright-1.43.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e9ec21b141727392f630761c7f4dec46d80c98243614257cc501b64ff636d337"},
-    {file = "playwright-1.43.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:e05a8d8fb2040c630429cca07e843c8fa33059717837c8f50c01b7d1fc651ce1"},
-    {file = "playwright-1.43.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:50d9a5c07c76456945a2296d63f78fdf6eb11aed3e8d39bb5ccbda760a8d6d41"},
-    {file = "playwright-1.43.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87191272c40b4c282cf2c9449ca3acaf705f38ac6e2372270f1617ce16b661b8"},
-    {file = "playwright-1.43.0-py3-none-win32.whl", hash = "sha256:bd8b818904b17e2914be23e7bc2a340b203f57fe81678520b10f908485b056ea"},
-    {file = "playwright-1.43.0-py3-none-win_amd64.whl", hash = "sha256:9b7bd707eeeaebee47f656b2de90aa9bd85e9ca2c6af7a08efd73896299e4d50"},
-]
-
-[[package]]
-name = "prompt-toolkit"
-version = "3.0.43"
-requires_python = ">=3.7.0"
-summary = "Library for building powerful interactive command lines in Python"
-groups = ["default"]
-dependencies = [
-    "wcwidth",
-]
-files = [
-    {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"},
-    {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"},
-]
-
-[[package]]
-name = "ptyprocess"
-version = "0.7.0"
-summary = "Run a subprocess in a pseudo terminal"
-groups = ["default"]
-marker = "sys_platform != \"win32\" and sys_platform != \"emscripten\""
-files = [
-    {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"},
-    {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"},
-]
-
-[[package]]
-name = "pure-eval"
-version = "0.2.2"
-summary = "Safely evaluate AST nodes without side effects"
-groups = ["default"]
-files = [
-    {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"},
-    {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"},
-]
-
-[[package]]
-name = "pyasn1"
-version = "0.6.0"
-requires_python = ">=3.8"
-summary = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)"
-groups = ["ldap"]
-files = [
-    {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"},
-    {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"},
-]
-
-[[package]]
-name = "pyasn1-modules"
-version = "0.4.0"
-requires_python = ">=3.8"
-summary = "A collection of ASN.1-based protocols modules"
-groups = ["ldap"]
-dependencies = [
-    "pyasn1<0.7.0,>=0.4.6",
-]
-files = [
-    {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"},
-    {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"},
-]
-
-[[package]]
-name = "pycparser"
-version = "2.22"
-requires_python = ">=3.8"
-summary = "C parser in Python"
-groups = ["default"]
-marker = "implementation_name != \"cpython\""
-files = [
-    {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
-    {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
-]
-
-[[package]]
-name = "pycryptodomex"
-version = "3.20.0"
-requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
-summary = "Cryptographic library for Python"
-groups = ["default"]
-files = [
-    {file = "pycryptodomex-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:59af01efb011b0e8b686ba7758d59cf4a8263f9ad35911bfe3f416cee4f5c08c"},
-    {file = "pycryptodomex-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:82ee7696ed8eb9a82c7037f32ba9b7c59e51dda6f105b39f043b6ef293989cb3"},
-    {file = "pycryptodomex-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91852d4480a4537d169c29a9d104dda44094c78f1f5b67bca76c29a91042b623"},
-    {file = "pycryptodomex-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca649483d5ed251d06daf25957f802e44e6bb6df2e8f218ae71968ff8f8edc4"},
-    {file = "pycryptodomex-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e186342cfcc3aafaad565cbd496060e5a614b441cacc3995ef0091115c1f6c5"},
-    {file = "pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:25cd61e846aaab76d5791d006497134602a9e451e954833018161befc3b5b9ed"},
-    {file = "pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:9c682436c359b5ada67e882fec34689726a09c461efd75b6ea77b2403d5665b7"},
-    {file = "pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7a7a8f33a1f1fb762ede6cc9cbab8f2a9ba13b196bfaf7bc6f0b39d2ba315a43"},
-    {file = "pycryptodomex-3.20.0-cp35-abi3-win32.whl", hash = "sha256:c39778fd0548d78917b61f03c1fa8bfda6cfcf98c767decf360945fe6f97461e"},
-    {file = "pycryptodomex-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:2a47bcc478741b71273b917232f521fd5704ab4b25d301669879e7273d3586cc"},
-    {file = "pycryptodomex-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:1be97461c439a6af4fe1cf8bf6ca5936d3db252737d2f379cc6b2e394e12a458"},
-    {file = "pycryptodomex-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:19764605feea0df966445d46533729b645033f134baeb3ea26ad518c9fdf212c"},
-    {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f2e497413560e03421484189a6b65e33fe800d3bd75590e6d78d4dfdb7accf3b"},
-    {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e48217c7901edd95f9f097feaa0388da215ed14ce2ece803d3f300b4e694abea"},
-    {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d00fe8596e1cc46b44bf3907354e9377aa030ec4cd04afbbf6e899fc1e2a7781"},
-    {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:88afd7a3af7ddddd42c2deda43d53d3dfc016c11327d0915f90ca34ebda91499"},
-    {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d3584623e68a5064a04748fb6d76117a21a7cb5eaba20608a41c7d0c61721794"},
-    {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0daad007b685db36d977f9de73f61f8da2a7104e20aca3effd30752fd56f73e1"},
-    {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dcac11031a71348faaed1f403a0debd56bf5404232284cf8c761ff918886ebc"},
-    {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:69138068268127cd605e03438312d8f271135a33140e2742b417d027a0539427"},
-    {file = "pycryptodomex-3.20.0.tar.gz", hash = "sha256:7a710b79baddd65b806402e14766c721aee8fb83381769c27920f26476276c1e"},
-]
-
-[[package]]
-name = "pydantic"
-version = "2.7.1"
-requires_python = ">=3.8"
-summary = "Data validation using Python type hints"
-groups = ["default"]
-dependencies = [
-    "annotated-types>=0.4.0",
-    "pydantic-core==2.18.2",
-    "typing-extensions>=4.6.1",
-]
-files = [
-    {file = "pydantic-2.7.1-py3-none-any.whl", hash = "sha256:e029badca45266732a9a79898a15ae2e8b14840b1eabbb25844be28f0b33f3d5"},
-    {file = "pydantic-2.7.1.tar.gz", hash = "sha256:e9dbb5eada8abe4d9ae5f46b9939aead650cd2b68f249bb3a8139dbe125803cc"},
-]
-
-[[package]]
-name = "pydantic-core"
-version = "2.18.2"
-requires_python = ">=3.8"
-summary = "Core functionality for Pydantic validation and serialization"
-groups = ["default"]
-dependencies = [
-    "typing-extensions!=4.7.0,>=4.6.0",
-]
-files = [
-    {file = "pydantic_core-2.18.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9e08e867b306f525802df7cd16c44ff5ebbe747ff0ca6cf3fde7f36c05a59a81"},
-    {file = "pydantic_core-2.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0a21cbaa69900cbe1a2e7cad2aa74ac3cf21b10c3efb0fa0b80305274c0e8a2"},
-    {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0680b1f1f11fda801397de52c36ce38ef1c1dc841a0927a94f226dea29c3ae3d"},
-    {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95b9d5e72481d3780ba3442eac863eae92ae43a5f3adb5b4d0a1de89d42bb250"},
-    {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fcf5cd9c4b655ad666ca332b9a081112cd7a58a8b5a6ca7a3104bc950f2038"},
-    {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b5155ff768083cb1d62f3e143b49a8a3432e6789a3abee8acd005c3c7af1c74"},
-    {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553ef617b6836fc7e4df130bb851e32fe357ce36336d897fd6646d6058d980af"},
-    {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89ed9eb7d616ef5714e5590e6cf7f23b02d0d539767d33561e3675d6f9e3857"},
-    {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:75f7e9488238e920ab6204399ded280dc4c307d034f3924cd7f90a38b1829563"},
-    {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ef26c9e94a8c04a1b2924149a9cb081836913818e55681722d7f29af88fe7b38"},
-    {file = "pydantic_core-2.18.2-cp310-none-win32.whl", hash = "sha256:182245ff6b0039e82b6bb585ed55a64d7c81c560715d1bad0cbad6dfa07b4027"},
-    {file = "pydantic_core-2.18.2-cp310-none-win_amd64.whl", hash = "sha256:e23ec367a948b6d812301afc1b13f8094ab7b2c280af66ef450efc357d2ae543"},
-    {file = "pydantic_core-2.18.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:219da3f096d50a157f33645a1cf31c0ad1fe829a92181dd1311022f986e5fbe3"},
-    {file = "pydantic_core-2.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc1cfd88a64e012b74e94cd00bbe0f9c6df57049c97f02bb07d39e9c852e19a4"},
-    {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b7133a6e6aeb8df37d6f413f7705a37ab4031597f64ab56384c94d98fa0e90"},
-    {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:224c421235f6102e8737032483f43c1a8cfb1d2f45740c44166219599358c2cd"},
-    {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b14d82cdb934e99dda6d9d60dc84a24379820176cc4a0d123f88df319ae9c150"},
-    {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2728b01246a3bba6de144f9e3115b532ee44bd6cf39795194fb75491824a1413"},
-    {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:470b94480bb5ee929f5acba6995251ada5e059a5ef3e0dfc63cca287283ebfa6"},
-    {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:997abc4df705d1295a42f95b4eec4950a37ad8ae46d913caeee117b6b198811c"},
-    {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75250dbc5290e3f1a0f4618db35e51a165186f9034eff158f3d490b3fed9f8a0"},
-    {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4456f2dca97c425231d7315737d45239b2b51a50dc2b6f0c2bb181fce6207664"},
-    {file = "pydantic_core-2.18.2-cp311-none-win32.whl", hash = "sha256:269322dcc3d8bdb69f054681edff86276b2ff972447863cf34c8b860f5188e2e"},
-    {file = "pydantic_core-2.18.2-cp311-none-win_amd64.whl", hash = "sha256:800d60565aec896f25bc3cfa56d2277d52d5182af08162f7954f938c06dc4ee3"},
-    {file = "pydantic_core-2.18.2-cp311-none-win_arm64.whl", hash = "sha256:1404c69d6a676245199767ba4f633cce5f4ad4181f9d0ccb0577e1f66cf4c46d"},
-    {file = "pydantic_core-2.18.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fb2bd7be70c0fe4dfd32c951bc813d9fe6ebcbfdd15a07527796c8204bd36242"},
-    {file = "pydantic_core-2.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6132dd3bd52838acddca05a72aafb6eab6536aa145e923bb50f45e78b7251043"},
-    {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d904828195733c183d20a54230c0df0eb46ec746ea1a666730787353e87182"},
-    {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9bd70772c720142be1020eac55f8143a34ec9f82d75a8e7a07852023e46617f"},
-    {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8ed04b3582771764538f7ee7001b02e1170223cf9b75dff0bc698fadb00cf3"},
-    {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6dac87ddb34aaec85f873d737e9d06a3555a1cc1a8e0c44b7f8d5daeb89d86f"},
-    {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca4ae5a27ad7a4ee5170aebce1574b375de390bc01284f87b18d43a3984df72"},
-    {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:886eec03591b7cf058467a70a87733b35f44707bd86cf64a615584fd72488b7c"},
-    {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ca7b0c1f1c983e064caa85f3792dd2fe3526b3505378874afa84baf662e12241"},
-    {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b4356d3538c3649337df4074e81b85f0616b79731fe22dd11b99499b2ebbdf3"},
-    {file = "pydantic_core-2.18.2-cp312-none-win32.whl", hash = "sha256:8b172601454f2d7701121bbec3425dd71efcb787a027edf49724c9cefc14c038"},
-    {file = "pydantic_core-2.18.2-cp312-none-win_amd64.whl", hash = "sha256:b1bd7e47b1558ea872bd16c8502c414f9e90dcf12f1395129d7bb42a09a95438"},
-    {file = "pydantic_core-2.18.2-cp312-none-win_arm64.whl", hash = "sha256:98758d627ff397e752bc339272c14c98199c613f922d4a384ddc07526c86a2ec"},
-    {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a1874c6dd4113308bd0eb568418e6114b252afe44319ead2b4081e9b9521fe75"},
-    {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ccdd111c03bfd3666bd2472b674c6899550e09e9f298954cfc896ab92b5b0e6d"},
-    {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e18609ceaa6eed63753037fc06ebb16041d17d28199ae5aba0052c51449650a9"},
-    {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e5c584d357c4e2baf0ff7baf44f4994be121e16a2c88918a5817331fc7599d7"},
-    {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43f0f463cf89ace478de71a318b1b4f05ebc456a9b9300d027b4b57c1a2064fb"},
-    {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e1b395e58b10b73b07b7cf740d728dd4ff9365ac46c18751bf8b3d8cca8f625a"},
-    {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0098300eebb1c837271d3d1a2cd2911e7c11b396eac9661655ee524a7f10587b"},
-    {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:36789b70d613fbac0a25bb07ab3d9dba4d2e38af609c020cf4d888d165ee0bf3"},
-    {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f9a801e7c8f1ef8718da265bba008fa121243dfe37c1cea17840b0944dfd72c"},
-    {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3a6515ebc6e69d85502b4951d89131ca4e036078ea35533bb76327f8424531ce"},
-    {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aca1e2298c56ececfd8ed159ae4dde2df0781988c97ef77d5c16ff4bd5b400"},
-    {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:223ee893d77a310a0391dca6df00f70bbc2f36a71a895cecd9a0e762dc37b349"},
-    {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2334ce8c673ee93a1d6a65bd90327588387ba073c17e61bf19b4fd97d688d63c"},
-    {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cbca948f2d14b09d20268cda7b0367723d79063f26c4ffc523af9042cad95592"},
-    {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b3ef08e20ec49e02d5c6717a91bb5af9b20f1805583cb0adfe9ba2c6b505b5ae"},
-    {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6fdc8627910eed0c01aed6a390a252fe3ea6d472ee70fdde56273f198938374"},
-    {file = "pydantic_core-2.18.2.tar.gz", hash = "sha256:2e29d20810dfc3043ee13ac7d9e25105799817683348823f305ab3f349b9386e"},
-]
-
-[[package]]
-name = "pyee"
-version = "11.1.0"
-requires_python = ">=3.8"
-summary = "A rough port of Node.js's EventEmitter to Python with a few tricks of its own"
-groups = ["default"]
-marker = "platform_machine != \"armv7l\""
-dependencies = [
-    "typing-extensions",
-]
-files = [
-    {file = "pyee-11.1.0-py3-none-any.whl", hash = "sha256:5d346a7d0f861a4b2e6c47960295bd895f816725b27d656181947346be98d7c1"},
-    {file = "pyee-11.1.0.tar.gz", hash = "sha256:b53af98f6990c810edd9b56b87791021a8f54fd13db4edd1142438d44ba2263f"},
-]
-
-[[package]]
-name = "pygments"
-version = "2.17.2"
-requires_python = ">=3.7"
-summary = "Pygments is a syntax highlighting package written in Python."
-groups = ["default"]
-files = [
-    {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"},
-    {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"},
-]
-
-[[package]]
-name = "python-crontab"
-version = "3.0.0"
-summary = "Python Crontab API"
-groups = ["default"]
-dependencies = [
-    "python-dateutil",
-]
-files = [
-    {file = "python-crontab-3.0.0.tar.gz", hash = "sha256:79fb7465039ddfd4fb93d072d6ee0d45c1ac8bf1597f0686ea14fd4361dba379"},
-    {file = "python_crontab-3.0.0-py3-none-any.whl", hash = "sha256:6d5ba3c190ec76e4d252989a1644fcb233dbf53fbc8fceeb9febe1657b9fb1d4"},
-]
-
-[[package]]
-name = "python-dateutil"
-version = "2.9.0.post0"
-requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
-summary = "Extensions to the standard Python datetime module"
-groups = ["default"]
-dependencies = [
-    "six>=1.5",
-]
-files = [
-    {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
-    {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
-]
-
-[[package]]
-name = "python-ldap"
-version = "3.4.4"
-requires_python = ">=3.6"
-summary = "Python modules for implementing LDAP clients"
-groups = ["ldap"]
-dependencies = [
-    "pyasn1-modules>=0.1.5",
-    "pyasn1>=0.3.7",
-]
-files = [
-    {file = "python-ldap-3.4.4.tar.gz", hash = "sha256:7edb0accec4e037797705f3a05cbf36a9fde50d08c8f67f2aef99a2628fab828"},
-]
-
-[[package]]
-name = "pytz"
-version = "2024.1"
-summary = "World timezone definitions, modern and historical"
-groups = ["default"]
-files = [
-    {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"},
-    {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"},
-]
-
-[[package]]
-name = "regex"
-version = "2024.4.16"
-requires_python = ">=3.7"
-summary = "Alternative regular expression module, to replace re."
-groups = ["default"]
-files = [
-    {file = "regex-2024.4.16-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb83cc090eac63c006871fd24db5e30a1f282faa46328572661c0a24a2323a08"},
-    {file = "regex-2024.4.16-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c91e1763696c0eb66340c4df98623c2d4e77d0746b8f8f2bee2c6883fd1fe18"},
-    {file = "regex-2024.4.16-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:10188fe732dec829c7acca7422cdd1bf57d853c7199d5a9e96bb4d40db239c73"},
-    {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:956b58d692f235cfbf5b4f3abd6d99bf102f161ccfe20d2fd0904f51c72c4c66"},
-    {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a70b51f55fd954d1f194271695821dd62054d949efd6368d8be64edd37f55c86"},
-    {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c02fcd2bf45162280613d2e4a1ca3ac558ff921ae4e308ecb307650d3a6ee51"},
-    {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ed75ea6892a56896d78f11006161eea52c45a14994794bcfa1654430984b22"},
-    {file = "regex-2024.4.16-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd727ad276bb91928879f3aa6396c9a1d34e5e180dce40578421a691eeb77f47"},
-    {file = "regex-2024.4.16-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7cbc5d9e8a1781e7be17da67b92580d6ce4dcef5819c1b1b89f49d9678cc278c"},
-    {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:78fddb22b9ef810b63ef341c9fcf6455232d97cfe03938cbc29e2672c436670e"},
-    {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:445ca8d3c5a01309633a0c9db57150312a181146315693273e35d936472df912"},
-    {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:95399831a206211d6bc40224af1c635cb8790ddd5c7493e0bd03b85711076a53"},
-    {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:7731728b6568fc286d86745f27f07266de49603a6fdc4d19c87e8c247be452af"},
-    {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4facc913e10bdba42ec0aee76d029aedda628161a7ce4116b16680a0413f658a"},
-    {file = "regex-2024.4.16-cp310-cp310-win32.whl", hash = "sha256:911742856ce98d879acbea33fcc03c1d8dc1106234c5e7d068932c945db209c0"},
-    {file = "regex-2024.4.16-cp310-cp310-win_amd64.whl", hash = "sha256:e0a2df336d1135a0b3a67f3bbf78a75f69562c1199ed9935372b82215cddd6e2"},
-    {file = "regex-2024.4.16-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1210365faba7c2150451eb78ec5687871c796b0f1fa701bfd2a4a25420482d26"},
-    {file = "regex-2024.4.16-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9ab40412f8cd6f615bfedea40c8bf0407d41bf83b96f6fc9ff34976d6b7037fd"},
-    {file = "regex-2024.4.16-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fd80d1280d473500d8086d104962a82d77bfbf2b118053824b7be28cd5a79ea5"},
-    {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bb966fdd9217e53abf824f437a5a2d643a38d4fd5fd0ca711b9da683d452969"},
-    {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:20b7a68444f536365af42a75ccecb7ab41a896a04acf58432db9e206f4e525d6"},
-    {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b74586dd0b039c62416034f811d7ee62810174bb70dffcca6439f5236249eb09"},
-    {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c8290b44d8b0af4e77048646c10c6e3aa583c1ca67f3b5ffb6e06cf0c6f0f89"},
-    {file = "regex-2024.4.16-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2d80a6749724b37853ece57988b39c4e79d2b5fe2869a86e8aeae3bbeef9eb0"},
-    {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3a1018e97aeb24e4f939afcd88211ace472ba566efc5bdf53fd8fd7f41fa7170"},
-    {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8d015604ee6204e76569d2f44e5a210728fa917115bef0d102f4107e622b08d5"},
-    {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:3d5ac5234fb5053850d79dd8eb1015cb0d7d9ed951fa37aa9e6249a19aa4f336"},
-    {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:0a38d151e2cdd66d16dab550c22f9521ba79761423b87c01dae0a6e9add79c0d"},
-    {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:159dc4e59a159cb8e4e8f8961eb1fa5d58f93cb1acd1701d8aff38d45e1a84a6"},
-    {file = "regex-2024.4.16-cp311-cp311-win32.whl", hash = "sha256:ba2336d6548dee3117520545cfe44dc28a250aa091f8281d28804aa8d707d93d"},
-    {file = "regex-2024.4.16-cp311-cp311-win_amd64.whl", hash = "sha256:8f83b6fd3dc3ba94d2b22717f9c8b8512354fd95221ac661784df2769ea9bba9"},
-    {file = "regex-2024.4.16-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:80b696e8972b81edf0af2a259e1b2a4a661f818fae22e5fa4fa1a995fb4a40fd"},
-    {file = "regex-2024.4.16-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d61ae114d2a2311f61d90c2ef1358518e8f05eafda76eaf9c772a077e0b465ec"},
-    {file = "regex-2024.4.16-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8ba6745440b9a27336443b0c285d705ce73adb9ec90e2f2004c64d95ab5a7598"},
-    {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6295004b2dd37b0835ea5c14a33e00e8cfa3c4add4d587b77287825f3418d310"},
-    {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4aba818dcc7263852aabb172ec27b71d2abca02a593b95fa79351b2774eb1d2b"},
-    {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0800631e565c47520aaa04ae38b96abc5196fe8b4aa9bd864445bd2b5848a7a"},
-    {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08dea89f859c3df48a440dbdcd7b7155bc675f2fa2ec8c521d02dc69e877db70"},
-    {file = "regex-2024.4.16-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eeaa0b5328b785abc344acc6241cffde50dc394a0644a968add75fcefe15b9d4"},
-    {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4e819a806420bc010489f4e741b3036071aba209f2e0989d4750b08b12a9343f"},
-    {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:c2d0e7cbb6341e830adcbfa2479fdeebbfbb328f11edd6b5675674e7a1e37730"},
-    {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:91797b98f5e34b6a49f54be33f72e2fb658018ae532be2f79f7c63b4ae225145"},
-    {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:d2da13568eff02b30fd54fccd1e042a70fe920d816616fda4bf54ec705668d81"},
-    {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:370c68dc5570b394cbaadff50e64d705f64debed30573e5c313c360689b6aadc"},
-    {file = "regex-2024.4.16-cp312-cp312-win32.whl", hash = "sha256:904c883cf10a975b02ab3478bce652f0f5346a2c28d0a8521d97bb23c323cc8b"},
-    {file = "regex-2024.4.16-cp312-cp312-win_amd64.whl", hash = "sha256:785c071c982dce54d44ea0b79cd6dfafddeccdd98cfa5f7b86ef69b381b457d9"},
-    {file = "regex-2024.4.16.tar.gz", hash = "sha256:fa454d26f2e87ad661c4f0c5a5fe4cf6aab1e307d1b94f16ffdfcb089ba685c0"},
-]
-
-[[package]]
-name = "requests"
-version = "2.31.0"
-requires_python = ">=3.7"
-summary = "Python HTTP for Humans."
-groups = ["default"]
-dependencies = [
-    "certifi>=2017.4.17",
-    "charset-normalizer<4,>=2",
-    "idna<4,>=2.5",
-    "urllib3<3,>=1.21.1",
-]
-files = [
-    {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
-    {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
-]
-
-[[package]]
-name = "setuptools"
-version = "69.5.1"
-requires_python = ">=3.8"
-summary = "Easily download, build, install, upgrade, and uninstall Python packages"
-groups = ["default"]
-files = [
-    {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"},
-    {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"},
-]
-
-[[package]]
-name = "sgmllib3k"
-version = "1.0.0"
-summary = "Py3k port of sgmllib."
-groups = ["default"]
-files = [
-    {file = "sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"},
-]
-
-[[package]]
-name = "six"
-version = "1.16.0"
-requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
-summary = "Python 2 and 3 compatibility utilities"
-groups = ["default"]
-files = [
-    {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
-    {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
-]
-
-[[package]]
-name = "sonic-client"
-version = "1.0.0"
-summary = "python client for sonic search backend"
-groups = ["sonic"]
-files = [
-    {file = "sonic-client-1.0.0.tar.gz", hash = "sha256:fe324c7354670488ed84847f6a6727d3cb5fb3675cb9b61396dcf5720e5aca66"},
-    {file = "sonic_client-1.0.0-py3-none-any.whl", hash = "sha256:291bf292861e97a2dd765ff0c8754ea9631383680d31a63ec3da6f5aa5f4beda"},
-]
-
-[[package]]
-name = "sqlparse"
-version = "0.5.0"
-requires_python = ">=3.8"
-summary = "A non-validating SQL parser."
-groups = ["default", "ldap"]
-files = [
-    {file = "sqlparse-0.5.0-py3-none-any.whl", hash = "sha256:c204494cd97479d0e39f28c93d46c0b2d5959c7b9ab904762ea6c7af211c8663"},
-    {file = "sqlparse-0.5.0.tar.gz", hash = "sha256:714d0a4932c059d16189f58ef5411ec2287a4360f17cdd0edd2d09d4c5087c93"},
-]
-
-[[package]]
-name = "stack-data"
-version = "0.6.3"
-summary = "Extract data from python stack frames and tracebacks for informative displays"
-groups = ["default"]
-dependencies = [
-    "asttokens>=2.1.0",
-    "executing>=1.2.0",
-    "pure-eval",
-]
-files = [
-    {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"},
-    {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"},
-]
-
-[[package]]
-name = "traitlets"
-version = "5.14.3"
-requires_python = ">=3.8"
-summary = "Traitlets Python configuration system"
-groups = ["default"]
-files = [
-    {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"},
-    {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"},
-]
-
-[[package]]
-name = "typing-extensions"
-version = "4.11.0"
-requires_python = ">=3.8"
-summary = "Backported and Experimental Type Hints for Python 3.8+"
-groups = ["default", "ldap"]
-files = [
-    {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"},
-    {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"},
-]
-
-[[package]]
-name = "tzdata"
-version = "2024.1"
-requires_python = ">=2"
-summary = "Provider of IANA time zone data"
-groups = ["default", "ldap"]
-marker = "sys_platform == \"win32\" or platform_system == \"Windows\""
-files = [
-    {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"},
-    {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
-]
-
-[[package]]
-name = "tzlocal"
-version = "5.2"
-requires_python = ">=3.8"
-summary = "tzinfo object for the local timezone"
-groups = ["default"]
-dependencies = [
-    "tzdata; platform_system == \"Windows\"",
-]
-files = [
-    {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"},
-    {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"},
-]
-
-[[package]]
-name = "urllib3"
-version = "2.2.1"
-requires_python = ">=3.8"
-summary = "HTTP library with thread-safe connection pooling, file post, and more."
-groups = ["default"]
-files = [
-    {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"},
-    {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"},
-]
-
-[[package]]
-name = "w3lib"
-version = "2.1.2"
-requires_python = ">=3.7"
-summary = "Library of web-related functions"
-groups = ["default"]
-files = [
-    {file = "w3lib-2.1.2-py3-none-any.whl", hash = "sha256:c4432926e739caa8e3f49f5de783f336df563d9490416aebd5d39fb896d264e7"},
-    {file = "w3lib-2.1.2.tar.gz", hash = "sha256:ed5b74e997eea2abe3c1321f916e344144ee8e9072a6f33463ee8e57f858a4b1"},
-]
-
-[[package]]
-name = "wcwidth"
-version = "0.2.13"
-summary = "Measures the displayed width of unicode strings in a terminal"
-groups = ["default"]
-files = [
-    {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"},
-    {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"},
-]
-
-[[package]]
-name = "websockets"
-version = "12.0"
-requires_python = ">=3.8"
-summary = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
-groups = ["default"]
-files = [
-    {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"},
-    {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"},
-    {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"},
-    {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"},
-    {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"},
-    {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"},
-    {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"},
-    {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"},
-    {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"},
-    {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"},
-    {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"},
-    {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"},
-    {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"},
-    {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"},
-    {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"},
-    {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"},
-    {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"},
-    {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"},
-    {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"},
-    {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"},
-    {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"},
-    {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"},
-    {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"},
-    {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"},
-    {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"},
-    {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"},
-    {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"},
-    {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"},
-    {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"},
-    {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"},
-    {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"},
-    {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"},
-    {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"},
-    {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"},
-    {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"},
-    {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"},
-    {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"},
-    {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"},
-    {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"},
-    {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"},
-    {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"},
-    {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"},
-    {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"},
-    {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"},
-    {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"},
-    {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"},
-    {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"},
-    {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"},
-    {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"},
-    {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"},
-]
-
-[[package]]
-name = "yt-dlp"
-version = "2024.4.9"
-requires_python = ">=3.8"
-summary = "A feature-rich command-line audio/video downloader"
-groups = ["default"]
-dependencies = [
-    "brotli; implementation_name == \"cpython\"",
-    "brotlicffi; implementation_name != \"cpython\"",
-    "certifi",
-    "mutagen",
-    "pycryptodomex",
-    "requests<3,>=2.31.0",
-    "urllib3<3,>=1.26.17",
-    "websockets>=12.0",
-]
-files = [
-    {file = "yt_dlp-2024.4.9-py3-none-any.whl", hash = "sha256:d6ff6798bd114cc48763564fcb2f296464ec1604f731e69b07a8814c89b170a2"},
-    {file = "yt_dlp-2024.4.9.tar.gz", hash = "sha256:7ee90572b4d313b582b99c89e4eccf779b57ff54edc331873c6b3fba77faa8b0"},
-]

+ 20 - 20
pyproject.toml

@@ -12,32 +12,31 @@ readme = "README.md"
 # pdm install
 # pdm install
 # pdm update --unconstrained
 # pdm update --unconstrained
 dependencies = [
 dependencies = [
+    # Last Bumped: 2024-04-25
     # Base Framework and Language Dependencies
     # Base Framework and Language Dependencies
     "setuptools>=69.5.1",
     "setuptools>=69.5.1",
-    "django>=4.2.0,<5.0",
+    "django>=5.0.4,<6.0",
     "django-ninja>=1.1.0",
     "django-ninja>=1.1.0",
     "django-extensions>=3.2.3",
     "django-extensions>=3.2.3",
     "mypy-extensions>=1.0.0",
     "mypy-extensions>=1.0.0",
-
     # Python Helper Libraries
     # Python Helper Libraries
     "requests>=2.31.0",
     "requests>=2.31.0",
     "dateparser>=1.0.0",
     "dateparser>=1.0.0",
     "feedparser>=6.0.11",
     "feedparser>=6.0.11",
-    "w3lib>=1.22.0",
-
+    "w3lib>=2.1.2",
     # Feature-Specific Dependencies
     # Feature-Specific Dependencies
-    "python-crontab>=2.5.1",          # for: archivebox schedule
-    "croniter>=0.3.34",               # for: archivebox schedule
-    "ipython>5.0.0",                  # for: archivebox shell
-
+    "python-crontab>=3.0.0",          # for: archivebox schedule
+    "croniter>=2.0.5",                # for: archivebox schedule
+    "ipython>=8.23.0",                # for: archivebox shell
     # Extractor Dependencies
     # Extractor Dependencies
     "yt-dlp>=2024.4.9",               # for: media
     "yt-dlp>=2024.4.9",               # for: media
-    "playwright>=1.43.0; platform_machine != 'armv7l'",  # WARNING: playwright doesn't have any sdist, causes trouble on build systems that refuse to install wheel-only packages
-    
+    # "playwright>=1.43.0; platform_machine != 'armv7l'",  # WARNING: playwright doesn't have any sdist, causes trouble on build systems that refuse to install wheel-only packages
     # TODO: add more extractors
     # TODO: add more extractors
     #  - gallery-dl
     #  - gallery-dl
     #  - scihubdl
     #  - scihubdl
     #  - See Github issues for more...
     #  - See Github issues for more...
+    "django-signal-webhooks>=0.3.0",
+    "django-admin-data-views>=0.3.1",
 ]
 ]
 
 
 homepage = "https://github.com/ArchiveBox/ArchiveBox"
 homepage = "https://github.com/ArchiveBox/ArchiveBox"
@@ -59,9 +58,6 @@ classifiers = [
     "Natural Language :: English",
     "Natural Language :: English",
     "Operating System :: OS Independent",
     "Operating System :: OS Independent",
     "Programming Language :: Python :: 3",
     "Programming Language :: Python :: 3",
-    "Programming Language :: Python :: 3.7",
-    "Programming Language :: Python :: 3.8",
-    "Programming Language :: Python :: 3.9",
     "Programming Language :: Python :: 3.10",
     "Programming Language :: Python :: 3.10",
     "Programming Language :: Python :: 3.11",
     "Programming Language :: Python :: 3.11",
     "Programming Language :: Python :: 3.12",
     "Programming Language :: Python :: 3.12",
@@ -100,10 +96,10 @@ ldap = [
 # pdm update --dev --unconstrained
 # pdm update --dev --unconstrained
 [tool.pdm.dev-dependencies]
 [tool.pdm.dev-dependencies]
 build = [
 build = [
+    # "pdm",                           # usually installed by apt/brew, dont double-install with pip
     "setuptools>=69.5.1",
     "setuptools>=69.5.1",
     "pip",
     "pip",
     "wheel",
     "wheel",
-    "pdm",
     "homebrew-pypi-poet>=0.10.0",      # for: generating archivebox.rb brewfile list of python packages
     "homebrew-pypi-poet>=0.10.0",      # for: generating archivebox.rb brewfile list of python packages
 ]
 ]
 docs = [
 docs = [
@@ -115,10 +111,11 @@ debug = [
     "django-debug-toolbar",
     "django-debug-toolbar",
     "djdt_flamegraph",
     "djdt_flamegraph",
     "ipdb",
     "ipdb",
+    "requests-tracker>=0.3.3",
 ]
 ]
 test = [
 test = [
-    "pdm[pytest]",
     "pytest",
     "pytest",
+    "bottle",
 ]
 ]
 lint = [
 lint = [
     "flake8",
     "flake8",
@@ -126,6 +123,12 @@ lint = [
     "django-stubs",
     "django-stubs",
 ]
 ]
 
 
+[tool.pdm.scripts]
+lint = "./bin/lint.sh"
+test = "./bin/test.sh"
+# all = {composite = ["lint mypackage/", "test -v tests/"]}
+
+
 [build-system]
 [build-system]
 requires = ["pdm-backend"]
 requires = ["pdm-backend"]
 build-backend = "pdm.backend"
 build-backend = "pdm.backend"
@@ -134,11 +137,6 @@ build-backend = "pdm.backend"
 archivebox = "archivebox.cli:main"
 archivebox = "archivebox.cli:main"
 
 
 
 
-[tool.pdm.scripts]
-lint = "./bin/lint.sh"
-test = "./bin/test.sh"
-# all = {composite = ["lint mypackage/", "test -v tests/"]}
-
 [tool.pytest.ini_options]
 [tool.pytest.ini_options]
 testpaths = [ "tests" ]
 testpaths = [ "tests" ]
 
 
@@ -154,6 +152,8 @@ explicit_package_bases = true
 # exclude = "pdm/(pep582/|models/in_process/.+\\.py)"
 # exclude = "pdm/(pep582/|models/in_process/.+\\.py)"
 plugins = ["mypy_django_plugin.main"]
 plugins = ["mypy_django_plugin.main"]
 
 
+[tool.django-stubs]
+django_settings_module = "core.settings"
 
 
 
 
 [project.urls]
 [project.urls]

+ 14 - 9
requirements.txt

@@ -2,54 +2,59 @@
 # Please do not edit it manually.
 # Please do not edit it manually.
 
 
 annotated-types==0.6.0
 annotated-types==0.6.0
+anyio==4.3.0
 asgiref==3.8.1
 asgiref==3.8.1
 asttokens==2.4.1
 asttokens==2.4.1
 brotli==1.1.0; implementation_name == "cpython"
 brotli==1.1.0; implementation_name == "cpython"
 brotlicffi==1.1.0.0; implementation_name != "cpython"
 brotlicffi==1.1.0.0; implementation_name != "cpython"
 certifi==2024.2.2
 certifi==2024.2.2
-cffi==1.16.0; implementation_name != "cpython"
+cffi==1.16.0; platform_python_implementation != "PyPy" or implementation_name != "cpython"
 charset-normalizer==3.3.2
 charset-normalizer==3.3.2
 colorama==0.4.6; sys_platform == "win32"
 colorama==0.4.6; sys_platform == "win32"
 croniter==2.0.5
 croniter==2.0.5
+cryptography==42.0.7
 dateparser==1.2.0
 dateparser==1.2.0
 decorator==5.1.1
 decorator==5.1.1
-django==4.2.11
+django==5.0.4
 django-auth-ldap==4.8.0
 django-auth-ldap==4.8.0
 django-extensions==3.2.3
 django-extensions==3.2.3
 django-ninja==1.1.0
 django-ninja==1.1.0
+django-settings-holder==0.1.2
+django-signal-webhooks==0.3.0
 exceptiongroup==1.2.1; python_version < "3.11"
 exceptiongroup==1.2.1; python_version < "3.11"
 executing==2.0.1
 executing==2.0.1
 feedparser==6.0.11
 feedparser==6.0.11
-greenlet==3.0.3; platform_machine != "armv7l"
+h11==0.14.0
+httpcore==1.0.5
+httpx==0.27.0
 idna==3.7
 idna==3.7
-ipython==8.23.0
+ipython==8.24.0
 jedi==0.19.1
 jedi==0.19.1
 matplotlib-inline==0.1.7
 matplotlib-inline==0.1.7
 mutagen==1.47.0
 mutagen==1.47.0
 mypy-extensions==1.0.0
 mypy-extensions==1.0.0
 parso==0.8.4
 parso==0.8.4
 pexpect==4.9.0; sys_platform != "win32" and sys_platform != "emscripten"
 pexpect==4.9.0; sys_platform != "win32" and sys_platform != "emscripten"
-playwright==1.43.0; platform_machine != "armv7l"
 prompt-toolkit==3.0.43
 prompt-toolkit==3.0.43
 ptyprocess==0.7.0; sys_platform != "win32" and sys_platform != "emscripten"
 ptyprocess==0.7.0; sys_platform != "win32" and sys_platform != "emscripten"
 pure-eval==0.2.2
 pure-eval==0.2.2
 pyasn1==0.6.0
 pyasn1==0.6.0
 pyasn1-modules==0.4.0
 pyasn1-modules==0.4.0
-pycparser==2.22; implementation_name != "cpython"
+pycparser==2.22; platform_python_implementation != "PyPy" or implementation_name != "cpython"
 pycryptodomex==3.20.0
 pycryptodomex==3.20.0
 pydantic==2.7.1
 pydantic==2.7.1
 pydantic-core==2.18.2
 pydantic-core==2.18.2
-pyee==11.1.0; platform_machine != "armv7l"
-pygments==2.17.2
+pygments==2.18.0
 python-crontab==3.0.0
 python-crontab==3.0.0
 python-dateutil==2.9.0.post0
 python-dateutil==2.9.0.post0
 python-ldap==3.4.4
 python-ldap==3.4.4
 pytz==2024.1
 pytz==2024.1
-regex==2024.4.16
+regex==2024.4.28
 requests==2.31.0
 requests==2.31.0
 setuptools==69.5.1
 setuptools==69.5.1
 sgmllib3k==1.0.0
 sgmllib3k==1.0.0
 six==1.16.0
 six==1.16.0
+sniffio==1.3.1
 sonic-client==1.0.0
 sonic-client==1.0.0
 sqlparse==0.5.0
 sqlparse==0.5.0
 stack-data==0.6.3
 stack-data==0.6.3