diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000..27246bf --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1 @@ +2cf0e060ed126537dd993896b6aa793e2a6b9e80 diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..3891848 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,10 @@ +version: 2 +updates: + - package-ecosystem: github-actions + directory: / + schedule: + interval: monthly + groups: + gh-actions: + patterns: + - "*" diff --git a/.github/workflows/build-and-release.yml b/.github/workflows/build-and-release.yml new file mode 100644 index 0000000..9cd962f --- /dev/null +++ b/.github/workflows/build-and-release.yml @@ -0,0 +1,79 @@ +name: build-and-release + +on: [push, pull_request] + +defaults: + run: + shell: bash + +jobs: + + build: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest, macos-13, macos-latest] + python: ["3.11"] + steps: + - uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v7 + with: + python-version: ${{ matrix.python }} + + - name: Set up project + run: uv sync + + - name: Run checks + run: | + ./scripts/check + ./scripts/format + + - name: Assert no changes + run: git diff --exit-code + + - name: Build + run: ./scripts/build + + - name: Rename binary + # Glob in source location because on windows pyinstaller creates a file + # named "pferd.exe" + run: mv dist/pferd* dist/pferd-${{ matrix.os }} + + - name: Upload binary + uses: actions/upload-artifact@v4 + with: + name: pferd-${{ matrix.os }} + path: dist/pferd-${{ matrix.os }} + + release: + runs-on: ubuntu-latest + if: startsWith(github.ref, 'refs/tags/v') + needs: build + steps: + + - name: Download binaries + uses: actions/download-artifact@v4 + with: + pattern: pferd-* + merge-multiple: true + + - name: Rename binaries + run: | + mv pferd-ubuntu-latest pferd-linux + mv pferd-windows-latest pferd-windows.exe + mv pferd-macos-13 pferd-mac-x86_64 + mv pferd-macos-latest pferd-mac + + - name: Create release + uses: softprops/action-gh-release@v2 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + files: | + pferd-linux + pferd-windows.exe + pferd-mac + pferd-mac-x86_64 diff --git a/.github/workflows/package.yml b/.github/workflows/package.yml deleted file mode 100644 index c451789..0000000 --- a/.github/workflows/package.yml +++ /dev/null @@ -1,74 +0,0 @@ -name: Package Application with Pyinstaller - -on: - push: - branches: - - "*" - tags: - - "v*" - -jobs: - build: - - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [ubuntu-latest, windows-latest, macos-latest] - - steps: - - uses: actions/checkout@v2 - - - uses: actions/setup-python@v2 - with: - python-version: '3.x' - - - name: "Install dependencies" - run: "pip install setuptools pyinstaller rich requests beautifulsoup4 -f --upgrade" - - - name: "Install sync_url.py" - run: "pyinstaller sync_url.py -F" - - - name: "Move artifact" - run: "mv dist/sync_url* dist/sync_url-${{ matrix.os }}" - - - uses: actions/upload-artifact@v2 - with: - name: "Pferd Sync URL" - path: "dist/sync_url*" - - release: - name: Release - - needs: [build] - runs-on: ubuntu-latest - if: startsWith(github.ref, 'refs/tags/') - - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - steps: - - name: "Checkout" - uses: actions/checkout@v2 - - - name: "Download artifacts" - uses: actions/download-artifact@v2 - with: - name: "Pferd Sync URL" - - - name: "look at folder structure" - run: "ls -lah" - - - name: "Rename releases" - run: "mv sync_url-macos-latest pferd_sync_url_mac && mv sync_url-ubuntu-latest pferd_sync_url_linux && mv sync_url-windows-latest pferd_sync_url.exe" - - - name: "Create release" - uses: softprops/action-gh-release@v1 - - - name: "Upload release artifacts" - uses: softprops/action-gh-release@v1 - with: - body: "Download the correct sync_url for your platform and run it in the terminal or CMD. You might need to make it executable on Linux/Mac with `chmod +x `." - files: | - pferd_sync_url_mac - pferd_sync_url_linux - pferd_sync_url.exe diff --git a/.gitignore b/.gitignore index fbb852b..36ab590 100644 --- a/.gitignore +++ b/.gitignore @@ -1,14 +1,11 @@ -__pycache__/ -.venv/ -venv/ -.idea/ -build/ .mypy_cache/ -.tmp/ -.env -.vscode -ilias_cookies.txt +/.venv/ +/PFERD.egg-info/ +__pycache__/ +/.vscode/ +/.idea/ -# PyInstaller -sync_url.spec -dist/ +# pyinstaller +/pferd.spec +/build/ +/dist/ diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..2a2848c --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,311 @@ +# Changelog + +All notable changes to this project will be documented in this file. The format +is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +This project has its own custom versioning scheme. Version numbers consist of +three parts (e. g. `3.1.5`). +- The first number is increased on major rewrites or changes. What classifies as + a major change is up to the maintainers. This is pretty rare and a PFERD + version 4 should hopefully not be necessary. +- The second number is increased on backwards-incompatible changes in behaviour. + This refers to any change that would make an existing setup behave differently + (e. g. renaming options or changing crawler behaviour). If this number is + increased, it may be necessary for you to adapt your own setup. +- The third number is increased on backwards-compatible changes (e. g. adding + new options or commands, changing documentation, fixing bugs). Updates that + only increase this number should be safe and not require manual intervention. + +We will try to correctly classify changes as backwards-compatible or +backwards-incompatible, but may occasionally make mistakes or stumble across +ambiguous situations. + +## Unreleased + +## Added +- Store the description when using the `internet-shortcut` link format +- Support for basic auth with the kit-ipd crawler + +## Fixed +- Event loop errors on Windows with Python 3.14 +- Sanitize `/` in headings in kit-ipd crawler +- Crawl info tab again + +## 3.8.3 - 2025-07-01 + +## Added +- Support for link collections. + In "fancy" mode, a single HTML file with multiple links is generated. + In all other modes, PFERD creates a folder for the collection and a new file + for every link inside. + +## Fixed +- Crawling of exercises with instructions +- Don't download unavailable elements. + Elements that are unavailable (for example, because their availability is + time restricted) will not download the HTML for the info page anymore. +- `base_url` argument for `ilias-web` crawler causing crashes + +## 3.8.2 - 2025-04-29 + +## Changed +- Explicitly mention that wikis are not supported at the moment and ignore them + +## Fixed +- Ilias-native login +- Exercise crawling + +## 3.8.1 - 2025-04-17 + +## Fixed +- Description html files now specify at UTF-8 encoding +- Images in descriptions now always have a white background + +## 3.8.0 - 2025-04-16 + +### Added +- Support for ILIAS 9 + +### Changed +- Added prettier CSS to forum threads +- Downloaded forum threads now link to the forum instead of the ILIAS thread +- Increase minimum supported Python version to 3.11 +- Do not crawl nested courses (courses linked in other courses) + +## Fixed +- File links in report on Windows +- TOTP authentication in KIT Shibboleth +- Forum crawling only considering the first 20 entries + +## 3.7.0 - 2024-11-13 + +### Added +- Support for MOB videos in page descriptions +- Clickable links in the report to directly open new/modified/not-deleted files +- Support for non KIT shibboleth login + +### Changed +- Remove videos from description pages +- Perform ILIAS cycle detection after processing the transform to allow + ignoring duplicated elements +- Parse headings (h1-h3) as folders in kit-ipd crawler + +### Fixed +- Personal desktop/dashboard/favorites crawling +- Crawling of nested courses +- Downloading of links with no target URL +- Handle row flex on description pages +- Add `` heading to forum threads to fix mime type detection +- Handle groups in cards + +## 3.6.0 - 2024-10-23 + +### Added +- Generic `ilias-web` crawler and `ilias-web` CLI command +- Support for the course overview page. Using this URL as a target might cause + duplication warnings, as subgroups are listed separately. +- Support for named capture groups in regex transforms +- Crawl custom item groups as folders + +### Fixed +- Normalization of meeting names in cards +- Sanitization of slashes in exercise container names + +## 3.5.2 - 2024-04-14 + +### Fixed +- Crawling of personal desktop with ILIAS 8 +- Crawling of empty personal desktops + +## 3.5.1 - 2024-04-09 + +### Added +- Support for ILIAS 8 + +### Fixed +- Video name deduplication + +## 3.5.0 - 2023-09-13 + +### Added +- `no-delete-prompt-override` conflict resolution strategy +- Support for ILIAS learning modules +- `show_not_deleted` option to stop printing the "Not Deleted" status or report + message. This combines nicely with the `no-delete-prompt-override` strategy, + causing PFERD to mostly ignore local-only files. +- Support for mediacast video listings +- Crawling of files in info tab + +### Changed +- Remove size suffix for files in content pages + +### Fixed +- Crawling of courses with the timeline view as the default tab +- Crawling of file and custom opencast cards +- Crawling of button cards without descriptions +- Abort crawling when encountering an unexpected ilias root page redirect +- Sanitize ascii control characters on Windows +- Crawling of paginated past meetings +- Ignore SCORM learning modules + +## 3.4.3 - 2022-11-29 + +### Added +- Missing documentation for `forums` option + +### Changed +- Clear up error message shown when multiple paths are found to an element + +### Fixed +- IPD crawler unnecessarily appending trailing slashes +- Crawling opencast when ILIAS is set to English + +## 3.4.2 - 2022-10-26 + +### Added +- Recognize and crawl content pages in cards +- Recognize and ignore surveys + +### Fixed +- Forum crawling crashing when a thread has no messages at all +- Forum crawling crashing when a forum has no threads at all +- Ilias login failing in some cases +- Crawling of paginated future meetings +- IPD crawler handling of URLs without trailing slash + +## 3.4.1 - 2022-08-17 + +### Added +- Download of page descriptions +- Forum download support +- `pass` authenticator + +### Changed +- Add `cpp` extension to default `link_regex` of IPD crawler +- Mention hrefs in IPD crawler's `--explain` output for users of `link_regex` option +- Simplify default IPD crawler `link_regex` + +### Fixed +- IPD crawler crashes on some sites +- Meeting name normalization for yesterday, today and tomorrow +- Crawling of meeting file previews +- Login with new login button html layout +- Descriptions for courses are now placed in the correct subfolder when + downloading the whole desktop + +## 3.4.0 - 2022-05-01 + +### Added +- Message when Shibboleth entitlements need to be manually reviewed +- Links to unofficial packages and repology in the readme + +### Changed +- Increase minimum supported Python version to 3.9 +- Support video listings with more columns +- Use UTF-8 when reading/writing the config file + +### Fixed +- Crash during authentication when the Shibboleth session is still valid + +## 3.3.1 - 2022-01-15 + +### Fixed +- ILIAS login +- Local video cache if `windows_paths` is enabled + +## 3.3.0 - 2022-01-09 + +### Added +- A KIT IPD crawler +- Support for ILIAS cards +- (Rudimentary) support for content pages +- Support for multi-stream videos +- Support for ILIAS 7 + +### Removed +- [Interpolation](https://docs.python.org/3/library/configparser.html#interpolation-of-values) in config file + +### Fixed +- Crawling of recursive courses +- Crawling files directly placed on the personal desktop +- Ignore timestamps at the unix epoch as they crash on windows + +## 3.2.0 - 2021-08-04 + +### Added +- `--skip` command line option +- Support for ILIAS booking objects + +### Changed +- Using multiple path segments on left side of `-name->` now results in an + error. This was already forbidden by the documentation but silently accepted + by PFERD. +- More consistent path printing in some `--explain` messages + +### Fixed +- Nondeterministic name deduplication due to ILIAS reordering elements +- More exceptions are handled properly + +## 3.1.0 - 2021-06-13 + +If your config file doesn't do weird things with transforms, it should continue +to work. If your `-re->` arrows behave weirdly, try replacing them with +`-exact-re->` arrows. If you're on Windows, you might need to switch from `\` +path separators to `/` in your regex rules. + +### Added +- `skip` option for crawlers +- Rules with `>>` instead of `>` as arrow head +- `-exact-re->` arrow (behaves like `-re->` did previously) + +### Changed +- The `-re->` arrow can now rename directories (like `-->`) +- Use `/` instead of `\` as path separator for (regex) rules on Windows +- Use the label to the left for exercises instead of the button name to + determine the folder name + +### Fixed +- Video pagination handling in ILIAS crawler + +## 3.0.1 - 2021-06-01 + +### Added +- `credential-file` authenticator +- `--credential-file` option for `kit-ilias-web` command +- Warning if using concurrent tasks with `kit-ilias-web` + +### Changed +- Cookies are now stored in a text-based format + +### Fixed +- Date parsing now also works correctly in non-group exercises + +## 3.0.0 - 2021-05-31 + +### Added +- Proper config files +- Concurrent crawling +- Crawl external ILIAS links +- Crawl uploaded exercise solutions +- Explain what PFERD is doing and why (`--explain`) +- More control over output (`--status`, `--report`) +- Debug transform rules with `--debug-transforms` +- Print report after exiting via Ctrl+C +- Store crawler reports in `.report` JSON file +- Extensive config file documentation (`CONFIG.md`) +- Documentation for developers (`DEV.md`) +- This changelog + +### Changed +- Rewrote almost everything +- Better error messages +- Redesigned CLI +- Redesigned transform rules +- ILIAS crawling logic (paths may be different) +- Better support for weird paths on Windows +- Set user agent (`PFERD/`) + +### Removed +- Backwards compatibility with 2.x +- Python files as config files +- Some types of crawlers diff --git a/CONFIG.md b/CONFIG.md new file mode 100644 index 0000000..b87f75c --- /dev/null +++ b/CONFIG.md @@ -0,0 +1,540 @@ +# Config file format + +A config file consists of sections. A section begins with a `[section]` header, +which is followed by a list of `key = value` pairs. Comments must be on their +own line and start with `#`. Multiline values must be indented beyond their key. +Boolean values can be `yes` or `no`. For more details and some examples on the +format, see the [configparser documentation][cp-file] +([interpolation][cp-interp] is disabled). + +[cp-file]: "Supported INI File Structure" +[cp-interp]: "Interpolation of values" + +## The `DEFAULT` section + +This section contains global configuration values. It can also be used to set +default values for the other sections. + +- `working_dir`: The directory PFERD operates in. Set to an absolute path to + make PFERD operate the same regardless of where it is executed from. All other + paths in the config file are interpreted relative to this path. If this path + is relative, it is interpreted relative to the script's working dir. `~` is + expanded to the current user's home directory. (Default: `.`) +- `explain`: Whether PFERD should log and explain its actions and decisions in + detail. (Default: `no`) +- `status`: Whether PFERD should print status updates (like `Crawled ...`, + `Added ...`) while running a crawler. (Default: `yes`) +- `report`: Whether PFERD should print a report of added, changed and deleted + local files for all crawlers before exiting. (Default: `yes`) +- `show_not_deleted`: Whether PFERD should print messages in status and report + when a local-only file wasn't deleted. Combines nicely with the + `no-delete-prompt-override` conflict resolution strategy. +- `share_cookies`: Whether crawlers should share cookies where applicable. For + example, some crawlers share cookies if they crawl the same website using the + same account. (Default: `yes`) + +## The `crawl:*` sections + +Sections whose names start with `crawl:` are used to configure crawlers. The +rest of the section name specifies the name of the crawler. + +A crawler synchronizes a remote resource to a local directory. There are +different types of crawlers for different kinds of resources, e.g. ILIAS +courses or lecture websites. + +Each crawl section represents an instance of a specific type of crawler. The +`type` option is used to specify the crawler type. The crawler's name is usually +used as the output directory. New crawlers can be created simply by adding a new +crawl section to the config file. + +Depending on a crawler's type, it may have different options. For more details, +see the type's [documentation](#crawler-types) below. The following options are +common to all crawlers: + +- `type`: The available types are specified in [this section](#crawler-types). +- `skip`: Whether the crawler should be skipped during normal execution. The + crawler can still be executed manually using the `--crawler` or `-C` flags. + (Default: `no`) +- `output_dir`: The directory the crawler synchronizes files to. A crawler will + never place any files outside this directory. (Default: the crawler's name) +- `redownload`: When to download a file that is already present locally. + (Default: `never-smart`) + - `never`: If a file is present locally, it is not downloaded again. + - `never-smart`: Like `never`, but PFERD tries to detect if an already + downloaded files has changed via some (unreliable) heuristics. + - `always`: All files are always downloaded, regardless of whether they are + already present locally. + - `always-smart`: Like `always`, but PFERD tries to avoid unnecessary + downloads via some (unreliable) heuristics. +- `on_conflict`: What to do when the local and remote versions of a file or + directory differ, including when a file is replaced by a directory or a + directory by a file. (Default: `prompt`) + - `prompt`: Always ask the user before overwriting or deleting local files + and directories. + - `local-first`: Always keep the local file or directory. Equivalent to + using `prompt` and always choosing "no". Implies that `redownload` is set + to `never`. + - `remote-first`: Always keep the remote file or directory. Equivalent to + using `prompt` and always choosing "yes". + - `no-delete`: Never delete local files, but overwrite local files if the + remote file is different. + - `no-delete-prompt-overwrite`: Never delete local files, but prompt to + overwrite local files if the remote file is different. Combines nicely + with the `show_not_deleted` option. +- `transform`: Rules for renaming and excluding certain files and directories. + For more details, see [this section](#transformation-rules). (Default: empty) +- `tasks`: The maximum number of concurrent tasks (such as crawling or + downloading). (Default: `1`) +- `downloads`: How many of those tasks can be download tasks at the same time. + Must not be greater than `tasks`. (Default: Same as `tasks`) +- `task_delay`: Time (in seconds) that the crawler should wait between + subsequent tasks. Can be used as a sort of rate limit to avoid unnecessary + load for the crawl target. (Default: `0.0`) +- `windows_paths`: Whether PFERD should find alternative names for paths that + are invalid on Windows. (Default: `yes` on Windows, `no` otherwise) + +Some crawlers may also require credentials for authentication. To configure how +the crawler obtains its credentials, the `auth` option is used. It is set to the +full name of an auth section (including the `auth:` prefix). + +Here is a simple example: + +```ini +[auth:example] +type = simple +username = foo +password = bar + +[crawl:something] +type = some-complex-crawler +auth = auth:example +on_conflict = no-delete +tasks = 3 +``` + +## The `auth:*` sections + +Sections whose names start with `auth:` are used to configure authenticators. An +authenticator provides a username and a password to one or more crawlers. + +Authenticators work similar to crawlers: A section represents an authenticator +instance whose name is the rest of the section name. The type is specified by +the `type` option. + +Depending on an authenticator's type, it may have different options. For more +details, see the type's [documentation](#authenticator-types) below. The only +option common to all authenticators is `type`: + +- `type`: The types are specified in [this section](#authenticator-types). + +## Crawler types + +### The `local` crawler + +This crawler crawls a local directory. It is really simple and mostly useful for +testing different setups. The various delay options are meant to make the +crawler simulate a slower, network-based crawler. + +- `target`: Path to the local directory to crawl. (Required) +- `crawl_delay`: Artificial delay (in seconds) to simulate for crawl requests. + (Default: `0.0`) +- `download_delay`: Artificial delay (in seconds) to simulate for download + requests. (Default: `0.0`) +- `download_speed`: Download speed (in bytes per second) to simulate. (Optional) + +### The `kit-ipd` crawler + +This crawler crawls a KIT-IPD page by url. The root page can be crawled from +outside the KIT network so you will be informed about any new/deleted files, +but downloading files requires you to be within. Adding a short delay between +requests is likely a good idea. + +- `target`: URL to a KIT-IPD page +- `link_regex`: A regex that is matched against the `href` part of links. If it + matches, the given link is downloaded as a file. This is used to extract + files from KIT-IPD pages. (Default: `^.*?[^/]+\.(pdf|zip|c|cpp|java)$`) +- `auth`: Name of auth section to use for basic authentication. (Optional) + +### The `ilias-web` crawler + +This crawler crawls a generic ILIAS instance. + +Inspired by [this ILIAS downloader][ilias-dl], the following configurations should work +out of the box for the corresponding universities: + +[ilias-dl]: https://github.com/V3lop5/ilias-downloader/blob/main/configs "ilias-downloader configs" + +| University | `base_url` | `login_type` | `client_id` | +|-----------------|-----------------------------------------|--------------|---------------| +| FH Aachen | https://www.ili.fh-aachen.de | local | elearning | +| HHU Düsseldorf | https://ilias.hhu.de | local | UniRZ | +| Uni Köln | https://www.ilias.uni-koeln.de/ilias | local | uk | +| Uni Konstanz | https://ilias.uni-konstanz.de | local | ILIASKONSTANZ | +| Uni Stuttgart | https://ilias3.uni-stuttgart.de | local | Uni_Stuttgart | +| Uni Tübingen | https://ovidius.uni-tuebingen.de/ilias3 | shibboleth | | +| KIT ILIAS Pilot | https://pilot.ilias.studium.kit.edu | shibboleth | pilot | + +If your university isn't listed, try navigating to your instance's login page. +Assuming no custom login service is used, the URL will look something like this: + +```jinja +{{ base_url }}/login.php?client_id={{ client_id }}&cmd=force_login&lang= +``` + +If the values work, feel free to submit a PR and add them to the table above. + +- `base_url`: The URL where the ILIAS instance is located. (Required) +- `login_type`: How you authenticate. (Required) + - `local`: Use `client_id` for authentication. + - `shibboleth`: Use shibboleth for authentication. +- `client_id`: An ID used for authentication if `login_type` is `local`. Is + ignored if `login_type` is `shibboleth`. +- `target`: The ILIAS element to crawl. (Required) + - `desktop`: Crawl your personal desktop / dashboard + - ``: Crawl the course with the given id + - ``: Crawl a given element by URL (preferably the permanent URL linked + at the bottom of its ILIAS page). + This also supports the "My Courses" overview page to download *all* + courses. Note that this might produce confusing local directory layouts + and duplication warnings if you are a member of an ILIAS group. The + `desktop` target is generally preferable. +- `auth`: Name of auth section to use for login. (Required) +- `tfa_auth`: Name of auth section to use for two-factor authentication. Only + uses the auth section's password. (Default: Anonymous `tfa` authenticator) +- `links`: How to represent external links. (Default: `fancy`) + - `ignore`: Don't download links. + - `plaintext`: A text file containing only the URL. + - `fancy`: A HTML file looking like the ILIAS link element. + - `internet-shortcut`: An internet shortcut file (`.url` file). +- `link_redirect_delay`: Time (in seconds) until `fancy` link files will + redirect to the actual URL. Set to a negative value to disable the automatic + redirect. (Default: `-1`) +- `videos`: Whether to download videos. (Default: `no`) +- `forums`: Whether to download forum threads. (Default: `no`) +- `http_timeout`: The timeout (in seconds) for all HTTP requests. (Default: + `20.0`) + +### The `kit-ilias-web` crawler + +This crawler crawls the KIT ILIAS instance. + +ILIAS is not great at handling too many concurrent requests. To avoid +unnecessary load, please limit `tasks` to `1`. + +There is a spike in ILIAS usage at the beginning of lectures, so please don't +run PFERD during those times. + +If you're automatically running PFERD periodically (e. g. via cron or a systemd +timer), please randomize the start time or at least don't use the full hour. For +systemd timers, this can be accomplished using the `RandomizedDelaySec` option. +Also, please schedule the script to run in periods of low activity. Running the +script once per day should be fine. + +- `target`: The ILIAS element to crawl. (Required) + - `desktop`: Crawl your personal desktop + - ``: Crawl the course with the given id + - ``: Crawl a given element by URL (preferably the permanent URL linked + at the bottom of its ILIAS page) +- `auth`: Name of auth section to use for login. (Required) +- `tfa_auth`: Name of auth section to use for two-factor authentication. Only + uses the auth section's password. (Default: Anonymous `tfa` authenticator) +- `links`: How to represent external links. (Default: `fancy`) + - `ignore`: Don't download links. + - `plaintext`: A text file containing only the URL. + - `fancy`: A HTML file looking like the ILIAS link element. + - `internet-shortcut`: An internet shortcut file (`.url` file). +- `link_redirect_delay`: Time (in seconds) until `fancy` link files will + redirect to the actual URL. Set to a negative value to disable the automatic + redirect. (Default: `-1`) +- `videos`: Whether to download videos. (Default: `no`) +- `forums`: Whether to download forum threads. (Default: `no`) +- `http_timeout`: The timeout (in seconds) for all HTTP requests. (Default: + `20.0`) + +## Authenticator types + +### The `simple` authenticator + +With this authenticator, the username and password can be set directly in the +config file. If the username or password are not specified, the user is prompted +via the terminal. + +- `username`: The username. (Optional) +- `password`: The password. (Optional) + +### The `credential-file` authenticator + +This authenticator reads a username and a password from a credential file. + +- `path`: Path to the credential file. (Required) + +The credential file has exactly two lines (trailing newline optional). The first +line starts with `username=` and contains the username, the second line starts +with `password=` and contains the password. The username and password may +contain any characters except a line break. + +``` +username=AzureDiamond +password=hunter2 +``` + +### The `keyring` authenticator + +This authenticator uses the system keyring to store passwords. The username can +be set directly in the config file. If the username is not specified, the user +is prompted via the terminal. If the keyring contains no entry or the entry is +incorrect, the user is prompted for a password via the terminal and the password +is stored in the keyring. + +- `username`: The username. (Optional) +- `keyring_name`: The service name PFERD uses for storing credentials. (Default: + `PFERD`) + +### The `pass` authenticator + +This authenticator queries the [`pass` password manager][pass] for a username +and password. It tries to be mostly compatible with [browserpass][browserpass] +and [passff][passff], so see those links for an overview of the format. If PFERD +fails to load your password, you can use the `--explain` flag to see why. + +- `passname`: The name of the password to use (Required) +- `username_prefixes`: A comma-separated list of username line prefixes + (Default: `login,username,user`) +- `password_prefixes`: A comma-separated list of password line prefixes + (Default: `password,pass,secret`) + +[pass]: "Pass: The Standard Unix Password Manager" +[browserpass]: "Organizing password store" +[passff]: "Multi-line format" + +### The `tfa` authenticator + +This authenticator prompts the user on the console for a two-factor +authentication token. The token is provided as password and it is not cached. +This authenticator does not support usernames. + +## Transformation rules + +Transformation rules are rules for renaming and excluding files and directories. +They are specified line-by-line in a crawler's `transform` option. When a +crawler needs to apply a rule to a path, it goes through this list top-to-bottom +and applies the first matching rule. + +To see this process in action, you can use the `--debug-transforms` or flag or +the `--explain` flag. + +Each rule has the format `SOURCE ARROW TARGET` (e. g. `foo/bar --> foo/baz`). +The arrow specifies how the source and target are interpreted. The different +kinds of arrows are documented below. + +`SOURCE` and `TARGET` are either a bunch of characters without spaces (e. g. +`foo/bar`) or string literals (e. g, `"foo/b a r"`). The former syntax has no +concept of escaping characters, so the backslash is just another character. The +string literals however support Python's escape syntax (e. g. +`"foo\\bar\tbaz"`). This also means that in string literals, backslashes must be +escaped. + +`TARGET` can additionally be a single exclamation mark `!` (*not* `"!"`). When a +rule with a `!` as target matches a path, the corresponding file or directory is +ignored by the crawler instead of renamed. + +`TARGET` can also be omitted entirely. When a rule without target matches a +path, the path is returned unmodified. This is useful to prevent rules further +down from matching instead. + +Each arrow's behaviour can be modified slightly by changing the arrow's head +from `>` to `>>`. When a rule with a `>>` arrow head matches a path, it doesn't +return immediately like a normal arrow. Instead, it replaces the current path +with its output and continues on to the next rule. In effect, this means that +multiple rules can be applied sequentially. + +### The `-->` arrow + +The `-->` arrow is a basic renaming operation for files and directories. If a +path matches `SOURCE`, it is renamed to `TARGET`. + +Example: `foo/bar --> baz` +- Doesn't match `foo`, `a/foo/bar` or `foo/baz` +- Converts `foo/bar` into `baz` +- Converts `foo/bar/wargl` into `baz/wargl` + +Example: `foo/bar --> !` +- Doesn't match `foo`, `a/foo/bar` or `foo/baz` +- Ignores `foo/bar` and any of its children + +### The `-name->` arrow + +The `-name->` arrow lets you rename files and directories by their name, +regardless of where they appear in the file tree. Because of this, its `SOURCE` +must not contain multiple path segments, only a single name. This restriction +does not apply to its `TARGET`. + +Example: `foo -name-> bar/baz` +- Doesn't match `a/foobar/b` or `x/Foo/y/z` +- Converts `hello/foo` into `hello/bar/baz` +- Converts `foo/world` into `bar/baz/world` +- Converts `a/foo/b/c/foo` into `a/bar/baz/b/c/bar/baz` + +Example: `foo -name-> !` +- Doesn't match `a/foobar/b` or `x/Foo/y/z` +- Ignores any path containing a segment `foo` + +### The `-exact->` arrow + +The `-exact->` arrow requires the path to match `SOURCE` exactly. The examples +below show why this is useful. + +Example: `foo/bar -exact-> baz` +- Doesn't match `foo`, `a/foo/bar` or `foo/baz` +- Converts `foo/bar` into `baz` +- Doesn't match `foo/bar/wargl` + +Example: `foo/bar -exact-> !` +- Doesn't match `foo`, `a/foo/bar` or `foo/baz` +- Ignores only `foo/bar`, not its children + +### The `-re->` arrow + +The `-re->` arrow is like the `-->` arrow but with regular expressions. `SOURCE` +is a regular expression and `TARGET` an f-string based template. If a path +matches `SOURCE`, the output path is created using `TARGET` as template. +`SOURCE` is automatically anchored. + +`TARGET` uses Python's [format string syntax][6]. The *n*-th capturing group can +be referred to as `{g}` (e.g. `{g3}`). `{g0}` refers to the original path. +If capturing group *n*'s contents are a valid integer, the integer value is +available as `{i}` (e.g. `{i3}`). If capturing group *n*'s contents are a +valid float, the float value is available as `{f}` (e.g. `{f3}`). Named capture +groups (e.g. `(?P)`) are available by their name (e.g. `{name}`). If a +capturing group is not present (e.g. when matching the string `cd` with the +regex `(ab)?cd`), the corresponding variables are not defined. + +Python's format string syntax has rich options for formatting its arguments. For +example, to left-pad the capturing group 3 with the digit `0` to width 5, you +can use `{i3:05}`. + +PFERD even allows you to write entire expressions inside the curly braces, for +example `{g2.lower()}` or `{g3.replace(' ', '_')}`. + +Example: `f(oo+)/be?ar -re-> B{g1.upper()}H/fear` +- Doesn't match `a/foo/bar`, `foo/abc/bar`, `afoo/bar` or `foo/bars` +- Converts `foo/bar` into `BOOH/fear` +- Converts `fooooo/bear` into `BOOOOOH/fear` +- Converts `foo/bar/baz` into `BOOH/fear/baz` + +[6]: "Format String Syntax" + +### The `-name-re->` arrow + +The `-name-re>` arrow is like a combination of the `-name->` and `-re->` arrows. + +Example: `(.*)\.jpeg -name-re-> {g1}.jpg` +- Doesn't match `foo/bar.png`, `baz.JPEG` or `hello,jpeg` +- Converts `foo/bar.jpeg` into `foo/bar.jpg` +- Converts `foo.jpeg/bar/baz.jpeg` into `foo.jpg/bar/baz.jpg` + +Example: `\..+ -name-re-> !` +- Doesn't match `.`, `test`, `a.b` +- Ignores all files and directories starting with `.`. + +### The `-exact-re->` arrow + +The `-exact-re>` arrow is like a combination of the `-exact->` and `-re->` +arrows. + +Example: `f(oo+)/be?ar -exactre-> B{g1.upper()}H/fear` +- Doesn't match `a/foo/bar`, `foo/abc/bar`, `afoo/bar` or `foo/bars` +- Converts `foo/bar` into `BOOH/fear` +- Converts `fooooo/bear` into `BOOOOOH/fear` +- Doesn't match `foo/bar/baz` + +### Example: Tutorials + +You have an ILIAS course with lots of tutorials, but are only interested in a +single one. + +``` +tutorials/ + |- tut_01/ + |- tut_02/ + |- tut_03/ + ... +``` + +You can use a mix of normal and exact arrows to get rid of the other ones and +move the `tutorials/tut_02/` folder to `my_tut/`: + +``` +tutorials/tut_02 --> my_tut +tutorials -exact-> +tutorials --> ! +``` + +The second rule is required for many crawlers since they use the rules to decide +which directories to crawl. If it was missing when the crawler looks at +`tutorials/`, the third rule would match. This means the crawler would not crawl +the `tutorials/` directory and thus not discover that `tutorials/tut02/` exists. + +Since the second rule is only relevant for crawling, the `TARGET` is left out. + +### Example: Lecture slides + +You have a course with slides like `Lecture 3: Linear functions.PDF` and you +would like to rename them to `03_linear_functions.pdf`. + +``` +Lectures/ + |- Lecture 1: Introduction.PDF + |- Lecture 2: Vectors and matrices.PDF + |- Lecture 3: Linear functions.PDF + ... +``` + +To do this, you can use the most powerful of arrows: The regex arrow. + +``` +"Lectures/Lecture (\\d+): (.*)\\.PDF" -re-> "Lectures/{i1:02}_{g2.lower().replace(' ', '_')}.pdf" +``` + +Note the escaped backslashes on the `SOURCE` side. + +### Example: Crawl a Python project + +You are crawling a Python project and want to ignore all hidden files (files +whose name starts with a `.`), all `__pycache__` directories and all markdown +files (for some weird reason). + +``` +.gitignore +.mypy_cache/ +.venv/ +CONFIG.md +PFERD/ + |- __init__.py + |- __main__.py + |- __pycache__/ + |- authenticator.py + |- config.py + ... +README.md +... +``` + +For this task, the name arrows can be used. + +``` +\..* -name-re-> ! +__pycache__ -name-> ! +.*\.md -name-re-> ! +``` + +### Example: Clean up names + +You want to convert all paths into lowercase and replace spaces with underscores +before applying any rules. This can be achieved using the `>>` arrow heads. + +``` +(.*) -re->> "{g1.lower().replace(' ', '_')}" + + +``` diff --git a/DEV.md b/DEV.md new file mode 100644 index 0000000..8cc42c2 --- /dev/null +++ b/DEV.md @@ -0,0 +1,84 @@ +# PFERD Development Guide + +PFERD is packaged following the [Python Packaging User Guide][ppug] (in +particular [this][ppug-1] and [this][ppug-2] guide). + +[ppug]: "Python Packaging User Guide" +[ppug-1]: "Packaging Python Projects" +[ppug-2]: "Packaging and distributing projects" + +## Setting up a dev environment + +The use of [venv][venv] and [uv][uv] is recommended. To initially set up a +development environment, run these commands in the same directory as this file: + +``` +$ uv sync +$ . .venv/bin/activate +``` + +This install all required dependencies and tools. It also installs PFERD as +*editable*, which means that you can just run `pferd` as if it was installed +normally. Since PFERD was installed with `--editable`, there is no need to +re-run `uv sync` when the source code is changed. + +For more details, see [this part of the Python Tutorial][venv-tut] and +[this section on "development mode"][ppug-dev]. + +[venv]: "venv - Creation of virtual environments" +[venv-tut]: "12. Virtual Environments and Packages" +[uv]: "uv - An extremely fast Python package and project manager" + +## Checking and formatting the code + +To run a set of checks against the code, run `./scripts/check` in the repo's +root directory. This script will run a few tools installed by `./scripts/setup` +against the entire project. + +To format the code, run `./scripts/format` in the repo's root directory. + +Before committing changes, please make sure the checks return no warnings and +the code is formatted. + +## Contributing + +When submitting a PR that adds, changes or modifies a feature, please ensure +that the corresponding documentation is updated as well. Also, please ensure +that `./scripts/check` returns no warnings and the code has been run through +`./scripts/format`. + +In your first PR, please add your name to the `LICENSE` file. + +## Releasing a new version + +This section describes the steps required to release a new version of PFERD. +Usually, they don't need to performed manually and `scripts/bump-version` can be +used instead. + +1. Update the version number in `PFERD/version.py` +2. Update `CHANGELOG.md` +3. Commit changes to `master` with message `Bump version to ` (e. g. `Bump version to 3.2.5`) +4. Create annotated tag named `v` (e. g. `v3.2.5`) + - Copy changes from changelog + - Remove `#` symbols (which git would interpret as comments) + - As the first line, add `Version - ` (e. g. `Version 3.2.5 - 2021-05-24`) + - Leave the second line empty +5. Fast-forward `latest` to `master` +6. Push `master`, `latest` and the new tag + +Example tag annotation: +``` +Version 3.2.5 - 2021-05-24 + +Added +- Support for concurrent downloads +- Support for proper config files +- This changelog + +Changed +- Rewrote almost everything +- Redesigned CLI + +Removed +- Backwards compatibility with 2.x +``` diff --git a/LICENSE b/LICENSE index 2e3fa8c..ccccbe3 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,6 @@ -Copyright 2019-2020 Garmelon, I-Al-Istannen, danstooamerican, pavelzw +Copyright 2019-2024 Garmelon, I-Al-Istannen, danstooamerican, pavelzw, + TheChristophe, Scriptim, thelukasprobst, Toorero, + Mr-Pine, p-fruck, PinieP Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in @@ -15,4 +17,4 @@ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/PFERD/__init__.py b/PFERD/__init__.py index 059f585..e69de29 100644 --- a/PFERD/__init__.py +++ b/PFERD/__init__.py @@ -1,8 +0,0 @@ -# pylint: disable=invalid-name - -""" -This module exports only what you need for a basic configuration. If you want a -more complex configuration, you need to import the other submodules manually. -""" - -from .pferd import Pferd diff --git a/PFERD/__main__.py b/PFERD/__main__.py new file mode 100644 index 0000000..2de9dbc --- /dev/null +++ b/PFERD/__main__.py @@ -0,0 +1,170 @@ +import argparse +import asyncio +import configparser +import os +import sys +from pathlib import Path + +from .auth import AuthLoadError +from .cli import PARSER, ParserLoadError, load_default_section +from .config import Config, ConfigDumpError, ConfigLoadError, ConfigOptionError +from .logging import log +from .pferd import Pferd, PferdLoadError +from .transformer import RuleParseError + + +def load_config_parser(args: argparse.Namespace) -> configparser.ConfigParser: + log.explain_topic("Loading config") + parser = configparser.ConfigParser(interpolation=None) + + if args.command is None: + log.explain("No CLI command specified, loading config from file") + Config.load_parser(parser, path=args.config) + else: + log.explain("CLI command specified, loading config from its arguments") + if args.command: + args.command(args, parser) + + load_default_section(args, parser) + + return parser + + +def load_config(args: argparse.Namespace) -> Config: + try: + return Config(load_config_parser(args)) + except ConfigLoadError as e: + log.error(str(e)) + log.error_contd(e.reason) + sys.exit(1) + except ParserLoadError as e: + log.error(str(e)) + sys.exit(1) + + +def configure_logging_from_args(args: argparse.Namespace) -> None: + if args.explain is not None: + log.output_explain = args.explain + if args.status is not None: + log.output_status = args.status + if args.show_not_deleted is not None: + log.output_not_deleted = args.show_not_deleted + if args.report is not None: + log.output_report = args.report + + # We want to prevent any unnecessary output if we're printing the config to + # stdout, otherwise it would not be a valid config file. + if args.dump_config_to == "-": + log.output_explain = False + log.output_status = False + log.output_report = False + + +def configure_logging_from_config(args: argparse.Namespace, config: Config) -> None: + # In configure_logging_from_args(), all normal logging is already disabled + # whenever we dump the config. We don't want to override that decision with + # values from the config file. + if args.dump_config_to == "-": + return + + try: + if args.explain is None: + log.output_explain = config.default_section.explain() + if args.status is None: + log.output_status = config.default_section.status() + if args.report is None: + log.output_report = config.default_section.report() + if args.show_not_deleted is None: + log.output_not_deleted = config.default_section.show_not_deleted() + except ConfigOptionError as e: + log.error(str(e)) + sys.exit(1) + + +def dump_config(args: argparse.Namespace, config: Config) -> None: + log.explain_topic("Dumping config") + + if args.dump_config and args.dump_config_to is not None: + log.error("--dump-config and --dump-config-to can't be specified at the same time") + sys.exit(1) + + try: + if args.dump_config: + config.dump() + elif args.dump_config_to == "-": + config.dump_to_stdout() + else: + config.dump(Path(args.dump_config_to)) + except ConfigDumpError as e: + log.error(str(e)) + log.error_contd(e.reason) + sys.exit(1) + + +def main() -> None: + args = PARSER.parse_args() + + # Configuring logging happens in two stages because CLI args have + # precedence over config file options and loading the config already + # produces some kinds of log messages (usually only explain()-s). + configure_logging_from_args(args) + + config = load_config(args) + + # Now, after loading the config file, we can apply its logging settings in + # all places that were not already covered by CLI args. + configure_logging_from_config(args, config) + + if args.dump_config or args.dump_config_to is not None: + dump_config(args, config) + sys.exit() + + try: + pferd = Pferd(config, args.crawler, args.skip) + except PferdLoadError as e: + log.unlock() + log.error(str(e)) + sys.exit(1) + + try: + if os.name == "nt": + # A "workaround" for the windows event loop somehow crashing after + # asyncio.run() completes. See: + # https://bugs.python.org/issue39232 + # https://github.com/encode/httpx/issues/914#issuecomment-780023632 + # TODO Fix this properly + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + loop.run_until_complete(pferd.run(args.debug_transforms)) + loop.run_until_complete(asyncio.sleep(1)) + loop.close() + else: + asyncio.run(pferd.run(args.debug_transforms)) + except (ConfigOptionError, AuthLoadError) as e: + log.unlock() + log.error(str(e)) + sys.exit(1) + except RuleParseError as e: + log.unlock() + e.pretty_print() + sys.exit(1) + except KeyboardInterrupt: + log.unlock() + log.explain_topic("Interrupted, exiting immediately") + log.explain("Open files and connections are left for the OS to clean up") + pferd.print_report() + # TODO Clean up tmp files + # And when those files *do* actually get cleaned up properly, + # reconsider if this should really exit with 1 + sys.exit(1) + except Exception: + log.unlock() + log.unexpected_exception() + pferd.print_report() + sys.exit(1) + else: + pferd.print_report() + + +if __name__ == "__main__": + main() diff --git a/PFERD/auth/__init__.py b/PFERD/auth/__init__.py new file mode 100644 index 0000000..7295c7a --- /dev/null +++ b/PFERD/auth/__init__.py @@ -0,0 +1,27 @@ +from collections.abc import Callable +from configparser import SectionProxy + +from ..config import Config +from .authenticator import Authenticator, AuthError, AuthLoadError, AuthSection # noqa: F401 +from .credential_file import CredentialFileAuthenticator, CredentialFileAuthSection +from .keyring import KeyringAuthenticator, KeyringAuthSection +from .pass_ import PassAuthenticator, PassAuthSection +from .simple import SimpleAuthenticator, SimpleAuthSection +from .tfa import TfaAuthenticator + +AuthConstructor = Callable[ + [ + str, # Name (without the "auth:" prefix) + SectionProxy, # Authenticator's section of global config + Config, # Global config + ], + Authenticator, +] + +AUTHENTICATORS: dict[str, AuthConstructor] = { + "credential-file": lambda n, s, c: CredentialFileAuthenticator(n, CredentialFileAuthSection(s), c), + "keyring": lambda n, s, c: KeyringAuthenticator(n, KeyringAuthSection(s)), + "pass": lambda n, s, c: PassAuthenticator(n, PassAuthSection(s)), + "simple": lambda n, s, c: SimpleAuthenticator(n, SimpleAuthSection(s)), + "tfa": lambda n, s, c: TfaAuthenticator(n), +} diff --git a/PFERD/auth/authenticator.py b/PFERD/auth/authenticator.py new file mode 100644 index 0000000..417b7ba --- /dev/null +++ b/PFERD/auth/authenticator.py @@ -0,0 +1,79 @@ +from abc import ABC, abstractmethod + +from ..config import Section + + +class AuthLoadError(Exception): + pass + + +class AuthError(Exception): + pass + + +class AuthSection(Section): + def type(self) -> str: + value = self.s.get("type") + if value is None: + self.missing_value("type") + return value + + +class Authenticator(ABC): + def __init__(self, name: str) -> None: + """ + Initialize an authenticator from its name and its section in the config + file. + + If you are writing your own constructor for your own authenticator, + make sure to call this constructor first (via super().__init__). + + May throw an AuthLoadError. + """ + + self.name = name + + @abstractmethod + async def credentials(self) -> tuple[str, str]: + pass + + async def username(self) -> str: + username, _ = await self.credentials() + return username + + async def password(self) -> str: + _, password = await self.credentials() + return password + + def invalidate_credentials(self) -> None: + """ + Tell the authenticator that some or all of its credentials are invalid. + + Authenticators should overwrite this function if they have a way to + deal with this issue that is likely to result in valid credentials + (e. g. prompting the user). + """ + + raise AuthError("Invalid credentials") + + def invalidate_username(self) -> None: + """ + Tell the authenticator that specifically its username is invalid. + + Authenticators should overwrite this function if they have a way to + deal with this issue that is likely to result in valid credentials + (e. g. prompting the user). + """ + + raise AuthError("Invalid username") + + def invalidate_password(self) -> None: + """ + Tell the authenticator that specifically its password is invalid. + + Authenticators should overwrite this function if they have a way to + deal with this issue that is likely to result in valid credentials + (e. g. prompting the user). + """ + + raise AuthError("Invalid password") diff --git a/PFERD/auth/credential_file.py b/PFERD/auth/credential_file.py new file mode 100644 index 0000000..cb7834c --- /dev/null +++ b/PFERD/auth/credential_file.py @@ -0,0 +1,47 @@ +from pathlib import Path + +from ..config import Config +from ..utils import fmt_real_path +from .authenticator import Authenticator, AuthLoadError, AuthSection + + +class CredentialFileAuthSection(AuthSection): + def path(self) -> Path: + value = self.s.get("path") + if value is None: + self.missing_value("path") + return Path(value) + + +class CredentialFileAuthenticator(Authenticator): + def __init__(self, name: str, section: CredentialFileAuthSection, config: Config) -> None: + super().__init__(name) + + path = config.default_section.working_dir() / section.path() + try: + with open(path, encoding="utf-8") as f: + lines = list(f) + except UnicodeDecodeError: + raise AuthLoadError( + f"Credential file at {fmt_real_path(path)} is not encoded using UTF-8" + ) from None + except OSError as e: + raise AuthLoadError(f"No credential file at {fmt_real_path(path)}") from e + + if len(lines) != 2: + raise AuthLoadError("Credential file must be two lines long") + [uline, pline] = lines + uline = uline[:-1] # Remove trailing newline + if pline.endswith("\n"): + pline = pline[:-1] + + if not uline.startswith("username="): + raise AuthLoadError("First line must start with 'username='") + if not pline.startswith("password="): + raise AuthLoadError("Second line must start with 'password='") + + self._username = uline[9:] + self._password = pline[9:] + + async def credentials(self) -> tuple[str, str]: + return self._username, self._password diff --git a/PFERD/auth/keyring.py b/PFERD/auth/keyring.py new file mode 100644 index 0000000..414640a --- /dev/null +++ b/PFERD/auth/keyring.py @@ -0,0 +1,64 @@ +from typing import Optional + +import keyring + +from ..logging import log +from ..utils import agetpass, ainput +from ..version import NAME +from .authenticator import Authenticator, AuthError, AuthSection + + +class KeyringAuthSection(AuthSection): + def username(self) -> Optional[str]: + return self.s.get("username") + + def keyring_name(self) -> str: + return self.s.get("keyring_name", fallback=NAME) + + +class KeyringAuthenticator(Authenticator): + def __init__(self, name: str, section: KeyringAuthSection) -> None: + super().__init__(name) + + self._username = section.username() + self._password: Optional[str] = None + self._keyring_name = section.keyring_name() + + self._password_invalidated = False + self._username_fixed = section.username() is not None + + async def credentials(self) -> tuple[str, str]: + # Request the username + if self._username is None: + async with log.exclusive_output(): + self._username = await ainput("Username: ") + + # First try looking it up in the keyring. + # Do not look it up if it was invalidated - we want to re-prompt in this case + if self._password is None and not self._password_invalidated: + self._password = keyring.get_password(self._keyring_name, self._username) + + # If that fails it wasn't saved in the keyring - we need to + # read it from the user and store it + if self._password is None: + async with log.exclusive_output(): + self._password = await agetpass("Password: ") + keyring.set_password(self._keyring_name, self._username, self._password) + + self._password_invalidated = False + return self._username, self._password + + def invalidate_credentials(self) -> None: + if not self._username_fixed: + self.invalidate_username() + self.invalidate_password() + + def invalidate_username(self) -> None: + if self._username_fixed: + raise AuthError("Configured username is invalid") + else: + self._username = None + + def invalidate_password(self) -> None: + self._password = None + self._password_invalidated = True diff --git a/PFERD/auth/pass_.py b/PFERD/auth/pass_.py new file mode 100644 index 0000000..c5d9b24 --- /dev/null +++ b/PFERD/auth/pass_.py @@ -0,0 +1,97 @@ +import re +import subprocess + +from ..logging import log +from .authenticator import Authenticator, AuthError, AuthSection + + +class PassAuthSection(AuthSection): + def passname(self) -> str: + if (value := self.s.get("passname")) is None: + self.missing_value("passname") + return value + + def username_prefixes(self) -> list[str]: + value = self.s.get("username_prefixes", "login,username,user") + return [prefix.lower() for prefix in value.split(",")] + + def password_prefixes(self) -> list[str]: + value = self.s.get("password_prefixes", "password,pass,secret") + return [prefix.lower() for prefix in value.split(",")] + + +class PassAuthenticator(Authenticator): + PREFIXED_LINE_RE = r"([a-zA-Z]+):\s?(.*)" # to be used with fullmatch + + def __init__(self, name: str, section: PassAuthSection) -> None: + super().__init__(name) + + self._passname = section.passname() + self._username_prefixes = section.username_prefixes() + self._password_prefixes = section.password_prefixes() + + async def credentials(self) -> tuple[str, str]: + log.explain_topic("Obtaining credentials from pass") + + try: + log.explain(f"Calling 'pass show {self._passname}'") + result = subprocess.check_output(["pass", "show", self._passname], text=True) + except subprocess.CalledProcessError as e: + raise AuthError(f"Failed to get password info from {self._passname}: {e}") from e + + prefixed = {} + unprefixed = [] + for line in result.strip().splitlines(): + if match := re.fullmatch(self.PREFIXED_LINE_RE, line): + prefix = match.group(1).lower() + value = match.group(2) + log.explain(f"Found prefixed line {line!r} with prefix {prefix!r}, value {value!r}") + if prefix in prefixed: + raise AuthError(f"Prefix {prefix} specified multiple times") + prefixed[prefix] = value + else: + log.explain(f"Found unprefixed line {line!r}") + unprefixed.append(line) + + username = None + for prefix in self._username_prefixes: + log.explain(f"Looking for username at prefix {prefix!r}") + if prefix in prefixed: + username = prefixed[prefix] + log.explain(f"Found username {username!r}") + break + + password = None + for prefix in self._password_prefixes: + log.explain(f"Looking for password at prefix {prefix!r}") + if prefix in prefixed: + password = prefixed[prefix] + log.explain(f"Found password {password!r}") + break + + if password is None and username is None: + log.explain("No username and password found so far") + log.explain("Using first unprefixed line as password") + log.explain("Using second unprefixed line as username") + elif password is None: + log.explain("No password found so far") + log.explain("Using first unprefixed line as password") + elif username is None: + log.explain("No username found so far") + log.explain("Using first unprefixed line as username") + + if password is None: + if not unprefixed: + log.explain("Not enough unprefixed lines left") + raise AuthError("Password could not be determined") + password = unprefixed.pop(0) + log.explain(f"Found password {password!r}") + + if username is None: + if not unprefixed: + log.explain("Not enough unprefixed lines left") + raise AuthError("Username could not be determined") + username = unprefixed.pop(0) + log.explain(f"Found username {username!r}") + + return username, password diff --git a/PFERD/auth/simple.py b/PFERD/auth/simple.py new file mode 100644 index 0000000..dea4b67 --- /dev/null +++ b/PFERD/auth/simple.py @@ -0,0 +1,62 @@ +from typing import Optional + +from ..logging import log +from ..utils import agetpass, ainput +from .authenticator import Authenticator, AuthError, AuthSection + + +class SimpleAuthSection(AuthSection): + def username(self) -> Optional[str]: + return self.s.get("username") + + def password(self) -> Optional[str]: + return self.s.get("password") + + +class SimpleAuthenticator(Authenticator): + def __init__(self, name: str, section: SimpleAuthSection) -> None: + super().__init__(name) + + self._username = section.username() + self._password = section.password() + + self._username_fixed = self.username is not None + self._password_fixed = self.password is not None + + async def credentials(self) -> tuple[str, str]: + if self._username is not None and self._password is not None: + return self._username, self._password + + async with log.exclusive_output(): + if self._username is None: + self._username = await ainput("Username: ") + else: + print(f"Username: {self._username}") + + if self._password is None: + self._password = await agetpass("Password: ") + + # Intentionally returned inside the context manager so we know + # they're both not None + return self._username, self._password + + def invalidate_credentials(self) -> None: + if self._username_fixed and self._password_fixed: + raise AuthError("Configured credentials are invalid") + + if not self._username_fixed: + self._username = None + if not self._password_fixed: + self._password = None + + def invalidate_username(self) -> None: + if self._username_fixed: + raise AuthError("Configured username is invalid") + else: + self._username = None + + def invalidate_password(self) -> None: + if self._password_fixed: + raise AuthError("Configured password is invalid") + else: + self._password = None diff --git a/PFERD/auth/tfa.py b/PFERD/auth/tfa.py new file mode 100644 index 0000000..6ae48fe --- /dev/null +++ b/PFERD/auth/tfa.py @@ -0,0 +1,28 @@ +from ..logging import log +from ..utils import ainput +from .authenticator import Authenticator, AuthError + + +class TfaAuthenticator(Authenticator): + def __init__(self, name: str) -> None: + super().__init__(name) + + async def username(self) -> str: + raise AuthError("TFA authenticator does not support usernames") + + async def password(self) -> str: + async with log.exclusive_output(): + code = await ainput("TFA code: ") + return code + + async def credentials(self) -> tuple[str, str]: + raise AuthError("TFA authenticator does not support usernames") + + def invalidate_username(self) -> None: + raise AuthError("TFA authenticator does not support usernames") + + def invalidate_password(self) -> None: + pass + + def invalidate_credentials(self) -> None: + pass diff --git a/PFERD/authenticators.py b/PFERD/authenticators.py deleted file mode 100644 index b8cfe28..0000000 --- a/PFERD/authenticators.py +++ /dev/null @@ -1,125 +0,0 @@ -""" -General authenticators useful in many situations -""" - -import getpass -from typing import Optional, Tuple - - -class TfaAuthenticator: - # pylint: disable=too-few-public-methods - """ - An authenticator for a TFA token. Always prompts the user, as the token can not be cached. - """ - - def __init__(self, reason: str): - """ - Create a new tfa authenticator. - - Arguments: - reason {str} -- the reason for obtaining the credentials - """ - self._reason = reason - - def get_token(self) -> str: - # pylint: disable=no-self-use - """ - Prompts the user for the token and returns it. - """ - print(f"Enter credentials ({self._reason})") - return getpass.getpass("TFA Token: ") - - -class UserPassAuthenticator: - """ - An authenticator for username-password combinations that prompts the user - for missing information. - """ - - def __init__( - self, - reason: str, - username: Optional[str] = None, - password: Optional[str] = None, - ) -> None: - """ - reason - what the credentials are used for - username - the username (if already known) - password - the password (if already known) - """ - - self._reason = reason - - self._given_username = username - self._given_password = password - - self._username = username - self._password = password - - def get_credentials(self) -> Tuple[str, str]: - """ - Returns a tuple (username, password). Prompts user for username or - password when necessary. - """ - - if self._username is None and self._given_username is not None: - self._username = self._given_username - - if self._password is None and self._given_password is not None: - self._password = self._given_password - - if self._username is None or self._password is None: - print(f"Enter credentials ({self._reason})") - - username: str - if self._username is None: - username = input("Username: ") - self._username = username - else: - username = self._username - - password: str - if self._password is None: - password = getpass.getpass(prompt="Password: ") - self._password = password - else: - password = self._password - - return (username, password) - - @property - def username(self) -> str: - """ - The username. Accessing this property may cause the authenticator to - prompt the user. - """ - - (username, _) = self.get_credentials() - return username - - @property - def password(self) -> str: - """ - The password. Accessing this property may cause the authenticator to - prompt the user. - """ - - (_, password) = self.get_credentials() - return password - - def invalidate_credentials(self) -> None: - """ - Marks the credentials as invalid. If only a username was supplied in - the constructor, assumes that the username is valid and only the - password is invalid. If only a password was supplied in the - constructor, assumes that the password is valid and only the username - is invalid. Otherwise, assumes that username and password are both - invalid. - """ - - self._username = None - self._password = None - - if self._given_username is not None and self._given_password is not None: - self._given_username = None - self._given_password = None diff --git a/PFERD/cli/__init__.py b/PFERD/cli/__init__.py new file mode 100644 index 0000000..c89f6f4 --- /dev/null +++ b/PFERD/cli/__init__.py @@ -0,0 +1,14 @@ +# isort: skip_file + +# The order of imports matters because each command module registers itself +# with the parser from ".parser" and the import order affects the order in +# which they appear in the help. Because of this, isort is disabled for this +# file. Also, since we're reexporting or just using the side effect of +# importing itself, we get a few linting warnings, which we're disabling as +# well. + +from . import command_local # noqa: F401 imported but unused +from . import command_ilias_web # noqa: F401 imported but unused +from . import command_kit_ilias_web # noqa: F401 imported but unused +from . import command_kit_ipd # noqa: F401 imported but unused +from .parser import PARSER, ParserLoadError, load_default_section # noqa: F401 imported but unused diff --git a/PFERD/cli/command_ilias_web.py b/PFERD/cli/command_ilias_web.py new file mode 100644 index 0000000..b68e48f --- /dev/null +++ b/PFERD/cli/command_ilias_web.py @@ -0,0 +1,53 @@ +import argparse +import configparser + +from ..logging import log +from .common_ilias_args import configure_common_group_args, load_common +from .parser import CRAWLER_PARSER, SUBPARSERS, load_crawler + +COMMAND_NAME = "ilias-web" + +SUBPARSER = SUBPARSERS.add_parser( + COMMAND_NAME, + parents=[CRAWLER_PARSER], +) + +GROUP = SUBPARSER.add_argument_group( + title=f"{COMMAND_NAME} crawler arguments", + description=f"arguments for the '{COMMAND_NAME}' crawler", +) + +GROUP.add_argument( + "--base-url", + type=str, + metavar="BASE_URL", + help="The base url of the ilias instance", +) + +GROUP.add_argument( + "--client-id", + type=str, + metavar="CLIENT_ID", + help="The client id of the ilias instance", +) + +configure_common_group_args(GROUP) + + +def load(args: argparse.Namespace, parser: configparser.ConfigParser) -> None: + log.explain(f"Creating config for command '{COMMAND_NAME}'") + + parser["crawl:ilias"] = {} + section = parser["crawl:ilias"] + load_crawler(args, section) + + section["type"] = COMMAND_NAME + if args.base_url is not None: + section["base_url"] = args.base_url + if args.client_id is not None: + section["client_id"] = args.client_id + + load_common(section, args, parser) + + +SUBPARSER.set_defaults(command=load) diff --git a/PFERD/cli/command_kit_ilias_web.py b/PFERD/cli/command_kit_ilias_web.py new file mode 100644 index 0000000..b3b45c5 --- /dev/null +++ b/PFERD/cli/command_kit_ilias_web.py @@ -0,0 +1,37 @@ +import argparse +import configparser + +from ..logging import log +from .common_ilias_args import configure_common_group_args, load_common +from .parser import CRAWLER_PARSER, SUBPARSERS, load_crawler + +COMMAND_NAME = "kit-ilias-web" + +SUBPARSER = SUBPARSERS.add_parser( + COMMAND_NAME, + parents=[CRAWLER_PARSER], +) + +GROUP = SUBPARSER.add_argument_group( + title=f"{COMMAND_NAME} crawler arguments", + description=f"arguments for the '{COMMAND_NAME}' crawler", +) + +configure_common_group_args(GROUP) + + +def load( + args: argparse.Namespace, + parser: configparser.ConfigParser, +) -> None: + log.explain(f"Creating config for command '{COMMAND_NAME}'") + + parser["crawl:ilias"] = {} + section = parser["crawl:ilias"] + load_crawler(args, section) + + section["type"] = COMMAND_NAME + load_common(section, args, parser) + + +SUBPARSER.set_defaults(command=load) diff --git a/PFERD/cli/command_kit_ipd.py b/PFERD/cli/command_kit_ipd.py new file mode 100644 index 0000000..a80af03 --- /dev/null +++ b/PFERD/cli/command_kit_ipd.py @@ -0,0 +1,65 @@ +import argparse +import configparser +from pathlib import Path + +from ..logging import log +from .parser import CRAWLER_PARSER, SUBPARSERS, load_crawler + +SUBPARSER = SUBPARSERS.add_parser( + "kit-ipd", + parents=[CRAWLER_PARSER], +) + +GROUP = SUBPARSER.add_argument_group( + title="kit ipd crawler arguments", + description="arguments for the 'kit-ipd' crawler", +) +GROUP.add_argument( + "--link-regex", + type=str, + metavar="REGEX", + help="href-matching regex to identify downloadable files", +) +GROUP.add_argument( + "--basic-auth", + action="store_true", + help="enable basic authentication", +) +GROUP.add_argument( + "target", + type=str, + metavar="TARGET", + help="url to crawl", +) +GROUP.add_argument( + "output", + type=Path, + metavar="OUTPUT", + help="output directory", +) + + +def load( + args: argparse.Namespace, + parser: configparser.ConfigParser, +) -> None: + log.explain("Creating config for command 'kit-ipd'") + + parser["crawl:kit-ipd"] = {} + section = parser["crawl:kit-ipd"] + load_crawler(args, section) + + section["type"] = "kit-ipd" + section["target"] = str(args.target) + section["output_dir"] = str(args.output) + if args.link_regex: + section["link_regex"] = str(args.link_regex) + + if args.basic_auth: + section["auth"] = "auth:kit-ipd" + parser["auth:kit-ipd"] = {} + auth_section = parser["auth:kit-ipd"] + auth_section["type"] = "simple" + + +SUBPARSER.set_defaults(command=load) diff --git a/PFERD/cli/command_local.py b/PFERD/cli/command_local.py new file mode 100644 index 0000000..6016afa --- /dev/null +++ b/PFERD/cli/command_local.py @@ -0,0 +1,70 @@ +import argparse +import configparser +from pathlib import Path + +from ..logging import log +from .parser import CRAWLER_PARSER, SUBPARSERS, load_crawler + +SUBPARSER = SUBPARSERS.add_parser( + "local", + parents=[CRAWLER_PARSER], +) + +GROUP = SUBPARSER.add_argument_group( + title="local crawler arguments", + description="arguments for the 'local' crawler", +) +GROUP.add_argument( + "target", + type=Path, + metavar="TARGET", + help="directory to crawl", +) +GROUP.add_argument( + "output", + type=Path, + metavar="OUTPUT", + help="output directory", +) +GROUP.add_argument( + "--crawl-delay", + type=float, + metavar="SECONDS", + help="artificial delay to simulate for crawl requests", +) +GROUP.add_argument( + "--download-delay", + type=float, + metavar="SECONDS", + help="artificial delay to simulate for download requests", +) +GROUP.add_argument( + "--download-speed", + type=int, + metavar="BYTES_PER_SECOND", + help="download speed to simulate", +) + + +def load( + args: argparse.Namespace, + parser: configparser.ConfigParser, +) -> None: + log.explain("Creating config for command 'local'") + + parser["crawl:local"] = {} + section = parser["crawl:local"] + load_crawler(args, section) + + section["type"] = "local" + section["target"] = str(args.target) + section["output_dir"] = str(args.output) + if args.crawl_delay is not None: + section["crawl_delay"] = str(args.crawl_delay) + if args.download_delay is not None: + section["download_delay"] = str(args.download_delay) + if args.download_speed is not None: + section["download_speed"] = str(args.download_speed) + + +SUBPARSER.set_defaults(command=load) diff --git a/PFERD/cli/common_ilias_args.py b/PFERD/cli/common_ilias_args.py new file mode 100644 index 0000000..edad6da --- /dev/null +++ b/PFERD/cli/common_ilias_args.py @@ -0,0 +1,106 @@ +import argparse +import configparser +from pathlib import Path + +from ..crawl.ilias.file_templates import Links +from .parser import BooleanOptionalAction, ParserLoadError, show_value_error + + +def configure_common_group_args(group: argparse._ArgumentGroup) -> None: + """These arguments are shared between the KIT and generic Ilias web command.""" + group.add_argument( + "target", + type=str, + metavar="TARGET", + help="course id, 'desktop', or ILIAS URL to crawl", + ) + group.add_argument( + "output", + type=Path, + metavar="OUTPUT", + help="output directory", + ) + group.add_argument( + "--username", + "-u", + type=str, + metavar="USERNAME", + help="user name for authentication", + ) + group.add_argument( + "--keyring", + action=BooleanOptionalAction, + help="use the system keyring to store and retrieve passwords", + ) + group.add_argument( + "--credential-file", + type=Path, + metavar="PATH", + help="read username and password from a credential file", + ) + group.add_argument( + "--links", + type=show_value_error(Links.from_string), + metavar="OPTION", + help="how to represent external links", + ) + group.add_argument( + "--link-redirect-delay", + type=int, + metavar="SECONDS", + help="time before 'fancy' links redirect to to their target (-1 to disable)", + ) + group.add_argument( + "--videos", + action=BooleanOptionalAction, + help="crawl and download videos", + ) + group.add_argument( + "--forums", + action=BooleanOptionalAction, + help="crawl and download forum posts", + ) + group.add_argument( + "--http-timeout", + "-t", + type=float, + metavar="SECONDS", + help="timeout for all HTTP requests", + ) + + +def load_common( + section: configparser.SectionProxy, + args: argparse.Namespace, + parser: configparser.ConfigParser, +) -> None: + """Load common config between generic and KIT ilias web command""" + section["target"] = str(args.target) + section["output_dir"] = str(args.output) + section["auth"] = "auth:ilias" + if args.links is not None: + section["links"] = str(args.links.value) + if args.link_redirect_delay is not None: + section["link_redirect_delay"] = str(args.link_redirect_delay) + if args.videos is not None: + section["videos"] = "yes" if args.videos else "no" + if args.forums is not None: + section["forums"] = "yes" if args.forums else "no" + if args.http_timeout is not None: + section["http_timeout"] = str(args.http_timeout) + + parser["auth:ilias"] = {} + auth_section = parser["auth:ilias"] + if args.credential_file is not None: + if args.username is not None: + raise ParserLoadError("--credential-file and --username can't be used together") + if args.keyring: + raise ParserLoadError("--credential-file and --keyring can't be used together") + auth_section["type"] = "credential-file" + auth_section["path"] = str(args.credential_file) + elif args.keyring: + auth_section["type"] = "keyring" + else: + auth_section["type"] = "simple" + if args.username is not None: + auth_section["username"] = args.username diff --git a/PFERD/cli/parser.py b/PFERD/cli/parser.py new file mode 100644 index 0000000..c9bec13 --- /dev/null +++ b/PFERD/cli/parser.py @@ -0,0 +1,253 @@ +import argparse +import configparser +from argparse import ArgumentTypeError +from collections.abc import Callable, Sequence +from pathlib import Path +from typing import Any, Optional + +from ..output_dir import OnConflict, Redownload +from ..version import NAME, VERSION + + +class ParserLoadError(Exception): + pass + + +# TODO Replace with argparse version when updating to 3.9? +class BooleanOptionalAction(argparse.Action): + def __init__( + self, + option_strings: list[str], + dest: Any, + default: Any = None, + type: Any = None, + choices: Any = None, + required: Any = False, + help: Any = None, + metavar: Any = None, + ): + if len(option_strings) != 1: + raise ValueError("There must be exactly one option string") + [self.name] = option_strings + if not self.name.startswith("--"): + raise ValueError(f"{self.name!r} doesn't start with '--'") + if self.name.startswith("--no-"): + raise ValueError(f"{self.name!r} starts with '--no-'") + + options = [self.name, "--no-" + self.name[2:]] + + super().__init__( + options, + dest, + nargs=0, + default=default, + type=type, + choices=choices, + required=required, + help=help, + metavar=metavar, + ) + + def __call__( + self, + parser: argparse.ArgumentParser, + namespace: argparse.Namespace, + values: str | Sequence[Any] | None, + option_string: Optional[str] = None, + ) -> None: + if option_string and option_string in self.option_strings: + value = not option_string.startswith("--no-") + setattr(namespace, self.dest, value) + + def format_usage(self) -> str: + return "--[no-]" + self.name[2:] + + +def show_value_error(inner: Callable[[str], Any]) -> Callable[[str], Any]: + """ + Some validation functions (like the from_string in our enums) raise a ValueError. + Argparse only pretty-prints ArgumentTypeErrors though, so we need to wrap our ValueErrors. + """ + + def wrapper(input: str) -> Any: + try: + return inner(input) + except ValueError as e: + raise ArgumentTypeError(e) from e + + return wrapper + + +CRAWLER_PARSER = argparse.ArgumentParser(add_help=False) +CRAWLER_PARSER_GROUP = CRAWLER_PARSER.add_argument_group( + title="general crawler arguments", + description="arguments common to all crawlers", +) +CRAWLER_PARSER_GROUP.add_argument( + "--redownload", + "-r", + type=show_value_error(Redownload.from_string), + metavar="OPTION", + help="when to download a file that's already present locally", +) +CRAWLER_PARSER_GROUP.add_argument( + "--on-conflict", + type=show_value_error(OnConflict.from_string), + metavar="OPTION", + help="what to do when local and remote files or directories differ", +) +CRAWLER_PARSER_GROUP.add_argument( + "--transform", + "-T", + action="append", + type=str, + metavar="RULE", + help="add a single transformation rule. Can be specified multiple times", +) +CRAWLER_PARSER_GROUP.add_argument( + "--tasks", + "-n", + type=int, + metavar="N", + help="maximum number of concurrent tasks (crawling, downloading)", +) +CRAWLER_PARSER_GROUP.add_argument( + "--downloads", + "-N", + type=int, + metavar="N", + help="maximum number of tasks that may download data at the same time", +) +CRAWLER_PARSER_GROUP.add_argument( + "--task-delay", + "-d", + type=float, + metavar="SECONDS", + help="time the crawler should wait between subsequent tasks", +) +CRAWLER_PARSER_GROUP.add_argument( + "--windows-paths", + action=BooleanOptionalAction, + help="whether to repair invalid paths on windows", +) + + +def load_crawler( + args: argparse.Namespace, + section: configparser.SectionProxy, +) -> None: + if args.redownload is not None: + section["redownload"] = args.redownload.value + if args.on_conflict is not None: + section["on_conflict"] = args.on_conflict.value + if args.transform is not None: + section["transform"] = "\n" + "\n".join(args.transform) + if args.tasks is not None: + section["tasks"] = str(args.tasks) + if args.downloads is not None: + section["downloads"] = str(args.downloads) + if args.task_delay is not None: + section["task_delay"] = str(args.task_delay) + if args.windows_paths is not None: + section["windows_paths"] = "yes" if args.windows_paths else "no" + + +PARSER = argparse.ArgumentParser() +PARSER.set_defaults(command=None) +PARSER.add_argument( + "--version", + action="version", + version=f"{NAME} {VERSION} (https://github.com/Garmelon/PFERD)", +) +PARSER.add_argument( + "--config", + "-c", + type=Path, + metavar="PATH", + help="custom config file", +) +PARSER.add_argument( + "--dump-config", + action="store_true", + help="dump current configuration to the default config path and exit", +) +PARSER.add_argument( + "--dump-config-to", + metavar="PATH", + help="dump current configuration to a file and exit. Use '-' as path to print to stdout instead", +) +PARSER.add_argument( + "--debug-transforms", + action="store_true", + help="apply transform rules to files of previous run", +) +PARSER.add_argument( + "--crawler", + "-C", + action="append", + type=str, + metavar="NAME", + help="only execute a single crawler. Can be specified multiple times to execute multiple crawlers", +) +PARSER.add_argument( + "--skip", + "-S", + action="append", + type=str, + metavar="NAME", + help="don't execute this particular crawler. Can be specified multiple times to skip multiple crawlers", +) +PARSER.add_argument( + "--working-dir", + type=Path, + metavar="PATH", + help="custom working directory", +) +PARSER.add_argument( + "--explain", + action=BooleanOptionalAction, + help="log and explain in detail what PFERD is doing", +) +PARSER.add_argument( + "--status", + action=BooleanOptionalAction, + help="print status updates while PFERD is crawling", +) +PARSER.add_argument( + "--report", + action=BooleanOptionalAction, + help="print a report of all local changes before exiting", +) +PARSER.add_argument( + "--share-cookies", + action=BooleanOptionalAction, + help="whether crawlers should share cookies where applicable", +) +PARSER.add_argument( + "--show-not-deleted", + action=BooleanOptionalAction, + help="print messages in status and report when PFERD did not delete a local only file", +) + + +def load_default_section( + args: argparse.Namespace, + parser: configparser.ConfigParser, +) -> None: + section = parser[parser.default_section] + + if args.working_dir is not None: + section["working_dir"] = str(args.working_dir) + if args.explain is not None: + section["explain"] = "yes" if args.explain else "no" + if args.status is not None: + section["status"] = "yes" if args.status else "no" + if args.report is not None: + section["report"] = "yes" if args.report else "no" + if args.share_cookies is not None: + section["share_cookies"] = "yes" if args.share_cookies else "no" + if args.show_not_deleted is not None: + section["show_not_deleted"] = "yes" if args.show_not_deleted else "no" + + +SUBPARSERS = PARSER.add_subparsers(title="crawlers") diff --git a/PFERD/config.py b/PFERD/config.py new file mode 100644 index 0000000..7da2889 --- /dev/null +++ b/PFERD/config.py @@ -0,0 +1,193 @@ +import asyncio +import os +import sys +from configparser import ConfigParser, SectionProxy +from pathlib import Path +from typing import Any, NoReturn, Optional + +from rich.markup import escape + +from .logging import log +from .utils import fmt_real_path, prompt_yes_no + + +class ConfigLoadError(Exception): + """ + Something went wrong while loading the config from a file. + """ + + def __init__(self, path: Path, reason: str): + super().__init__(f"Failed to load config from {fmt_real_path(path)}") + self.path = path + self.reason = reason + + +class ConfigOptionError(Exception): + """ + An option in the config file has an invalid or missing value. + """ + + def __init__(self, section: str, key: str, desc: str): + super().__init__(f"Section {section!r}, key {key!r}: {desc}") + self.section = section + self.key = key + self.desc = desc + + +class ConfigDumpError(Exception): + def __init__(self, path: Path, reason: str): + super().__init__(f"Failed to dump config to {fmt_real_path(path)}") + self.path = path + self.reason = reason + + +class Section: + """ + Base class for the crawler and auth section classes. + """ + + def __init__(self, section: SectionProxy): + self.s = section + + def error(self, key: str, desc: str) -> NoReturn: + raise ConfigOptionError(self.s.name, key, desc) + + def invalid_value( + self, + key: str, + value: Any, + reason: Optional[str], + ) -> NoReturn: + if reason is None: + self.error(key, f"Invalid value {value!r}") + else: + self.error(key, f"Invalid value {value!r}: {reason}") + + def missing_value(self, key: str) -> NoReturn: + self.error(key, "Missing value") + + +class DefaultSection(Section): + def working_dir(self) -> Path: + # TODO Change to working dir instead of manually prepending it to paths + pathstr = self.s.get("working_dir", ".") + return Path(pathstr).expanduser() + + def explain(self) -> bool: + return self.s.getboolean("explain", fallback=False) + + def status(self) -> bool: + return self.s.getboolean("status", fallback=True) + + def report(self) -> bool: + return self.s.getboolean("report", fallback=True) + + def show_not_deleted(self) -> bool: + return self.s.getboolean("show_not_deleted", fallback=True) + + def share_cookies(self) -> bool: + return self.s.getboolean("share_cookies", fallback=True) + + +class Config: + @staticmethod + def _default_path() -> Path: + if os.name == "posix": + return Path("~/.config/PFERD/pferd.cfg").expanduser() + elif os.name == "nt": + return Path("~/AppData/Roaming/PFERD/pferd.cfg").expanduser() + else: + return Path("~/.pferd.cfg").expanduser() + + def __init__(self, parser: ConfigParser): + self._parser = parser + self._default_section = DefaultSection(parser[parser.default_section]) + + @property + def default_section(self) -> DefaultSection: + return self._default_section + + @staticmethod + def load_parser(parser: ConfigParser, path: Optional[Path] = None) -> None: + """ + May throw a ConfigLoadError. + """ + + if path: + log.explain("Path specified on CLI") + else: + log.explain("Using default path") + path = Config._default_path() + log.explain(f"Loading {fmt_real_path(path)}") + + # Using config.read_file instead of config.read because config.read + # would just ignore a missing file and carry on. + try: + with open(path, encoding="utf-8") as f: + parser.read_file(f, source=str(path)) + except FileNotFoundError: + raise ConfigLoadError(path, "File does not exist") from None + except IsADirectoryError: + raise ConfigLoadError(path, "That's a directory, not a file") from None + except PermissionError: + raise ConfigLoadError(path, "Insufficient permissions") from None + except UnicodeDecodeError: + raise ConfigLoadError(path, "File is not encoded using UTF-8") from None + + def dump(self, path: Optional[Path] = None) -> None: + """ + May throw a ConfigDumpError. + """ + + if path: + log.explain("Using custom path") + else: + log.explain("Using default path") + path = self._default_path() + + log.explain(f"Dumping to {fmt_real_path(path)}") + log.print(f"[bold bright_cyan]Dumping[/] to {escape(fmt_real_path(path))}") + + try: + path.parent.mkdir(parents=True, exist_ok=True) + except PermissionError as e: + raise ConfigDumpError(path, "Could not create parent directory") from e + + try: + # Ensuring we don't accidentally overwrite any existing files by + # always asking before overwriting a file. + try: + # x = open for exclusive creation, failing if the file already + # exists + with open(path, "x", encoding="utf-8") as f: + self._parser.write(f) + except FileExistsError: + print("That file already exists.") + if asyncio.run(prompt_yes_no("Overwrite it?", default=False)): + with open(path, "w", encoding="utf-8") as f: + self._parser.write(f) + else: + raise ConfigDumpError(path, "File already exists") from None + except IsADirectoryError: + raise ConfigDumpError(path, "That's a directory, not a file") from None + except PermissionError as e: + raise ConfigDumpError(path, "Insufficient permissions") from e + + def dump_to_stdout(self) -> None: + self._parser.write(sys.stdout) + + def crawl_sections(self) -> list[tuple[str, SectionProxy]]: + result = [] + for name, proxy in self._parser.items(): + if name.startswith("crawl:"): + result.append((name, proxy)) + + return result + + def auth_sections(self) -> list[tuple[str, SectionProxy]]: + result = [] + for name, proxy in self._parser.items(): + if name.startswith("auth:"): + result.append((name, proxy)) + + return result diff --git a/PFERD/cookie_jar.py b/PFERD/cookie_jar.py deleted file mode 100644 index e5b568f..0000000 --- a/PFERD/cookie_jar.py +++ /dev/null @@ -1,69 +0,0 @@ -"""A helper for requests cookies.""" - -import logging -from http.cookiejar import LoadError, LWPCookieJar -from pathlib import Path -from typing import Optional - -import requests - -LOGGER = logging.getLogger(__name__) - - -class CookieJar: - """A cookie jar that can be persisted.""" - - def __init__(self, cookie_file: Optional[Path] = None) -> None: - """Create a new cookie jar at the given path. - - If the path is None, the cookies will not be persisted. - """ - self._cookies: LWPCookieJar - if cookie_file is None: - self._cookies = LWPCookieJar() - else: - self._cookies = LWPCookieJar(str(cookie_file.resolve())) - - @property - def cookies(self) -> LWPCookieJar: - """Return the requests cookie jar.""" - return self._cookies - - def load_cookies(self) -> None: - """Load all cookies from the file given in the constructor.""" - if self._cookies.filename is None: - return - - try: - LOGGER.info("Loading old cookies from %s", self._cookies.filename) - self._cookies.load(ignore_discard=True) - except (FileNotFoundError, LoadError): - LOGGER.warning( - "No valid cookie file found at %s, continuing with no cookies", - self._cookies.filename - ) - - def save_cookies(self, reason: Optional[str] = None) -> None: - """Save the cookies in the file given in the constructor.""" - if self._cookies.filename is None: - return - - if reason is None: - LOGGER.info("Saving cookies") - else: - LOGGER.info("Saving cookies (%s)", reason) - - # TODO figure out why ignore_discard is set - # TODO possibly catch a few more exceptions - self._cookies.save(ignore_discard=True) - - def create_session(self) -> requests.Session: - """Create a new session using the cookie jar.""" - sess = requests.Session() - - # From the request docs: "All requests code should work out of the box - # with externally provided instances of CookieJar, e.g. LWPCookieJar - # and FileCookieJar." - sess.cookies = self.cookies # type: ignore - - return sess diff --git a/PFERD/crawl/__init__.py b/PFERD/crawl/__init__.py new file mode 100644 index 0000000..9ba6a37 --- /dev/null +++ b/PFERD/crawl/__init__.py @@ -0,0 +1,26 @@ +from collections.abc import Callable +from configparser import SectionProxy + +from ..auth import Authenticator +from ..config import Config +from .crawler import Crawler, CrawlError, CrawlerSection # noqa: F401 +from .ilias import IliasWebCrawler, IliasWebCrawlerSection, KitIliasWebCrawler, KitIliasWebCrawlerSection +from .kit_ipd_crawler import KitIpdCrawler, KitIpdCrawlerSection +from .local_crawler import LocalCrawler, LocalCrawlerSection + +CrawlerConstructor = Callable[ + [ + str, # Name (without the "crawl:" prefix) + SectionProxy, # Crawler's section of global config + Config, # Global config + dict[str, Authenticator], # Loaded authenticators by name + ], + Crawler, +] + +CRAWLERS: dict[str, CrawlerConstructor] = { + "local": lambda n, s, c, a: LocalCrawler(n, LocalCrawlerSection(s), c), + "ilias-web": lambda n, s, c, a: IliasWebCrawler(n, IliasWebCrawlerSection(s), c, a), + "kit-ilias-web": lambda n, s, c, a: KitIliasWebCrawler(n, KitIliasWebCrawlerSection(s), c, a), + "kit-ipd": lambda n, s, c, a: KitIpdCrawler(n, KitIpdCrawlerSection(s), c, a), +} diff --git a/PFERD/crawl/crawler.py b/PFERD/crawl/crawler.py new file mode 100644 index 0000000..e2cdf30 --- /dev/null +++ b/PFERD/crawl/crawler.py @@ -0,0 +1,406 @@ +import asyncio +import os +from abc import ABC, abstractmethod +from collections.abc import Awaitable, Callable, Coroutine, Sequence +from datetime import datetime +from pathlib import Path, PurePath +from typing import Any, Optional, TypeVar + +from ..auth import Authenticator +from ..config import Config, Section +from ..deduplicator import Deduplicator +from ..limiter import Limiter +from ..logging import ProgressBar, log +from ..output_dir import FileSink, FileSinkToken, OnConflict, OutputDirectory, OutputDirError, Redownload +from ..report import MarkConflictError, MarkDuplicateError, Report +from ..transformer import Transformer +from ..utils import ReusableAsyncContextManager, fmt_path + + +class CrawlWarning(Exception): + pass + + +class CrawlError(Exception): + pass + + +Wrapped = TypeVar("Wrapped", bound=Callable[..., None]) + + +def noncritical(f: Wrapped) -> Wrapped: + """ + Catches and logs a few noncritical exceptions occurring during the function + call, mainly CrawlWarning. + + If any exception occurs during the function call, the crawler's error_free + variable is set to False. This includes noncritical exceptions. + + Warning: Must only be applied to member functions of the Crawler class! + """ + + def wrapper(*args: Any, **kwargs: Any) -> None: + if not (args and isinstance(args[0], Crawler)): + raise RuntimeError("@noncritical must only applied to Crawler methods") + + crawler = args[0] + + try: + f(*args, **kwargs) + except (CrawlWarning, OutputDirError, MarkDuplicateError, MarkConflictError) as e: + crawler.report.add_warning(str(e)) + log.warn(str(e)) + crawler.error_free = False + except Exception as e: + crawler.error_free = False + crawler.report.add_error(str(e)) + raise + + return wrapper # type: ignore + + +AWrapped = TypeVar("AWrapped", bound=Callable[..., Coroutine[Any, Any, Optional[Any]]]) + + +def anoncritical(f: AWrapped) -> AWrapped: + """ + An async version of @noncritical. + + Catches and logs a few noncritical exceptions occurring during the function + call, mainly CrawlWarning. + + If any exception occurs during the function call, the crawler's error_free + variable is set to False. This includes noncritical exceptions. + + Warning: Must only be applied to member functions of the Crawler class! + """ + + async def wrapper(*args: Any, **kwargs: Any) -> Optional[Any]: + if not (args and isinstance(args[0], Crawler)): + raise RuntimeError("@anoncritical must only applied to Crawler methods") + + crawler = args[0] + + try: + return await f(*args, **kwargs) + except (CrawlWarning, OutputDirError, MarkDuplicateError, MarkConflictError) as e: + log.warn(str(e)) + crawler.error_free = False + crawler.report.add_warning(str(e)) + except Exception as e: + crawler.error_free = False + crawler.report.add_error(str(e)) + raise + + return None + + return wrapper # type: ignore + + +class CrawlToken(ReusableAsyncContextManager[ProgressBar]): + def __init__(self, limiter: Limiter, path: PurePath): + super().__init__() + + self._limiter = limiter + self._path = path + + @property + def path(self) -> PurePath: + return self._path + + async def _on_aenter(self) -> ProgressBar: + self._stack.callback(lambda: log.status("[bold cyan]", "Crawled", fmt_path(self._path))) + await self._stack.enter_async_context(self._limiter.limit_crawl()) + bar = self._stack.enter_context(log.crawl_bar("[bold bright_cyan]", "Crawling", fmt_path(self._path))) + + return bar + + +class DownloadToken(ReusableAsyncContextManager[tuple[ProgressBar, FileSink]]): + def __init__(self, limiter: Limiter, fs_token: FileSinkToken, path: PurePath): + super().__init__() + + self._limiter = limiter + self._fs_token = fs_token + self._path = path + + @property + def path(self) -> PurePath: + return self._path + + async def _on_aenter(self) -> tuple[ProgressBar, FileSink]: + await self._stack.enter_async_context(self._limiter.limit_download()) + sink = await self._stack.enter_async_context(self._fs_token) + # The "Downloaded ..." message is printed in the output dir, not here + bar = self._stack.enter_context( + log.download_bar("[bold bright_cyan]", "Downloading", fmt_path(self._path)) + ) + + return bar, sink + + +class CrawlerSection(Section): + def type(self) -> str: + value = self.s.get("type") + if value is None: + self.missing_value("type") + return value + + def skip(self) -> bool: + return self.s.getboolean("skip", fallback=False) + + def output_dir(self, name: str) -> Path: + name = name.removeprefix("crawl:") + return Path(self.s.get("output_dir", name)).expanduser() + + def redownload(self) -> Redownload: + value = self.s.get("redownload", "never-smart") + try: + return Redownload.from_string(value) + except ValueError as e: + self.invalid_value( + "redownload", + value, + str(e).capitalize(), + ) + + def on_conflict(self) -> OnConflict: + value = self.s.get("on_conflict", "prompt") + try: + return OnConflict.from_string(value) + except ValueError as e: + self.invalid_value( + "on_conflict", + value, + str(e).capitalize(), + ) + + def transform(self) -> str: + return self.s.get("transform", "") + + def tasks(self) -> int: + value = self.s.getint("tasks", fallback=1) + if value <= 0: + self.invalid_value("tasks", value, "Must be greater than 0") + return value + + def downloads(self) -> int: + tasks = self.tasks() + value = self.s.getint("downloads", fallback=None) + if value is None: + return tasks + if value <= 0: + self.invalid_value("downloads", value, "Must be greater than 0") + if value > tasks: + self.invalid_value("downloads", value, "Must not be greater than tasks") + return value + + def task_delay(self) -> float: + value = self.s.getfloat("task_delay", fallback=0.0) + if value < 0: + self.invalid_value("task_delay", value, "Must not be negative") + return value + + def windows_paths(self) -> bool: + on_windows = os.name == "nt" + return self.s.getboolean("windows_paths", fallback=on_windows) + + def auth(self, authenticators: dict[str, Authenticator]) -> Authenticator: + value = self.s.get("auth") + if value is None: + self.missing_value("auth") + auth = authenticators.get(value) + if auth is None: + self.invalid_value("auth", value, "No such auth section exists") + return auth + + +class Crawler(ABC): + def __init__( + self, + name: str, + section: CrawlerSection, + config: Config, + ) -> None: + """ + Initialize a crawler from its name and its section in the config file. + + If you are writing your own constructor for your own crawler, make sure + to call this constructor first (via super().__init__). + + May throw a CrawlerLoadException. + """ + + self.name = name + self.error_free = True + + self._limiter = Limiter( + task_limit=section.tasks(), + download_limit=section.downloads(), + task_delay=section.task_delay(), + ) + + self._deduplicator = Deduplicator(section.windows_paths()) + self._transformer = Transformer(section.transform()) + + self._output_dir = OutputDirectory( + config.default_section.working_dir() / section.output_dir(name), + section.redownload(), + section.on_conflict(), + ) + + @property + def report(self) -> Report: + return self._output_dir.report + + @property + def prev_report(self) -> Optional[Report]: + return self._output_dir.prev_report + + @property + def output_dir(self) -> OutputDirectory: + return self._output_dir + + @staticmethod + async def gather(awaitables: Sequence[Awaitable[Any]]) -> list[Any]: + """ + Similar to asyncio.gather. However, in the case of an exception, all + still running tasks are cancelled and the exception is rethrown. + + This should always be preferred over asyncio.gather in crawler code so + that an exception like CrawlError may actually stop the crawler. + """ + + tasks = [asyncio.ensure_future(aw) for aw in awaitables] + result = asyncio.gather(*tasks) + try: + return await result + except: # noqa: E722 + for task in tasks: + task.cancel() + raise + + async def crawl(self, path: PurePath) -> Optional[CrawlToken]: + log.explain_topic(f"Decision: Crawl {fmt_path(path)}") + path = self._deduplicator.mark(path) + self._output_dir.report.found(path) + + if self._transformer.transform(path) is None: + log.explain("Answer: No") + log.status("[bold bright_black]", "Ignored", fmt_path(path)) + return None + + log.explain("Answer: Yes") + return CrawlToken(self._limiter, path) + + def should_try_download( + self, + path: PurePath, + *, + etag_differs: Optional[bool] = None, + mtime: Optional[datetime] = None, + redownload: Optional[Redownload] = None, + on_conflict: Optional[OnConflict] = None, + ) -> bool: + log.explain_topic(f"Decision: Should Download {fmt_path(path)}") + + if self._transformer.transform(path) is None: + log.explain("Answer: No (ignored)") + return False + + should_download = self._output_dir.should_try_download( + path, etag_differs=etag_differs, mtime=mtime, redownload=redownload, on_conflict=on_conflict + ) + if should_download: + log.explain("Answer: Yes") + return True + else: + log.explain("Answer: No") + return False + + async def download( + self, + path: PurePath, + *, + etag_differs: Optional[bool] = None, + mtime: Optional[datetime] = None, + redownload: Optional[Redownload] = None, + on_conflict: Optional[OnConflict] = None, + ) -> Optional[DownloadToken]: + log.explain_topic(f"Decision: Download {fmt_path(path)}") + path = self._deduplicator.mark(path) + self._output_dir.report.found(path) + + transformed_path = self._transformer.transform(path) + if transformed_path is None: + log.explain("Answer: No") + log.status("[bold bright_black]", "Ignored", fmt_path(path)) + return None + + fs_token = await self._output_dir.download( + path, + transformed_path, + etag_differs=etag_differs, + mtime=mtime, + redownload=redownload, + on_conflict=on_conflict, + ) + if fs_token is None: + log.explain("Answer: No") + return None + + log.explain("Answer: Yes") + return DownloadToken(self._limiter, fs_token, path) + + async def _cleanup(self) -> None: + log.explain_topic("Decision: Clean up files") + if self.error_free: + log.explain("No warnings or errors occurred during this run") + log.explain("Answer: Yes") + await self._output_dir.cleanup() + else: + log.explain("Warnings or errors occurred during this run") + log.explain("Answer: No") + + @anoncritical + async def run(self) -> None: + """ + Start the crawling process. Call this function if you want to use a + crawler. + """ + + with log.show_progress(): + self._output_dir.prepare() + self._output_dir.load_prev_report() + await self._run() + await self._cleanup() + self._output_dir.store_report() + + @abstractmethod + async def _run(self) -> None: + """ + Overwrite this function if you are writing a crawler. + + This function must not return before all crawling is complete. To crawl + multiple things concurrently, asyncio.gather can be used. + """ + + pass + + def debug_transforms(self) -> None: + self._output_dir.load_prev_report() + + if not self.prev_report: + log.warn("Couldn't find or load old report") + return + + seen: set[PurePath] = set() + for known in sorted(self.prev_report.found_paths): + looking_at = list(reversed(known.parents)) + [known] + for path in looking_at: + if path in seen: + continue + + log.explain_topic(f"Transforming {fmt_path(path)}") + self._transformer.transform(path) + seen.add(path) diff --git a/PFERD/crawl/http_crawler.py b/PFERD/crawl/http_crawler.py new file mode 100644 index 0000000..49d6013 --- /dev/null +++ b/PFERD/crawl/http_crawler.py @@ -0,0 +1,281 @@ +import asyncio +import http.cookies +import ssl +from datetime import datetime +from pathlib import Path, PurePath +from typing import Any, Optional + +import aiohttp +import certifi +from aiohttp.client import ClientTimeout +from bs4 import Tag + +from ..auth import Authenticator +from ..config import Config +from ..logging import log +from ..utils import fmt_real_path, sanitize_path_name +from ..version import NAME, VERSION +from .crawler import Crawler, CrawlerSection + +ETAGS_CUSTOM_REPORT_VALUE_KEY = "etags" + + +class HttpCrawlerSection(CrawlerSection): + def http_timeout(self) -> float: + return self.s.getfloat("http_timeout", fallback=30) + + +class HttpCrawler(Crawler): + COOKIE_FILE = PurePath(".cookies") + + def __init__( + self, + name: str, + section: HttpCrawlerSection, + config: Config, + shared_auth: Optional[Authenticator] = None, + ) -> None: + super().__init__(name, section, config) + + self._authentication_id = 0 + self._authentication_lock = asyncio.Lock() + self._request_count = 0 + self._http_timeout = section.http_timeout() + + self._cookie_jar_path = self._output_dir.resolve(self.COOKIE_FILE) + self._shared_cookie_jar_paths: Optional[list[Path]] = None + self._shared_auth = shared_auth + + self._output_dir.register_reserved(self.COOKIE_FILE) + + async def _current_auth_id(self) -> int: + """ + Returns the id for the current authentication, i.e. an identifier for the last + successful call to [authenticate]. + + This method must be called before any request that might authenticate is made, so the + HttpCrawler can properly track when [authenticate] can return early and when actual + authentication is necessary. + """ + # We acquire the lock here to ensure we wait for any concurrent authenticate to finish. + # This should reduce the amount of requests we make: If an authentication is in progress + # all future requests wait for authentication to complete. + async with self._authentication_lock: + self._request_count += 1 + return self._authentication_id + + async def authenticate(self, caller_auth_id: int) -> None: + """ + Starts the authentication process. The main work is offloaded to _authenticate, which + you should overwrite in a subclass if needed. This method should *NOT* be overwritten. + + The [caller_auth_id] should be the result of a [_current_auth_id] call made *before* + the request was made. This ensures that authentication is not performed needlessly. + """ + async with self._authentication_lock: + log.explain_topic("Authenticating") + # Another thread successfully called authenticate in-between + # We do not want to perform auth again, so we return here. We can + # assume the other thread suceeded as authenticate will throw an error + # if it failed and aborts the crawl process. + if caller_auth_id != self._authentication_id: + log.explain( + "Authentication skipped due to auth id mismatch." + "A previous authentication beat us to the race." + ) + return + log.explain("Calling crawler-specific authenticate") + await self._authenticate() + self._authentication_id += 1 + # Saving the cookies after the first auth ensures we won't need to re-authenticate + # on the next run, should this one be aborted or crash + self._save_cookies() + + async def _authenticate(self) -> None: + """ + Performs authentication. This method must only return normally if authentication suceeded. + In all other cases it must either retry internally or throw a terminal exception. + """ + raise RuntimeError("_authenticate() was called but crawler doesn't provide an implementation") + + def share_cookies(self, shared: dict[Authenticator, list[Path]]) -> None: + if not self._shared_auth: + return + + if self._shared_auth in shared: + self._shared_cookie_jar_paths = shared[self._shared_auth] + else: + self._shared_cookie_jar_paths = [] + shared[self._shared_auth] = self._shared_cookie_jar_paths + + self._shared_cookie_jar_paths.append(self._cookie_jar_path) + + def _load_cookies_from_file(self, path: Path) -> None: + jar: Any = http.cookies.SimpleCookie() + with open(path, encoding="utf-8") as f: + for i, line in enumerate(f): + # Names of headers are case insensitive + if line[:11].lower() == "set-cookie:": + jar.load(line[11:]) + else: + log.explain(f"Line {i} doesn't start with 'Set-Cookie:', ignoring it") + self._cookie_jar.update_cookies(jar) + + def _save_cookies_to_file(self, path: Path) -> None: + jar: Any = http.cookies.SimpleCookie() + for morsel in self._cookie_jar: + jar[morsel.key] = morsel + with open(path, "w", encoding="utf-8") as f: + f.write(jar.output(sep="\n")) + f.write("\n") # A trailing newline is just common courtesy + + def _load_cookies(self) -> None: + log.explain_topic("Loading cookies") + + cookie_jar_path: Optional[Path] = None + + if self._shared_cookie_jar_paths is None: + log.explain("Not sharing any cookies") + cookie_jar_path = self._cookie_jar_path + else: + log.explain("Sharing cookies") + max_mtime: Optional[float] = None + for path in self._shared_cookie_jar_paths: + if not path.is_file(): + log.explain(f"{fmt_real_path(path)} is not a file") + continue + mtime = path.stat().st_mtime + if max_mtime is None or mtime > max_mtime: + log.explain(f"{fmt_real_path(path)} has newest mtime so far") + max_mtime = mtime + cookie_jar_path = path + else: + log.explain(f"{fmt_real_path(path)} has older mtime") + + if cookie_jar_path is None: + log.explain("Couldn't find a suitable cookie file") + return + + log.explain(f"Loading cookies from {fmt_real_path(cookie_jar_path)}") + try: + self._load_cookies_from_file(cookie_jar_path) + except Exception as e: + log.explain("Failed to load cookies") + log.explain(str(e)) + + def _save_cookies(self) -> None: + log.explain_topic("Saving cookies") + + try: + log.explain(f"Saving cookies to {fmt_real_path(self._cookie_jar_path)}") + self._save_cookies_to_file(self._cookie_jar_path) + except Exception as e: + log.warn(f"Failed to save cookies to {fmt_real_path(self._cookie_jar_path)}") + log.warn(str(e)) + + @staticmethod + def get_folder_structure_from_heading_hierarchy(file_link: Tag, drop_h1: bool = False) -> PurePath: + """ + Retrieves the hierarchy of headings associated with the give file link and constructs a folder + structure from them. + +

level headings usually only appear once and serve as the page title, so they would introduce + redundant nesting. To avoid this,

headings are ignored via the drop_h1 parameter. + """ + + def find_associated_headings(tag: Tag, level: int) -> PurePath: + if level == 0 or (level == 1 and drop_h1): + return PurePath() + + level_heading = tag.find_previous(name=f"h{level}") + + if level_heading is None: + return find_associated_headings(tag, level - 1) + + folder_name = sanitize_path_name(level_heading.get_text().strip()) + return find_associated_headings(level_heading, level - 1) / folder_name + + # start at level

because paragraph-level headings are usually too granular for folder names + return find_associated_headings(file_link, 3) + + def _get_previous_etag_from_report(self, path: PurePath) -> Optional[str]: + """ + If available, retrieves the entity tag for a given path which was stored in the previous report. + """ + if not self._output_dir.prev_report: + return None + + etags = self._output_dir.prev_report.get_custom_value(ETAGS_CUSTOM_REPORT_VALUE_KEY) or {} + return etags.get(str(path)) + + def _add_etag_to_report(self, path: PurePath, etag: Optional[str]) -> None: + """ + Adds an entity tag for a given path to the report's custom values. + """ + if not etag: + return + + etags = self._output_dir.report.get_custom_value(ETAGS_CUSTOM_REPORT_VALUE_KEY) or {} + etags[str(path)] = etag + self._output_dir.report.add_custom_value(ETAGS_CUSTOM_REPORT_VALUE_KEY, etags) + + async def _request_resource_version(self, resource_url: str) -> tuple[Optional[str], Optional[datetime]]: + """ + Requests the ETag and Last-Modified headers of a resource via a HEAD request. + If no entity tag / modification date can be obtained, the according value will be None. + """ + try: + async with self.session.head(resource_url) as resp: + if resp.status != 200: + return None, None + + etag_header = resp.headers.get("ETag") + last_modified_header = resp.headers.get("Last-Modified") + last_modified = None + + if last_modified_header: + try: + # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Last-Modified#directives + datetime_format = "%a, %d %b %Y %H:%M:%S GMT" + last_modified = datetime.strptime(last_modified_header, datetime_format) + except ValueError: + # last_modified remains None + pass + + return etag_header, last_modified + except aiohttp.ClientError: + return None, None + + async def run(self) -> None: + self._request_count = 0 + self._cookie_jar = aiohttp.CookieJar() + self._load_cookies() + + async with aiohttp.ClientSession( + headers={"User-Agent": f"{NAME}/{VERSION}"}, + cookie_jar=self._cookie_jar, + connector=aiohttp.TCPConnector(ssl=ssl.create_default_context(cafile=certifi.where())), + timeout=ClientTimeout( + # 30 minutes. No download in the history of downloads was longer than 30 minutes. + # This is enough to transfer a 600 MB file over a 3 Mib/s connection. + # Allowing an arbitrary value could be annoying for overnight batch jobs + total=15 * 60, + connect=self._http_timeout, + sock_connect=self._http_timeout, + sock_read=self._http_timeout, + ), + # See https://github.com/aio-libs/aiohttp/issues/6626 + # Without this aiohttp will mangle the redirect header from Shibboleth, invalidating the + # passed signature. Shibboleth will not accept the broken signature and authentication will + # fail. + requote_redirect_url=False, + ) as session: + self.session = session + try: + await super().run() + finally: + del self.session + log.explain_topic(f"Total amount of HTTP requests: {self._request_count}") + + # They are saved in authenticate, but a final save won't hurt + self._save_cookies() diff --git a/PFERD/crawl/ilias/__init__.py b/PFERD/crawl/ilias/__init__.py new file mode 100644 index 0000000..fa1aaed --- /dev/null +++ b/PFERD/crawl/ilias/__init__.py @@ -0,0 +1,13 @@ +from .kit_ilias_web_crawler import ( + IliasWebCrawler, + IliasWebCrawlerSection, + KitIliasWebCrawler, + KitIliasWebCrawlerSection, +) + +__all__ = [ + "IliasWebCrawler", + "IliasWebCrawlerSection", + "KitIliasWebCrawler", + "KitIliasWebCrawlerSection", +] diff --git a/PFERD/crawl/ilias/async_helper.py b/PFERD/crawl/ilias/async_helper.py new file mode 100644 index 0000000..2e6b301 --- /dev/null +++ b/PFERD/crawl/ilias/async_helper.py @@ -0,0 +1,41 @@ +import asyncio +from collections.abc import Callable +from typing import Any, Optional + +import aiohttp + +from ...logging import log +from ..crawler import AWrapped, CrawlError, CrawlWarning + + +def _iorepeat(attempts: int, name: str, failure_is_error: bool = False) -> Callable[[AWrapped], AWrapped]: + def decorator(f: AWrapped) -> AWrapped: + async def wrapper(*args: Any, **kwargs: Any) -> Optional[Any]: + last_exception: Optional[BaseException] = None + for round in range(attempts): + try: + return await f(*args, **kwargs) + except aiohttp.ContentTypeError: # invalid content type + raise CrawlWarning("ILIAS returned an invalid content type") from None + except aiohttp.TooManyRedirects: + raise CrawlWarning("Got stuck in a redirect loop") from None + except aiohttp.ClientPayloadError as e: # encoding or not enough bytes + last_exception = e + except aiohttp.ClientConnectionError as e: # e.g. timeout, disconnect, resolve failed, etc. + last_exception = e + except asyncio.exceptions.TimeoutError as e: # explicit http timeouts in HttpCrawler + last_exception = e + log.explain_topic(f"Retrying operation {name}. Retries left: {attempts - 1 - round}") + log.explain(f"Last exception: {last_exception!r}") + + if last_exception: + message = f"Error in I/O Operation: {last_exception!r}" + if failure_is_error: + raise CrawlError(message) from last_exception + else: + raise CrawlWarning(message) from last_exception + raise CrawlError("Impossible return in ilias _iorepeat") + + return wrapper # type: ignore + + return decorator diff --git a/PFERD/crawl/ilias/file_templates.py b/PFERD/crawl/ilias/file_templates.py new file mode 100644 index 0000000..c832977 --- /dev/null +++ b/PFERD/crawl/ilias/file_templates.py @@ -0,0 +1,356 @@ +import dataclasses +import re +from enum import Enum +from typing import Optional, cast + +import bs4 + +from PFERD.utils import soupify + +_link_template_plain = "{{link}}" +_link_template_fancy = """ + + + + + ILIAS - Link: {{name}} + + + + + + + +
+ +
+ +
+
+ {{name}} +
+
{{description}}
+
+ +
+ +
+ + +""".strip() # noqa: E501 line too long + +_link_template_internet_shortcut = """ +[InternetShortcut] +URL={{link}} +Desc={{description}} +""".strip() + +_learning_module_template = """ + + + + + {{name}} + + + + +{{body}} + + +""" + +_forum_thread_template = """ + + + + + ILIAS - Forum: {{name}} + + + + {{heading}} + {{content}} + + +""".strip() # noqa: E501 line too long + + +def learning_module_template(body: bs4.Tag, name: str, prev: Optional[str], next: Optional[str]) -> str: + # Seems to be comments, ignore those. + for elem in body.select(".il-copg-mob-fullscreen-modal"): + elem.decompose() + + nav_template = """ + + """ + if prev and body.select_one(".ilc_page_lnav_LeftNavigation"): + text = cast(bs4.Tag, body.select_one(".ilc_page_lnav_LeftNavigation")).get_text().strip() + left = f'{text}' + else: + left = "" + + if next and body.select_one(".ilc_page_rnav_RightNavigation"): + text = cast(bs4.Tag, body.select_one(".ilc_page_rnav_RightNavigation")).get_text().strip() + right = f'{text}' + else: + right = "" + + if top_nav := body.select_one(".ilc_page_tnav_TopNavigation"): + top_nav.replace_with( + soupify(nav_template.replace("{{left}}", left).replace("{{right}}", right).encode()) + ) + + if bot_nav := body.select_one(".ilc_page_bnav_BottomNavigation"): + bot_nav.replace_with( + soupify(nav_template.replace("{{left}}", left).replace("{{right}}", right).encode()) + ) + + body_str = body.prettify() + return _learning_module_template.replace("{{body}}", body_str).replace("{{name}}", name) + + +def forum_thread_template(name: str, url: str, heading: bs4.Tag, content: bs4.Tag) -> str: + if title := heading.find(name="b"): + title.wrap(bs4.Tag(name="a", attrs={"href": url})) + return ( + _forum_thread_template.replace("{{name}}", name) + .replace("{{heading}}", heading.prettify()) + .replace("{{content}}", content.prettify()) + ) + + +@dataclasses.dataclass +class LinkData: + name: str + url: str + description: str + + +class Links(Enum): + IGNORE = "ignore" + PLAINTEXT = "plaintext" + FANCY = "fancy" + INTERNET_SHORTCUT = "internet-shortcut" + + def template(self) -> Optional[str]: + if self == Links.FANCY: + return _link_template_fancy + elif self == Links.PLAINTEXT: + return _link_template_plain + elif self == Links.INTERNET_SHORTCUT: + return _link_template_internet_shortcut + elif self == Links.IGNORE: + return None + raise ValueError("Missing switch case") + + def collection_as_one(self) -> bool: + return self == Links.FANCY + + def extension(self) -> Optional[str]: + if self == Links.FANCY: + return ".html" + elif self == Links.PLAINTEXT: + return ".txt" + elif self == Links.INTERNET_SHORTCUT: + return ".url" + elif self == Links.IGNORE: + return None + raise ValueError("Missing switch case") + + def interpolate(self, redirect_delay: int, collection_name: str, links: list[LinkData]) -> str: + template = self.template() + if template is None: + raise ValueError("Cannot interpolate ignored links") + + if len(links) == 1: + link = links[0] + content = template + content = content.replace("{{link}}", link.url) + content = content.replace("{{name}}", link.name) + content = content.replace("{{description}}", link.description) + content = content.replace("{{redirect_delay}}", str(redirect_delay)) + return content + if self == Links.PLAINTEXT or self == Links.INTERNET_SHORTCUT: + return "\n".join(f"{link.url}" for link in links) + + # All others get coerced to fancy + content = cast(str, Links.FANCY.template()) + repeated_content = cast( + re.Match[str], re.search(r"([\s\S]+)", content) + ).group(1) + + parts = [] + for link in links: + instance = repeated_content + instance = instance.replace("{{link}}", link.url) + instance = instance.replace("{{name}}", link.name) + instance = instance.replace("{{description}}", link.description) + instance = instance.replace("{{redirect_delay}}", str(redirect_delay)) + parts.append(instance) + + content = content.replace(repeated_content, "\n".join(parts)) + content = content.replace("{{name}}", collection_name) + content = re.sub(r"[\s\S]+", "", content) + + return content + + @staticmethod + def from_string(string: str) -> "Links": + try: + return Links(string) + except ValueError: + options = [f"'{option.value}'" for option in Links] + raise ValueError(f"must be one of {', '.join(options)}") from None diff --git a/PFERD/crawl/ilias/ilias_html_cleaner.py b/PFERD/crawl/ilias/ilias_html_cleaner.py new file mode 100644 index 0000000..35a7ea0 --- /dev/null +++ b/PFERD/crawl/ilias/ilias_html_cleaner.py @@ -0,0 +1,108 @@ +from typing import cast + +from bs4 import BeautifulSoup, Comment, Tag + +_STYLE_TAG_CONTENT = """ + .ilc_text_block_Information { + background-color: #f5f7fa; + } + div.ilc_text_block_Standard { + margin-bottom: 10px; + margin-top: 10px; + } + span.ilc_text_inline_Strong { + font-weight: bold; + } + + .row-flex { + display: flex; + } + .row-flex-wrap { + flex-wrap: wrap; + } + + .accordion-head { + background-color: #f5f7fa; + padding: 0.5rem 0; + } + + h3 { + margin-top: 0.5rem; + margin-bottom: 1rem; + } + + br.visible-break { + margin-bottom: 1rem; + } + + article { + margin: 0.5rem 0; + } + + img { + background-color: white; + } + + body { + padding: 1em; + grid-template-columns: 1fr min(60rem, 90%) 1fr; + line-height: 1.2; + } +""" + +_ARTICLE_WORTHY_CLASSES = [ + "ilc_text_block_Information", + "ilc_section_Attention", + "ilc_section_Link", +] + + +def insert_base_markup(soup: BeautifulSoup) -> BeautifulSoup: + head = soup.new_tag("head") + soup.insert(0, head) + # Force UTF-8 encoding + head.append(soup.new_tag("meta", charset="utf-8")) + + # + head.append(soup.new_tag("link", rel="stylesheet", href="https://cdn.simplecss.org/simple.css")) + + # Basic style tags for compat + style: Tag = soup.new_tag("style") + style.append(_STYLE_TAG_CONTENT) + head.append(style) + + return soup + + +def clean(soup: BeautifulSoup) -> BeautifulSoup: + for block in cast(list[Tag], soup.find_all(class_=lambda x: x in _ARTICLE_WORTHY_CLASSES)): + block.name = "article" + + for block in cast(list[Tag], soup.find_all("h3")): + block.name = "div" + + for block in cast(list[Tag], soup.find_all("h1")): + block.name = "h3" + + for block in cast(list[Tag], soup.find_all(class_="ilc_va_ihcap_VAccordIHeadCap")): + block.name = "h3" + block["class"] += ["accordion-head"] # type: ignore + + for dummy in soup.select(".ilc_text_block_Standard.ilc_Paragraph"): + children = list(dummy.children) + if not children: + dummy.decompose() + if len(children) > 1: + continue + if isinstance(type(children[0]), Comment): + dummy.decompose() + + # Delete video figures, as they can not be internalized anyway + for video in soup.select(".ilc_media_cont_MediaContainerHighlighted .ilPageVideo"): + if figure := video.find_parent("figure"): + figure.decompose() + + for hrule_imposter in cast(list[Tag], soup.find_all(class_="ilc_section_Separator")): + hrule_imposter.insert(0, soup.new_tag("hr")) + + return soup diff --git a/PFERD/crawl/ilias/ilias_web_crawler.py b/PFERD/crawl/ilias/ilias_web_crawler.py new file mode 100644 index 0000000..b5041b3 --- /dev/null +++ b/PFERD/crawl/ilias/ilias_web_crawler.py @@ -0,0 +1,1074 @@ +import asyncio +import base64 +import os +import re +from collections.abc import Awaitable, Coroutine +from pathlib import PurePath +from typing import Any, Literal, Optional, cast +from urllib.parse import urljoin + +import aiohttp +from aiohttp import hdrs +from bs4 import BeautifulSoup, Tag + +from ...auth import Authenticator +from ...config import Config +from ...logging import ProgressBar, log +from ...output_dir import FileSink, Redownload +from ...utils import fmt_path, sanitize_path_name, soupify, url_set_query_param +from ..crawler import CrawlError, CrawlToken, CrawlWarning, DownloadToken, anoncritical +from ..http_crawler import HttpCrawler, HttpCrawlerSection +from .async_helper import _iorepeat +from .file_templates import LinkData, Links, forum_thread_template, learning_module_template +from .ilias_html_cleaner import clean, insert_base_markup +from .kit_ilias_html import ( + IliasElementType, + IliasForumThread, + IliasLearningModulePage, + IliasPage, + IliasPageElement, + IliasSoup, + parse_ilias_forum_export, +) +from .shibboleth_login import ShibbolethLogin + +TargetType = str | int + + +class LoginTypeLocal: + def __init__(self, client_id: str): + self.client_id = client_id + + +class IliasWebCrawlerSection(HttpCrawlerSection): + def base_url(self) -> str: + base_url = self.s.get("base_url") + if not base_url: + self.missing_value("base_url") + + return base_url + + def login(self) -> Literal["shibboleth"] | LoginTypeLocal: + login_type = self.s.get("login_type") + if not login_type: + self.missing_value("login_type") + if login_type == "shibboleth": + return "shibboleth" + if login_type == "local": + client_id = self.s.get("client_id") + if not client_id: + self.missing_value("client_id") + return LoginTypeLocal(client_id) + + self.invalid_value("login_type", login_type, "Should be ") + + def tfa_auth(self, authenticators: dict[str, Authenticator]) -> Optional[Authenticator]: + value: Optional[str] = self.s.get("tfa_auth") + if value is None: + return None + auth = authenticators.get(value) + if auth is None: + self.invalid_value("tfa_auth", value, "No such auth section exists") + return auth + + def target(self) -> TargetType: + target = self.s.get("target") + if not target: + self.missing_value("target") + + if re.fullmatch(r"\d+", target): + # Course id + return int(target) + if target == "desktop": + # Full personal desktop + return target + if target.startswith(self.base_url()): + # URL + return target + + self.invalid_value("target", target, "Should be ") + + def links(self) -> Links: + type_str: Optional[str] = self.s.get("links") + + if type_str is None: + return Links.FANCY + + try: + return Links.from_string(type_str) + except ValueError as e: + self.invalid_value("links", type_str, str(e).capitalize()) + + def link_redirect_delay(self) -> int: + return self.s.getint("link_redirect_delay", fallback=-1) + + def videos(self) -> bool: + return self.s.getboolean("videos", fallback=False) + + def forums(self) -> bool: + return self.s.getboolean("forums", fallback=False) + + +_DIRECTORY_PAGES: set[IliasElementType] = { + IliasElementType.EXERCISE, + IliasElementType.EXERCISE_FILES, + IliasElementType.EXERCISE_OVERVIEW, + IliasElementType.FOLDER, + IliasElementType.INFO_TAB, + IliasElementType.MEDIACAST_VIDEO_FOLDER, + IliasElementType.MEETING, + IliasElementType.OPENCAST_VIDEO_FOLDER, + IliasElementType.OPENCAST_VIDEO_FOLDER_MAYBE_PAGINATED, +} + +_VIDEO_ELEMENTS: set[IliasElementType] = { + IliasElementType.MEDIACAST_VIDEO, + IliasElementType.MEDIACAST_VIDEO_FOLDER, + IliasElementType.OPENCAST_VIDEO, + IliasElementType.OPENCAST_VIDEO_FOLDER, + IliasElementType.OPENCAST_VIDEO_FOLDER_MAYBE_PAGINATED, + IliasElementType.OPENCAST_VIDEO_PLAYER, +} + + +def _get_video_cache_key(element: IliasPageElement) -> str: + return f"ilias-video-cache-{element.id()}" + + +# Crawler control flow: +# +# crawl_desktop -+ +# | +# crawl_course --+ +# | +# @_io_repeat | # retries internally (before the bar) +# +- crawl_url <-+ +# | +# | +# | @_wrap_io_exception # does not need to retry as children acquire bars +# +> crawl_ilias_element -+ +# ^ | +# | @_io_repeat | # retries internally (before the bar) +# +- crawl_ilias_page <---+ +# | | +# +> get_page | # Handles and retries authentication +# | +# @_io_repeat | # retries internally (before the bar) +# +- download_link <---+ +# | | +# +> resolve_target | # Handles and retries authentication +# | +# @_io_repeat | # retries internally (before the bar) +# +- download_video <---+ +# | | +# | @_io_repeat | # retries internally (before the bar) +# +- download_file <---+ +# | +# +> stream_from_url # Handles and retries authentication +class IliasWebCrawler(HttpCrawler): + def __init__( + self, + name: str, + section: IliasWebCrawlerSection, + config: Config, + authenticators: dict[str, Authenticator], + ): + # Setting a main authenticator for cookie sharing + auth = section.auth(authenticators) + super().__init__(name, section, config, shared_auth=auth) + + if section.tasks() > 1: + log.warn( + """ +Please avoid using too many parallel requests as these are the KIT ILIAS +instance's greatest bottleneck. + """.strip() + ) + + self._auth = auth + self._base_url = section.base_url() + self._tfa_auth = section.tfa_auth(authenticators) + + self._login_type = section.login() + if isinstance(self._login_type, LoginTypeLocal): + self._client_id = self._login_type.client_id + else: + self._shibboleth_login = ShibbolethLogin(self._base_url, self._auth, self._tfa_auth) + + self._target = section.target() + self._link_file_redirect_delay = section.link_redirect_delay() + self._links = section.links() + self._videos = section.videos() + self._forums = section.forums() + self._visited_urls: dict[str, PurePath] = dict() + + async def _run(self) -> None: + if isinstance(self._target, int): + log.explain_topic(f"Inferred crawl target: Course with id {self._target}") + await self._crawl_course(self._target) + elif self._target == "desktop": + log.explain_topic("Inferred crawl target: Personal desktop") + await self._crawl_desktop() + else: + log.explain_topic(f"Inferred crawl target: URL {self._target}") + await self._crawl_url(self._target) + + async def _crawl_course(self, course_id: int) -> None: + # Start crawling at the given course + root_url = url_set_query_param( + urljoin(self._base_url + "/", "goto.php"), + "target", + f"crs_{course_id}", + ) + + await self._crawl_url(root_url, expected_id=course_id) + + async def _crawl_desktop(self) -> None: + await self._crawl_url( + urljoin(self._base_url, "/ilias.php?baseClass=ilDashboardGUI&cmd=show"), crawl_nested_courses=True + ) + + async def _crawl_url( + self, url: str, expected_id: Optional[int] = None, crawl_nested_courses: bool = False + ) -> None: + if awaitable := await self._handle_ilias_page( + url, None, PurePath("."), expected_id, crawl_nested_courses + ): + await awaitable + + async def _handle_ilias_page( + self, + url: str, + current_element: Optional[IliasPageElement], + path: PurePath, + expected_course_id: Optional[int] = None, + crawl_nested_courses: bool = False, + ) -> Optional[Coroutine[Any, Any, None]]: + maybe_cl = await self.crawl(path) + if not maybe_cl: + return None + if current_element: + self._ensure_not_seen(current_element, path) + + return self._crawl_ilias_page( + url, current_element, maybe_cl, expected_course_id, crawl_nested_courses + ) + + @anoncritical + async def _crawl_ilias_page( + self, + url: str, + current_element: Optional[IliasPageElement], + cl: CrawlToken, + expected_course_id: Optional[int] = None, + crawl_nested_courses: bool = False, + ) -> None: + elements: list[IliasPageElement] = [] + # A list as variable redefinitions are not propagated to outer scopes + description: list[BeautifulSoup] = [] + + @_iorepeat(3, "crawling folder") + async def gather_elements() -> None: + elements.clear() + async with cl: + next_stage_url: Optional[str] = url + current_parent = current_element + page = None + + while next_stage_url: + soup = await self._get_page(next_stage_url) + log.explain_topic(f"Parsing HTML page for {fmt_path(cl.path)}") + log.explain(f"URL: {next_stage_url}") + + # If we expect to find a root course, enforce it + if current_parent is None and expected_course_id is not None: + perma_link = IliasPage.get_soup_permalink(soup) + if not perma_link or "crs/" not in perma_link: + raise CrawlError("Invalid course id? Didn't find anything looking like a course") + if str(expected_course_id) not in perma_link: + raise CrawlError(f"Expected course id {expected_course_id} but got {perma_link}") + + page = IliasPage(soup, current_parent) + if next_element := page.get_next_stage_element(): + current_parent = next_element + next_stage_url = next_element.url + else: + next_stage_url = None + + page = cast(IliasPage, page) + elements.extend(page.get_child_elements()) + if current_element is None and (info_tab := page.get_info_tab()): + elements.append(info_tab) + if description_string := page.get_description(): + description.append(description_string) + + # Fill up our task list with the found elements + await gather_elements() + + if description: + await self._download_description(cl.path, description[0]) + + elements.sort(key=lambda e: e.id()) + + tasks: list[Awaitable[None]] = [] + for element in elements: + if handle := await self._handle_ilias_element(cl.path, element, crawl_nested_courses): + tasks.append(asyncio.create_task(handle)) + + # And execute them + await self.gather(tasks) + + # These decorators only apply *to this method* and *NOT* to the returned + # awaitables! + # This method does not await the handlers but returns them instead. + # This ensures one level is handled at a time and name deduplication + # works correctly. + @anoncritical + async def _handle_ilias_element( + self, parent_path: PurePath, element: IliasPageElement, crawl_nested_courses: bool = False + ) -> Optional[Coroutine[Any, Any, None]]: + # element.name might contain `/` if the crawler created nested elements, + # so we can not sanitize it here. We trust in the output dir to thwart worst-case + # directory escape attacks. + element_path = PurePath(parent_path, element.name) + + # This is symptomatic of no access to the element, for example, because + # of time availability restrictions. + if "cmdClass=ilInfoScreenGUI" in element.url and "cmd=showSummary" in element.url: + log.explain( + "Skipping element as url points to info screen, " + "this should only happen with not-yet-released elements" + ) + return None + + if element.type in _VIDEO_ELEMENTS and not self._videos: + log.status( + "[bold bright_black]", + "Ignored", + fmt_path(element_path), + "[bright_black](enable with option 'videos')", + ) + return None + + if element.type == IliasElementType.FILE: + return await self._handle_file(element, element_path) + elif element.type == IliasElementType.FORUM: + if not self._forums: + log.status( + "[bold bright_black]", + "Ignored", + fmt_path(element_path), + "[bright_black](enable with option 'forums')", + ) + return None + return await self._handle_forum(element, element_path) + elif element.type == IliasElementType.TEST: + log.status( + "[bold bright_black]", + "Ignored", + fmt_path(element_path), + "[bright_black](tests contain no relevant data)", + ) + return None + elif element.type == IliasElementType.SURVEY: + log.status( + "[bold bright_black]", + "Ignored", + fmt_path(element_path), + "[bright_black](surveys contain no relevant data)", + ) + return None + elif element.type == IliasElementType.SCORM_LEARNING_MODULE: + log.status( + "[bold bright_black]", + "Ignored", + fmt_path(element_path), + "[bright_black](scorm learning modules are not supported)", + ) + return None + elif element.type == IliasElementType.LITERATURE_LIST: + log.status( + "[bold bright_black]", + "Ignored", + fmt_path(element_path), + "[bright_black](literature lists are not currently supported)", + ) + return None + elif element.type == IliasElementType.LEARNING_MODULE_HTML: + log.status( + "[bold bright_black]", + "Ignored", + fmt_path(element_path), + "[bright_black](HTML learning modules are not supported)", + ) + return None + elif element.type == IliasElementType.BLOG: + log.status( + "[bold bright_black]", + "Ignored", + fmt_path(element_path), + "[bright_black](blogs are not currently supported)", + ) + return None + elif element.type == IliasElementType.DCL_RECORD_LIST: + log.status( + "[bold bright_black]", + "Ignored", + fmt_path(element_path), + "[bright_black](dcl record lists are not currently supported)", + ) + return None + elif element.type == IliasElementType.MEDIA_POOL: + log.status( + "[bold bright_black]", + "Ignored", + fmt_path(element_path), + "[bright_black](media pools are not currently supported)", + ) + return None + elif element.type == IliasElementType.COURSE: + if crawl_nested_courses: + return await self._handle_ilias_page(element.url, element, element_path) + log.status( + "[bold bright_black]", + "Ignored", + fmt_path(element_path), + "[bright_black](not descending into linked course)", + ) + return None + elif element.type == IliasElementType.WIKI: + log.status( + "[bold bright_black]", + "Ignored", + fmt_path(element_path), + "[bright_black](wikis are not currently supported)", + ) + return None + elif element.type == IliasElementType.LEARNING_MODULE: + return await self._handle_learning_module(element, element_path) + elif element.type == IliasElementType.LINK: + return await self._handle_link(element, element_path) + elif element.type == IliasElementType.LINK_COLLECTION: + return await self._handle_link(element, element_path) + elif element.type == IliasElementType.BOOKING: + return await self._handle_booking(element, element_path) + elif element.type == IliasElementType.OPENCAST_VIDEO: + return await self._handle_file(element, element_path) + elif element.type == IliasElementType.OPENCAST_VIDEO_PLAYER: + return await self._handle_opencast_video(element, element_path) + elif element.type == IliasElementType.MEDIACAST_VIDEO: + return await self._handle_file(element, element_path) + elif element.type == IliasElementType.MOB_VIDEO: + return await self._handle_file(element, element_path, is_video=True) + elif element.type in _DIRECTORY_PAGES: + return await self._handle_ilias_page(element.url, element, element_path) + else: + # This will retry it a few times, failing everytime. It doesn't make any network + # requests, so that's fine. + raise CrawlWarning(f"Unknown element type: {element.type!r}") + + async def _handle_link( + self, + element: IliasPageElement, + element_path: PurePath, + ) -> Optional[Coroutine[Any, Any, None]]: + log.explain_topic(f"Decision: Crawl Link {fmt_path(element_path)}") + log.explain(f"Links type is {self._links}") + + export_url = url_set_query_param(element.url, "cmd", "exportHTML") + resolved = await self._resolve_link_target(export_url) + if resolved == "none": + links = [LinkData(element.name, "", element.description or "")] + else: + links = self._parse_link_content(element, cast(BeautifulSoup, resolved)) + + maybe_extension = self._links.extension() + + if not maybe_extension: + log.explain("Answer: No") + return None + else: + log.explain("Answer: Yes") + + if len(links) <= 1 or self._links.collection_as_one(): + element_path = element_path.with_name(element_path.name + maybe_extension) + maybe_dl = await self.download(element_path, mtime=element.mtime) + if not maybe_dl: + return None + return self._download_link(self._links, element.name, links, maybe_dl) + + maybe_cl = await self.crawl(element_path) + if not maybe_cl: + return None + # Required for download_all closure + cl = maybe_cl + extension = maybe_extension + + async def download_all() -> None: + for link in links: + path = cl.path / (sanitize_path_name(link.name) + extension) + if dl := await self.download(path, mtime=element.mtime): + await self._download_link(self._links, element.name, [link], dl) + + return download_all() + + @anoncritical + @_iorepeat(3, "resolving link") + async def _download_link( + self, link_renderer: Links, collection_name: str, links: list[LinkData], dl: DownloadToken + ) -> None: + async with dl as (bar, sink): + rendered = link_renderer.interpolate(self._link_file_redirect_delay, collection_name, links) + sink.file.write(rendered.encode("utf-8")) + sink.done() + + async def _resolve_link_target(self, export_url: str) -> BeautifulSoup | Literal["none"]: + async def impl() -> Optional[BeautifulSoup | Literal["none"]]: + async with self.session.get(export_url, allow_redirects=False) as resp: + # No redirect means we were authenticated + if hdrs.LOCATION not in resp.headers: + return soupify(await resp.read()) # .select_one("a").get("href").strip() # type: ignore + # We are either unauthenticated or the link is not active + new_url = resp.headers[hdrs.LOCATION].lower() + if "baseclass=illinkresourcehandlergui" in new_url and "cmd=infoscreen" in new_url: + return "none" + return None + + auth_id = await self._current_auth_id() + target = await impl() + if target is not None: + return target + + await self.authenticate(auth_id) + + target = await impl() + if target is not None: + return target + + raise CrawlError("resolve_link_target failed even after authenticating") + + @staticmethod + def _parse_link_content(element: IliasPageElement, content: BeautifulSoup) -> list[LinkData]: + links = list(content.select("a")) + if len(links) == 1: + url = str(links[0].get("href")).strip() + return [LinkData(name=element.name, description=element.description or "", url=url)] + + results = [] + for link in links: + url = str(link.get("href")).strip() + name = link.get_text(strip=True) + description = cast(Tag, link.find_next_sibling("dd")).get_text(strip=True) + results.append(LinkData(name=name, description=description, url=url.strip())) + + return results + + async def _handle_booking( + self, + element: IliasPageElement, + element_path: PurePath, + ) -> Optional[Coroutine[Any, Any, None]]: + log.explain_topic(f"Decision: Crawl Booking Link {fmt_path(element_path)}") + log.explain(f"Links type is {self._links}") + + link_template_maybe = self._links.template() + link_extension = self._links.extension() + if not link_template_maybe or not link_extension: + log.explain("Answer: No") + return None + else: + log.explain("Answer: Yes") + element_path = element_path.with_name(element_path.name + link_extension) + + maybe_dl = await self.download(element_path, mtime=element.mtime) + if not maybe_dl: + return None + + self._ensure_not_seen(element, element_path) + + return self._download_booking(element, maybe_dl) + + @anoncritical + @_iorepeat(1, "downloading description") + async def _download_description(self, parent_path: PurePath, description: BeautifulSoup) -> None: + path = parent_path / "Description.html" + dl = await self.download(path, redownload=Redownload.ALWAYS) + if not dl: + return + + async with dl as (_bar, sink): + description = clean(insert_base_markup(description)) + description_tag = await self.internalize_images(description) + sink.file.write(description_tag.prettify().encode("utf-8")) + sink.done() + + @anoncritical + @_iorepeat(3, "resolving booking") + async def _download_booking( + self, + element: IliasPageElement, + dl: DownloadToken, + ) -> None: + async with dl as (bar, sink): + links = [LinkData(name=element.name, description=element.description or "", url=element.url)] + rendered = self._links.interpolate(self._link_file_redirect_delay, element.name, links) + sink.file.write(rendered.encode("utf-8")) + sink.done() + + async def _handle_opencast_video( + self, + element: IliasPageElement, + element_path: PurePath, + ) -> Optional[Coroutine[Any, Any, None]]: + # Copy old mapping as it is likely still relevant + if self.prev_report: + self.report.add_custom_value( + _get_video_cache_key(element), + self.prev_report.get_custom_value(_get_video_cache_key(element)), + ) + + # A video might contain other videos, so let's "crawl" the video first + # to ensure rate limits apply. This must be a download as *this token* + # is re-used if the video consists of a single stream. In that case the + # file name is used and *not* the stream name the ilias html parser reported + # to ensure backwards compatibility. + maybe_dl = await self.download(element_path, mtime=element.mtime, redownload=Redownload.ALWAYS) + + # If we do not want to crawl it (user filter), we can move on + if not maybe_dl: + return None + + self._ensure_not_seen(element, element_path) + + # If we have every file from the cached mapping already, we can ignore this and bail + if self._all_opencast_videos_locally_present(element, maybe_dl.path): + # Mark all existing videos as known to ensure they do not get deleted during cleanup. + # We "downloaded" them, just without actually making a network request as we assumed + # they did not change. + contained = self._previous_contained_opencast_videos(element, maybe_dl.path) + if len(contained) > 1: + # Only do this if we threw away the original dl token, + # to not download single-stream videos twice + for video in contained: + await self.download(video) + + return None + + return self._download_opencast_video(element, maybe_dl) + + def _previous_contained_opencast_videos( + self, element: IliasPageElement, element_path: PurePath + ) -> list[PurePath]: + if not self.prev_report: + return [] + custom_value = self.prev_report.get_custom_value(_get_video_cache_key(element)) + if not custom_value: + return [] + cached_value = cast(dict[str, Any], custom_value) + if "known_paths" not in cached_value or "own_path" not in cached_value: + log.explain(f"'known_paths' or 'own_path' missing from cached value: {cached_value}") + return [] + transformed_own_path = self._transformer.transform(element_path) + if cached_value["own_path"] != str(transformed_own_path): + log.explain( + f"own_path '{transformed_own_path}' does not match cached value: '{cached_value['own_path']}" + ) + return [] + return [PurePath(name) for name in cached_value["known_paths"]] + + def _all_opencast_videos_locally_present(self, element: IliasPageElement, element_path: PurePath) -> bool: + log.explain_topic(f"Checking local cache for video {fmt_path(element_path)}") + if contained_videos := self._previous_contained_opencast_videos(element, element_path): + log.explain( + f"The following contained videos are known: {','.join(map(fmt_path, contained_videos))}" + ) + if all(self._output_dir.resolve(path).exists() for path in contained_videos): + log.explain("Found all known videos locally, skipping enumeration request") + return True + log.explain("Missing at least one video, continuing with requests!") + else: + log.explain("No local cache present") + return False + + @anoncritical + @_iorepeat(3, "downloading video") + async def _download_opencast_video(self, element: IliasPageElement, dl: DownloadToken) -> None: + def add_to_report(paths: list[str]) -> None: + self.report.add_custom_value( + _get_video_cache_key(element), + {"known_paths": paths, "own_path": str(self._transformer.transform(dl.path))}, + ) + + async with dl as (bar, sink): + page = IliasPage(await self._get_page(element.url), element) + stream_elements = page.get_child_elements() + + if len(stream_elements) > 1: + log.explain(f"Found multiple video streams for {element.name}") + else: + log.explain(f"Using single video mode for {element.name}") + stream_element = stream_elements[0] + + # We do not have a local cache yet + await self._stream_from_url(stream_element, sink, bar, is_video=True) + add_to_report([str(self._transformer.transform(dl.path))]) + return + + contained_video_paths: list[str] = [] + + for stream_element in stream_elements: + video_path = dl.path.parent / stream_element.name + + maybe_dl = await self.download(video_path, mtime=element.mtime, redownload=Redownload.NEVER) + if not maybe_dl: + continue + async with maybe_dl as (bar, sink): + log.explain(f"Streaming video from real url {stream_element.url}") + contained_video_paths.append(str(self._transformer.transform(maybe_dl.path))) + await self._stream_from_url(stream_element, sink, bar, is_video=True) + + add_to_report(contained_video_paths) + + async def _handle_file( + self, + element: IliasPageElement, + element_path: PurePath, + is_video: bool = False, + ) -> Optional[Coroutine[Any, Any, None]]: + maybe_dl = await self.download(element_path, mtime=element.mtime) + if not maybe_dl: + return None + self._ensure_not_seen(element, element_path) + + return self._download_file(element, maybe_dl, is_video) + + @_iorepeat(3, "downloading file") + @anoncritical + async def _download_file(self, element: IliasPageElement, dl: DownloadToken, is_video: bool) -> None: + assert dl # The function is only reached when dl is not None + async with dl as (bar, sink): + await self._stream_from_url(element, sink, bar, is_video) + + async def _stream_from_url( + self, element: IliasPageElement, sink: FileSink, bar: ProgressBar, is_video: bool + ) -> None: + url = element.url + + async def try_stream() -> bool: + next_url = url + # Normal files redirect to the magazine if we are not authenticated. As files could be HTML, + # we can not match on the content type here. Instead, we disallow redirects and inspect the + # new location. If we are redirected anywhere but the ILIAS 8 "sendfile" command, we assume + # our authentication expired. + if not is_video: + async with self.session.get(url, allow_redirects=False) as resp: + # Redirect to anything except a "sendfile" means we weren't authenticated + if hdrs.LOCATION in resp.headers: + if "&cmd=sendfile" not in resp.headers[hdrs.LOCATION]: + return False + # Directly follow the redirect to not make a second, unnecessary request + next_url = resp.headers[hdrs.LOCATION] + + # Let's try this again and follow redirects + return await fetch_follow_redirects(next_url) + + async def fetch_follow_redirects(file_url: str) -> bool: + async with self.session.get(file_url) as resp: + # We wanted a video but got HTML => Forbidden, auth expired. Logging in won't really + # solve that depending on the setup, but it is better than nothing. + if is_video and "html" in resp.content_type: + return False + + # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Range + if content_range := resp.headers.get(hdrs.CONTENT_RANGE, default=None): + parts = content_range.split("/") + if len(parts) == 2 and parts[1].isdigit(): + bar.set_total(int(parts[1])) + + # Prefer the content length header + if resp.content_length: + bar.set_total(resp.content_length) + + async for data in resp.content.iter_chunked(1024): + sink.file.write(data) + bar.advance(len(data)) + + sink.done() + return True + + auth_id = await self._current_auth_id() + if await try_stream(): + return + + await self.authenticate(auth_id) + + if not await try_stream(): + raise CrawlError(f"File streaming failed after authenticate() {element!r}") + + async def _handle_forum( + self, + element: IliasPageElement, + element_path: PurePath, + ) -> Optional[Coroutine[Any, Any, None]]: + maybe_cl = await self.crawl(element_path) + if not maybe_cl: + return None + return self._crawl_forum(element, maybe_cl) + + @_iorepeat(3, "crawling forum") + @anoncritical + async def _crawl_forum(self, element: IliasPageElement, cl: CrawlToken) -> None: + async with cl: + inner = IliasPage(await self._get_page(element.url), element) + export_url = inner.get_forum_export_url() + if not export_url: + log.warn("Could not extract forum export url") + return + + export = await self._post( + export_url, + {"format": "html", "cmd[createExportFile]": ""}, + ) + + elements = parse_ilias_forum_export(soupify(export)) + + tasks: list[Awaitable[None]] = [] + for thread in elements: + tasks.append(asyncio.create_task(self._download_forum_thread(cl.path, thread, element.url))) + + # And execute them + await self.gather(tasks) + + @anoncritical + @_iorepeat(3, "saving forum thread") + async def _download_forum_thread( + self, parent_path: PurePath, thread: IliasForumThread | IliasPageElement, forum_url: str + ) -> None: + path = parent_path / (sanitize_path_name(thread.name) + ".html") + maybe_dl = await self.download(path, mtime=thread.mtime) + if not maybe_dl or not isinstance(thread, IliasForumThread): + return + + async with maybe_dl as (bar, sink): + rendered = forum_thread_template( + thread.name, forum_url, thread.name_tag, await self.internalize_images(thread.content_tag) + ) + sink.file.write(rendered.encode("utf-8")) + sink.done() + + async def _handle_learning_module( + self, + element: IliasPageElement, + element_path: PurePath, + ) -> Optional[Coroutine[Any, Any, None]]: + maybe_cl = await self.crawl(element_path) + if not maybe_cl: + return None + self._ensure_not_seen(element, element_path) + + return self._crawl_learning_module(element, maybe_cl) + + @_iorepeat(3, "crawling learning module") + @anoncritical + async def _crawl_learning_module(self, element: IliasPageElement, cl: CrawlToken) -> None: + elements: list[IliasLearningModulePage] = [] + + async with cl: + log.explain_topic(f"Parsing initial HTML page for {fmt_path(cl.path)}") + log.explain(f"URL: {element.url}") + soup = await self._get_page(element.url) + page = IliasPage(soup, element) + if next := page.get_learning_module_data(): + elements.extend( + await self._crawl_learning_module_direction(cl.path, next.previous_url, "left", element) + ) + elements.append(next) + elements.extend( + await self._crawl_learning_module_direction(cl.path, next.next_url, "right", element) + ) + + # Reflect their natural ordering in the file names + for index, lm_element in enumerate(elements): + lm_element.title = f"{index:02}_{lm_element.title}" + + tasks: list[Awaitable[None]] = [] + for index, elem in enumerate(elements): + prev_url = elements[index - 1].title if index > 0 else None + next_url = elements[index + 1].title if index < len(elements) - 1 else None + tasks.append( + asyncio.create_task(self._download_learning_module_page(cl.path, elem, prev_url, next_url)) + ) + + # And execute them + await self.gather(tasks) + + async def _crawl_learning_module_direction( + self, + path: PurePath, + start_url: Optional[str], + dir: Literal["left"] | Literal["right"], + parent_element: IliasPageElement, + ) -> list[IliasLearningModulePage]: + elements: list[IliasLearningModulePage] = [] + + if not start_url: + return elements + + next_element_url: Optional[str] = start_url + counter = 0 + while next_element_url: + log.explain_topic(f"Parsing HTML page for {fmt_path(path)} ({dir}-{counter})") + log.explain(f"URL: {next_element_url}") + soup = await self._get_page(next_element_url) + page = IliasPage(soup, parent_element) + if next := page.get_learning_module_data(): + elements.append(next) + next_element_url = next.previous_url if dir == "left" else next.next_url + counter += 1 + + return elements + + @anoncritical + @_iorepeat(3, "saving learning module page") + async def _download_learning_module_page( + self, + parent_path: PurePath, + element: IliasLearningModulePage, + prev: Optional[str], + next: Optional[str], + ) -> None: + path = parent_path / (sanitize_path_name(element.title) + ".html") + maybe_dl = await self.download(path) + if not maybe_dl: + return + my_path = self._transformer.transform(maybe_dl.path) + if not my_path: + return + + if prev: + prev_p = self._transformer.transform(parent_path / (sanitize_path_name(prev) + ".html")) + prev = os.path.relpath(prev_p, my_path.parent) if prev_p else None + if next: + next_p = self._transformer.transform(parent_path / (sanitize_path_name(next) + ".html")) + next = os.path.relpath(next_p, my_path.parent) if next_p else None + + async with maybe_dl as (bar, sink): + content = element.content + content = await self.internalize_images(content) + sink.file.write(learning_module_template(content, maybe_dl.path.name, prev, next).encode("utf-8")) + sink.done() + + async def internalize_images(self, tag: Tag) -> Tag: + """ + Tries to fetch ILIAS images and embed them as base64 data. + """ + log.explain_topic("Internalizing images") + for elem in tag.find_all(recursive=True): + if elem.name == "img" and (src := elem.attrs.get("src", None)): + url = urljoin(self._base_url, cast(str, src)) + if not url.startswith(self._base_url): + continue + log.explain(f"Internalizing {url!r}") + img = await self._get_authenticated(url) + elem.attrs["src"] = "data:;base64," + base64.b64encode(img).decode() + if elem.name == "iframe" and cast(str, elem.attrs.get("src", "")).startswith("//"): + # For unknown reasons the protocol seems to be stripped. + elem.attrs["src"] = "https:" + cast(str, elem.attrs["src"]) + return tag + + def _ensure_not_seen(self, element: IliasPageElement, parent_path: PurePath) -> None: + if element.url in self._visited_urls: + raise CrawlWarning( + f"Found second path to element {element.name!r} at {element.url!r}. " + + f"First path: {fmt_path(self._visited_urls[element.url])}. " + + f"Second path: {fmt_path(parent_path)}." + ) + self._visited_urls[element.url] = parent_path + + async def _get_page(self, url: str, root_page_allowed: bool = False) -> IliasSoup: + auth_id = await self._current_auth_id() + async with self.session.get(url) as request: + soup = IliasSoup(soupify(await request.read()), str(request.url)) + if IliasPage.is_logged_in(soup): + return self._verify_page(soup, url, root_page_allowed) + + # We weren't authenticated, so try to do that + await self.authenticate(auth_id) + + # Retry once after authenticating. If this fails, we will die. + async with self.session.get(url) as request: + soup = IliasSoup(soupify(await request.read()), str(request.url)) + if IliasPage.is_logged_in(soup): + return self._verify_page(soup, url, root_page_allowed) + raise CrawlError(f"get_page failed even after authenticating on {url!r}") + + @staticmethod + def _verify_page(soup: IliasSoup, url: str, root_page_allowed: bool) -> IliasSoup: + if IliasPage.is_root_page(soup) and not root_page_allowed: + raise CrawlError( + "Unexpectedly encountered ILIAS root page. " + "This usually happens because the ILIAS instance is broken. " + "If so, wait a day or two and try again. " + "It could also happen because a crawled element links to the ILIAS root page. " + "If so, use a transform with a ! as target to ignore the particular element. " + f"The redirect came from {url}" + ) + return soup + + async def _post(self, url: str, data: dict[str, str | list[str]]) -> bytes: + form_data = aiohttp.FormData() + for key, val in data.items(): + form_data.add_field(key, val) + + async with self.session.post(url, data=form_data()) as request: + if request.status == 200: + return await request.read() + raise CrawlError(f"post failed with status {request.status}") + + async def _get_authenticated(self, url: str) -> bytes: + auth_id = await self._current_auth_id() + + async with self.session.get(url, allow_redirects=False) as request: + if request.status == 200: + return await request.read() + + # We weren't authenticated, so try to do that + await self.authenticate(auth_id) + + # Retry once after authenticating. If this fails, we will die. + async with self.session.get(url, allow_redirects=False) as request: + if request.status == 200: + return await request.read() + raise CrawlError("get_authenticated failed even after authenticating") + + async def _authenticate(self) -> None: + # fill the session with the correct cookies + if self._login_type == "shibboleth": + await self._shibboleth_login.login(self.session) + else: + params = { + "client_id": self._client_id, + "cmd": "force_login", + } + async with self.session.get(urljoin(self._base_url, "/login.php"), params=params) as request: + login_page = soupify(await request.read()) + + login_form = login_page.find("form", attrs={"name": "login_form"}) + if login_form is None: + raise CrawlError("Could not find the login form! Specified client id might be invalid.") + + login_url = cast(Optional[str], login_form.attrs.get("action")) + if login_url is None: + raise CrawlError("Could not find the action URL in the login form!") + + username, password = await self._auth.credentials() + + login_form_data = aiohttp.FormData() + login_form_data.add_field("login_form/input_3/input_4", username) + login_form_data.add_field("login_form/input_3/input_5", password) + + # do the actual login + async with self.session.post(urljoin(self._base_url, login_url), data=login_form_data) as request: + soup = IliasSoup(soupify(await request.read()), str(request.url)) + if not IliasPage.is_logged_in(soup): + self._auth.invalidate_credentials() diff --git a/PFERD/crawl/ilias/kit_ilias_html.py b/PFERD/crawl/ilias/kit_ilias_html.py new file mode 100644 index 0000000..5966141 --- /dev/null +++ b/PFERD/crawl/ilias/kit_ilias_html.py @@ -0,0 +1,1578 @@ +import json +import re +from collections.abc import Callable +from dataclasses import dataclass +from datetime import date, datetime, timedelta +from enum import Enum +from typing import Optional, cast +from urllib.parse import urljoin, urlparse + +from bs4 import BeautifulSoup, Tag + +from PFERD.crawl import CrawlError +from PFERD.crawl.crawler import CrawlWarning +from PFERD.logging import log +from PFERD.utils import sanitize_path_name, url_set_query_params + +TargetType = str | int + + +class TypeMatcher: + class UrlPath: + path: str + + def __init__(self, path: str): + self.path = path + + class UrlParameter: + query: str + + def __init__(self, query: str): + self.query = query + + class ImgSrc: + src: str + + def __init__(self, src: str): + self.src = src + + class ImgAlt: + alt: str + + def __init__(self, alt: str): + self.alt = alt + + class All: + matchers: list["IliasElementMatcher"] + + def __init__(self, matchers: list["IliasElementMatcher"]): + self.matchers = matchers + + class Any: + matchers: list["IliasElementMatcher"] + + def __init__(self, matchers: list["IliasElementMatcher"]): + self.matchers = matchers + + @staticmethod + def path(path: str) -> UrlPath: + return TypeMatcher.UrlPath(path) + + @staticmethod + def query(query: str) -> UrlParameter: + return TypeMatcher.UrlParameter(query) + + @staticmethod + def img_src(src: str) -> ImgSrc: + return TypeMatcher.ImgSrc(src) + + @staticmethod + def img_alt(alt: str) -> ImgAlt: + return TypeMatcher.ImgAlt(alt) + + @staticmethod + def all(*matchers: "IliasElementMatcher") -> All: + return TypeMatcher.All(list(matchers)) + + @staticmethod + def any(*matchers: "IliasElementMatcher") -> Any: + return TypeMatcher.Any(list(matchers)) + + @staticmethod + def never() -> Any: + return TypeMatcher.Any([]) + + +IliasElementMatcher = ( + TypeMatcher.UrlPath + | TypeMatcher.UrlParameter + | TypeMatcher.ImgSrc + | TypeMatcher.ImgAlt + | TypeMatcher.All + | TypeMatcher.Any +) + + +class IliasElementType(Enum): + BLOG = "blog" + BOOKING = "booking" + COURSE = "course" + DCL_RECORD_LIST = "dcl_record_list" + EXERCISE_OVERVIEW = "exercise_overview" + EXERCISE = "exercise" # own submitted files + EXERCISE_FILES = "exercise_files" # own submitted files + FILE = "file" + FOLDER = "folder" + FORUM = "forum" + FORUM_THREAD = "forum_thread" + INFO_TAB = "info_tab" + LEARNING_MODULE = "learning_module" + LEARNING_MODULE_HTML = "learning_module_html" + LITERATURE_LIST = "literature_list" + LINK = "link" + LINK_COLLECTION = "link_collection" + MEDIA_POOL = "media_pool" + MEDIACAST_VIDEO = "mediacast_video" + MEDIACAST_VIDEO_FOLDER = "mediacast_video_folder" + MEETING = "meeting" + MOB_VIDEO = "mob_video" + OPENCAST_VIDEO = "opencast_video" + OPENCAST_VIDEO_FOLDER = "opencast_video_folder" + OPENCAST_VIDEO_FOLDER_MAYBE_PAGINATED = "opencast_video_folder_maybe_paginated" + OPENCAST_VIDEO_PLAYER = "opencast_video_player" + SCORM_LEARNING_MODULE = "scorm_learning_module" + SURVEY = "survey" + TEST = "test" # an online test. Will be ignored currently. + WIKI = "wiki" + + def matcher(self) -> IliasElementMatcher: + match self: + case IliasElementType.BLOG: + return TypeMatcher.any(TypeMatcher.img_src("_blog.svg")) + case IliasElementType.BOOKING: + return TypeMatcher.any(TypeMatcher.path("/book/"), TypeMatcher.img_src("_book.svg")) + case IliasElementType.COURSE: + return TypeMatcher.any(TypeMatcher.path("/crs/"), TypeMatcher.img_src("_crsr.svg")) + case IliasElementType.DCL_RECORD_LIST: + return TypeMatcher.any( + TypeMatcher.img_src("_dcl.svg"), TypeMatcher.query("cmdclass=ildclrecordlistgui") + ) + case IliasElementType.EXERCISE: + return TypeMatcher.never() + case IliasElementType.EXERCISE_FILES: + return TypeMatcher.never() + case IliasElementType.EXERCISE_OVERVIEW: + return TypeMatcher.any( + TypeMatcher.path("/exc/"), + TypeMatcher.path("_exc_"), + TypeMatcher.img_src("_exc.svg"), + ) + case IliasElementType.FILE: + return TypeMatcher.any( + TypeMatcher.query("cmd=sendfile"), + TypeMatcher.path("_file_"), + TypeMatcher.img_src("/filedelivery/"), + ) + case IliasElementType.FOLDER: + return TypeMatcher.any( + TypeMatcher.path("/fold/"), + TypeMatcher.img_src("_fold.svg"), + TypeMatcher.path("/grp/"), + TypeMatcher.img_src("_grp.svg"), + TypeMatcher.path("/copa/"), + TypeMatcher.path("_copa_"), + TypeMatcher.img_src("_copa.svg"), + # Not supported right now but warn users + # TypeMatcher.query("baseclass=ilmediapoolpresentationgui"), + # TypeMatcher.img_alt("medienpool"), + # TypeMatcher.img_src("_mep.svg"), + ) + case IliasElementType.FORUM: + return TypeMatcher.any( + TypeMatcher.path("/frm/"), + TypeMatcher.path("_frm_"), + TypeMatcher.img_src("_frm.svg"), + ) + case IliasElementType.FORUM_THREAD: + return TypeMatcher.never() + case IliasElementType.INFO_TAB: + return TypeMatcher.never() + case IliasElementType.LITERATURE_LIST: + return TypeMatcher.img_src("_bibl.svg") + case IliasElementType.LEARNING_MODULE: + return TypeMatcher.any(TypeMatcher.path("/lm/"), TypeMatcher.img_src("_lm.svg")) + case IliasElementType.LEARNING_MODULE_HTML: + return TypeMatcher.any( + TypeMatcher.query("baseclass=ilhtlmpresentationgui"), TypeMatcher.img_src("_htlm.svg") + ) + case IliasElementType.LINK: + return TypeMatcher.any( + TypeMatcher.all( + TypeMatcher.query("baseclass=illinkresourcehandlergui"), + TypeMatcher.query("calldirectlink"), + ), + TypeMatcher.img_src("_webr.svg"), # duplicated :( + ) + case IliasElementType.LINK_COLLECTION: + return TypeMatcher.any( + TypeMatcher.query("baseclass=illinkresourcehandlergui"), + TypeMatcher.img_src("_webr.svg"), # duplicated :( + ) + case IliasElementType.MEDIA_POOL: + return TypeMatcher.any( + TypeMatcher.query("baseclass=ilmediapoolpresentationgui"), TypeMatcher.img_src("_mep.svg") + ) + case IliasElementType.MEDIACAST_VIDEO: + return TypeMatcher.never() + case IliasElementType.MEDIACAST_VIDEO_FOLDER: + return TypeMatcher.any( + TypeMatcher.path("/mcst/"), + TypeMatcher.query("baseclass=ilmediacasthandlergui"), + TypeMatcher.img_src("_mcst.svg"), + ) + case IliasElementType.MEETING: + return TypeMatcher.any(TypeMatcher.img_src("_sess.svg")) + case IliasElementType.MOB_VIDEO: + return TypeMatcher.never() + case IliasElementType.OPENCAST_VIDEO: + return TypeMatcher.never() + case IliasElementType.OPENCAST_VIDEO_FOLDER: + return TypeMatcher.never() + case IliasElementType.OPENCAST_VIDEO_FOLDER_MAYBE_PAGINATED: + return TypeMatcher.img_alt("opencast") + case IliasElementType.OPENCAST_VIDEO_PLAYER: + return TypeMatcher.never() + case IliasElementType.SCORM_LEARNING_MODULE: + return TypeMatcher.any( + TypeMatcher.query("baseclass=ilsahspresentationgui"), TypeMatcher.img_src("_sahs.svg") + ) + case IliasElementType.SURVEY: + return TypeMatcher.any(TypeMatcher.path("/svy/"), TypeMatcher.img_src("svy.svg")) + case IliasElementType.TEST: + return TypeMatcher.any( + TypeMatcher.query("cmdclass=ilobjtestgui"), + TypeMatcher.query("cmdclass=iltestscreengui"), + TypeMatcher.img_src("_tst.svg"), + ) + case IliasElementType.WIKI: + return TypeMatcher.any( + TypeMatcher.query("baseClass=ilwikihandlergui"), TypeMatcher.img_src("wiki.svg") + ) + + raise CrawlWarning(f"Unknown matcher {self}") + + +@dataclass +class IliasPageElement: + type: IliasElementType + url: str + name: str + mtime: Optional[datetime] = None + description: Optional[str] = None + + def id(self) -> str: + regexes = [ + r"eid=(?P[0-9a-z\-]+)", + r"book/(?P\d+)", # booking + r"cat/(?P\d+)", + r"copa/(?P\d+)", # content page + r"crs/(?P\d+)", # course + r"exc/(?P\d+)", # exercise + r"file/(?P\d+)", # file + r"fold/(?P\d+)", # folder + r"frm/(?P\d+)", # forum + r"grp/(?P\d+)", # group + r"lm/(?P\d+)", # learning module + r"mcst/(?P\d+)", # mediacast + r"pg/(?P(\d|_)+)", # page? + r"svy/(?P\d+)", # survey + r"sess/(?P\d+)", # session + r"webr/(?P\d+)", # web referene (link) + r"thr_pk=(?P\d+)", # forums + r"ref_id=(?P\d+)", + r"target=[a-z]+_(?P\d+)", + r"mm_(?P\d+)", + ] + + for regex in regexes: + if match := re.search(regex, self.url): + return match.groupdict()["id"] + + # Fall back to URL + log.warn(f"Didn't find identity for {self.name} - {self.url}. Please report this.") + return self.url + + @staticmethod + def create_new( + typ: IliasElementType, + url: str, + name: str, + mtime: Optional[datetime] = None, + description: Optional[str] = None, + skip_sanitize: bool = False, + ) -> "IliasPageElement": + if typ == IliasElementType.MEETING: + normalized = IliasPageElement._normalize_meeting_name(name) + log.explain(f"Normalized meeting name from {name!r} to {normalized!r}") + name = normalized + + if not skip_sanitize: + name = sanitize_path_name(name) + + return IliasPageElement(typ, url, name, mtime, description) + + @staticmethod + def _normalize_meeting_name(meeting_name: str) -> str: + """ + Normalizes meeting names, which have a relative time as their first part, + to their date in ISO format. + """ + + # This checks whether we can reach a `:` without passing a `-` + if re.search(r"^[^-]+: ", meeting_name): # noqa: SIM108 + # Meeting name only contains date: "05. Jan 2000:" + split_delimiter = ":" + else: + # Meeting name contains date and start/end times: "05. Jan 2000, 16:00 - 17:30:" + split_delimiter = ", " + + # We have a meeting day without time + date_portion_str = meeting_name.split(split_delimiter)[0] + date_portion = demangle_date(date_portion_str) + + # We failed to parse the date, bail out + if not date_portion: + return meeting_name + + # Replace the first section with the absolute date + rest_of_name = split_delimiter.join(meeting_name.split(split_delimiter)[1:]) + return datetime.strftime(date_portion, "%Y-%m-%d") + split_delimiter + rest_of_name + + +@dataclass +class IliasDownloadForumData: + url: str + form_data: dict[str, str | list[str]] + empty: bool + + +@dataclass +class IliasForumThread: + name: str + name_tag: Tag + content_tag: Tag + mtime: Optional[datetime] + + +@dataclass +class IliasLearningModulePage: + title: str + content: Tag + next_url: Optional[str] + previous_url: Optional[str] + + +class IliasSoup: + soup: BeautifulSoup + page_url: str + + def __init__(self, soup: BeautifulSoup, page_url: str): + self.soup = soup + self.page_url = page_url + + +class IliasPage: + def __init__(self, ilias_soup: IliasSoup, source_element: Optional[IliasPageElement]): + self._ilias_soup = ilias_soup + self._soup = ilias_soup.soup + self._page_url = ilias_soup.page_url + self._page_type = source_element.type if source_element else None + self._source_name = source_element.name if source_element else "" + + @staticmethod + def is_root_page(soup: IliasSoup) -> bool: + if permalink := IliasPage.get_soup_permalink(soup): + return "goto.php/root/" in permalink + return False + + def get_child_elements(self) -> list[IliasPageElement]: + """ + Return all child page elements you can find here. + """ + if self._is_video_player(): + log.explain("Page is a video player, extracting URL") + return self._player_to_video() + if self._is_opencast_video_listing(): + log.explain("Page is an opencast video listing, searching for elements") + return self._find_opencast_video_entries() + if self._is_exercise_file(): + log.explain("Page is an exercise, searching for elements") + return self._find_exercise_entries() + if self._is_personal_desktop(): + log.explain("Page is the personal desktop, searching for elements") + return self._find_personal_desktop_entries() + if self._is_content_page(): + log.explain("Page is a content page, searching for elements") + return self._find_copa_entries() + if self._is_info_tab(): + log.explain("Page is info tab, searching for elements") + return self._find_info_tab_entries() + log.explain("Page is a normal folder, searching for elements") + return self._find_normal_entries() + + def get_info_tab(self) -> Optional[IliasPageElement]: + tab: Optional[Tag] = self._soup.find( + name="a", attrs={"href": lambda x: x is not None and "cmdClass=ilinfoscreengui" in x} + ) + if tab is not None: + return IliasPageElement.create_new( + IliasElementType.INFO_TAB, self._abs_url_from_link(tab), "infos" + ) + return None + + def get_description(self) -> Optional[BeautifulSoup]: + def is_interesting_class(name: str | None) -> bool: + return name in [ + "ilCOPageSection", + "ilc_Paragraph", + "ilc_va_ihcap_VAccordIHeadCap", + "ilc_va_ihcap_AccordIHeadCap", + "ilc_media_cont_MediaContainer", + ] + + paragraphs: list[Tag] = cast(list[Tag], self._soup.find_all(class_=is_interesting_class)) + if not paragraphs: + return None + + # Extract bits and pieces into a string and parse it again. + # This ensures we don't miss anything and weird structures are resolved + # somewhat gracefully. + raw_html = "" + for p in paragraphs: + if p.find_parent(class_=is_interesting_class): + continue + if "ilc_media_cont_MediaContainer" in p["class"] and (video := p.select_one("video")): + # We have an embedded video which should be downloaded by _find_mob_videos + url, title = self._find_mob_video_url_title(video, p) + raw_html += '
External Video: {title}' + else: + raw_html += f"Video elided. Filename: '{title}'." + raw_html += "
\n" + continue + + # Ignore special listings (like folder groupings) + if "ilc_section_Special" in p["class"]: + continue + + raw_html += str(p) + "\n" + raw_html = f"\n{raw_html}\n" + + return BeautifulSoup(raw_html, "html.parser") + + def get_learning_module_data(self) -> Optional[IliasLearningModulePage]: + if not self._is_learning_module_page(): + return None + content = cast(Tag, self._soup.select_one("#ilLMPageContent")) + title = cast(Tag, self._soup.select_one(".ilc_page_title_PageTitle")).get_text().strip() + return IliasLearningModulePage( + title=title, + content=content, + next_url=self._find_learning_module_next(), + previous_url=self._find_learning_module_prev(), + ) + + def _find_learning_module_next(self) -> Optional[str]: + for link in self._soup.select("a.ilc_page_rnavlink_RightNavigationLink"): + url = self._abs_url_from_link(link) + if "baseClass=ilLMPresentationGUI" not in url: + continue + return url + return None + + def _find_learning_module_prev(self) -> Optional[str]: + for link in self._soup.select("a.ilc_page_lnavlink_LeftNavigationLink"): + url = self._abs_url_from_link(link) + if "baseClass=ilLMPresentationGUI" not in url: + continue + return url + return None + + def get_forum_export_url(self) -> Optional[str]: + forum_link = self._soup.select_one("#tab_forums_threads > a") + if not forum_link: + log.explain("Found no forum link") + return None + + base_url = self._abs_url_from_link(forum_link) + base_url = re.sub(r"cmd=\w+", "cmd=post", base_url) + base_url = re.sub(r"cmdClass=\w+", "cmdClass=ilExportGUI", base_url) + + rtoken_form = self._soup.find("form", attrs={"action": lambda x: x is not None and "rtoken=" in x}) + if not rtoken_form: + log.explain("Found no rtoken anywhere") + return None + match = cast(re.Match[str], re.search(r"rtoken=(\w+)", str(rtoken_form.attrs["action"]))) + rtoken = match.group(1) + + base_url = base_url + "&rtoken=" + rtoken + + return base_url + + def get_next_stage_element(self) -> Optional[IliasPageElement]: + if self._is_ilias_opencast_embedding(): + log.explain("Unwrapping opencast embedding") + return self.get_child_elements()[0] + if self._page_type == IliasElementType.OPENCAST_VIDEO_FOLDER_MAYBE_PAGINATED: + log.explain("Unwrapping video pagination") + return self._find_opencast_video_entries_paginated()[0] + if self._contains_collapsed_future_meetings(): + log.explain("Requesting *all* future meetings") + return self._uncollapse_future_meetings_url() + if self._is_exercise_not_all_shown(): + return self._show_all_exercises() + if not self._is_content_tab_selected(): + if self._page_type != IliasElementType.INFO_TAB: + log.explain("Selecting content tab") + return self._select_content_page_url() + else: + log.explain("Crawling info tab, skipping content select") + return None + + def _is_video_player(self) -> bool: + return "paella_config_file" in str(self._soup) + + def _is_opencast_video_listing(self) -> bool: + if self._is_ilias_opencast_embedding(): + return True + + # Raw listing without ILIAS fluff + video_element_table = self._soup.find(name="table", id=re.compile(r"tbl_xoct_.+")) + return video_element_table is not None + + def _is_ilias_opencast_embedding(self) -> bool: + # ILIAS fluff around the real opencast html + if self._soup.find(id="headerimage"): + element: Tag = cast(Tag, self._soup.find(id="headerimage")) + if "opencast" in cast(str, element.attrs["src"]).lower(): + return True + return False + + def _is_exercise_file(self) -> bool: + # we know it from before + if self._page_type == IliasElementType.EXERCISE_OVERVIEW: + return True + + # We have no suitable parent - let's guesss + if self._soup.find(id="headerimage"): + element: Tag = cast(Tag, self._soup.find(id="headerimage")) + if "exc" in cast(str, element.attrs["src"]).lower(): + return True + + return False + + def _is_personal_desktop(self) -> bool: + return "baseclass=ildashboardgui" in self._page_url.lower() and "&cmd=show" in self._page_url.lower() + + def _is_content_page(self) -> bool: + if link := self.get_permalink(): + return "/copa/" in link + return False + + def _is_learning_module_page(self) -> bool: + if link := self.get_permalink(): + return "target=pg_" in link + return False + + def _contains_collapsed_future_meetings(self) -> bool: + return self._uncollapse_future_meetings_url() is not None + + def _uncollapse_future_meetings_url(self) -> Optional[IliasPageElement]: + element = self._soup.find( + "a", + attrs={"href": lambda x: x is not None and ("crs_next_sess=1" in x or "crs_prev_sess=1" in x)}, + ) + if not element: + return None + link = self._abs_url_from_link(element) + return IliasPageElement.create_new(IliasElementType.FOLDER, link, "show all meetings") + + def _is_exercise_not_all_shown(self) -> bool: + return ( + self._page_type == IliasElementType.EXERCISE_OVERVIEW and "mode=all" not in self._page_url.lower() + ) + + def _show_all_exercises(self) -> Optional[IliasPageElement]: + return IliasPageElement.create_new( + IliasElementType.EXERCISE_OVERVIEW, self._page_url + "&mode=all", "show all exercises" + ) + + def _is_content_tab_selected(self) -> bool: + return self._select_content_page_url() is None + + def _is_info_tab(self) -> bool: + might_be_info = self._soup.find("form", attrs={"name": lambda x: x == "formInfoScreen"}) is not None + return self._page_type == IliasElementType.INFO_TAB and might_be_info + + def _is_course_overview_page(self) -> bool: + return "baseClass=ilmembershipoverviewgui" in self._page_url + + def _select_content_page_url(self) -> Optional[IliasPageElement]: + tab = self._soup.find( + id="tab_view_content", attrs={"class": lambda x: x is not None and "active" not in x} + ) + # Already selected (or not found) + if not tab: + return None + link = tab.find("a") + if link: + link_str = self._abs_url_from_link(link) + return IliasPageElement.create_new(IliasElementType.FOLDER, link_str, "select content page") + + _unexpected_html_warning() + log.warn_contd(f"Could not find content tab URL on {self._page_url!r}.") + log.warn_contd("PFERD might not find content on the course's main page.") + return None + + def _player_to_video(self) -> list[IliasPageElement]: + # Fetch the actual video page. This is a small wrapper page initializing a javscript + # player. Sadly we can not execute that JS. The actual video stream url is nowhere + # on the page, but defined in a JS object inside a script tag, passed to the player + # library. + # We do the impossible and RegEx the stream JSON object out of the page's HTML source + regex = re.compile(r"({\"streams\"[\s\S]+?),\s*{\"paella_config_file", re.IGNORECASE) + json_match = regex.search(str(self._soup)) + + if json_match is None: + log.warn("Could not find JSON stream info in video player. Ignoring video.") + return [] + json_str = json_match.group(1) + + # parse it + json_object = json.loads(json_str) + streams = [stream for stream in json_object["streams"]] + + # and just fetch the lone video url! + if len(streams) == 1: + video_url = streams[0]["sources"]["mp4"][0]["src"] + return [ + IliasPageElement.create_new(IliasElementType.OPENCAST_VIDEO, video_url, self._source_name) + ] + + log.explain(f"Found multiple videos for stream at {self._source_name}") + items = [] + for stream in sorted(streams, key=lambda stream: stream["content"]): + full_name = f"{self._source_name.replace('.mp4', '')} ({stream['content']}).mp4" + video_url = stream["sources"]["mp4"][0]["src"] + items.append(IliasPageElement.create_new(IliasElementType.OPENCAST_VIDEO, video_url, full_name)) + + return items + + def _get_show_max_forum_entries_per_page_url( + self, wanted_max: Optional[int] = None + ) -> Optional[IliasPageElement]: + correct_link = self._soup.find( + "a", attrs={"href": lambda x: x is not None and "trows=800" in x and "cmd=showThreads" in x} + ) + + if not correct_link: + return None + + link = self._abs_url_from_link(correct_link) + if wanted_max is not None: + link = link.replace("trows=800", f"trows={wanted_max}") + + return IliasPageElement.create_new(IliasElementType.FORUM, link, "show all forum threads") + + def _get_forum_thread_count(self) -> Optional[int]: + log.explain_topic("Trying to find forum thread count") + + candidates = cast(list[Tag], self._soup.select(".ilTableFootLight")) + extract_regex = re.compile(r"\s(?P\d+)\s*\)") + + for candidate in candidates: + log.explain(f"Found thread count candidate: {candidate}") + if match := extract_regex.search(candidate.get_text()): + return int(match.group("max")) + else: + log.explain("Found no candidates to extract thread count from") + + return None + + def _find_personal_desktop_entries(self) -> list[IliasPageElement]: + items: list[IliasPageElement] = [] + + titles: list[Tag] = self._soup.select("#block_pditems_0 .il-item-title") + for title in titles: + link = title.find("a") + + if not link: + log.explain(f"Skipping offline item: {title.get_text().strip()!r}") + continue + + name = sanitize_path_name(link.text.strip()) + url = self._abs_url_from_link(link) + + if "cmd=manage" in url and "cmdClass=ilPDSelectedItemsBlockGUI" in url: + # Configure button/link does not have anything interesting + continue + + typ = IliasPage._find_type_for_element( + name, url, lambda: IliasPage._find_icon_for_folder_entry(cast(Tag, link)) + ) + if not typ: + _unexpected_html_warning() + log.warn_contd(f"Could not extract type for {link}") + continue + + log.explain(f"Found {name!r} of type {typ}") + + items.append(IliasPageElement.create_new(typ, url, name)) + + return items + + def _find_copa_entries(self) -> list[IliasPageElement]: + items: list[IliasPageElement] = [] + links: list[Tag] = cast(list[Tag], self._soup.find_all(class_="ilc_flist_a_FileListItemLink")) + + for link in links: + url = self._abs_url_from_link(link) + name = re.sub(r"\([\d,.]+ [MK]B\)", "", link.get_text()).strip().replace("\t", "") + name = sanitize_path_name(name) + + if "file_id" not in url: + _unexpected_html_warning() + log.warn_contd(f"Found unknown content page item {name!r} with url {url!r}") + continue + + items.append(IliasPageElement.create_new(IliasElementType.FILE, url, name)) + + return items + + def _find_info_tab_entries(self) -> list[IliasPageElement]: + items = [] + links: list[Tag] = self._soup.select("a.il_ContainerItemCommand") + + for link in links: + log.explain(f"Found info tab link: {self._abs_url_from_link(link)}") + if "cmdclass=ilobjcoursegui" not in cast(str, link["href"]).lower(): + continue + if "cmd=sendfile" not in cast(str, link["href"]).lower(): + continue + items.append( + IliasPageElement.create_new( + IliasElementType.FILE, self._abs_url_from_link(link), sanitize_path_name(link.get_text()) + ) + ) + + log.explain(f"Found {len(items)} info tab entries {items}") + return items + + def _find_opencast_video_entries(self) -> list[IliasPageElement]: + # ILIAS has three stages for video pages + # 1. The initial dummy page without any videos. This page contains the link to the listing + # 2. The video listing which might be paginated + # 3. An unpaginated video listing (or at least one that includes 800 videos) + # + # We need to figure out where we are. + + video_element_table = self._soup.find(name="table", id=re.compile(r"tbl_xoct_.+")) + + if video_element_table is None: + # We are in stage 1 + # The page is actually emtpy but contains the link to stage 2 + content_link: Tag = cast(Tag, self._soup.select_one("#tab_series a")) + url: str = self._abs_url_from_link(content_link) + query_params = {"limit": "800", "cmd": "asyncGetTableGUI", "cmdMode": "asynch"} + url = url_set_query_params(url, query_params) + log.explain("Found ILIAS video frame page, fetching actual content next") + return [ + IliasPageElement.create_new(IliasElementType.OPENCAST_VIDEO_FOLDER_MAYBE_PAGINATED, url, "") + ] + + is_paginated = self._soup.find(id=re.compile(r"tab_page_sel.+")) is not None + + if is_paginated and self._page_type != IliasElementType.OPENCAST_VIDEO_FOLDER: + # We are in stage 2 - try to break pagination + return self._find_opencast_video_entries_paginated() + + return self._find_opencast_video_entries_no_paging() + + def _find_opencast_video_entries_paginated(self) -> list[IliasPageElement]: + table_element = self._soup.find(name="table", id=re.compile(r"tbl_xoct_.+")) + + if table_element is None: + log.warn("Couldn't increase elements per page (table not found). I might miss elements.") + return self._find_opencast_video_entries_no_paging() + + id_match = re.match(r"tbl_xoct_(.+)", cast(str, table_element.attrs["id"])) + if id_match is None: + log.warn("Couldn't increase elements per page (table id not found). I might miss elements.") + return self._find_opencast_video_entries_no_paging() + + table_id = id_match.group(1) + + query_params = {f"tbl_xoct_{table_id}_trows": "800", "cmd": "asyncGetTableGUI", "cmdMode": "asynch"} + url = url_set_query_params(self._page_url, query_params) + + log.explain("Disabled pagination, retrying folder as a new entry") + return [IliasPageElement.create_new(IliasElementType.OPENCAST_VIDEO_FOLDER, url, "")] + + def _find_opencast_video_entries_no_paging(self) -> list[IliasPageElement]: + """ + Crawls the "second stage" video page. This page contains the actual video urls. + """ + # Video start links are marked with an "Abspielen" link + video_links = cast( + list[Tag], self._soup.find_all(name="a", text=re.compile(r"\s*(Abspielen|Play)\s*")) + ) + + results: list[IliasPageElement] = [] + + for link in video_links: + results.append(self._listed_opencast_video_to_element(link)) + + return results + + def _listed_opencast_video_to_element(self, link: Tag) -> IliasPageElement: + # The link is part of a table with multiple columns, describing metadata. + # 6th or 7th child (1 indexed) is the modification time string. Try to find it + # by parsing backwards from the end and finding something that looks like a date + modification_time = None + row: Tag = link.parent.parent.parent # type: ignore + column_count = len(row.select("td.std")) + for index in range(column_count, 0, -1): + modification_string = cast(Tag, row.select_one(f"td.std:nth-child({index})")).get_text().strip() + if match := re.search(r"\d+\.\d+.\d+ \d+:\d+", modification_string): + modification_time = datetime.strptime(match.group(0), "%d.%m.%Y %H:%M") + break + + if modification_time is None: + log.warn(f"Could not determine upload time for {link}") + modification_time = datetime.now() + + title = cast(Tag, row.select_one("td.std:nth-child(3)")).get_text().strip() + title += ".mp4" + + video_name: str = sanitize_path_name(title) + + video_url = self._abs_url_from_link(link) + + log.explain(f"Found video {video_name!r} at {video_url}") + return IliasPageElement.create_new( + IliasElementType.OPENCAST_VIDEO_PLAYER, video_url, video_name, modification_time + ) + + def _find_exercise_entries(self) -> list[IliasPageElement]: + if self._soup.find(id="tab_submission"): + log.explain("Found submission tab. This is an exercise detail or files page") + if self._soup.select_one("#tab_submission.active") is None: + log.explain(" This is a details page") + return self._find_exercise_entries_detail_page() + else: + log.explain(" This is a files page") + return self._find_exercise_entries_files_page() + + log.explain("Found no submission tab. This is an exercise root page") + return self._find_exercise_entries_root_page() + + def _find_exercise_entries_detail_page(self) -> list[IliasPageElement]: + results: list[IliasPageElement] = [] + + if link := self._soup.select_one("#tab_submission > a"): + results.append( + IliasPageElement.create_new( + IliasElementType.EXERCISE_FILES, self._abs_url_from_link(link), "Submission" + ) + ) + else: + log.explain("Found no submission link for exercise, maybe it has not started yet?") + + # Find all download links in the container (this will contain all the *feedback* files) + download_links = cast( + list[Tag], + self._soup.find_all( + name="a", + # download links contain the given command class + attrs={"href": lambda x: x is not None and "cmd=download" in x}, + text="Download", + ), + ) + + for link in download_links: + parent_row: Tag = cast( + Tag, link.find_parent(attrs={"class": lambda x: x is not None and "row" in x}) + ) + name_tag = parent_row.find(name="div") + + if not name_tag: + log.warn("Could not find name tag for exercise entry") + _unexpected_html_warning() + continue + + name = sanitize_path_name(name_tag.get_text().strip()) + log.explain(f"Found exercise detail entry {name!r}") + + results.append( + IliasPageElement.create_new(IliasElementType.FILE, self._abs_url_from_link(link), name) + ) + + return results + + def _find_exercise_entries_files_page(self) -> list[IliasPageElement]: + results: list[IliasPageElement] = [] + + # Find all download links in the container + download_links = cast( + list[Tag], + self._soup.find_all( + name="a", + # download links contain the given command class + attrs={"href": lambda x: x is not None and "cmd=download" in x}, + text="Download", + ), + ) + + for link in download_links: + parent_row: Tag = cast(Tag, link.find_parent("tr")) + children = cast(list[Tag], parent_row.find_all("td")) + + name = sanitize_path_name(children[1].get_text().strip()) + log.explain(f"Found exercise file entry {name!r}") + + date = None + for child in reversed(children): + date = demangle_date(child.get_text().strip(), fail_silently=True) + if date is not None: + break + if date is None: + log.warn(f"Date parsing failed for exercise file entry {name!r}") + + results.append( + IliasPageElement.create_new(IliasElementType.FILE, self._abs_url_from_link(link), name, date) + ) + + return results + + def _find_exercise_entries_root_page(self) -> list[IliasPageElement]: + results: list[IliasPageElement] = [] + + content_tab = self._soup.find(id="ilContentContainer") + if not content_tab: + log.warn("Could not find content tab in exercise overview page") + _unexpected_html_warning() + return [] + + exercise_links = content_tab.select(".il-item-title a") + + for exercise in cast(list[Tag], exercise_links): + if "href" not in exercise.attrs: + continue + href = exercise.attrs["href"] + if type(href) is not str: + continue + if "ass_id=" not in href or "cmdclass=ilassignmentpresentationgui" not in href.lower(): + continue + + name = sanitize_path_name(exercise.get_text().strip()) + results.append( + IliasPageElement.create_new( + IliasElementType.EXERCISE, self._abs_url_from_link(exercise), name + ) + ) + + for result in results: + log.explain(f"Found exercise {result.name!r}") + + return results + + def _find_normal_entries(self) -> list[IliasPageElement]: + result: list[IliasPageElement] = [] + + links: list[Tag] = [] + # Fetch all links and throw them to the general interpreter + if self._is_course_overview_page(): + log.explain("Page is a course overview page, adjusting link selector") + links.extend(self._soup.select(".il-item-title > a")) + else: + links.extend(self._soup.select("a.il_ContainerItemTitle")) + + for link in links: + abs_url = self._abs_url_from_link(link) + # Make sure parents are sanitized. We do not want accidental parents + parents = [sanitize_path_name(x) for x in IliasPage._find_upwards_folder_hierarchy(link)] + + if parents: + element_name = "/".join(parents) + "/" + sanitize_path_name(link.get_text()) + else: + element_name = sanitize_path_name(link.get_text()) + + element_type = IliasPage._find_type_for_element( + element_name, abs_url, lambda: IliasPage._find_icon_for_folder_entry(link) + ) + description = IliasPage._find_link_description(link) + + # The last meeting on every page is expanded by default. + # Its content is then shown inline *and* in the meeting page itself. + # We should skip the inline content. + if element_type != IliasElementType.MEETING and self._is_in_expanded_meeting(link): + continue + + if not element_type: + continue + elif element_type == IliasElementType.FILE: + result.append(IliasPage._file_to_element(element_name, abs_url, link)) + continue + + log.explain(f"Found {element_name!r} of type {element_type}") + result.append( + IliasPageElement.create_new( + element_type, abs_url, element_name, description=description, skip_sanitize=True + ) + ) + + result += self._find_cards() + result += self._find_mediacast_videos() + result += self._find_mob_videos() + + return result + + def _find_mediacast_videos(self) -> list[IliasPageElement]: + videos: list[IliasPageElement] = [] + + regex = re.compile(r"il\.VideoPlaylist\.init.+?\[(.+?)], ") + for script in cast(list[Tag], self._soup.find_all("script")): + for match in regex.finditer(script.text): + try: + playlist = json.loads("[" + match.group(1) + "]") + except json.JSONDecodeError: + log.warn("Could not decode playlist json") + log.warn_contd(f"Playlist json: [{match.group(1)}]") + continue + for elem in playlist: + title = elem.get("title", None) + description = elem.get("description", None) + url = elem.get("resource", None) + if title is None or description is None or url is None: + log.explain(f"Mediacast json: {match.group(1)}") + log.warn("Mediacast video json was not complete") + if title is None: + log.warn_contd("Missing title") + if description is None: + log.warn_contd("Missing description") + if url is None: + log.warn_contd("Missing URL") + + if not title.endswith(".mp4") and not title.endswith(".webm"): + # just to make sure it has some kinda-alrightish ending + title = title + ".mp4" + videos.append( + IliasPageElement.create_new( + typ=IliasElementType.MEDIACAST_VIDEO, + url=self._abs_url_from_relative(cast(str, url)), + name=sanitize_path_name(title), + ) + ) + + return videos + + def _find_mob_videos(self) -> list[IliasPageElement]: + videos: list[IliasPageElement] = [] + + selector = "figure.ilc_media_cont_MediaContainerHighlighted,figure.ilc_media_cont_MediaContainer" + for figure in self._soup.select(selector): + video_element = figure.select_one("video") + if not video_element: + continue + + url, title = self._find_mob_video_url_title(video_element, figure) + + if url is None: + _unexpected_html_warning() + log.warn_contd(f"No element found for mob video '{title}'") + continue + + if urlparse(url).hostname != urlparse(self._page_url).hostname: + log.explain(f"Found external video at {url}, ignoring") + continue + + videos.append( + IliasPageElement.create_new( + typ=IliasElementType.MOB_VIDEO, url=url, name=sanitize_path_name(title), mtime=None + ) + ) + + return videos + + def _find_mob_video_url_title(self, video_element: Tag, figure: Tag) -> tuple[Optional[str], str]: + url = None + for source in video_element.select("source"): + if source.get("type", "") == "video/mp4": + url = cast(Optional[str], source.get("src")) + break + + if url is None and video_element.get("src"): + url = cast(Optional[str], video_element.get("src")) + + fig_caption = figure.select_one("figcaption") + if fig_caption: + title = cast(Tag, figure.select_one("figcaption")).get_text().strip() + ".mp4" + elif url is not None: + path = urlparse(self._abs_url_from_relative(url)).path + title = path.rsplit("/", 1)[-1] + else: + title = f"unknown video {figure}" + + if url: + url = self._abs_url_from_relative(url) + + return url, title + + def _is_in_expanded_meeting(self, tag: Tag) -> bool: + """ + Returns whether a file is part of an expanded meeting. + Has false positives for meetings themselves as their title is also "in the expanded meeting content". + It is in the same general div and this whole thing is guesswork. + Therefore, you should check for meetings before passing them in this function. + """ + parents: list[Tag] = list(tag.parents) + for parent in parents: + if not parent.get("class"): + continue + + # We should not crawl files under meetings + if "ilContainerListItemContentCB" in cast(str, parent.get("class")): + link: Tag = cast(Tag, cast(Tag, parent.parent).find("a")) + typ = IliasPage._find_type_for_element( + "meeting", + self._abs_url_from_link(link), + lambda: IliasPage._find_icon_for_folder_entry(link), + ) + return typ == IliasElementType.MEETING + + return False + + @staticmethod + def _find_upwards_folder_hierarchy(tag: Tag) -> list[str]: + """ + Interprets accordions and expandable blocks as virtual folders and returns them + in order. This allows us to find a file named "Test" in an accordion "Acc" as "Acc/Test" + """ + found_titles = [] + + if None == "hey": + pass + + outer_accordion_content: Optional[Tag] = None + + parents: list[Tag] = list(tag.parents) + for parent in parents: + if not parent.get("class"): + continue + + # ILIAS has proper accordions and weird blocks that look like normal headings, + # but some JS later transforms them into an accordion. + + # This is for these weird JS-y blocks and custom item groups + if "ilContainerItemsContainer" in cast(str, parent.get("class")): + data_store_url = cast(str, cast(Tag, parent.parent).get("data-store-url", "")).lower() + is_custom_item_group = ( + "baseclass=ilcontainerblockpropertiesstoragegui" in data_store_url + and "cont_block_id=" in data_store_url + ) + # I am currently under the impression that *only* those JS blocks have an + # ilNoDisplay class. + if not is_custom_item_group and "ilNoDisplay" not in cast(str, parent.get("class")): + continue + prev = cast(Tag, parent.find_previous_sibling("div")) + if "ilContainerBlockHeader" in cast(str, prev.get("class")): + if prev.find("h3"): + found_titles.append(cast(Tag, prev.find("h3")).get_text().strip()) + else: + found_titles.append(cast(Tag, prev.find("h2")).get_text().strip()) + + # And this for real accordions + if "il_VAccordionContentDef" in cast(str, parent.get("class")): + outer_accordion_content = parent + break + + if outer_accordion_content: + accordion_tag = cast(Tag, outer_accordion_content.parent) + head_tag = cast( + Tag, + accordion_tag.find( + attrs={ + "class": lambda x: x is not None + and ("ilc_va_ihead_VAccordIHead" in x or "ilc_va_ihead_AccordIHead" in x) + } + ), + ) + found_titles.append(head_tag.get_text().strip()) + + return [sanitize_path_name(x) for x in reversed(found_titles)] + + @staticmethod + def _find_link_description(link: Tag) -> Optional[str]: + tile = cast( + Tag, link.find_parent("div", {"class": lambda x: x is not None and "il_ContainerListItem" in x}) + ) + if not tile: + return None + description_element = cast( + Tag, tile.find("div", {"class": lambda x: x is not None and "il_Description" in x}) + ) + if not description_element: + return None + return description_element.get_text().strip() + + @staticmethod + def _file_to_element(name: str, url: str, link_element: Tag) -> IliasPageElement: + # Files have a list of properties (type, modification date, size, etc.) + # In a series of divs. + # Find the parent containing all those divs, so we can filter our what we need + properties_parent = cast( + Tag, + cast( + Tag, + link_element.find_parent( + "div", {"class": lambda x: x is not None and "il_ContainerListItem" in x} + ), + ).select_one(".il_ItemProperties"), + ) + # The first one is always the filetype + file_type = cast(Tag, properties_parent.select_one("span.il_ItemProperty")).get_text().strip() + + # The rest does not have a stable order. Grab the whole text and reg-ex the date + # out of it + all_properties_text = properties_parent.get_text().strip() + modification_date = IliasPage._find_date_in_text(all_properties_text) + if modification_date is None: + log.explain(f"Element {name} at {url} has no date.") + + # Grab the name from the link text + full_path = name + "." + file_type + + log.explain(f"Found file {full_path!r}") + return IliasPageElement.create_new( + IliasElementType.FILE, url, full_path, modification_date, skip_sanitize=True + ) + + def _find_cards(self) -> list[IliasPageElement]: + result: list[IliasPageElement] = [] + + card_titles: list[Tag] = self._soup.select(".card-title a") + + for title in card_titles: + url = self._abs_url_from_link(title) + name = sanitize_path_name(title.get_text().strip()) + typ = IliasPage._find_type_for_element(name, url, lambda: IliasPage._find_icon_from_card(title)) + + if not typ: + _unexpected_html_warning() + log.warn_contd(f"Could not extract type for {title}") + continue + + result.append(IliasPageElement.create_new(typ, url, name)) + + card_button_tiles: list[Tag] = self._soup.select(".card-title button") + + for button in card_button_tiles: + signal_regex = re.compile("#" + str(button["id"]) + r"[\s\S]*?\.trigger\('(.+?)'") + signal_match = signal_regex.search(str(self._soup)) + if not signal_match: + _unexpected_html_warning() + log.warn_contd(f"Could not find click handler signal for {button}") + continue + signal = signal_match.group(1) + open_regex = re.compile(r"\.on\('" + signal + r"[\s\S]*?window.open\(['\"](.+?)['\"]") + open_match = open_regex.search(str(self._soup)) + if not open_match: + _unexpected_html_warning() + log.warn_contd(f"Could not find click handler target for signal {signal} for {button}") + continue + url = self._abs_url_from_relative(open_match.group(1)) + name = sanitize_path_name(button.get_text().strip()) + typ = IliasPage._find_type_for_element(name, url, lambda: IliasPage._find_icon_from_card(button)) + caption_parent = cast( + Tag, + button.find_parent( + "div", + attrs={"class": lambda x: x is not None and "caption" in x}, + ), + ) + caption_container = caption_parent.find_next_sibling("div") + description = caption_container.get_text().strip() if caption_container else None + + if not typ: + _unexpected_html_warning() + log.warn_contd(f"Could not extract type for {button}") + continue + + result.append(IliasPageElement.create_new(typ, url, name, description=description)) + + return result + + @staticmethod + def _find_type_for_element( + element_name: str, + url: str, + icon_for_element: Callable[[], Optional[Tag]], + ) -> Optional[IliasElementType]: + """ + Decides which sub crawler to use for a given top level element. + """ + parsed_url = urlparse(url) + icon = icon_for_element() + + def try_matcher(matcher: IliasElementMatcher) -> bool: + match matcher: + case TypeMatcher.All(matchers=ms): + return all(try_matcher(m) for m in ms) + case TypeMatcher.Any(matchers=ms): + return any(try_matcher(m) for m in ms) + case TypeMatcher.ImgAlt(alt=alt): + return icon is not None and alt in str(icon["alt"]).lower() + case TypeMatcher.ImgSrc(src=src): + return icon is not None and src in str(icon["src"]).lower() + case TypeMatcher.UrlPath(path=path): + return path in parsed_url.path.lower() + case TypeMatcher.UrlParameter(query=query): + return query in parsed_url.query.lower() + + raise CrawlError(f"Unknown matcher {matcher}") + + for typ in IliasElementType: + if try_matcher(typ.matcher()): + return typ + + _unexpected_html_warning() + log.warn_contd(f"Tried to figure out element type, but failed for {element_name!r} / {url!r})") + + if "ref_id=" in parsed_url.query.lower() or "goto.php" in parsed_url.path.lower(): + log.warn_contd("Defaulting to FOLDER as it contains a ref_id/goto") + return IliasElementType.FOLDER + + return None + + @staticmethod + def _find_icon_for_folder_entry(link_element: Tag) -> Optional[Tag]: + found_parent: Optional[Tag] = None + + # We look for the outer div of our inner link, to find information around it + # (mostly the icon) + for parent in link_element.parents: + if "ilContainerListItemOuter" in parent["class"] or "il-std-item" in parent["class"]: + found_parent = parent + break + + if found_parent is None: + _unexpected_html_warning() + log.warn_contd(f"Tried to figure out element type, but did not find an icon for {link_element!r}") + return None + + # Find the small descriptive icon to figure out the type + img_tag: Optional[Tag] = found_parent.select_one("img.ilListItemIcon") + + if img_tag is None: + img_tag = found_parent.select_one("img.icon") + + is_session_expansion_button = found_parent.find( + "a", attrs={"href": lambda x: x is not None and ("crs_next_sess=" in x or "crs_prev_sess=" in x)} + ) + if img_tag is None and is_session_expansion_button: + log.explain("Found session expansion button, skipping it as it has no content") + return None + + if img_tag is not None: + return img_tag + + log.explain(f"Tried to figure out element type, but did not find an image for {link_element!r}") + return None + + @staticmethod + def _find_icon_from_card(card_title: Tag) -> Optional[Tag]: + def is_card_root(element: Tag) -> bool: + return "il-card" in element["class"] and "thumbnail" in element["class"] + + card_root: Optional[Tag] = None + + # We look for the card root + for parent in card_title.parents: + if is_card_root(parent): + card_root = parent + break + + if card_root is None: + _unexpected_html_warning() + log.warn_contd(f"Tried to figure out element type, but did not find an icon for {card_title}") + return None + + return cast(Tag, card_root.select_one(".il-card-repository-head .icon")) + + @staticmethod + def is_logged_in(ilias_soup: IliasSoup) -> bool: + soup = ilias_soup.soup + # Normal ILIAS pages + mainbar = soup.find(class_="il-maincontrols-metabar") + if mainbar is not None: + login_button = mainbar.find(attrs={"href": lambda x: x is not None and "login.php" in x}) + shib_login = soup.find(id="button_shib_login") + return not login_button and not shib_login + + # Personal Desktop + if soup.find("a", attrs={"href": lambda x: x is not None and "block_type=pditems" in x}): + return True + + # Empty personal desktop has zero (0) markers. Match on the text... + if alert := soup.select_one(".alert-info"): + text = alert.get_text().lower() + if "you have not yet selected any favourites" in text: + return True + if "sie haben aktuell noch keine favoriten ausgewählt" in text: + return True + + # Video listing embeds do not have complete ILIAS html. Try to match them by + # their video listing table + video_table = soup.find( + recursive=True, name="table", attrs={"id": lambda x: x is not None and x.startswith("tbl_xoct")} + ) + if video_table is not None: + return True + # The individual video player wrapper page has nothing of the above. + # Match it by its playerContainer. + return soup.select_one("#playerContainer") is not None + + @staticmethod + def _find_date_in_text(text: str) -> Optional[datetime]: + modification_date_match = re.search( + r"(((\d+\. \w+ \d+)|(Gestern|Yesterday)|(Heute|Today)|(Morgen|Tomorrow)), \d+:\d+)", text + ) + if modification_date_match is not None: + modification_date_str = modification_date_match.group(1) + return demangle_date(modification_date_str) + return None + + def get_permalink(self) -> Optional[str]: + return IliasPage.get_soup_permalink(self._ilias_soup) + + def _abs_url_from_link(self, link_tag: Tag) -> str: + """ + Create an absolute url from an tag. + """ + return self._abs_url_from_relative(cast(str, link_tag.get("href"))) + + def _abs_url_from_relative(self, relative_url: str) -> str: + """ + Create an absolute url from a relative URL. + """ + return urljoin(self._page_url, relative_url) + + @staticmethod + def get_soup_permalink(ilias_soup: IliasSoup) -> Optional[str]: + scripts = cast(list[Tag], ilias_soup.soup.find_all("script")) + pattern = re.compile(r"il\.Footer\.permalink\.copyText\(\"(.+?)\"\)") + for script in scripts: + if match := pattern.search(script.text): + url = match.group(1) + url = url.replace(r"\/", "/") + return url + return None + + +def _unexpected_html_warning() -> None: + log.warn("Encountered unexpected HTML structure, ignoring element.") + + +german_months = ["Jan", "Feb", "Mär", "Apr", "Mai", "Jun", "Jul", "Aug", "Sep", "Okt", "Nov", "Dez"] +english_months = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] + + +def demangle_date(date_str: str, fail_silently: bool = False) -> Optional[datetime]: + """ + Demangle a given date in one of the following formats (hour/minute part is optional): + "Gestern, HH:MM" + "Heute, HH:MM" + "Morgen, HH:MM" + "dd. mon yyyy, HH:MM + """ + try: + # Normalize whitespace because users + date_str = re.sub(r"\s+", " ", date_str) + + date_str = re.sub("Gestern|Yesterday", _format_date_english(_yesterday()), date_str, flags=re.I) + date_str = re.sub("Heute|Today", _format_date_english(date.today()), date_str, flags=re.I) + date_str = re.sub("Morgen|Tomorrow", _format_date_english(_tomorrow()), date_str, flags=re.I) + date_str = date_str.strip() + for german, english in zip(german_months, english_months, strict=True): + date_str = date_str.replace(german, english) + # Remove trailing dots for abbreviations, e.g. "20. Apr. 2020" -> "20. Apr 2020" + date_str = date_str.replace(english + ".", english) + + # We now have a nice english String in the format: "dd. mmm yyyy, hh:mm" or "dd. mmm yyyy" + + # Check if we have a time as well + if ", " in date_str: + day_part, time_part = date_str.split(",") + else: + day_part = date_str.split(",")[0] + time_part = None + + day_str, month_str, year_str = day_part.split(" ") + + day = int(day_str.strip().replace(".", "")) + month = english_months.index(month_str.strip()) + 1 + year = int(year_str.strip()) + + if time_part: + hour_str, minute_str = time_part.split(":") + hour = int(hour_str) + minute = int(minute_str) + return datetime(year, month, day, hour, minute) + + return datetime(year, month, day) + except Exception: + if not fail_silently: + log.warn(f"Date parsing failed for {date_str!r}") + return None + + +def _format_date_english(date_to_format: date) -> str: + month = english_months[date_to_format.month - 1] + return f"{date_to_format.day:02d}. {month} {date_to_format.year:04d}" + + +def _yesterday() -> date: + return date.today() - timedelta(days=1) + + +def _tomorrow() -> date: + return date.today() + timedelta(days=1) + + +def parse_ilias_forum_export(forum_export: BeautifulSoup) -> list[IliasForumThread]: + elements = [] + for p in forum_export.select("body > p"): + title_tag = p + content_tag = p.find_next_sibling("ul") + + title = cast(Tag, p.find("b")).text + if ":" in title: + title = title[title.find(":") + 1 :] + title = title.strip() + + if not content_tag or content_tag.find_previous_sibling("p") != title_tag: + # ILIAS allows users to delete the initial post while keeping the thread open + # This produces empty threads without *any* content. + # I am not sure why you would want this, but ILIAS makes it easy to do. + elements.append(IliasForumThread(title, title_tag, forum_export.new_tag("ul"), None)) + continue + + mtime = _guess_timestamp_from_forum_post_content(content_tag) + elements.append(IliasForumThread(title, title_tag, content_tag, mtime)) + + return elements + + +def _guess_timestamp_from_forum_post_content(content: Tag) -> Optional[datetime]: + posts = cast(Optional[Tag], content.select(".ilFrmPostHeader > span.small")) + if not posts: + return None + + newest_date: Optional[datetime] = None + + for post in posts: + text = post.text.strip() + text = text[text.rfind("|") + 1 :] + date = demangle_date(text, fail_silently=True) + if not date: + continue + + if not newest_date or newest_date < date: + newest_date = date + + return newest_date diff --git a/PFERD/crawl/ilias/kit_ilias_web_crawler.py b/PFERD/crawl/ilias/kit_ilias_web_crawler.py new file mode 100644 index 0000000..5088e01 --- /dev/null +++ b/PFERD/crawl/ilias/kit_ilias_web_crawler.py @@ -0,0 +1,37 @@ +from typing import Literal + +from ...auth import Authenticator +from ...config import Config +from .ilias_web_crawler import IliasWebCrawler, IliasWebCrawlerSection +from .shibboleth_login import ShibbolethLogin + +_ILIAS_URL = "https://ilias.studium.kit.edu" + + +class KitShibbolethBackgroundLoginSuccessful: + pass + + +class KitIliasWebCrawlerSection(IliasWebCrawlerSection): + def base_url(self) -> str: + return _ILIAS_URL + + def login(self) -> Literal["shibboleth"]: + return "shibboleth" + + +class KitIliasWebCrawler(IliasWebCrawler): + def __init__( + self, + name: str, + section: KitIliasWebCrawlerSection, + config: Config, + authenticators: dict[str, Authenticator], + ): + super().__init__(name, section, config, authenticators) + + self._shibboleth_login = ShibbolethLogin( + _ILIAS_URL, + self._auth, + section.tfa_auth(authenticators), + ) diff --git a/PFERD/crawl/ilias/shibboleth_login.py b/PFERD/crawl/ilias/shibboleth_login.py new file mode 100644 index 0000000..bffb183 --- /dev/null +++ b/PFERD/crawl/ilias/shibboleth_login.py @@ -0,0 +1,127 @@ +from typing import Any, Optional, cast + +import aiohttp +import yarl +from bs4 import BeautifulSoup, Tag + +from ...auth import Authenticator, TfaAuthenticator +from ...logging import log +from ...utils import soupify +from ..crawler import CrawlError + + +class ShibbolethLogin: + """ + Login via shibboleth system. + """ + + def __init__( + self, ilias_url: str, authenticator: Authenticator, tfa_authenticator: Optional[Authenticator] + ) -> None: + self._ilias_url = ilias_url + self._auth = authenticator + self._tfa_auth = tfa_authenticator + + async def login(self, sess: aiohttp.ClientSession) -> None: + """ + Performs the ILIAS Shibboleth authentication dance and saves the login + cookies it receieves. + + This function should only be called whenever it is detected that you're + not logged in. The cookies obtained should be good for a few minutes, + maybe even an hour or two. + """ + + # Equivalent: Click on "Mit KIT-Account anmelden" button in + # https://ilias.studium.kit.edu/login.php + url = f"{self._ilias_url}/shib_login.php" + async with sess.get(url) as response: + shib_url = response.url + if str(shib_url).startswith(self._ilias_url): + log.explain("ILIAS recognized our shib token and logged us in in the background, returning") + return + soup: BeautifulSoup = soupify(await response.read()) + + # Attempt to login using credentials, if necessary + while not self._login_successful(soup): + # Searching the form here so that this fails before asking for + # credentials rather than after asking. + form = cast(Tag, soup.find("form", {"method": "post"})) + action = cast(str, form["action"]) + + # Equivalent: Enter credentials in + # https://idp.scc.kit.edu/idp/profile/SAML2/Redirect/SSO + url = str(shib_url.origin()) + action + username, password = await self._auth.credentials() + data = { + "_eventId_proceed": "", + "j_username": username, + "j_password": password, + "fudis_web_authn_assertion_input": "", + } + if csrf_token_input := form.find("input", {"name": "csrf_token"}): + data["csrf_token"] = csrf_token_input["value"] # type: ignore + soup = await _post(sess, url, data) + + if soup.find(id="attributeRelease"): + raise CrawlError( + "ILIAS Shibboleth entitlements changed! " + "Please log in once in your browser and review them" + ) + + if self._tfa_required(soup): + soup = await self._authenticate_tfa(sess, soup, shib_url) + + if not self._login_successful(soup): + self._auth.invalidate_credentials() + + # Equivalent: Being redirected via JS automatically + # (or clicking "Continue" if you have JS disabled) + relay_state = cast(Tag, soup.find("input", {"name": "RelayState"})) + saml_response = cast(Tag, soup.find("input", {"name": "SAMLResponse"})) + url = cast(str, cast(Tag, soup.find("form", {"method": "post"}))["action"]) + data = { # using the info obtained in the while loop above + "RelayState": cast(str, relay_state["value"]), + "SAMLResponse": cast(str, saml_response["value"]), + } + await sess.post(cast(str, url), data=data) + + async def _authenticate_tfa( + self, session: aiohttp.ClientSession, soup: BeautifulSoup, shib_url: yarl.URL + ) -> BeautifulSoup: + if not self._tfa_auth: + self._tfa_auth = TfaAuthenticator("ilias-anon-tfa") + + tfa_token = await self._tfa_auth.password() + + # Searching the form here so that this fails before asking for + # credentials rather than after asking. + form = cast(Tag, soup.find("form", {"method": "post"})) + action = cast(str, form["action"]) + + # Equivalent: Enter token in + # https://idp.scc.kit.edu/idp/profile/SAML2/Redirect/SSO + url = str(shib_url.origin()) + action + username, password = await self._auth.credentials() + data = { + "_eventId_proceed": "", + "fudis_otp_input": tfa_token, + } + if csrf_token_input := form.find("input", {"name": "csrf_token"}): + data["csrf_token"] = csrf_token_input["value"] # type: ignore + return await _post(session, url, data) + + @staticmethod + def _login_successful(soup: BeautifulSoup) -> bool: + relay_state = soup.find("input", {"name": "RelayState"}) + saml_response = soup.find("input", {"name": "SAMLResponse"}) + return relay_state is not None and saml_response is not None + + @staticmethod + def _tfa_required(soup: BeautifulSoup) -> bool: + return soup.find(id="fudiscr-form") is not None + + +async def _post(session: aiohttp.ClientSession, url: str, data: Any) -> BeautifulSoup: + async with session.post(url, data=data) as response: + return soupify(await response.read()) diff --git a/PFERD/crawl/kit_ipd_crawler.py b/PFERD/crawl/kit_ipd_crawler.py new file mode 100644 index 0000000..7094b9c --- /dev/null +++ b/PFERD/crawl/kit_ipd_crawler.py @@ -0,0 +1,208 @@ +import os +import re +from collections.abc import Awaitable, Generator, Iterable +from dataclasses import dataclass +from datetime import datetime +from pathlib import PurePath +from re import Pattern +from typing import Any, Optional, Union, cast +from urllib.parse import urljoin + +import aiohttp +from bs4 import BeautifulSoup, Tag + +from ..auth import Authenticator +from ..config import Config +from ..logging import ProgressBar, log +from ..output_dir import FileSink +from ..utils import sanitize_path_name, soupify +from .crawler import CrawlError +from .http_crawler import HttpCrawler, HttpCrawlerSection + + +class KitIpdCrawlerSection(HttpCrawlerSection): + def target(self) -> str: + target = self.s.get("target") + if not target: + self.missing_value("target") + + if not target.startswith("https://"): + self.invalid_value("target", target, "Should be a URL") + + return target + + def link_regex(self) -> Pattern[str]: + regex = self.s.get("link_regex", r"^.*?[^/]+\.(pdf|zip|c|cpp|java)$") + return re.compile(regex) + + def basic_auth(self, authenticators: dict[str, Authenticator]) -> Optional[Authenticator]: + value: Optional[str] = self.s.get("auth") + if value is None: + return None + auth = authenticators.get(value) + if auth is None: + self.invalid_value("auth", value, "No such auth section exists") + return auth + + +@dataclass +class KitIpdFile: + name: str + url: str + + def explain(self) -> None: + log.explain(f"File {self.name!r} (href={self.url!r})") + + +@dataclass +class KitIpdFolder: + name: str + entries: list[Union[KitIpdFile, "KitIpdFolder"]] + + def explain(self) -> None: + log.explain_topic(f"Folder {self.name!r}") + for entry in self.entries: + entry.explain() + + +class KitIpdCrawler(HttpCrawler): + def __init__( + self, + name: str, + section: KitIpdCrawlerSection, + config: Config, + authenticators: dict[str, Authenticator], + ): + super().__init__(name, section, config) + self._url = section.target() + self._file_regex = section.link_regex() + self._authenticator = section.basic_auth(authenticators) + self._basic_auth: Optional[aiohttp.BasicAuth] = None + + async def _run(self) -> None: + if self._authenticator: + username, password = await self._authenticator.credentials() + self._basic_auth = aiohttp.BasicAuth(username, password) + + maybe_cl = await self.crawl(PurePath(".")) + if not maybe_cl: + return + + tasks: list[Awaitable[None]] = [] + + async with maybe_cl: + for item in await self._fetch_items(): + item.explain() + if isinstance(item, KitIpdFolder): + tasks.append(self._crawl_folder(PurePath("."), item)) + else: + log.explain_topic(f"Orphan file {item.name!r} (href={item.url!r})") + log.explain("Attributing it to root folder") + # do this here to at least be sequential and not parallel (rate limiting is hard, as the + # crawl abstraction does not hold for these requests) + etag, mtime = await self._request_resource_version(item.url) + tasks.append(self._download_file(PurePath("."), item, etag, mtime)) + + await self.gather(tasks) + + async def _crawl_folder(self, parent: PurePath, folder: KitIpdFolder) -> None: + path = parent / sanitize_path_name(folder.name) + if not await self.crawl(path): + return + + tasks = [] + for entry in folder.entries: + if isinstance(entry, KitIpdFolder): + tasks.append(self._crawl_folder(path, entry)) + else: + # do this here to at least be sequential and not parallel (rate limiting is hard, as the crawl + # abstraction does not hold for these requests) + etag, mtime = await self._request_resource_version(entry.url) + tasks.append(self._download_file(path, entry, etag, mtime)) + + await self.gather(tasks) + + async def _download_file( + self, parent: PurePath, file: KitIpdFile, etag: Optional[str], mtime: Optional[datetime] + ) -> None: + element_path = parent / sanitize_path_name(file.name) + + prev_etag = self._get_previous_etag_from_report(element_path) + etag_differs = None if prev_etag is None else prev_etag != etag + + maybe_dl = await self.download(element_path, etag_differs=etag_differs, mtime=mtime) + if not maybe_dl: + # keep storing the known file's etag + if prev_etag: + self._add_etag_to_report(element_path, prev_etag) + return + + async with maybe_dl as (bar, sink): + await self._stream_from_url(file.url, element_path, sink, bar) + + async def _fetch_items(self) -> Iterable[KitIpdFile | KitIpdFolder]: + page, url = await self.get_page() + elements: list[Tag] = self._find_file_links(page) + + # do not add unnecessary nesting for a single

heading + drop_h1: bool = len(page.find_all(name="h1")) <= 1 + + folder_tree: KitIpdFolder = KitIpdFolder(".", []) + for element in elements: + parent = HttpCrawler.get_folder_structure_from_heading_hierarchy(element, drop_h1) + file = self._extract_file(element, url) + + current_folder: KitIpdFolder = folder_tree + for folder_name in parent.parts: + # helps the type checker to verify that current_folder is indeed a folder + def subfolders() -> Generator[KitIpdFolder, Any, None]: + return (entry for entry in current_folder.entries if isinstance(entry, KitIpdFolder)) + + if not any(entry.name == folder_name for entry in subfolders()): + current_folder.entries.append(KitIpdFolder(folder_name, [])) + current_folder = next(entry for entry in subfolders() if entry.name == folder_name) + + current_folder.entries.append(file) + + return folder_tree.entries + + def _extract_file(self, link: Tag, url: str) -> KitIpdFile: + url = self._abs_url_from_link(url, link) + name = os.path.basename(url) + return KitIpdFile(name, url) + + def _find_file_links(self, tag: Tag | BeautifulSoup) -> list[Tag]: + return cast(list[Tag], tag.find_all(name="a", attrs={"href": self._file_regex})) + + def _abs_url_from_link(self, url: str, link_tag: Tag) -> str: + return urljoin(url, cast(str, link_tag.get("href"))) + + async def _stream_from_url(self, url: str, path: PurePath, sink: FileSink, bar: ProgressBar) -> None: + async with self.session.get(url, allow_redirects=False, auth=self._basic_auth) as resp: + if resp.status == 403: + raise CrawlError("Received a 403. Are you within the KIT network/VPN?") + if resp.status == 401: + raise CrawlError("Received a 401. Do you maybe need credentials?") + if resp.status >= 400: + raise CrawlError(f"Received HTTP {resp.status} when trying to download {url!r}") + + if resp.content_length: + bar.set_total(resp.content_length) + + async for data in resp.content.iter_chunked(1024): + sink.file.write(data) + bar.advance(len(data)) + + sink.done() + + self._add_etag_to_report(path, resp.headers.get("ETag")) + + async def get_page(self) -> tuple[BeautifulSoup, str]: + async with self.session.get(self._url, auth=self._basic_auth) as request: + # The web page for Algorithmen für Routenplanung contains some + # weird comments that beautifulsoup doesn't parse correctly. This + # hack enables those pages to be crawled, and should hopefully not + # cause issues on other pages. + content = (await request.read()).decode("utf-8") + content = re.sub(r"", "", content) + return soupify(content.encode("utf-8")), str(request.url) diff --git a/PFERD/crawl/local_crawler.py b/PFERD/crawl/local_crawler.py new file mode 100644 index 0000000..dfc6f65 --- /dev/null +++ b/PFERD/crawl/local_crawler.py @@ -0,0 +1,118 @@ +import asyncio +import datetime +import random +from pathlib import Path, PurePath +from typing import Optional + +from ..config import Config +from .crawler import Crawler, CrawlerSection, anoncritical + + +class LocalCrawlerSection(CrawlerSection): + def target(self) -> Path: + value = self.s.get("target") + if value is None: + self.missing_value("target") + return Path(value).expanduser() + + def crawl_delay(self) -> float: + value = self.s.getfloat("crawl_delay", fallback=0.0) + if value < 0: + self.invalid_value("crawl_delay", value, "Must not be negative") + return value + + def download_delay(self) -> float: + value = self.s.getfloat("download_delay", fallback=0.0) + if value < 0: + self.invalid_value("download_delay", value, "Must not be negative") + return value + + def download_speed(self) -> Optional[int]: + value = self.s.getint("download_speed") + if value is not None and value <= 0: + self.invalid_value("download_speed", value, "Must be greater than 0") + return value + + +class LocalCrawler(Crawler): + def __init__( + self, + name: str, + section: LocalCrawlerSection, + config: Config, + ): + super().__init__(name, section, config) + + self._target = config.default_section.working_dir() / section.target() + self._crawl_delay = section.crawl_delay() + self._download_delay = section.download_delay() + self._download_speed = section.download_speed() + + if self._download_speed: + self._block_size = self._download_speed // 10 + else: + self._block_size = 1024**2 # 1 MiB + + async def _run(self) -> None: + await self._crawl_path(self._target, PurePath()) + + @anoncritical + async def _crawl_path(self, path: Path, pure: PurePath) -> None: + if path.is_dir(): + await self._crawl_dir(path, pure) + elif path.is_file(): + await self._crawl_file(path, pure) + + async def _crawl_dir(self, path: Path, pure: PurePath) -> None: + cl = await self.crawl(pure) + if not cl: + return + + tasks = [] + + async with cl: + await asyncio.sleep( + random.uniform( + 0.5 * self._crawl_delay, + self._crawl_delay, + ) + ) + + for child in path.iterdir(): + pure_child = cl.path / child.name + tasks.append(self._crawl_path(child, pure_child)) + + await self.gather(tasks) + + async def _crawl_file(self, path: Path, pure: PurePath) -> None: + stat = path.stat() + mtime = datetime.datetime.fromtimestamp(stat.st_mtime) + dl = await self.download(pure, mtime=mtime) + if not dl: + return + + async with dl as (bar, sink): + await asyncio.sleep( + random.uniform( + 0.5 * self._download_delay, + self._download_delay, + ) + ) + + bar.set_total(stat.st_size) + + with open(path, "rb") as f: + while True: + data = f.read(self._block_size) + if len(data) == 0: + break + + sink.file.write(data) + bar.advance(len(data)) + + if self._download_speed: + delay = self._block_size / self._download_speed + delay = random.uniform(0.8 * delay, 1.2 * delay) + await asyncio.sleep(delay) + + sink.done() diff --git a/PFERD/deduplicator.py b/PFERD/deduplicator.py new file mode 100644 index 0000000..18940c5 --- /dev/null +++ b/PFERD/deduplicator.py @@ -0,0 +1,104 @@ +from collections.abc import Iterator +from pathlib import PurePath + +from .logging import log +from .utils import fmt_path + + +def name_variants(path: PurePath) -> Iterator[PurePath]: + separator = " " if " " in path.stem else "_" + i = 1 + while True: + yield path.parent / f"{path.stem}{separator}{i}{path.suffix}" + i += 1 + + +class Deduplicator: + FORBIDDEN_CHARS = '<>:"/\\|?*' + "".join([chr(i) for i in range(0, 32)]) + FORBIDDEN_NAMES = { + "CON", + "PRN", + "AUX", + "NUL", + "COM1", + "COM2", + "COM3", + "COM4", + "COM5", + "COM6", + "COM7", + "COM8", + "COM9", + "LPT1", + "LPT2", + "LPT3", + "LPT4", + "LPT5", + "LPT6", + "LPT7", + "LPT8", + "LPT9", + } + + def __init__(self, windows_paths: bool) -> None: + self._windows_paths = windows_paths + + self._known: set[PurePath] = set() + + def _add(self, path: PurePath) -> None: + self._known.add(path) + + # The last parent is just "." + for parent in list(path.parents)[:-1]: + self._known.add(parent) + + def _fixup_element(self, name: str) -> str: + # For historical reasons, windows paths have some odd restrictions that + # we're trying to avoid. See: + # https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file + + for char in self.FORBIDDEN_CHARS: + name = name.replace(char, "_") + + path = PurePath(name) + if path.stem in self.FORBIDDEN_NAMES: + name = f"{path.stem}_{path.suffix}" + + if name.endswith(" ") or name.endswith("."): + name += "_" + + return name + + def _fixup_for_windows(self, path: PurePath) -> PurePath: + new_path = PurePath(*[self._fixup_element(elem) for elem in path.parts]) + if new_path != path: + log.explain(f"Changed path to {fmt_path(new_path)} for windows compatibility") + return new_path + + def fixup_path(self, path: PurePath) -> PurePath: + """Fixes up the path for windows, if enabled. Returns the path unchanged otherwise.""" + if self._windows_paths: + return self._fixup_for_windows(path) + return path + + def mark(self, path: PurePath) -> PurePath: + if self._windows_paths: + path = self._fixup_for_windows(path) + + if path not in self._known: + self._add(path) + return path + + log.explain(f"Path {fmt_path(path)} is already taken, finding a new name") + + for variant in name_variants(path): + if variant in self._known: + log.explain(f"Path {fmt_path(variant)} is taken as well") + continue + + log.explain(f"Found unused path {fmt_path(variant)}") + self._add(variant) + return variant + + # The "name_variants" iterator returns infinitely many paths + raise RuntimeError("Unreachable") diff --git a/PFERD/diva.py b/PFERD/diva.py deleted file mode 100644 index 148fa56..0000000 --- a/PFERD/diva.py +++ /dev/null @@ -1,169 +0,0 @@ -""" -Utility functions and a scraper/downloader for the KIT DIVA portal. -""" -import logging -import re -from dataclasses import dataclass -from pathlib import Path -from typing import Any, Callable, List, Optional - -import requests - -from .errors import FatalException -from .logging import PrettyLogger -from .organizer import Organizer -from .tmp_dir import TmpDir -from .transform import Transformable -from .utils import stream_to_path - -LOGGER = logging.getLogger(__name__) -PRETTY = PrettyLogger(LOGGER) - - -@dataclass -class DivaDownloadInfo(Transformable): - """ - Information about a DIVA video - """ - url: str - - -DivaDownloadStrategy = Callable[[Organizer, DivaDownloadInfo], bool] - - -def diva_download_new(organizer: Organizer, info: DivaDownloadInfo) -> bool: - """ - Accepts only new files. - """ - resolved_file = organizer.resolve(info.path) - if not resolved_file.exists(): - return True - PRETTY.ignored_file(info.path, "local file exists") - return False - - -class DivaPlaylistCrawler: - # pylint: disable=too-few-public-methods - """ - A crawler for DIVA playlists. - """ - - _PLAYLIST_BASE_URL = "https://mediaservice.bibliothek.kit.edu/asset/detail/" - _COLLECTION_BASE_URL = "https://mediaservice.bibliothek.kit.edu/asset/collection.json" - - def __init__(self, playlist_id: str): - self._id = playlist_id - - @classmethod - def fetch_id(cls, playlist_link: str) -> str: - """ - Fetches the ID for a playerlist, given the base link - (e.g. https://mediaservice.bibliothek.kit.edu/#/details/DIVA-2019-271). - - Raises a FatalException, if the id can not be resolved - """ - match = re.match(r".+#/details/(.+)", playlist_link) - if match is None: - raise FatalException( - "DIVA: Invalid playlist link format, could not extract details." - ) - base_name = match.group(1) - - response = requests.get(cls._PLAYLIST_BASE_URL + base_name + ".json") - - if response.status_code != 200: - raise FatalException( - f"DIVA: Got non-200 status code ({response.status_code}))" - f"when requesting {response.url!r}!" - ) - - body = response.json() - - if body["error"]: - raise FatalException(f"DIVA: Server returned error {body['error']!r}.") - - return body["result"]["collection"]["id"] - - def crawl(self) -> List[DivaDownloadInfo]: - """ - Crawls the playlist given in the constructor. - """ - response = requests.get(self._COLLECTION_BASE_URL, params={"collection": self._id}) - if response.status_code != 200: - raise FatalException(f"Server returned status {response.status_code}.") - - body = response.json() - - if body["error"]: - raise FatalException(f"Server returned error {body['error']!r}.") - - result = body["result"] - - if result["resultCount"] > result["pageSize"]: - PRETTY.warning("Did not receive all results, some will be missing") - - download_infos: List[DivaDownloadInfo] = [] - - for video in result["resultList"]: - title = video["title"] - collection_title = self._follow_path(["collection", "title"], video) - url = self._follow_path( - ["resourceList", "derivateList", "mp4", "url"], - video - ) - - if url and collection_title and title: - path = Path(collection_title, title + ".mp4") - download_infos.append(DivaDownloadInfo(path, url)) - else: - PRETTY.warning(f"Incomplete video found: {title!r} {collection_title!r} {url!r}") - - return download_infos - - @staticmethod - def _follow_path(path: List[str], obj: Any) -> Optional[Any]: - """ - Follows a property path through an object, bailing at the first None. - """ - current = obj - for path_step in path: - if path_step in current: - current = current[path_step] - else: - return None - return current - - -class DivaDownloader: - """ - A downloader for DIVA videos. - """ - - def __init__(self, tmp_dir: TmpDir, organizer: Organizer, strategy: DivaDownloadStrategy): - self._tmp_dir = tmp_dir - self._organizer = organizer - self._strategy = strategy - self._session = requests.session() - - def download_all(self, infos: List[DivaDownloadInfo]) -> None: - """ - Download multiple files one after the other. - """ - for info in infos: - self.download(info) - - def download(self, info: DivaDownloadInfo) -> None: - """ - Download a single file. - """ - if not self._strategy(self._organizer, info): - self._organizer.mark(info.path) - return - - with self._session.get(info.url, stream=True) as response: - if response.status_code == 200: - tmp_file = self._tmp_dir.new_path() - stream_to_path(response, tmp_file, info.path.name) - self._organizer.accept_file(tmp_file, info.path) - else: - PRETTY.warning(f"Could not download file, got response {response.status_code}") diff --git a/PFERD/download_summary.py b/PFERD/download_summary.py deleted file mode 100644 index 28d51b5..0000000 --- a/PFERD/download_summary.py +++ /dev/null @@ -1,69 +0,0 @@ -""" -Provides a summary that keeps track of new modified or deleted files. -""" -from pathlib import Path -from typing import List - - -class DownloadSummary: - """ - Keeps track of all new, modified or deleted files and provides a summary. - """ - - def __init__(self) -> None: - self._new_files: List[Path] = [] - self._modified_files: List[Path] = [] - self._deleted_files: List[Path] = [] - - @property - def new_files(self) -> List[Path]: - """ - Returns all new files. - """ - return self._new_files.copy() - - @property - def modified_files(self) -> List[Path]: - """ - Returns all modified files. - """ - return self._modified_files.copy() - - @property - def deleted_files(self) -> List[Path]: - """ - Returns all deleted files. - """ - return self._deleted_files.copy() - - def merge(self, summary: 'DownloadSummary') -> None: - """ - Merges ourselves with the passed summary. Modifies this object, but not the passed one. - """ - self._new_files += summary.new_files - self._modified_files += summary.modified_files - self._deleted_files += summary.deleted_files - - def add_deleted_file(self, path: Path) -> None: - """ - Registers a file as deleted. - """ - self._deleted_files.append(path) - - def add_modified_file(self, path: Path) -> None: - """ - Registers a file as changed. - """ - self._modified_files.append(path) - - def add_new_file(self, path: Path) -> None: - """ - Registers a file as new. - """ - self._new_files.append(path) - - def has_updates(self) -> bool: - """ - Returns whether this summary has any updates. - """ - return bool(self._new_files or self._modified_files or self._deleted_files) diff --git a/PFERD/downloaders.py b/PFERD/downloaders.py deleted file mode 100644 index 94b8b9f..0000000 --- a/PFERD/downloaders.py +++ /dev/null @@ -1,72 +0,0 @@ -""" -General downloaders useful in many situations -""" - -from dataclasses import dataclass, field -from typing import Any, Dict, List, Optional - -import requests -import requests.auth - -from .organizer import Organizer -from .tmp_dir import TmpDir -from .transform import Transformable -from .utils import stream_to_path - - -@dataclass -class HttpDownloadInfo(Transformable): - """ - This class describes a single file to be downloaded. - """ - - url: str - parameters: Dict[str, Any] = field(default_factory=dict) - - -class HttpDownloader: - """A HTTP downloader that can handle HTTP basic auth.""" - - def __init__( - self, - tmp_dir: TmpDir, - organizer: Organizer, - username: Optional[str], - password: Optional[str], - ): - """Create a new http downloader.""" - self._organizer = organizer - self._tmp_dir = tmp_dir - self._username = username - self._password = password - self._session = self._build_session() - - def _build_session(self) -> requests.Session: - session = requests.Session() - if self._username and self._password: - session.auth = requests.auth.HTTPBasicAuth( - self._username, self._password - ) - return session - - def download_all(self, infos: List[HttpDownloadInfo]) -> None: - """ - Download multiple files one after the other. - """ - - for info in infos: - self.download(info) - - def download(self, info: HttpDownloadInfo) -> None: - """ - Download a single file. - """ - - with self._session.get(info.url, params=info.parameters, stream=True) as response: - if response.status_code == 200: - tmp_file = self._tmp_dir.new_path() - stream_to_path(response, tmp_file, info.path.name) - self._organizer.accept_file(tmp_file, info.path) - else: - # TODO use proper exception - raise Exception(f"Could not download file, got response {response.status_code}") diff --git a/PFERD/errors.py b/PFERD/errors.py deleted file mode 100644 index d003314..0000000 --- a/PFERD/errors.py +++ /dev/null @@ -1,39 +0,0 @@ -""" -An error logging decorator. -""" - -import logging -from typing import Any, Callable, TypeVar, cast - -from rich.console import Console - -from .logging import PrettyLogger - -LOGGER = logging.getLogger(__name__) -PRETTY = PrettyLogger(LOGGER) - - -class FatalException(Exception): - """ - A fatal exception occurred. Recovery is not possible. - """ - - -TFun = TypeVar('TFun', bound=Callable[..., Any]) - - -def swallow_and_print_errors(function: TFun) -> TFun: - """ - Decorates a function, swallows all errors, logs them and returns none if one occurred. - """ - def inner(*args: Any, **kwargs: Any) -> Any: - # pylint: disable=broad-except - try: - return function(*args, **kwargs) - except FatalException as error: - PRETTY.error(str(error)) - return None - except Exception as error: - Console().print_exception() - return None - return cast(TFun, inner) diff --git a/PFERD/ilias/__init__.py b/PFERD/ilias/__init__.py deleted file mode 100644 index 0a5f08b..0000000 --- a/PFERD/ilias/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -""" -Synchronizing files from ILIAS instances (https://www.ilias.de/). -""" - -from .authenticators import IliasAuthenticator, KitShibbolethAuthenticator -from .crawler import (IliasCrawler, IliasCrawlerEntry, IliasDirectoryFilter, - IliasElementType) -from .downloader import (IliasDownloader, IliasDownloadInfo, - IliasDownloadStrategy, download_everything, - download_modified_or_new) diff --git a/PFERD/ilias/authenticators.py b/PFERD/ilias/authenticators.py deleted file mode 100644 index 763ed38..0000000 --- a/PFERD/ilias/authenticators.py +++ /dev/null @@ -1,131 +0,0 @@ -""" -Authenticators that can obtain proper ILIAS session cookies. -""" - -import abc -import logging -from typing import Optional - -import bs4 -import requests - -from ..authenticators import TfaAuthenticator, UserPassAuthenticator -from ..utils import soupify - -LOGGER = logging.getLogger(__name__) - - -class IliasAuthenticator(abc.ABC): - # pylint: disable=too-few-public-methods - - """ - An authenticator that logs an existing requests session into an ILIAS - account. - """ - - @abc.abstractmethod - def authenticate(self, sess: requests.Session) -> None: - """ - Log a requests session into this authenticator's ILIAS account. - """ - - -class KitShibbolethAuthenticator(IliasAuthenticator): - # pylint: disable=too-few-public-methods - - """ - Authenticate via KIT's shibboleth system. - """ - - def __init__(self, username: Optional[str] = None, password: Optional[str] = None) -> None: - self._auth = UserPassAuthenticator("KIT ILIAS Shibboleth", username, password) - self._tfa_auth = TfaAuthenticator("KIT ILIAS Shibboleth") - - def authenticate(self, sess: requests.Session) -> None: - """ - Performs the ILIAS Shibboleth authentication dance and saves the login - cookies it receieves. - - This function should only be called whenever it is detected that you're - not logged in. The cookies obtained should be good for a few minutes, - maybe even an hour or two. - """ - - # Equivalent: Click on "Mit KIT-Account anmelden" button in - # https://ilias.studium.kit.edu/login.php - LOGGER.debug("Begin authentication process with ILIAS") - url = "https://ilias.studium.kit.edu/Shibboleth.sso/Login" - data = { - "sendLogin": "1", - "idp_selection": "https://idp.scc.kit.edu/idp/shibboleth", - "target": "/shib_login.php", - "home_organization_selection": "Mit KIT-Account anmelden", - } - soup = soupify(sess.post(url, data=data)) - - # Attempt to login using credentials, if necessary - while not self._login_successful(soup): - # Searching the form here so that this fails before asking for - # credentials rather than after asking. - form = soup.find("form", {"class": "full content", "method": "post"}) - action = form["action"] - - # Equivalent: Enter credentials in - # https://idp.scc.kit.edu/idp/profile/SAML2/Redirect/SSO - LOGGER.debug("Attempt to log in to Shibboleth using credentials") - url = "https://idp.scc.kit.edu" + action - data = { - "_eventId_proceed": "", - "j_username": self._auth.username, - "j_password": self._auth.password, - } - soup = soupify(sess.post(url, data=data)) - - if self._tfa_required(soup): - soup = self._authenticate_tfa(sess, soup) - - if not self._login_successful(soup): - print("Incorrect credentials.") - self._auth.invalidate_credentials() - - # Equivalent: Being redirected via JS automatically - # (or clicking "Continue" if you have JS disabled) - LOGGER.debug("Redirect back to ILIAS with login information") - relay_state = soup.find("input", {"name": "RelayState"}) - saml_response = soup.find("input", {"name": "SAMLResponse"}) - url = "https://ilias.studium.kit.edu/Shibboleth.sso/SAML2/POST" - data = { # using the info obtained in the while loop above - "RelayState": relay_state["value"], - "SAMLResponse": saml_response["value"], - } - sess.post(url, data=data) - - def _authenticate_tfa( - self, - session: requests.Session, - soup: bs4.BeautifulSoup - ) -> bs4.BeautifulSoup: - # Searching the form here so that this fails before asking for - # credentials rather than after asking. - form = soup.find("form", {"method": "post"}) - action = form["action"] - - # Equivalent: Enter token in - # https://idp.scc.kit.edu/idp/profile/SAML2/Redirect/SSO - LOGGER.debug("Attempt to log in to Shibboleth with TFA token") - url = "https://idp.scc.kit.edu" + action - data = { - "_eventId_proceed": "", - "j_tokenNumber": self._tfa_auth.get_token() - } - return soupify(session.post(url, data=data)) - - @staticmethod - def _login_successful(soup: bs4.BeautifulSoup) -> bool: - relay_state = soup.find("input", {"name": "RelayState"}) - saml_response = soup.find("input", {"name": "SAMLResponse"}) - return relay_state is not None and saml_response is not None - - @staticmethod - def _tfa_required(soup: bs4.BeautifulSoup) -> bool: - return soup.find(id="j_tokenNumber") is not None diff --git a/PFERD/ilias/crawler.py b/PFERD/ilias/crawler.py deleted file mode 100644 index 7ce460e..0000000 --- a/PFERD/ilias/crawler.py +++ /dev/null @@ -1,644 +0,0 @@ -""" -Contains an ILIAS crawler alongside helper functions. -""" - -import datetime -import json -import logging -import re -from enum import Enum -from pathlib import Path -from typing import Any, Callable, Dict, List, Optional, Union -from urllib.parse import (parse_qs, urlencode, urljoin, urlparse, urlsplit, - urlunsplit) - -import bs4 -import requests - -from ..errors import FatalException -from ..logging import PrettyLogger -from ..utils import soupify -from .authenticators import IliasAuthenticator -from .date_demangler import demangle_date -from .downloader import IliasDownloadInfo - -LOGGER = logging.getLogger(__name__) -PRETTY = PrettyLogger(LOGGER) - - -class IliasElementType(Enum): - """ - The type of an ilias element. - """ - REGULAR_FOLDER = "REGULAR_FOLDER" - VIDEO_FOLDER = "VIDEO_FOLDER" - EXERCISE_FOLDER = "EXERCISE_FOLDER" - REGULAR_FILE = "REGULAR_FILE" - VIDEO_FILE = "VIDEO_FILE" - FORUM = "FORUM" - EXTERNAL_LINK = "EXTERNAL_LINK" - - def is_folder(self) -> bool: - """ - Returns whether this type is some kind of folder. - """ - return "FOLDER" in str(self.name) - - -IliasDirectoryFilter = Callable[[Path, IliasElementType], bool] - - -class IliasCrawlerEntry: - # pylint: disable=too-few-public-methods - """ - An ILIAS crawler entry used internally to find, catalogue and recursively crawl elements. - """ - - def __init__( - self, - path: Path, - url: Union[str, Callable[[], Optional[str]]], - entry_type: IliasElementType, - modification_date: Optional[datetime.datetime] - ): - self.path = path - if isinstance(url, str): - str_url = url - self.url: Callable[[], Optional[str]] = lambda: str_url - else: - self.url = url - self.entry_type = entry_type - self.modification_date = modification_date - - def to_download_info(self) -> Optional[IliasDownloadInfo]: - """ - Converts this crawler entry to an IliasDownloadInfo, if possible. - This method will only succeed for *File* types. - """ - if self.entry_type in [IliasElementType.REGULAR_FILE, IliasElementType.VIDEO_FILE]: - return IliasDownloadInfo(self.path, self.url, self.modification_date) - return None - - -class IliasCrawler: - # pylint: disable=too-few-public-methods - - """ - A crawler for ILIAS. - """ - - # pylint: disable=too-many-arguments - def __init__( - self, - base_url: str, - session: requests.Session, - authenticator: IliasAuthenticator, - dir_filter: IliasDirectoryFilter - ): - """ - Create a new ILIAS crawler. - """ - - self._base_url = base_url - self._session = session - self._authenticator = authenticator - self.dir_filter = dir_filter - - @staticmethod - def _url_set_query_param(url: str, param: str, value: str) -> str: - """ - Set a query parameter in an url, overwriting existing ones with the same name. - """ - scheme, netloc, path, query, fragment = urlsplit(url) - query_parameters = parse_qs(query) - query_parameters[param] = [value] - new_query_string = urlencode(query_parameters, doseq=True) - - return urlunsplit((scheme, netloc, path, new_query_string, fragment)) - - def recursive_crawl_url(self, url: str) -> List[IliasDownloadInfo]: - """ - Crawls a given url *and all reachable elements in it*. - - Args: - url {str} -- the *full* url to crawl - """ - start_entries: List[IliasCrawlerEntry] = self._crawl_folder(Path(""), url) - return self._iterate_entries_to_download_infos(start_entries) - - def crawl_course(self, course_id: str) -> List[IliasDownloadInfo]: - """ - Starts the crawl process for a course, yielding a list of elements to (potentially) - download. - - Arguments: - course_id {str} -- the course id - - Raises: - FatalException: if an unrecoverable error occurs or the course id is not valid - """ - # Start crawling at the given course - root_url = self._url_set_query_param( - self._base_url + "/goto.php", "target", f"crs_{course_id}" - ) - - if not self._is_course_id_valid(root_url, course_id): - raise FatalException( - "Invalid course id? I didn't find anything looking like a course!" - ) - - # And treat it as a folder - entries: List[IliasCrawlerEntry] = self._crawl_folder(Path(""), root_url) - return self._iterate_entries_to_download_infos(entries) - - def _is_course_id_valid(self, root_url: str, course_id: str) -> bool: - response: requests.Response = self._session.get(root_url) - # We were redirected ==> Non-existant ID - if course_id not in response.url: - return False - - link_element: bs4.Tag = self._get_page(root_url, {}).find(id="current_perma_link") - if not link_element: - return False - # It wasn't a course but a category list, forum, etc. - return "crs_" in link_element.get("value") - - def find_course_name(self, course_id: str) -> Optional[str]: - """ - Returns the name of a given course. None if it is not a valid course - or it could not be found. - """ - course_url = self._url_set_query_param( - self._base_url + "/goto.php", "target", f"crs_{course_id}" - ) - return self.find_element_name(course_url) - - def find_element_name(self, url: str) -> Optional[str]: - """ - Returns the name of the element at the given URL, if it can find one. - """ - focus_element: bs4.Tag = self._get_page(url, {}).find(id="il_mhead_t_focus") - if not focus_element: - return None - return focus_element.text - - def crawl_personal_desktop(self) -> List[IliasDownloadInfo]: - """ - Crawls the ILIAS personal desktop (and every subelements that can be reached from there). - - Raises: - FatalException: if an unrecoverable error occurs - """ - entries: List[IliasCrawlerEntry] = self._crawl_folder( - Path(""), self._base_url + "?baseClass=ilPersonalDesktopGUI" - ) - return self._iterate_entries_to_download_infos(entries) - - def _iterate_entries_to_download_infos( - self, - entries: List[IliasCrawlerEntry] - ) -> List[IliasDownloadInfo]: - result: List[IliasDownloadInfo] = [] - entries_to_process: List[IliasCrawlerEntry] = entries.copy() - while len(entries_to_process) > 0: - entry = entries_to_process.pop() - - if entry.entry_type == IliasElementType.EXTERNAL_LINK: - PRETTY.not_searching(entry.path, "external link") - continue - if entry.entry_type == IliasElementType.FORUM: - PRETTY.not_searching(entry.path, "forum") - continue - - if entry.entry_type.is_folder() and not self.dir_filter(entry.path, entry.entry_type): - PRETTY.not_searching(entry.path, "user filter") - continue - - download_info = entry.to_download_info() - if download_info is not None: - result.append(download_info) - continue - - url = entry.url() - - if url is None: - PRETTY.warning(f"Could not find url for {str(entry.path)!r}, skipping it") - continue - - PRETTY.searching(entry.path) - - if entry.entry_type == IliasElementType.EXERCISE_FOLDER: - entries_to_process += self._crawl_exercises(entry.path, url) - continue - if entry.entry_type == IliasElementType.REGULAR_FOLDER: - entries_to_process += self._crawl_folder(entry.path, url) - continue - if entry.entry_type == IliasElementType.VIDEO_FOLDER: - entries_to_process += self._crawl_video_directory(entry.path, url) - continue - - return result - - def _crawl_folder(self, folder_path: Path, url: str) -> List[IliasCrawlerEntry]: - """ - Crawl all files in a folder-like element. - """ - soup = self._get_page(url, {}) - - if soup.find(id="headerimage"): - element: bs4.Tag = soup.find(id="headerimage") - if "opencast" in element.attrs["src"].lower(): - PRETTY.warning(f"Switched to crawling a video at {folder_path}") - if not self.dir_filter(folder_path, IliasElementType.VIDEO_FOLDER): - PRETTY.not_searching(folder_path, "user filter") - return [] - return self._crawl_video_directory(folder_path, url) - - result: List[IliasCrawlerEntry] = [] - - # Fetch all links and throw them to the general interpreter - links: List[bs4.Tag] = soup.select("a.il_ContainerItemTitle") - for link in links: - abs_url = self._abs_url_from_link(link) - element_path = Path(folder_path, link.getText().strip()) - element_type = self._find_type_from_link(element_path, link, abs_url) - - if element_type == IliasElementType.REGULAR_FILE: - result += self._crawl_file(folder_path, link, abs_url) - elif element_type is not None: - result += [IliasCrawlerEntry(element_path, abs_url, element_type, None)] - else: - PRETTY.warning(f"Found element without a type at {str(element_path)!r}") - - return result - - def _abs_url_from_link(self, link_tag: bs4.Tag) -> str: - """ - Create an absolute url from an tag. - """ - return urljoin(self._base_url, link_tag.get("href")) - - @staticmethod - def _find_type_from_link( - path: Path, - link_element: bs4.Tag, - url: str - ) -> Optional[IliasElementType]: - """ - Decides which sub crawler to use for a given top level element. - """ - parsed_url = urlparse(url) - LOGGER.debug("Parsed url: %r", parsed_url) - - # file URLs contain "target=file" - if "target=file_" in parsed_url.query: - return IliasElementType.REGULAR_FILE - - # Skip forums - if "cmd=showThreads" in parsed_url.query: - return IliasElementType.FORUM - - # Everything with a ref_id can *probably* be opened to reveal nested things - # video groups, directories, exercises, etc - if "ref_id=" in parsed_url.query: - return IliasCrawler._find_type_from_folder_like(link_element, url) - - PRETTY.warning( - "Got unknown element type in switch. I am not sure what horror I found on the" - f" ILIAS page. The element was at {str(path)!r} and it is {link_element!r})" - ) - return None - - @staticmethod - def _find_type_from_folder_like(link_element: bs4.Tag, url: str) -> Optional[IliasElementType]: - """ - Try crawling something that looks like a folder. - """ - # pylint: disable=too-many-return-statements - - # We look for the outer div of our inner link, to find information around it - # (mostly the icon) - for parent in link_element.parents: - if "ilContainerListItemOuter" in parent["class"]: - found_parent = parent - break - - if found_parent is None: - PRETTY.warning(f"Could not find element icon for {url!r}") - return None - - # Find the small descriptive icon to figure out the type - img_tag: Optional[bs4.Tag] = found_parent.select_one("img.ilListItemIcon") - - if img_tag is None: - PRETTY.warning(f"Could not find image tag for {url!r}") - return None - - if "opencast" in str(img_tag["alt"]).lower(): - return IliasElementType.VIDEO_FOLDER - - if str(img_tag["src"]).endswith("icon_exc.svg"): - return IliasElementType.EXERCISE_FOLDER - - if str(img_tag["src"]).endswith("icon_webr.svg"): - return IliasElementType.EXTERNAL_LINK - - if str(img_tag["src"]).endswith("frm.svg"): - return IliasElementType.FORUM - - return IliasElementType.REGULAR_FOLDER - - @staticmethod - def _crawl_file(path: Path, link_element: bs4.Tag, url: str) -> List[IliasCrawlerEntry]: - """ - Crawls a file. - """ - # Files have a list of properties (type, modification date, size, etc.) - # In a series of divs. - # Find the parent containing all those divs, so we can filter our what we need - properties_parent: bs4.Tag = link_element.findParent( - "div", {"class": lambda x: "il_ContainerListItem" in x} - ).select_one(".il_ItemProperties") - # The first one is always the filetype - file_type = properties_parent.select_one("span.il_ItemProperty").getText().strip() - - # The rest does not have a stable order. Grab the whole text and reg-ex the date - # out of it - all_properties_text = properties_parent.getText().strip() - modification_date_match = re.search( - r"(((\d+\. \w+ \d+)|(Gestern|Yesterday)|(Heute|Today)|(Morgen|Tomorrow)), \d+:\d+)", - all_properties_text - ) - if modification_date_match is None: - modification_date = None - PRETTY.warning(f"Could not extract start date from {all_properties_text!r}") - else: - modification_date_str = modification_date_match.group(1) - modification_date = demangle_date(modification_date_str) - - # Grab the name from the link text - name = link_element.getText() - full_path = Path(path, name + "." + file_type) - - return [ - IliasCrawlerEntry(full_path, url, IliasElementType.REGULAR_FILE, modification_date) - ] - - def _crawl_video_directory(self, video_dir_path: Path, url: str) -> List[IliasCrawlerEntry]: - """ - Crawl the video overview site. - """ - initial_soup = self._get_page(url, {}) - - # The page is actually emtpy but contains a much needed token in the link below. - # That token can be used to fetch the *actual* video listing - content_link: bs4.Tag = initial_soup.select_one("#tab_series a") - # Fetch the actual video listing. The given parameters return all videos (max 800) - # in a standalone html page - video_list_soup = self._get_page( - self._abs_url_from_link(content_link), - {"limit": 800, "cmd": "asyncGetTableGUI", "cmdMode": "asynch"} - ) - - # If we find a page selected, we probably need to respect pagination - if self._is_paginated_video_page(video_list_soup): - second_stage_url = self._abs_url_from_link(content_link) - - return self._crawl_paginated_video_directory( - video_dir_path, video_list_soup, second_stage_url - ) - - return self._crawl_video_directory_second_stage(video_dir_path, video_list_soup) - - @staticmethod - def _is_paginated_video_page(soup: bs4.BeautifulSoup) -> bool: - return soup.find(id=re.compile(r"tab_page_sel.+")) is not None - - def _crawl_paginated_video_directory( - self, - video_dir_path: Path, - paged_video_list_soup: bs4.BeautifulSoup, - second_stage_url: str - ) -> List[IliasCrawlerEntry]: - LOGGER.info("Found paginated video page, trying 800 elements") - - # Try to find the table id. This can be used to build the query parameter indicating - # you want 800 elements - - table_element: bs4.Tag = paged_video_list_soup.find( - name="table", id=re.compile(r"tbl_xoct_.+") - ) - if table_element is None: - PRETTY.warning( - "Could not increase elements per page (table not found)." - " Some might not be crawled!" - ) - return self._crawl_video_directory_second_stage(video_dir_path, paged_video_list_soup) - - match = re.match(r"tbl_xoct_(.+)", table_element.attrs["id"]) - if match is None: - PRETTY.warning( - "Could not increase elements per page (table id not found)." - " Some might not be crawled!" - ) - return self._crawl_video_directory_second_stage(video_dir_path, paged_video_list_soup) - table_id = match.group(1) - - extended_video_page = self._get_page( - second_stage_url, - {f"tbl_xoct_{table_id}_trows": 800, "cmd": "asyncGetTableGUI", "cmdMode": "asynch"} - ) - - if self._is_paginated_video_page(extended_video_page): - PRETTY.warning( - "800 elements do not seem to be enough (or I failed to fetch that many)." - " I will miss elements." - ) - - return self._crawl_video_directory_second_stage(video_dir_path, extended_video_page) - - def _crawl_video_directory_second_stage( - self, - video_dir_path: Path, - video_list_soup: bs4.BeautifulSoup - ) -> List[IliasCrawlerEntry]: - """ - Crawls the "second stage" video page. This page contains the actual video urls. - """ - direct_download_links: List[bs4.Tag] = video_list_soup.findAll( - name="a", text=re.compile(r"\s*Download\s*") - ) - - # Video start links are marked with an "Abspielen" link - video_links: List[bs4.Tag] = video_list_soup.findAll( - name="a", text=re.compile(r"\s*Abspielen\s*") - ) - - results: List[IliasCrawlerEntry] = [] - - # We can download everything directly! - # FIXME: Sadly the download button is currently broken, so never do that - if False and len(direct_download_links) == len(video_links): - for link in direct_download_links: - results += self._crawl_single_video(video_dir_path, link, True) - else: - for link in video_links: - results += self._crawl_single_video(video_dir_path, link, False) - - return results - - def _crawl_single_video( - self, - parent_path: Path, - link: bs4.Tag, - direct_download: bool - ) -> List[IliasCrawlerEntry]: - """ - Crawl a single video based on its "Abspielen" link from the video listing. - """ - # The link is part of a table with multiple columns, describing metadata. - # 6th child (1 indexed) is the modification time string - modification_string = link.parent.parent.parent.select_one( - "td.std:nth-child(6)" - ).getText().strip() - modification_time = datetime.datetime.strptime(modification_string, "%d.%m.%Y - %H:%M") - - title = link.parent.parent.parent.select_one( - "td.std:nth-child(3)" - ).getText().strip() - title += ".mp4" - - video_path: Path = Path(parent_path, title) - - video_url = self._abs_url_from_link(link) - - # The video had a direct download button we can use instead - if direct_download: - LOGGER.debug("Using direct download for video %r", str(video_path)) - return [IliasCrawlerEntry( - video_path, video_url, IliasElementType.VIDEO_FILE, modification_time - )] - - return [IliasCrawlerEntry( - video_path, - self._crawl_video_url_from_play_link(video_url), - IliasElementType.VIDEO_FILE, - modification_time - )] - - def _crawl_video_url_from_play_link(self, play_url: str) -> Callable[[], Optional[str]]: - def inner() -> Optional[str]: - # Fetch the actual video page. This is a small wrapper page initializing a javscript - # player. Sadly we can not execute that JS. The actual video stream url is nowhere - # on the page, but defined in a JS object inside a script tag, passed to the player - # library. - # We do the impossible and RegEx the stream JSON object out of the page's HTML source - video_page_soup = soupify(self._session.get(play_url)) - regex: re.Pattern = re.compile( - r"({\"streams\"[\s\S]+?),\s*{\"paella_config_file", re.IGNORECASE - ) - json_match = regex.search(str(video_page_soup)) - - if json_match is None: - PRETTY.warning(f"Could not find json stream info for {play_url!r}") - return None - json_str = json_match.group(1) - - # parse it - json_object = json.loads(json_str) - # and fetch the video url! - video_url = json_object["streams"][0]["sources"]["mp4"][0]["src"] - return video_url - return inner - - def _crawl_exercises(self, element_path: Path, url: str) -> List[IliasCrawlerEntry]: - """ - Crawl files offered for download in exercises. - """ - soup = self._get_page(url, {}) - - results: List[IliasCrawlerEntry] = [] - - # Each assignment is in an accordion container - assignment_containers: List[bs4.Tag] = soup.select(".il_VAccordionInnerContainer") - - for container in assignment_containers: - # Fetch the container name out of the header to use it in the path - container_name = container.select_one(".ilAssignmentHeader").getText().strip() - # Find all download links in the container (this will contain all the files) - files: List[bs4.Tag] = container.findAll( - name="a", - # download links contain the given command class - attrs={"href": lambda x: x and "cmdClass=ilexsubmissiongui" in x}, - text="Download" - ) - - LOGGER.debug("Found exercise container %r", container_name) - - # Grab each file as you now have the link - for file_link in files: - # Two divs, side by side. Left is the name, right is the link ==> get left - # sibling - file_name = file_link.parent.findPrevious(name="div").getText().strip() - url = self._abs_url_from_link(file_link) - - LOGGER.debug("Found file %r at %r", file_name, url) - - results.append(IliasCrawlerEntry( - Path(element_path, container_name, file_name), - url, - IliasElementType.REGULAR_FILE, - None # We do not have any timestamp - )) - - return results - - def _get_page(self, url: str, params: Dict[str, Any]) -> bs4.BeautifulSoup: - """ - Fetches a page from ILIAS, authenticating when needed. - """ - LOGGER.debug("Fetching %r", url) - - response = self._session.get(url, params=params) - content_type = response.headers["content-type"] - - if not content_type.startswith("text/html"): - raise FatalException( - f"Invalid content type {content_type} when crawling ilias page" - " {url!r} with {params!r}" - ) - - soup = soupify(response) - - if self._is_logged_in(soup): - return soup - - LOGGER.info("Not authenticated, changing that...") - - self._authenticator.authenticate(self._session) - - return self._get_page(url, params) - - @staticmethod - def _is_logged_in(soup: bs4.BeautifulSoup) -> bool: - # Normal ILIAS pages - userlog = soup.find("li", {"id": "userlog"}) - if userlog is not None: - LOGGER.debug("Auth: Found #userlog") - return True - # Video listing embeds do not have complete ILIAS html. Try to match them by - # their video listing table - video_table = soup.find( - recursive=True, - name="table", - attrs={"id": lambda x: x is not None and x.startswith("tbl_xoct")} - ) - if video_table is not None: - LOGGER.debug("Auth: Found #tbl_xoct.+") - return True - # The individual video player wrapper page has nothing of the above. - # Match it by its playerContainer. - if soup.select_one("#playerContainer") is not None: - LOGGER.debug("Auth: Found #playerContainer") - return True - return False diff --git a/PFERD/ilias/date_demangler.py b/PFERD/ilias/date_demangler.py deleted file mode 100644 index 9c1fc8d..0000000 --- a/PFERD/ilias/date_demangler.py +++ /dev/null @@ -1,51 +0,0 @@ -""" -Helper methods to demangle an ILIAS date. -""" - -import datetime -import locale -import logging -import re -from typing import Optional - -from ..logging import PrettyLogger - -LOGGER = logging.getLogger(__name__) -PRETTY = PrettyLogger(LOGGER) - - -def demangle_date(date: str) -> Optional[datetime.datetime]: - """ - Demangle a given date in one of the following formats: - "Gestern, HH:MM" - "Heute, HH:MM" - "Morgen, HH:MM" - "dd. mon.yyyy, HH:MM - """ - saved = locale.setlocale(locale.LC_ALL) - try: - try: - locale.setlocale(locale.LC_ALL, 'de_DE.UTF-8') - except locale.Error: - PRETTY.warning( - "Could not set language to german. Assuming you use english everywhere." - ) - - date = re.sub(r"\s+", " ", date) - date = re.sub("Gestern|Yesterday", _yesterday().strftime("%d. %b %Y"), date, re.I) - date = re.sub("Heute|Today", datetime.date.today().strftime("%d. %b %Y"), date, re.I) - date = re.sub("Morgen|Tomorrow", _tomorrow().strftime("%d. %b %Y"), date, re.I) - return datetime.datetime.strptime(date, "%d. %b %Y, %H:%M") - except ValueError: - PRETTY.warning(f"Could not parse date {date!r}") - return None - finally: - locale.setlocale(locale.LC_ALL, saved) - - -def _yesterday() -> datetime.date: - return datetime.date.today() - datetime.timedelta(days=1) - - -def _tomorrow() -> datetime.date: - return datetime.date.today() + datetime.timedelta(days=1) diff --git a/PFERD/ilias/downloader.py b/PFERD/ilias/downloader.py deleted file mode 100644 index 82527a0..0000000 --- a/PFERD/ilias/downloader.py +++ /dev/null @@ -1,162 +0,0 @@ -"""Contains a downloader for ILIAS.""" - -import datetime -import logging -import math -import os -from pathlib import Path, PurePath -from typing import Callable, List, Optional, Union - -import bs4 -import requests - -from ..logging import PrettyLogger -from ..organizer import Organizer -from ..tmp_dir import TmpDir -from ..transform import Transformable -from ..utils import soupify, stream_to_path -from .authenticators import IliasAuthenticator - -LOGGER = logging.getLogger(__name__) -PRETTY = PrettyLogger(LOGGER) - - -class ContentTypeException(Exception): - """Thrown when the content type of the ilias element can not be handled.""" - - -class IliasDownloadInfo(Transformable): - """ - This class describes a single file to be downloaded. - """ - - def __init__( - self, - path: PurePath, - url: Union[str, Callable[[], Optional[str]]], - modifcation_date: Optional[datetime.datetime] - ): - super().__init__(path) - if isinstance(url, str): - string_url = url - self.url: Callable[[], Optional[str]] = lambda: string_url - else: - self.url = url - self.modification_date = modifcation_date - - -IliasDownloadStrategy = Callable[[Organizer, IliasDownloadInfo], bool] - - -def download_everything(organizer: Organizer, info: IliasDownloadInfo) -> bool: - # pylint: disable=unused-argument - """ - Accepts everything. - """ - return True - - -def download_modified_or_new(organizer: Organizer, info: IliasDownloadInfo) -> bool: - """ - Accepts new files or files with a more recent modification date. - """ - resolved_file = organizer.resolve(info.path) - if not resolved_file.exists() or info.modification_date is None: - return True - resolved_mod_time_seconds = resolved_file.stat().st_mtime - - # Download if the info is newer - if info.modification_date.timestamp() > resolved_mod_time_seconds: - return True - - PRETTY.ignored_file(info.path, "local file has newer or equal modification time") - return False - - -class IliasDownloader: - # pylint: disable=too-many-arguments - """A downloader for ILIAS.""" - - def __init__( - self, - tmp_dir: TmpDir, - organizer: Organizer, - session: requests.Session, - authenticator: IliasAuthenticator, - strategy: IliasDownloadStrategy, - timeout: int = 5 - ): - """ - Create a new IliasDownloader. - - The timeout applies to the download request only, as bwcloud uses IPv6 - and requests has a problem with that: https://github.com/psf/requests/issues/5522 - """ - - self._tmp_dir = tmp_dir - self._organizer = organizer - self._session = session - self._authenticator = authenticator - self._strategy = strategy - self._timeout = timeout - - def download_all(self, infos: List[IliasDownloadInfo]) -> None: - """ - Download multiple files one after the other. - """ - - for info in infos: - self.download(info) - - def download(self, info: IliasDownloadInfo) -> None: - """ - Download a file from ILIAS. - - Retries authentication until eternity if it could not fetch the file. - """ - - LOGGER.debug("Downloading %r", info) - if not self._strategy(self._organizer, info): - self._organizer.mark(info.path) - return - - tmp_file = self._tmp_dir.new_path() - - while not self._try_download(info, tmp_file): - LOGGER.info("Retrying download: %r", info) - self._authenticator.authenticate(self._session) - - dst_path = self._organizer.accept_file(tmp_file, info.path) - if dst_path and info.modification_date: - os.utime( - dst_path, - times=( - math.ceil(info.modification_date.timestamp()), - math.ceil(info.modification_date.timestamp()) - ) - ) - - def _try_download(self, info: IliasDownloadInfo, target: Path) -> bool: - url = info.url() - if url is None: - PRETTY.warning(f"Could not download {str(info.path)!r} as I got no URL :/") - return True - - with self._session.get(url, stream=True, timeout=self._timeout) as response: - content_type = response.headers["content-type"] - has_content_disposition = "content-disposition" in response.headers - - if content_type.startswith("text/html") and not has_content_disposition: - if self._is_logged_in(soupify(response)): - raise ContentTypeException("Attempting to download a web page, not a file") - - return False - - # Yay, we got the file :) - stream_to_path(response, target, info.path.name) - return True - - @staticmethod - def _is_logged_in(soup: bs4.BeautifulSoup) -> bool: - userlog = soup.find("li", {"id": "userlog"}) - return userlog is not None diff --git a/PFERD/limiter.py b/PFERD/limiter.py new file mode 100644 index 0000000..01b4914 --- /dev/null +++ b/PFERD/limiter.py @@ -0,0 +1,93 @@ +import asyncio +import time +from collections.abc import AsyncIterator +from contextlib import asynccontextmanager +from dataclasses import dataclass +from typing import Optional + + +@dataclass +class Slot: + active: bool = False + last_left: Optional[float] = None + + +class Limiter: + def __init__(self, task_limit: int, download_limit: int, task_delay: float): + if task_limit <= 0: + raise ValueError("task limit must be at least 1") + if download_limit <= 0: + raise ValueError("download limit must be at least 1") + if download_limit > task_limit: + raise ValueError("download limit can't be greater than task limit") + if task_delay < 0: + raise ValueError("Task delay must not be negative") + + self._slots = [Slot() for _ in range(task_limit)] + self._downloads = download_limit + self._delay = task_delay + + self._condition = asyncio.Condition() + + def _acquire_slot(self) -> Optional[Slot]: + for slot in self._slots: + if not slot.active: + slot.active = True + return slot + + return None + + async def _wait_for_slot_delay(self, slot: Slot) -> None: + if slot.last_left is not None: + delay = slot.last_left + self._delay - time.time() + if delay > 0: + await asyncio.sleep(delay) + + def _release_slot(self, slot: Slot) -> None: + slot.last_left = time.time() + slot.active = False + + @asynccontextmanager + async def limit_crawl(self) -> AsyncIterator[None]: + slot: Slot + async with self._condition: + while True: + if found_slot := self._acquire_slot(): + slot = found_slot + break + await self._condition.wait() + + await self._wait_for_slot_delay(slot) + + try: + yield + finally: + async with self._condition: + self._release_slot(slot) + self._condition.notify_all() + + @asynccontextmanager + async def limit_download(self) -> AsyncIterator[None]: + slot: Slot + async with self._condition: + while True: + if self._downloads <= 0: + await self._condition.wait() + continue + + if found_slot := self._acquire_slot(): + slot = found_slot + self._downloads -= 1 + break + + await self._condition.wait() + + await self._wait_for_slot_delay(slot) + + try: + yield + finally: + async with self._condition: + self._release_slot(slot) + self._downloads += 1 + self._condition.notify_all() diff --git a/PFERD/location.py b/PFERD/location.py deleted file mode 100644 index 7f4c8ca..0000000 --- a/PFERD/location.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -Contains a Location class for objects with an inherent path. -""" - -from pathlib import Path, PurePath - - -class ResolveException(Exception): - """An exception while resolving a file.""" - # TODO take care of this when doing exception handling - - -class Location: - """ - An object that has an inherent path. - """ - - def __init__(self, path: Path): - self._path = path.resolve() - - @property - def path(self) -> Path: - """ - This object's location. - """ - - return self._path - - def resolve(self, target: PurePath) -> Path: - """ - Resolve a file relative to the path of this location. - - Raises a [ResolveException] if the file is outside the given directory. - """ - absolute_path = self.path.joinpath(target).resolve() - - # TODO Make this less inefficient - if self.path not in absolute_path.parents: - raise ResolveException(f"Path {target} is not inside directory {self.path}") - - return absolute_path diff --git a/PFERD/logging.py b/PFERD/logging.py index 76741f7..ac633ec 100644 --- a/PFERD/logging.py +++ b/PFERD/logging.py @@ -1,187 +1,302 @@ -""" -Contains a few logger utility functions and implementations. -""" +import asyncio +import sys +import traceback +from collections.abc import AsyncIterator, Iterator +from contextlib import AbstractContextManager, asynccontextmanager, contextmanager +from typing import Any, Optional -import logging -from pathlib import Path -from typing import List, Optional - -from rich import print as rich_print -from rich._log_render import LogRender -from rich.console import Console +from rich.console import Console, Group +from rich.live import Live +from rich.markup import escape from rich.panel import Panel -from rich.style import Style -from rich.text import Text -from rich.theme import Theme - -from .download_summary import DownloadSummary -from .utils import PathLike, to_path - -STYLE = "{" -FORMAT = "[{levelname:<7}] {message}" -DATE_FORMAT = "%F %T" +from rich.progress import ( + BarColumn, + DownloadColumn, + Progress, + TaskID, + TextColumn, + TimeRemainingColumn, + TransferSpeedColumn, +) +from rich.table import Column -def enable_logging(name: str = "PFERD", level: int = logging.INFO) -> None: - """ - Enable and configure logging via the logging module. - """ +class ProgressBar: + def __init__(self, progress: Progress, taskid: TaskID): + self._progress = progress + self._taskid = taskid - logger = logging.getLogger(name) - logger.setLevel(level) - logger.addHandler(RichLoggingHandler(level=level)) + def advance(self, amount: float = 1) -> None: + self._progress.advance(self._taskid, advance=amount) - # This should be logged by our own handler, and not the root logger's - # default handler, so we don't pass it on to the root logger. - logger.propagate = False + def set_total(self, total: float) -> None: + self._progress.update(self._taskid, total=total) + self._progress.start_task(self._taskid) -class RichLoggingHandler(logging.Handler): - """ - A logging handler that uses rich for highlighting - """ +class Log: + STATUS_WIDTH = 11 - def __init__(self, level: int) -> None: - super().__init__(level=level) - self.console = Console(theme=Theme({ - "logging.level.warning": Style(color="yellow") - })) - self._log_render = LogRender(show_level=True, show_time=False, show_path=False) + def __init__(self) -> None: + self.console = Console(highlight=False) - def emit(self, record: logging.LogRecord) -> None: + self._crawl_progress = Progress( + TextColumn("{task.description}", table_column=Column(ratio=1)), + BarColumn(), + TimeRemainingColumn(), + expand=True, + ) + self._download_progress = Progress( + TextColumn("{task.description}", table_column=Column(ratio=1)), + TransferSpeedColumn(), + DownloadColumn(), + BarColumn(), + TimeRemainingColumn(), + expand=True, + ) + + self._live = Live(console=self.console, transient=True) + self._update_live() + + self._showing_progress = False + self._progress_suspended = False + self._lock = asyncio.Lock() + self._lines: list[str] = [] + + # Whether different parts of the output are enabled or disabled + self.output_explain = False + self.output_status = True + self.output_not_deleted = True + self.output_report = True + + def _update_live(self) -> None: + elements = [] + if self._crawl_progress.task_ids: + elements.append(self._crawl_progress) + if self._download_progress.task_ids: + elements.append(self._download_progress) + + group = Group(*elements) + self._live.update(group) + + @contextmanager + def show_progress(self) -> Iterator[None]: + if self._showing_progress: + raise RuntimeError("Calling 'show_progress' while already showing progress") + + self._showing_progress = True + try: + with self._live: + yield + finally: + self._showing_progress = False + + @asynccontextmanager + async def exclusive_output(self) -> AsyncIterator[None]: + if not self._showing_progress: + raise RuntimeError("Calling 'exclusive_output' while not showing progress") + + async with self._lock: + self._progress_suspended = True + self._live.stop() + try: + yield + finally: + self._live.start() + self._progress_suspended = False + for line in self._lines: + self.print(line) + self._lines = [] + + def unlock(self) -> None: """ - Invoked by logging. + Get rid of an exclusive output state. + + This function is meant to let PFERD print log messages after the event + loop was forcibly stopped and if it will not be started up again. After + this is called, it is not safe to use any functions except the logging + functions (print, warn, ...). """ - log_style = f"logging.level.{record.levelname.lower()}" - message = self.format(record) - level = Text() - level.append(record.levelname, log_style) - message_text = Text.from_markup(message) + self._progress_suspended = False + for line in self._lines: + self.print(line) - self.console.print( - self._log_render( - self.console, - [message_text], - level=level, + def print(self, text: Any) -> None: + """ + Print a normal message. Allows markup. + """ + + if self._progress_suspended: + self._lines.append(text) + else: + self.console.print(text) + + # TODO Print errors (and warnings?) to stderr + + def warn(self, text: str) -> None: + """ + Print a warning message. Allows no markup. + """ + + self.print(f"[bold bright_red]Warning[/] {escape(text)}") + + def warn_contd(self, text: str) -> None: + """ + Print further lines of a warning message. Allows no markup. + """ + + self.print(f"{escape(text)}") + + def error(self, text: str) -> None: + """ + Print an error message. Allows no markup. + """ + + self.print(f"[bold bright_red]Error[/] [red]{escape(text)}") + + def error_contd(self, text: str) -> None: + """ + Print further lines of an error message. Allows no markup. + """ + + self.print(f"[red]{escape(text)}") + + def unexpected_exception(self) -> None: + """ + Call this in an "except" clause to log an unexpected exception. + """ + + t, v, tb = sys.exc_info() + if t is None or v is None or tb is None: + # We're not currently handling an exception, so somebody probably + # called this function where they shouldn't. + self.error("Something unexpected happened") + self.error_contd("") + for line in traceback.format_stack(): + self.error_contd(line[:-1]) # Without the newline + self.error_contd("") + else: + self.error("An unexpected exception occurred") + self.error_contd("") + self.error_contd(traceback.format_exc()) + + # Our print function doesn't take types other than strings, but the + # underlying rich.print function does. This call is a special case + # anyways, and we're calling it internally, so this should be fine. + self.print( + Panel.fit( + """ +Please copy your program output and send it to the PFERD maintainers, either +directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new + """.strip() ) ) - -class PrettyLogger: - """ - A logger that prints some specially formatted log messages in color. - """ - - def __init__(self, logger: logging.Logger) -> None: - self.logger = logger - - @staticmethod - def _format_path(path: PathLike) -> str: - return repr(str(to_path(path))) - - def error(self, message: str) -> None: + def explain_topic(self, text: str) -> None: """ - Print an error message indicating some operation fatally failed. - """ - self.logger.error( - f"[bold red]{message}[/bold red]" - ) - - def warning(self, message: str) -> None: - """ - Print a warning message indicating some operation failed, but the error can be recovered - or ignored. - """ - self.logger.warning( - f"[bold yellow]{message}[/bold yellow]" - ) - - def modified_file(self, path: PathLike) -> None: - """ - An existing file has changed. + Print a top-level explain text. Allows no markup. """ - self.logger.info( - f"[bold magenta]Modified {self._format_path(path)}.[/bold magenta]" - ) + if self.output_explain: + self.print(f"[yellow]{escape(text)}") - def new_file(self, path: PathLike) -> None: + def explain(self, text: str) -> None: """ - A new file has been downloaded. + Print an indented explain text. Allows no markup. """ - self.logger.info( - f"[bold green]Created {self._format_path(path)}.[/bold green]" - ) + if self.output_explain: + self.print(f" {escape(text)}") - def deleted_file(self, path: PathLike) -> None: + def status(self, style: str, action: str, text: str, suffix: str = "") -> None: """ - A file has been deleted. + Print a status update while crawling. Allows markup in the "style" + argument which will be applied to the "action" string. """ - self.logger.info( - f"[bold red]Deleted {self._format_path(path)}.[/bold red]" - ) + if self.output_status: + action = escape(f"{action:<{self.STATUS_WIDTH}}") + self.print(f"{style}{action}[/] {escape(text)} {suffix}") - def ignored_file(self, path: PathLike, reason: str) -> None: + def not_deleted(self, style: str, action: str, text: str, suffix: str = "") -> None: """ - File was not downloaded or modified. + Print a message for a local only file that wasn't + deleted while crawling. Allows markup in the "style" + argument which will be applied to the "action" string. """ - self.logger.info( - f"[dim]Ignored {self._format_path(path)} " - f"([/dim]{reason}[dim]).[/dim]" - ) + if self.output_status and self.output_not_deleted: + action = escape(f"{action:<{self.STATUS_WIDTH}}") + self.print(f"{style}{action}[/] {escape(text)} {suffix}") - def searching(self, path: PathLike) -> None: + def report(self, text: str) -> None: """ - A crawler searches a particular object. + Print a report after crawling. Allows markup. """ - self.logger.info(f"Searching {self._format_path(path)}") + if self.output_report: + self.print(text) - def not_searching(self, path: PathLike, reason: str) -> None: + def report_not_deleted(self, text: str) -> None: """ - A crawler does not search a particular object. + Print a report for a local only file that wasn't deleted after crawling. Allows markup. """ - self.logger.info( - f"[dim]Not searching {self._format_path(path)} " - f"([/dim]{reason}[dim]).[/dim]" - ) + if self.output_report and self.output_not_deleted: + self.print(text) - def summary(self, download_summary: DownloadSummary) -> None: - """ - Prints a download summary. - """ - self.logger.info("") - self.logger.info("[bold cyan]Download Summary[/bold cyan]") - if not download_summary.has_updates(): - self.logger.info("[bold dim]Nothing changed![/bold dim]") - return + @contextmanager + def _bar( + self, + progress: Progress, + description: str, + total: Optional[float], + ) -> Iterator[ProgressBar]: + if total is None: + # Indeterminate progress bar + taskid = progress.add_task(description, start=False) + else: + taskid = progress.add_task(description, total=total) + self._update_live() - for new_file in download_summary.new_files: - self.new_file(new_file) - for modified_file in download_summary.modified_files: - self.modified_file(modified_file) - for deleted_files in download_summary.deleted_files: - self.deleted_file(deleted_files) + try: + yield ProgressBar(progress, taskid) + finally: + progress.remove_task(taskid) + self._update_live() - def starting_synchronizer( - self, - target_directory: PathLike, - synchronizer_name: str, - subject: Optional[str] = None, - ) -> None: + def crawl_bar( + self, + style: str, + action: str, + text: str, + total: Optional[float] = None, + ) -> AbstractContextManager[ProgressBar]: """ - A special message marking that a synchronizer has been started. + Allows markup in the "style" argument which will be applied to the + "action" string. """ - subject_str = f"{subject} " if subject else "" - self.logger.info("") - self.logger.info(( - f"[bold cyan]Synchronizing " - f"{subject_str}to {self._format_path(target_directory)} " - f"using the {synchronizer_name} synchronizer.[/bold cyan]" - )) + action = escape(f"{action:<{self.STATUS_WIDTH}}") + description = f"{style}{action}[/] {text}" + return self._bar(self._crawl_progress, description, total) + + def download_bar( + self, + style: str, + action: str, + text: str, + total: Optional[float] = None, + ) -> AbstractContextManager[ProgressBar]: + """ + Allows markup in the "style" argument which will be applied to the + "action" string. + """ + + action = escape(f"{action:<{self.STATUS_WIDTH}}") + description = f"{style}{action}[/] {text}" + return self._bar(self._download_progress, description, total) + + +log = Log() diff --git a/PFERD/organizer.py b/PFERD/organizer.py deleted file mode 100644 index 1665f23..0000000 --- a/PFERD/organizer.py +++ /dev/null @@ -1,147 +0,0 @@ -"""A simple helper for managing downloaded files. - -A organizer is bound to a single directory. -""" - -import filecmp -import logging -import os -import shutil -from pathlib import Path, PurePath -from typing import List, Optional, Set - -from .download_summary import DownloadSummary -from .location import Location -from .logging import PrettyLogger -from .utils import prompt_yes_no - -LOGGER = logging.getLogger(__name__) -PRETTY = PrettyLogger(LOGGER) - - -class FileAcceptException(Exception): - """An exception while accepting a file.""" - - -class Organizer(Location): - """A helper for managing downloaded files.""" - - def __init__(self, path: Path): - """Create a new organizer for a given path.""" - super().__init__(path) - self._known_files: Set[Path] = set() - - # Keep the root dir - self._known_files.add(path.resolve()) - - self.download_summary = DownloadSummary() - - def accept_file(self, src: Path, dst: PurePath) -> Optional[Path]: - """ - Move a file to this organizer and mark it. - - Returns the path the file was moved to, to allow the caller to adjust the metadata. - As you might still need to adjust the metadata when the file was identical - (e.g. update the timestamp), the path is also returned in this case. - In all other cases (ignored, not overwritten, etc.) this method returns None. - """ - # Windows limits the path length to 260 for *some* historical reason - # If you want longer paths, you will have to add the "\\?\" prefix in front of - # your path... - # See: - # https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file#maximum-path-length-limitation - if os.name == 'nt': - src_absolute = Path("\\\\?\\" + str(src.resolve())) - dst_absolute = Path("\\\\?\\" + str(self.resolve(dst))) - else: - src_absolute = src.resolve() - dst_absolute = self.resolve(dst) - - if not src_absolute.exists(): - raise FileAcceptException("Source file does not exist") - - if not src_absolute.is_file(): - raise FileAcceptException("Source is a directory") - - LOGGER.debug("Copying %s to %s", src_absolute, dst_absolute) - - if self._is_marked(dst): - PRETTY.warning(f"File {str(dst_absolute)!r} was already written!") - if not prompt_yes_no(f"Overwrite file?", default=False): - PRETTY.ignored_file(dst_absolute, "file was written previously") - return None - - # Destination file is directory - if dst_absolute.exists() and dst_absolute.is_dir(): - if prompt_yes_no(f"Overwrite folder {dst_absolute} with file?", default=False): - shutil.rmtree(dst_absolute) - else: - PRETTY.warning(f"Could not add file {str(dst_absolute)!r}") - return None - - # Destination file exists - if dst_absolute.exists() and dst_absolute.is_file(): - if filecmp.cmp(str(src_absolute), str(dst_absolute), shallow=False): - # Bail out, nothing more to do - PRETTY.ignored_file(dst_absolute, "same file contents") - self.mark(dst) - return dst_absolute - - self.download_summary.add_modified_file(dst_absolute) - PRETTY.modified_file(dst_absolute) - else: - self.download_summary.add_new_file(dst_absolute) - PRETTY.new_file(dst_absolute) - - # Create parent dir if needed - dst_parent_dir: Path = dst_absolute.parent - dst_parent_dir.mkdir(exist_ok=True, parents=True) - - # Move file - shutil.move(str(src_absolute), str(dst_absolute)) - - self.mark(dst) - - return dst_absolute - - def mark(self, path: PurePath) -> None: - """Mark a file as used so it will not get cleaned up.""" - absolute_path = self.resolve(path) - self._known_files.add(absolute_path) - LOGGER.debug("Tracked %s", absolute_path) - - def _is_marked(self, path: PurePath) -> bool: - """ - Checks whether a file is marked. - """ - absolute_path = self.resolve(path) - return absolute_path in self._known_files - - def cleanup(self) -> None: - """Remove all untracked files in the organizer's dir.""" - LOGGER.debug("Deleting all untracked files...") - - self._cleanup(self.path) - - def _cleanup(self, start_dir: Path) -> None: - paths: List[Path] = list(start_dir.iterdir()) - - # Recursively clean paths - for path in paths: - if path.is_dir(): - self._cleanup(path) - else: - if path.resolve() not in self._known_files: - self._delete_file_if_confirmed(path) - - # Delete dir if it was empty and untracked - dir_empty = len(list(start_dir.iterdir())) == 0 - if start_dir.resolve() not in self._known_files and dir_empty: - start_dir.rmdir() - - def _delete_file_if_confirmed(self, path: Path) -> None: - prompt = f"Do you want to delete {path}" - - if prompt_yes_no(prompt, False): - self.download_summary.add_deleted_file(path) - path.unlink() diff --git a/PFERD/output_dir.py b/PFERD/output_dir.py new file mode 100644 index 0000000..159e1db --- /dev/null +++ b/PFERD/output_dir.py @@ -0,0 +1,547 @@ +import filecmp +import json +import os +import random +import shutil +import string +from collections.abc import Iterator +from contextlib import contextmanager, suppress +from dataclasses import dataclass +from datetime import datetime +from enum import Enum +from pathlib import Path, PurePath +from typing import BinaryIO, Optional + +from .logging import log +from .report import Report, ReportLoadError +from .utils import ReusableAsyncContextManager, fmt_path, fmt_real_path, prompt_yes_no + +SUFFIX_CHARS = string.ascii_lowercase + string.digits +SUFFIX_LENGTH = 6 +TRIES = 5 + + +class OutputDirError(Exception): + pass + + +class Redownload(Enum): + NEVER = "never" + NEVER_SMART = "never-smart" + ALWAYS = "always" + ALWAYS_SMART = "always-smart" + + @staticmethod + def from_string(string: str) -> "Redownload": + try: + return Redownload(string) + except ValueError: + raise ValueError("must be one of 'never', 'never-smart', 'always', 'always-smart'") from None + + +class OnConflict(Enum): + PROMPT = "prompt" + LOCAL_FIRST = "local-first" + REMOTE_FIRST = "remote-first" + NO_DELETE = "no-delete" + NO_DELETE_PROMPT_OVERWRITE = "no-delete-prompt-overwrite" + + @staticmethod + def from_string(string: str) -> "OnConflict": + try: + return OnConflict(string) + except ValueError: + raise ValueError( + "must be one of 'prompt', 'local-first'," + " 'remote-first', 'no-delete', 'no-delete-prompt-overwrite'" + ) from None + + +@dataclass +class Heuristics: + etag_differs: Optional[bool] + mtime: Optional[datetime] + + +class FileSink: + def __init__(self, file: BinaryIO): + self._file = file + self._done = False + + @property + def file(self) -> BinaryIO: + return self._file + + def done(self) -> None: + self._done = True + + def is_done(self) -> bool: + return self._done + + +@dataclass +class DownloadInfo: + remote_path: PurePath + path: PurePath + local_path: Path + tmp_path: Path + heuristics: Heuristics + on_conflict: OnConflict + success: bool = False + + +class FileSinkToken(ReusableAsyncContextManager[FileSink]): + # Whenever this class is entered, it creates a new temporary file and + # returns a corresponding FileSink. + # + # When it is exited again, the file is closed and information about the + # download handed back to the OutputDirectory. + + def __init__( + self, + output_dir: "OutputDirectory", + remote_path: PurePath, + path: PurePath, + local_path: Path, + heuristics: Heuristics, + on_conflict: OnConflict, + ): + super().__init__() + + self._output_dir = output_dir + self._remote_path = remote_path + self._path = path + self._local_path = local_path + self._heuristics = heuristics + self._on_conflict = on_conflict + + async def _on_aenter(self) -> FileSink: + tmp_path, file = await self._output_dir._create_tmp_file(self._local_path) + sink = FileSink(file) + + async def after_download() -> None: + await self._output_dir._after_download( + DownloadInfo( + self._remote_path, + self._path, + self._local_path, + tmp_path, + self._heuristics, + self._on_conflict, + sink.is_done(), + ) + ) + + self._stack.push_async_callback(after_download) + self._stack.enter_context(file) + + return sink + + +class OutputDirectory: + REPORT_FILE = PurePath(".report") + + def __init__( + self, + root: Path, + redownload: Redownload, + on_conflict: OnConflict, + ): + if os.name == "nt": + # Windows limits the path length to 260 for some historical reason. + # If you want longer paths, you will have to add the "\\?\" prefix + # in front of your path. See: + # https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file#maximum-path-length-limitation + self._root = Path("\\\\?\\" + str(root.absolute())) + else: + self._root = root + + self._redownload = redownload + self._on_conflict = on_conflict + + self._report_path = self.resolve(self.REPORT_FILE) + self._report = Report() + self._prev_report: Optional[Report] = None + + self.register_reserved(self.REPORT_FILE) + + @property + def report(self) -> Report: + return self._report + + @property + def prev_report(self) -> Optional[Report]: + return self._prev_report + + def prepare(self) -> None: + log.explain_topic(f"Creating base directory at {fmt_real_path(self._root)}") + + try: + self._root.mkdir(parents=True, exist_ok=True) + except OSError as e: + raise OutputDirError("Failed to create base directory") from e + + def register_reserved(self, path: PurePath) -> None: + self._report.mark_reserved(path) + + def resolve(self, path: PurePath) -> Path: + """ + May throw an OutputDirError. + """ + + if ".." in path.parts: + raise OutputDirError(f"Forbidden segment '..' in path {fmt_path(path)}") + if "." in path.parts: + raise OutputDirError(f"Forbidden segment '.' in path {fmt_path(path)}") + + return self._root / path + + def _should_download( + self, + local_path: Path, + heuristics: Heuristics, + redownload: Redownload, + on_conflict: OnConflict, + ) -> bool: + if not local_path.exists(): + log.explain("No corresponding file present locally") + return True + + if on_conflict == OnConflict.LOCAL_FIRST: + # Whatever is here, it will never be overwritten, so we don't need + # to download the file. + log.explain("Conflict resolution is 'local-first' and path exists") + return False + + if not local_path.is_file(): + # We know that there is *something* here that's not a file. + log.explain("Non-file (probably a directory) present locally") + + # If on_conflict is LOCAL_FIRST or NO_DELETE, we know that it would + # never be overwritten. It also doesn't have any relevant stats to + # update. This means that we don't have to download the file + # because we'd just always throw it away again. + if on_conflict in {OnConflict.LOCAL_FIRST, OnConflict.NO_DELETE}: + log.explain(f"Conflict resolution is {on_conflict.value!r}") + return False + + return True + + log.explain(f"Redownload policy is {redownload.value}") + + if redownload == Redownload.NEVER: + return False + elif redownload == Redownload.ALWAYS: + return True + + stat = local_path.stat() + + remote_newer = None + + # ETag should be a more reliable indicator than mtime, so we check it first + if heuristics.etag_differs is not None: + remote_newer = heuristics.etag_differs + if remote_newer: + log.explain("Remote file's entity tag differs") + else: + log.explain("Remote file's entity tag is the same") + + # Python on Windows crashes when faced with timestamps around the unix epoch + if remote_newer is None and heuristics.mtime and (os.name != "nt" or heuristics.mtime.year > 1970): + mtime = heuristics.mtime + remote_newer = mtime.timestamp() > stat.st_mtime + if remote_newer: + log.explain("Remote file seems to be newer") + else: + log.explain("Remote file doesn't seem to be newer") + + if redownload == Redownload.NEVER_SMART: + if remote_newer is None: + return False + else: + return remote_newer + elif redownload == Redownload.ALWAYS_SMART: + if remote_newer is None: + return True + else: + return remote_newer + + # This should never be reached + raise ValueError(f"{redownload!r} is not a valid redownload policy") + + # The following conflict resolution functions all return False if the local + # file(s) should be kept and True if they should be replaced by the remote + # files. + + async def _conflict_lfrf( + self, + on_conflict: OnConflict, + path: PurePath, + ) -> bool: + if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}: + async with log.exclusive_output(): + prompt = f"Replace {fmt_path(path)} with remote file?" + return await prompt_yes_no(prompt, default=False) + elif on_conflict == OnConflict.LOCAL_FIRST: + return False + elif on_conflict == OnConflict.REMOTE_FIRST: + return True + elif on_conflict == OnConflict.NO_DELETE: + return True + + # This should never be reached + raise ValueError(f"{on_conflict!r} is not a valid conflict policy") + + async def _conflict_ldrf( + self, + on_conflict: OnConflict, + path: PurePath, + ) -> bool: + if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}: + async with log.exclusive_output(): + prompt = f"Recursively delete {fmt_path(path)} and replace with remote file?" + return await prompt_yes_no(prompt, default=False) + elif on_conflict == OnConflict.LOCAL_FIRST: + return False + elif on_conflict == OnConflict.REMOTE_FIRST: + return True + elif on_conflict == OnConflict.NO_DELETE: + return False + + # This should never be reached + raise ValueError(f"{on_conflict!r} is not a valid conflict policy") + + async def _conflict_lfrd( + self, + on_conflict: OnConflict, + path: PurePath, + parent: PurePath, + ) -> bool: + if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}: + async with log.exclusive_output(): + prompt = f"Delete {fmt_path(parent)} so remote file {fmt_path(path)} can be downloaded?" + return await prompt_yes_no(prompt, default=False) + elif on_conflict == OnConflict.LOCAL_FIRST: + return False + elif on_conflict == OnConflict.REMOTE_FIRST: + return True + elif on_conflict == OnConflict.NO_DELETE: + return False + + # This should never be reached + raise ValueError(f"{on_conflict!r} is not a valid conflict policy") + + async def _conflict_delete_lf( + self, + on_conflict: OnConflict, + path: PurePath, + ) -> bool: + if on_conflict == OnConflict.PROMPT: + async with log.exclusive_output(): + prompt = f"Delete {fmt_path(path)}?" + return await prompt_yes_no(prompt, default=False) + elif on_conflict == OnConflict.LOCAL_FIRST: + return False + elif on_conflict == OnConflict.REMOTE_FIRST: + return True + elif on_conflict in {OnConflict.NO_DELETE, OnConflict.NO_DELETE_PROMPT_OVERWRITE}: + return False + + # This should never be reached + raise ValueError(f"{on_conflict!r} is not a valid conflict policy") + + def _tmp_path(self, base: Path, suffix_length: int) -> Path: + prefix = "" if base.name.startswith(".") else "." + suffix = "".join(random.choices(SUFFIX_CHARS, k=suffix_length)) + name = f"{prefix}{base.name}.tmp.{suffix}" + return base.parent / name + + async def _create_tmp_file( + self, + local_path: Path, + ) -> tuple[Path, BinaryIO]: + """ + May raise an OutputDirError. + """ + + # Create tmp file + for attempt in range(TRIES): + suffix_length = SUFFIX_LENGTH + 2 * attempt + tmp_path = self._tmp_path(local_path, suffix_length) + try: + return tmp_path, open(tmp_path, "xb") + except FileExistsError: + pass # Try again + + raise OutputDirError("Failed to create temporary file") + + def should_try_download( + self, + path: PurePath, + *, + etag_differs: Optional[bool] = None, + mtime: Optional[datetime] = None, + redownload: Optional[Redownload] = None, + on_conflict: Optional[OnConflict] = None, + ) -> bool: + heuristics = Heuristics(etag_differs, mtime) + redownload = self._redownload if redownload is None else redownload + on_conflict = self._on_conflict if on_conflict is None else on_conflict + local_path = self.resolve(path) + + return self._should_download(local_path, heuristics, redownload, on_conflict) + + async def download( + self, + remote_path: PurePath, + path: PurePath, + *, + etag_differs: Optional[bool] = None, + mtime: Optional[datetime] = None, + redownload: Optional[Redownload] = None, + on_conflict: Optional[OnConflict] = None, + ) -> Optional[FileSinkToken]: + """ + May throw an OutputDirError, a MarkDuplicateError or a + MarkConflictError. + """ + + heuristics = Heuristics(etag_differs, mtime) + redownload = self._redownload if redownload is None else redownload + on_conflict = self._on_conflict if on_conflict is None else on_conflict + local_path = self.resolve(path) + + self._report.mark(path) + + if not self._should_download(local_path, heuristics, redownload, on_conflict): + return None + + # Detect and solve local-dir-remote-file conflict + if local_path.is_dir(): + log.explain("Conflict: There's a directory in place of the local file") + if await self._conflict_ldrf(on_conflict, path): + log.explain("Result: Delete the obstructing directory") + shutil.rmtree(local_path) + else: + log.explain("Result: Keep the obstructing directory") + return None + + # Detect and solve local-file-remote-dir conflict + for parent in path.parents: + local_parent = self.resolve(parent) + if local_parent.exists() and not local_parent.is_dir(): + log.explain("Conflict: One of the local file's parents is a file") + if await self._conflict_lfrd(on_conflict, path, parent): + log.explain("Result: Delete the obstructing file") + local_parent.unlink() + break + else: + log.explain("Result: Keep the obstructing file") + return None + + # Ensure parent directory exists + local_path.parent.mkdir(parents=True, exist_ok=True) + + return FileSinkToken(self, remote_path, path, local_path, heuristics, on_conflict) + + def _update_metadata(self, info: DownloadInfo) -> None: + if mtime := info.heuristics.mtime: + mtimestamp = mtime.timestamp() + os.utime(info.local_path, times=(mtimestamp, mtimestamp)) + + @contextmanager + def _ensure_deleted(self, path: Path) -> Iterator[None]: + try: + yield + finally: + path.unlink(missing_ok=True) + + async def _after_download(self, info: DownloadInfo) -> None: + with self._ensure_deleted(info.tmp_path): + log.status("[bold cyan]", "Downloaded", fmt_path(info.remote_path)) + log.explain_topic(f"Processing downloaded file for {fmt_path(info.path)}") + + changed = False + + if not info.success: + log.explain("Download unsuccessful, aborting") + return + + # Solve conflicts arising from existing local file + if info.local_path.exists(): + changed = True + + if filecmp.cmp(info.local_path, info.tmp_path): + log.explain("Contents identical with existing file") + log.explain("Updating metadata of existing file") + self._update_metadata(info) + return + + log.explain("Conflict: The local and remote versions differ") + if await self._conflict_lfrf(info.on_conflict, info.path): + log.explain("Result: Replacing local with remote version") + else: + log.explain("Result: Keeping local version") + return + + info.tmp_path.replace(info.local_path) + log.explain("Updating file metadata") + self._update_metadata(info) + + if changed: + log.status("[bold bright_yellow]", "Changed", fmt_path(info.path)) + self._report.change_file(info.path) + else: + log.status("[bold bright_green]", "Added", fmt_path(info.path)) + self._report.add_file(info.path) + + async def cleanup(self) -> None: + await self._cleanup_dir(self._root, PurePath(), delete_self=False) + + async def _cleanup(self, path: Path, pure: PurePath) -> None: + if path.is_dir(): + await self._cleanup_dir(path, pure) + elif path.is_file(): + await self._cleanup_file(path, pure) + + async def _cleanup_dir(self, path: Path, pure: PurePath, delete_self: bool = True) -> None: + for child in sorted(path.iterdir()): + pure_child = pure / child.name + await self._cleanup(child, pure_child) + + if delete_self: + with suppress(OSError): + path.rmdir() + + async def _cleanup_file(self, path: Path, pure: PurePath) -> None: + if self._report.is_marked(pure): + return + + if await self._conflict_delete_lf(self._on_conflict, pure): + try: + path.unlink() + log.status("[bold bright_magenta]", "Deleted", fmt_path(pure)) + self._report.delete_file(pure) + except OSError: + pass + else: + log.not_deleted("[bold bright_magenta]", "Not deleted", fmt_path(pure)) + self._report.not_delete_file(pure) + + def load_prev_report(self) -> None: + log.explain_topic(f"Loading previous report from {fmt_real_path(self._report_path)}") + try: + self._prev_report = Report.load(self._report_path) + log.explain("Loaded report successfully") + except (OSError, UnicodeDecodeError, json.JSONDecodeError, ReportLoadError) as e: + log.explain("Failed to load report") + log.explain(str(e)) + + def store_report(self) -> None: + log.explain_topic(f"Storing report to {fmt_real_path(self._report_path)}") + try: + self._report.store(self._report_path) + log.explain("Stored report successfully") + except OSError as e: + log.warn(f"Failed to save report to {fmt_real_path(self._report_path)}") + log.warn_contd(str(e)) diff --git a/PFERD/pferd.py b/PFERD/pferd.py index 042dd93..9a6035f 100644 --- a/PFERD/pferd.py +++ b/PFERD/pferd.py @@ -1,355 +1,199 @@ -""" -Convenience functions for using PFERD. -""" +from pathlib import Path, PurePath +from typing import Optional -import logging -from pathlib import Path -from typing import Callable, List, Optional, Union +from rich.markup import escape -from .cookie_jar import CookieJar -from .diva import (DivaDownloader, DivaDownloadStrategy, DivaPlaylistCrawler, - diva_download_new) -from .download_summary import DownloadSummary -from .errors import FatalException, swallow_and_print_errors -from .ilias import (IliasAuthenticator, IliasCrawler, IliasDirectoryFilter, - IliasDownloader, IliasDownloadInfo, IliasDownloadStrategy, - KitShibbolethAuthenticator, download_modified_or_new) -from .location import Location -from .logging import PrettyLogger, enable_logging -from .organizer import Organizer -from .tmp_dir import TmpDir -from .transform import TF, Transform, apply_transform -from .utils import PathLike, to_path - -# TODO save known-good cookies as soon as possible +from .auth import AUTHENTICATORS, Authenticator, AuthError, AuthSection +from .config import Config, ConfigOptionError +from .crawl import CRAWLERS, Crawler, CrawlError, CrawlerSection, KitIliasWebCrawler +from .logging import log +from .utils import fmt_path -LOGGER = logging.getLogger(__name__) -PRETTY = PrettyLogger(LOGGER) +class PferdLoadError(Exception): + pass -class Pferd(Location): - # pylint: disable=too-many-arguments - """ - The main entrypoint in your Pferd usage: This class combines a number of - useful shortcuts for running synchronizers in a single interface. - """ - - def __init__( - self, - base_dir: Path, - tmp_dir: Path = Path(".tmp"), - test_run: bool = False - ): - super().__init__(Path(base_dir)) - - self._download_summary = DownloadSummary() - self._tmp_dir = TmpDir(self.resolve(tmp_dir)) - self._test_run = test_run - - @staticmethod - def enable_logging() -> None: +class Pferd: + def __init__(self, config: Config, cli_crawlers: Optional[list[str]], cli_skips: Optional[list[str]]): """ - Enable and configure logging via the logging module. + May throw PferdLoadError. """ - enable_logging() + self._config = config + self._crawlers_to_run = self._find_crawlers_to_run(config, cli_crawlers, cli_skips) - @staticmethod - def _print_transformables(transformables: List[TF]) -> None: - LOGGER.info("") - LOGGER.info("Results of the test run:") - for transformable in transformables: - LOGGER.info(transformable.path) + self._authenticators: dict[str, Authenticator] = {} + self._crawlers: dict[str, Crawler] = {} - def _ilias( - self, - target: PathLike, - base_url: str, - crawl_function: Callable[[IliasCrawler], List[IliasDownloadInfo]], - authenticator: IliasAuthenticator, - cookies: Optional[PathLike], - dir_filter: IliasDirectoryFilter, - transform: Transform, - download_strategy: IliasDownloadStrategy, - timeout: int, - clean: bool = True, - ) -> Organizer: - # pylint: disable=too-many-locals - cookie_jar = CookieJar(to_path(cookies) if cookies else None) - session = cookie_jar.create_session() - tmp_dir = self._tmp_dir.new_subdir() - organizer = Organizer(self.resolve(to_path(target))) + def _find_config_crawlers(self, config: Config) -> list[str]: + crawl_sections = [] - crawler = IliasCrawler(base_url, session, authenticator, dir_filter) - downloader = IliasDownloader(tmp_dir, organizer, session, - authenticator, download_strategy, timeout) + for name, section in config.crawl_sections(): + if CrawlerSection(section).skip(): + log.explain(f"Skipping {name!r}") + else: + crawl_sections.append(name) - cookie_jar.load_cookies() - info = crawl_function(crawler) - cookie_jar.save_cookies() + return crawl_sections - transformed = apply_transform(transform, info) - if self._test_run: - self._print_transformables(transformed) - return organizer + def _find_cli_crawlers(self, config: Config, cli_crawlers: list[str]) -> list[str]: + if len(cli_crawlers) != len(set(cli_crawlers)): + raise PferdLoadError("Some crawlers were selected multiple times") - downloader.download_all(transformed) - cookie_jar.save_cookies() + crawl_sections = [name for name, _ in config.crawl_sections()] - if clean: - organizer.cleanup() + crawlers_to_run = [] # With crawl: prefix + unknown_names = [] # Without crawl: prefix - return organizer + for name in cli_crawlers: + section_name = f"crawl:{name}" + if section_name in crawl_sections: + log.explain(f"Crawler section named {section_name!r} exists") + crawlers_to_run.append(section_name) + else: + log.explain(f"There's no crawler section named {section_name!r}") + unknown_names.append(name) - @swallow_and_print_errors - def ilias_kit( - self, - target: PathLike, - course_id: str, - dir_filter: IliasDirectoryFilter = lambda x, y: True, - transform: Transform = lambda x: x, - cookies: Optional[PathLike] = None, - username: Optional[str] = None, - password: Optional[str] = None, - download_strategy: IliasDownloadStrategy = download_modified_or_new, - clean: bool = True, - timeout: int = 5, - ) -> Organizer: - """ - Synchronizes a folder with the ILIAS instance of the KIT. + if unknown_names: + if len(unknown_names) == 1: + [name] = unknown_names + raise PferdLoadError(f"There is no crawler named {name!r}") + else: + names_str = ", ".join(repr(name) for name in unknown_names) + raise PferdLoadError(f"There are no crawlers named {names_str}") - Arguments: - target {Path} -- the target path to write the data to - course_id {str} -- the id of the main course page (found in the URL after ref_id - when opening the course homepage) + return crawlers_to_run - Keyword Arguments: - dir_filter {IliasDirectoryFilter} -- A filter for directories. Will be applied on the - crawler level, these directories and all of their content is skipped. - (default: {lambdax:True}) - transform {Transform} -- A transformation function for the output paths. Return None - to ignore a file. (default: {lambdax:x}) - cookies {Optional[Path]} -- The path to store and load cookies from. - (default: {None}) - username {Optional[str]} -- The SCC username. If none is given, it will prompt - the user. (default: {None}) - password {Optional[str]} -- The SCC password. If none is given, it will prompt - the user. (default: {None}) - download_strategy {DownloadStrategy} -- A function to determine which files need to - be downloaded. Can save bandwidth and reduce the number of requests. - (default: {download_modified_or_new}) - clean {bool} -- Whether to clean up when the method finishes. - timeout {int} -- The download timeout for opencast videos. Sadly needed due to a - requests bug. - """ - # This authenticator only works with the KIT ilias instance. - authenticator = KitShibbolethAuthenticator(username=username, password=password) - PRETTY.starting_synchronizer(target, "ILIAS", course_id) + def _find_crawlers_to_run( + self, + config: Config, + cli_crawlers: Optional[list[str]], + cli_skips: Optional[list[str]], + ) -> list[str]: + log.explain_topic("Deciding which crawlers to run") - organizer = self._ilias( - target=target, - base_url="https://ilias.studium.kit.edu/", - crawl_function=lambda crawler: crawler.crawl_course(course_id), - authenticator=authenticator, - cookies=cookies, - dir_filter=dir_filter, - transform=transform, - download_strategy=download_strategy, - clean=clean, - timeout=timeout - ) - - self._download_summary.merge(organizer.download_summary) - - return organizer - - def print_summary(self) -> None: - """ - Prints the accumulated download summary. - """ - PRETTY.summary(self._download_summary) - - @swallow_and_print_errors - def ilias_kit_personal_desktop( - self, - target: PathLike, - dir_filter: IliasDirectoryFilter = lambda x, y: True, - transform: Transform = lambda x: x, - cookies: Optional[PathLike] = None, - username: Optional[str] = None, - password: Optional[str] = None, - download_strategy: IliasDownloadStrategy = download_modified_or_new, - clean: bool = True, - timeout: int = 5, - ) -> Organizer: - """ - Synchronizes a folder with the ILIAS instance of the KIT. This method will crawl the ILIAS - "personal desktop" instead of a single course. - - Arguments: - target {Path} -- the target path to write the data to - - Keyword Arguments: - dir_filter {IliasDirectoryFilter} -- A filter for directories. Will be applied on the - crawler level, these directories and all of their content is skipped. - (default: {lambdax:True}) - transform {Transform} -- A transformation function for the output paths. Return None - to ignore a file. (default: {lambdax:x}) - cookies {Optional[Path]} -- The path to store and load cookies from. - (default: {None}) - username {Optional[str]} -- The SCC username. If none is given, it will prompt - the user. (default: {None}) - password {Optional[str]} -- The SCC password. If none is given, it will prompt - the user. (default: {None}) - download_strategy {DownloadStrategy} -- A function to determine which files need to - be downloaded. Can save bandwidth and reduce the number of requests. - (default: {download_modified_or_new}) - clean {bool} -- Whether to clean up when the method finishes. - timeout {int} -- The download timeout for opencast videos. Sadly needed due to a - requests bug. - """ - # This authenticator only works with the KIT ilias instance. - authenticator = KitShibbolethAuthenticator(username=username, password=password) - PRETTY.starting_synchronizer(target, "ILIAS", "Personal Desktop") - - organizer = self._ilias( - target=target, - base_url="https://ilias.studium.kit.edu/", - crawl_function=lambda crawler: crawler.crawl_personal_desktop(), - authenticator=authenticator, - cookies=cookies, - dir_filter=dir_filter, - transform=transform, - download_strategy=download_strategy, - clean=clean, - timeout=timeout - ) - - self._download_summary.merge(organizer.download_summary) - - return organizer - - @swallow_and_print_errors - def ilias_kit_folder( - self, - target: PathLike, - full_url: str, - dir_filter: IliasDirectoryFilter = lambda x, y: True, - transform: Transform = lambda x: x, - cookies: Optional[PathLike] = None, - username: Optional[str] = None, - password: Optional[str] = None, - download_strategy: IliasDownloadStrategy = download_modified_or_new, - clean: bool = True, - timeout: int = 5, - ) -> Organizer: - """ - Synchronizes a folder with a given folder on the ILIAS instance of the KIT. - - Arguments: - target {Path} -- the target path to write the data to - full_url {str} -- the full url of the folder/videos/course to crawl - - Keyword Arguments: - dir_filter {IliasDirectoryFilter} -- A filter for directories. Will be applied on the - crawler level, these directories and all of their content is skipped. - (default: {lambdax:True}) - transform {Transform} -- A transformation function for the output paths. Return None - to ignore a file. (default: {lambdax:x}) - cookies {Optional[Path]} -- The path to store and load cookies from. - (default: {None}) - username {Optional[str]} -- The SCC username. If none is given, it will prompt - the user. (default: {None}) - password {Optional[str]} -- The SCC password. If none is given, it will prompt - the user. (default: {None}) - download_strategy {DownloadStrategy} -- A function to determine which files need to - be downloaded. Can save bandwidth and reduce the number of requests. - (default: {download_modified_or_new}) - clean {bool} -- Whether to clean up when the method finishes. - timeout {int} -- The download timeout for opencast videos. Sadly needed due to a - requests bug. - """ - # This authenticator only works with the KIT ilias instance. - authenticator = KitShibbolethAuthenticator(username=username, password=password) - PRETTY.starting_synchronizer(target, "ILIAS", "An ILIAS element by url") - - if not full_url.startswith("https://ilias.studium.kit.edu"): - raise FatalException("Not a valid KIT ILIAS URL") - - organizer = self._ilias( - target=target, - base_url="https://ilias.studium.kit.edu/", - crawl_function=lambda crawler: crawler.recursive_crawl_url(full_url), - authenticator=authenticator, - cookies=cookies, - dir_filter=dir_filter, - transform=transform, - download_strategy=download_strategy, - clean=clean, - timeout=timeout - ) - - self._download_summary.merge(organizer.download_summary) - - return organizer - - @swallow_and_print_errors - def diva_kit( - self, - target: Union[PathLike, Organizer], - playlist_location: str, - transform: Transform = lambda x: x, - download_strategy: DivaDownloadStrategy = diva_download_new, - clean: bool = True - ) -> Organizer: - """ - Synchronizes a folder with a DIVA playlist. - - Arguments: - organizer {Organizer} -- The organizer to use. - playlist_location {str} -- the playlist id or the playlist URL - in the format 'https://mediaservice.bibliothek.kit.edu/#/details/DIVA-2019-271' - - Keyword Arguments: - transform {Transform} -- A transformation function for the output paths. Return None - to ignore a file. (default: {lambdax:x}) - download_strategy {DivaDownloadStrategy} -- A function to determine which files need to - be downloaded. Can save bandwidth and reduce the number of requests. - (default: {diva_download_new}) - clean {bool} -- Whether to clean up when the method finishes. - """ - tmp_dir = self._tmp_dir.new_subdir() - - if playlist_location.startswith("http"): - playlist_id = DivaPlaylistCrawler.fetch_id(playlist_link=playlist_location) + crawlers: list[str] + if cli_crawlers is None: + log.explain("No crawlers specified on CLI") + log.explain("Running crawlers specified in config") + crawlers = self._find_config_crawlers(config) else: - playlist_id = playlist_location + log.explain("Crawlers specified on CLI") + crawlers = self._find_cli_crawlers(config, cli_crawlers) - if target is None: - PRETTY.starting_synchronizer("None", "DIVA", playlist_id) - raise FatalException("Got 'None' as target directory, aborting") + skips = {f"crawl:{name}" for name in cli_skips} if cli_skips else set() + for crawler in crawlers: + if crawler in skips: + log.explain(f"Skipping crawler {crawler!r}") + crawlers = [crawler for crawler in crawlers if crawler not in skips] - if isinstance(target, Organizer): - organizer = target - else: - organizer = Organizer(self.resolve(to_path(target))) + return crawlers - PRETTY.starting_synchronizer(organizer.path, "DIVA", playlist_id) + def _load_authenticators(self) -> None: + for name, section in self._config.auth_sections(): + log.print(f"[bold bright_cyan]Loading[/] {escape(name)}") - crawler = DivaPlaylistCrawler(playlist_id) - downloader = DivaDownloader(tmp_dir, organizer, download_strategy) + auth_type = AuthSection(section).type() + authenticator_constructor = AUTHENTICATORS.get(auth_type) + if authenticator_constructor is None: + raise ConfigOptionError(name, "type", f"Unknown authenticator type: {auth_type!r}") - info = crawler.crawl() + authenticator = authenticator_constructor(name, section, self._config) + self._authenticators[name] = authenticator - transformed = apply_transform(transform, info) - if self._test_run: - self._print_transformables(transformed) - return organizer + def _load_crawlers(self) -> None: + # Cookie sharing + kit_ilias_web_paths: dict[Authenticator, list[Path]] = {} - downloader.download_all(transformed) + for name, section in self._config.crawl_sections(): + log.print(f"[bold bright_cyan]Loading[/] {escape(name)}") - if clean: - organizer.cleanup() + crawl_type = CrawlerSection(section).type() + crawler_constructor = CRAWLERS.get(crawl_type) + if crawler_constructor is None: + raise ConfigOptionError(name, "type", f"Unknown crawler type: {crawl_type!r}") - return organizer + crawler = crawler_constructor(name, section, self._config, self._authenticators) + self._crawlers[name] = crawler + + if self._config.default_section.share_cookies() and isinstance(crawler, KitIliasWebCrawler): + crawler.share_cookies(kit_ilias_web_paths) + + def debug_transforms(self) -> None: + for name in self._crawlers_to_run: + crawler = self._crawlers[name] + log.print("") + log.print(f"[bold bright_cyan]Debugging transforms[/] for {escape(name)}") + crawler.debug_transforms() + + async def run(self, debug_transforms: bool) -> None: + """ + May throw ConfigOptionError. + """ + + # These two functions must run inside the same event loop as the + # crawlers, so that any new objects (like Conditions or Futures) can + # obtain the correct event loop. + self._load_authenticators() + self._load_crawlers() + + if debug_transforms: + log.output_explain = True + log.output_report = False + self.debug_transforms() + return + + log.print("") + + for name in self._crawlers_to_run: + crawler = self._crawlers[name] + + log.print(f"[bold bright_cyan]Running[/] {escape(name)}") + + try: + await crawler.run() + except (CrawlError, AuthError) as e: + log.error(str(e)) + except Exception: + log.unexpected_exception() + + def print_report(self) -> None: + for name in self._crawlers_to_run: + crawlerOpt = self._crawlers.get(name) + if crawlerOpt is None: + continue # Crawler failed to load + crawler = crawlerOpt + + log.report("") + log.report(f"[bold bright_cyan]Report[/] for {escape(name)}") + + def fmt_path_link(relative_path: PurePath) -> str: + # We need to URL-encode the path because it might contain spaces or special characters + link = crawler.output_dir.resolve(relative_path).absolute().as_uri() + return f"[link={link}]{fmt_path(relative_path)}[/link]" + + something_changed = False + for path in sorted(crawler.report.added_files): + something_changed = True + log.report(f" [bold bright_green]Added[/] {fmt_path_link(path)}") + for path in sorted(crawler.report.changed_files): + something_changed = True + log.report(f" [bold bright_yellow]Changed[/] {fmt_path_link(path)}") + for path in sorted(crawler.report.deleted_files): + something_changed = True + log.report(f" [bold bright_magenta]Deleted[/] {fmt_path(path)}") + for path in sorted(crawler.report.not_deleted_files): + something_changed = True + log.report_not_deleted(f" [bold bright_magenta]Not deleted[/] {fmt_path_link(path)}") + + for warning in crawler.report.encountered_warnings: + something_changed = True + log.report(f" [bold bright_red]Warning[/] {warning}") + + for error in crawler.report.encountered_errors: + something_changed = True + log.report(f" [bold bright_red]Error[/] {error}") + + if not something_changed: + log.report(" Nothing changed") diff --git a/PFERD/progress.py b/PFERD/progress.py deleted file mode 100644 index 6ad098f..0000000 --- a/PFERD/progress.py +++ /dev/null @@ -1,111 +0,0 @@ -""" -A small progress bar implementation. -""" -import sys -from dataclasses import dataclass -from types import TracebackType -from typing import Optional, Type - -import requests -from rich.console import Console -from rich.progress import (BarColumn, DownloadColumn, Progress, TaskID, - TextColumn, TimeRemainingColumn, - TransferSpeedColumn) - -_progress: Progress = Progress( - TextColumn("[bold blue]{task.fields[name]}", justify="right"), - BarColumn(bar_width=None), - "[progress.percentage]{task.percentage:>3.1f}%", - "•", - DownloadColumn(), - "•", - TransferSpeedColumn(), - "•", - TimeRemainingColumn(), - console=Console(file=sys.stdout), - transient=True -) - - -def size_from_headers(response: requests.Response) -> Optional[int]: - """ - Return the size of the download based on the response headers. - - Arguments: - response {requests.Response} -- the response - - Returns: - Optional[int] -- the size - """ - if "Content-Length" in response.headers: - return int(response.headers["Content-Length"]) - return None - - -@dataclass -class ProgressSettings: - """ - Settings you can pass to customize the progress bar. - """ - name: str - max_size: int - - -def progress_for(settings: Optional[ProgressSettings]) -> 'ProgressContextManager': - """ - Returns a context manager that displays progress - - Returns: - ProgressContextManager -- the progress manager - """ - return ProgressContextManager(settings) - - -class ProgressContextManager: - """ - A context manager used for displaying progress. - """ - - def __init__(self, settings: Optional[ProgressSettings]): - self._settings = settings - self._task_id: Optional[TaskID] = None - - def __enter__(self) -> 'ProgressContextManager': - """Context manager entry function.""" - if not self._settings: - return self - - _progress.start() - self._task_id = _progress.add_task( - self._settings.name, - total=self._settings.max_size, - name=self._settings.name - ) - return self - - # pylint: disable=useless-return - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> Optional[bool]: - """Context manager exit function. Removes the task.""" - if self._task_id is None: - return None - - _progress.remove_task(self._task_id) - - if len(_progress.task_ids) == 0: - # We need to clean up after ourselves, as we were the last one - _progress.stop() - _progress.refresh() - - return None - - def advance(self, amount: float) -> None: - """ - Advances the progress bar. - """ - if self._task_id is not None: - _progress.advance(self._task_id, amount) diff --git a/PFERD/report.py b/PFERD/report.py new file mode 100644 index 0000000..5b37c1c --- /dev/null +++ b/PFERD/report.py @@ -0,0 +1,229 @@ +import json +from pathlib import Path, PurePath +from typing import Any, Optional + + +class ReportLoadError(Exception): + pass + + +class MarkDuplicateError(Exception): + """ + Tried to mark a file that was already marked. + """ + + def __init__(self, path: PurePath): + super().__init__(f"A previous file already used path {path}") + self.path = path + + +class MarkConflictError(Exception): + """ + Marking the path would have caused a conflict. + + A conflict can have two reasons: Either the new file has the same path as + the parent directory of a known file, or a parent directory of the new file + has the same path as a known file. In either case, adding the new file + would require a file and a directory to share the same path, which is + usually not possible. + """ + + def __init__(self, path: PurePath, collides_with: PurePath): + super().__init__(f"File at {path} collides with previous file at {collides_with}") + self.path = path + self.collides_with = collides_with + + +class Report: + """ + A report of a synchronization. Includes all files found by the crawler, as + well as the set of changes made to local files. + """ + + def __init__(self) -> None: + # Paths found by the crawler, untransformed + self.found_paths: set[PurePath] = set() + + # Files reserved for metadata files (e. g. the report file or cookies) + # that can't be overwritten by user transforms and won't be cleaned up + # at the end. + self.reserved_files: set[PurePath] = set() + + # Files found by the crawler, transformed. Only includes files that + # were downloaded (or a download was attempted) + self.known_files: set[PurePath] = set() + + self.added_files: set[PurePath] = set() + self.changed_files: set[PurePath] = set() + self.deleted_files: set[PurePath] = set() + # Files that should have been deleted by the cleanup but weren't + self.not_deleted_files: set[PurePath] = set() + + # Custom crawler-specific data + self.custom: dict[str, Any] = dict() + + # Encountered errors and warnings + self.encountered_warnings: list[str] = [] + self.encountered_errors: list[str] = [] + + @staticmethod + def _get_list_of_strs(data: dict[str, Any], key: str) -> list[str]: + result: Any = data.get(key, []) + + if not isinstance(result, list): + raise ReportLoadError(f"Incorrect format: {key!r} is not a list") + + for elem in result: + if not isinstance(elem, str): + raise ReportLoadError(f"Incorrect format: {key!r} must contain only strings") + + return result + + @staticmethod + def _get_str_dictionary(data: dict[str, Any], key: str) -> dict[str, Any]: + result: dict[str, Any] = data.get(key, {}) + + if not isinstance(result, dict): + raise ReportLoadError(f"Incorrect format: {key!r} is not a dictionary") + + return result + + @classmethod + def load(cls, path: Path) -> "Report": + """ + May raise OSError, UnicodeDecodeError, JsonDecodeError, ReportLoadError. + """ + + with open(path, encoding="utf-8") as f: + data = json.load(f) + + if not isinstance(data, dict): + raise ReportLoadError("Incorrect format: Root is not an object") + + self = cls() + for elem in self._get_list_of_strs(data, "found"): + self.found(PurePath(elem)) + for elem in self._get_list_of_strs(data, "reserved"): + self.mark_reserved(PurePath(elem)) + for elem in self._get_list_of_strs(data, "known"): + self.mark(PurePath(elem)) + for elem in self._get_list_of_strs(data, "added"): + self.add_file(PurePath(elem)) + for elem in self._get_list_of_strs(data, "changed"): + self.change_file(PurePath(elem)) + for elem in self._get_list_of_strs(data, "deleted"): + self.delete_file(PurePath(elem)) + for elem in self._get_list_of_strs(data, "not_deleted"): + self.not_delete_file(PurePath(elem)) + self.custom = self._get_str_dictionary(data, "custom") + self.encountered_errors = self._get_list_of_strs(data, "encountered_errors") + self.encountered_warnings = self._get_list_of_strs(data, "encountered_warnings") + + return self + + def store(self, path: Path) -> None: + """ + May raise OSError. + """ + + data = { + "found": [str(path) for path in sorted(self.found_paths)], + "reserved": [str(path) for path in sorted(self.reserved_files)], + "known": [str(path) for path in sorted(self.known_files)], + "added": [str(path) for path in sorted(self.added_files)], + "changed": [str(path) for path in sorted(self.changed_files)], + "deleted": [str(path) for path in sorted(self.deleted_files)], + "not_deleted": [str(path) for path in sorted(self.not_deleted_files)], + "custom": self.custom, + "encountered_warnings": self.encountered_warnings, + "encountered_errors": self.encountered_errors, + } + + with open(path, "w", encoding="utf-8") as f: + json.dump(data, f, indent=2, sort_keys=True) + f.write("\n") # json.dump doesn't do this + + def found(self, path: PurePath) -> None: + self.found_paths.add(path) + + def mark_reserved(self, path: PurePath) -> None: + if path in self.marked: + raise RuntimeError("Trying to reserve an already reserved file") + + self.reserved_files.add(path) + + def mark(self, path: PurePath) -> None: + """ + Mark a previously unknown file as known. + + May throw a MarkDuplicateError or a MarkConflictError. For more detail, + see the respective exception's docstring. + """ + + for other in self.marked: + if path == other: + raise MarkDuplicateError(path) + + if path.is_relative_to(other) or other.is_relative_to(path): + raise MarkConflictError(path, other) + + self.known_files.add(path) + + @property + def marked(self) -> set[PurePath]: + return self.known_files | self.reserved_files + + def is_marked(self, path: PurePath) -> bool: + return path in self.marked + + def add_file(self, path: PurePath) -> None: + """ + Unlike mark(), this function accepts any paths. + """ + + self.added_files.add(path) + + def change_file(self, path: PurePath) -> None: + """ + Unlike mark(), this function accepts any paths. + """ + + self.changed_files.add(path) + + def delete_file(self, path: PurePath) -> None: + """ + Unlike mark(), this function accepts any paths. + """ + + self.deleted_files.add(path) + + def not_delete_file(self, path: PurePath) -> None: + """ + Unlike mark(), this function accepts any paths. + """ + + self.not_deleted_files.add(path) + + def add_custom_value(self, key: str, value: Any) -> None: + """ + Adds a custom value under the passed key, overwriting any existing + """ + self.custom[key] = value + + def get_custom_value(self, key: str) -> Optional[Any]: + """ + Retrieves a custom value for the given key. + """ + return self.custom.get(key) + + def add_error(self, error: str) -> None: + """ + Adds an error to this report's error list. + """ + self.encountered_errors.append(error) + + def add_warning(self, warning: str) -> None: + """ + Adds a warning to this report's warning list. + """ + self.encountered_warnings.append(warning) diff --git a/PFERD/tmp_dir.py b/PFERD/tmp_dir.py deleted file mode 100644 index 51ade2d..0000000 --- a/PFERD/tmp_dir.py +++ /dev/null @@ -1,79 +0,0 @@ -"""Helper functions and classes for temporary folders.""" - -import logging -import shutil -from pathlib import Path -from types import TracebackType -from typing import Optional, Type - -from .location import Location - -LOGGER = logging.getLogger(__name__) - - -class TmpDir(Location): - """A temporary folder that can create files or nested temp folders.""" - - def __init__(self, path: Path): - """Create a new temporary folder for the given path.""" - super().__init__(path) - self._counter = 0 - self.cleanup() - self.path.mkdir(parents=True, exist_ok=True) - - def __str__(self) -> str: - """Format the folder as a string.""" - return f"Folder at {self.path}" - - def __enter__(self) -> 'TmpDir': - """Context manager entry function.""" - return self - - # pylint: disable=useless-return - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> Optional[bool]: - """Context manager exit function. Calls cleanup().""" - self.cleanup() - return None - - def new_path(self, prefix: Optional[str] = None) -> Path: - """ - Return a unique path inside the directory. Doesn't create a file or - directory. - """ - - name = f"{prefix if prefix else 'tmp'}-{self._inc_and_get_counter():03}" - - LOGGER.debug("Creating temp file %s", name) - - return self.resolve(Path(name)) - - def new_subdir(self, prefix: Optional[str] = None) -> 'TmpDir': - """ - Create a new nested temporary folder and return it. - """ - - name = f"{prefix if prefix else 'tmp'}-{self._inc_and_get_counter():03}" - sub_path = self.resolve(Path(name)) - sub_path.mkdir(parents=True) - - LOGGER.debug("Creating temp dir %s at %s", name, sub_path) - - return TmpDir(sub_path) - - def cleanup(self) -> None: - """Delete this folder and all contained files.""" - LOGGER.debug("Deleting temp folder %s", self.path) - - if self.path.resolve().exists(): - shutil.rmtree(self.path.resolve()) - - def _inc_and_get_counter(self) -> int: - """Get and increment the counter by one.""" - counter = self._counter - self._counter += 1 - return counter diff --git a/PFERD/transform.py b/PFERD/transform.py deleted file mode 100644 index 16769df..0000000 --- a/PFERD/transform.py +++ /dev/null @@ -1,127 +0,0 @@ -""" -Transforms let the user define functions to decide where the downloaded files -should be placed locally. They let the user do more advanced things like moving -only files whose names match a regex, or renaming files from one numbering -scheme to another. -""" - -from dataclasses import dataclass -from pathlib import PurePath -from typing import Callable, List, Optional, TypeVar - -from .utils import PathLike, Regex, to_path, to_pattern - -Transform = Callable[[PurePath], Optional[PurePath]] - - -@dataclass -class Transformable: - """ - An object that can be transformed by a Transform. - """ - - path: PurePath - - -TF = TypeVar("TF", bound=Transformable) - - -def apply_transform( - transform: Transform, - transformables: List[TF], -) -> List[TF]: - """ - Apply a Transform to multiple Transformables, discarding those that were - not transformed by the Transform. - """ - - result: List[TF] = [] - for transformable in transformables: - new_path = transform(transformable.path) - if new_path: - transformable.path = new_path - result.append(transformable) - return result - -# Transform combinators - -keep = lambda path: path - -def attempt(*args: Transform) -> Transform: - def inner(path: PurePath) -> Optional[PurePath]: - for transform in args: - result = transform(path) - if result: - return result - return None - return inner - -def optionally(transform: Transform) -> Transform: - return attempt(transform, lambda path: path) - -def do(*args: Transform) -> Transform: - def inner(path: PurePath) -> Optional[PurePath]: - current = path - for transform in args: - result = transform(current) - if result: - current = result - else: - return None - return current - return inner - -def predicate(pred: Callable[[PurePath], bool]) -> Transform: - def inner(path: PurePath) -> Optional[PurePath]: - if pred(path): - return path - return None - return inner - -def glob(pattern: str) -> Transform: - return predicate(lambda path: path.match(pattern)) - -def move_dir(source_dir: PathLike, target_dir: PathLike) -> Transform: - source_path = to_path(source_dir) - target_path = to_path(target_dir) - def inner(path: PurePath) -> Optional[PurePath]: - if source_path in path.parents: - return target_path / path.relative_to(source_path) - return None - return inner - -def move(source: PathLike, target: PathLike) -> Transform: - source_path = to_path(source) - target_path = to_path(target) - def inner(path: PurePath) -> Optional[PurePath]: - if path == source_path: - return target_path - return None - return inner - -def rename(source: str, target: str) -> Transform: - def inner(path: PurePath) -> Optional[PurePath]: - if path.name == source: - return path.with_name(target) - return None - return inner - -def re_move(regex: Regex, target: str) -> Transform: - def inner(path: PurePath) -> Optional[PurePath]: - match = to_pattern(regex).fullmatch(str(path)) - if match: - groups = [match.group(0)] - groups.extend(match.groups()) - return PurePath(target.format(*groups)) - return None - return inner - -def re_rename(regex: Regex, target: str) -> Transform: - def inner(path: PurePath) -> Optional[PurePath]: - match = to_pattern(regex).fullmatch(path.name) - if match: - groups = [match.group(0)] - groups.extend(match.groups()) - return path.with_name(target.format(*groups)) - return None - return inner diff --git a/PFERD/transformer.py b/PFERD/transformer.py new file mode 100644 index 0000000..2cfb28d --- /dev/null +++ b/PFERD/transformer.py @@ -0,0 +1,447 @@ +import ast +import contextlib +import re +from abc import ABC, abstractmethod +from collections.abc import Callable, Sequence +from dataclasses import dataclass +from enum import Enum +from pathlib import PurePath +from typing import Optional, TypeVar + +from .logging import log +from .utils import fmt_path, str_path + + +class ArrowHead(Enum): + NORMAL = 0 + SEQUENCE = 1 + + +class Ignore: + pass + + +class Empty: + pass + + +RightSide = str | Ignore | Empty + + +@dataclass +class Transformed: + path: PurePath + + +class Ignored: + pass + + +TransformResult = Transformed | Ignored | None + + +@dataclass +class Rule: + left: str + left_index: int + name: str + head: ArrowHead + right: RightSide + right_index: int + + def right_result(self, path: PurePath) -> str | Transformed | Ignored: + if isinstance(self.right, str): + return self.right + elif isinstance(self.right, Ignore): + return Ignored() + elif isinstance(self.right, Empty): + return Transformed(path) + else: + raise RuntimeError(f"Right side has invalid type {type(self.right)}") + + +class Transformation(ABC): + def __init__(self, rule: Rule): + self.rule = rule + + @abstractmethod + def transform(self, path: PurePath) -> TransformResult: + pass + + +class ExactTf(Transformation): + def transform(self, path: PurePath) -> TransformResult: + if path != PurePath(self.rule.left): + return None + + right = self.rule.right_result(path) + if not isinstance(right, str): + return right + + return Transformed(PurePath(right)) + + +class ExactReTf(Transformation): + def transform(self, path: PurePath) -> TransformResult: + match = re.fullmatch(self.rule.left, str_path(path)) + if not match: + return None + + right = self.rule.right_result(path) + if not isinstance(right, str): + return right + + # For some reason, mypy thinks that "groups" has type List[str]. But + # since elements of "match.groups()" can be None, mypy is wrong. + groups: Sequence[Optional[str]] = [match[0]] + list(match.groups()) + + locals_dir: dict[str, str | int | float] = {} + for i, group in enumerate(groups): + if group is None: + continue + + locals_dir[f"g{i}"] = group + + with contextlib.suppress(ValueError): + locals_dir[f"i{i}"] = int(group) + + with contextlib.suppress(ValueError): + locals_dir[f"f{i}"] = float(group) + + named_groups: dict[str, str] = match.groupdict() + for name, capture in named_groups.items(): + locals_dir[name] = capture + + result = eval(f"f{right!r}", {}, locals_dir) + return Transformed(PurePath(result)) + + +class RenamingParentsTf(Transformation): + def __init__(self, sub_tf: Transformation): + super().__init__(sub_tf.rule) + self.sub_tf = sub_tf + + def transform(self, path: PurePath) -> TransformResult: + for i in range(len(path.parts), -1, -1): + parent = PurePath(*path.parts[:i]) + child = PurePath(*path.parts[i:]) + + transformed = self.sub_tf.transform(parent) + if not transformed: + continue + elif isinstance(transformed, Transformed): + return Transformed(transformed.path / child) + elif isinstance(transformed, Ignored): + return transformed + else: + raise RuntimeError(f"Invalid transform result of type {type(transformed)}: {transformed}") + + return None + + +class RenamingPartsTf(Transformation): + def __init__(self, sub_tf: Transformation): + super().__init__(sub_tf.rule) + self.sub_tf = sub_tf + + def transform(self, path: PurePath) -> TransformResult: + result = PurePath() + any_part_matched = False + for part in path.parts: + transformed = self.sub_tf.transform(PurePath(part)) + if not transformed: + result /= part + elif isinstance(transformed, Transformed): + result /= transformed.path + any_part_matched = True + elif isinstance(transformed, Ignored): + return transformed + else: + raise RuntimeError(f"Invalid transform result of type {type(transformed)}: {transformed}") + + if any_part_matched: + return Transformed(result) + else: + return None + + +class RuleParseError(Exception): + def __init__(self, line: "Line", reason: str): + super().__init__(f"Error in rule on line {line.line_nr}, column {line.index}: {reason}") + + self.line = line + self.reason = reason + + def pretty_print(self) -> None: + log.error(f"Error parsing rule on line {self.line.line_nr}:") + log.error_contd(self.line.line) + spaces = " " * self.line.index + log.error_contd(f"{spaces}^--- {self.reason}") + + +T = TypeVar("T") + + +class Line: + def __init__(self, line: str, line_nr: int): + self._line = line + self._line_nr = line_nr + self._index = 0 + + @property + def line(self) -> str: + return self._line + + @property + def line_nr(self) -> int: + return self._line_nr + + @property + def index(self) -> int: + return self._index + + @index.setter + def index(self, index: int) -> None: + self._index = index + + @property + def rest(self) -> str: + return self.line[self.index :] + + def peek(self, amount: int = 1) -> str: + return self.rest[:amount] + + def take(self, amount: int = 1) -> str: + string = self.peek(amount) + self.index += len(string) + return string + + def expect(self, string: str) -> str: + if self.peek(len(string)) == string: + return self.take(len(string)) + else: + raise RuleParseError(self, f"Expected {string!r}") + + def expect_with(self, string: str, value: T) -> T: + self.expect(string) + return value + + def one_of(self, parsers: list[Callable[[], T]], description: str) -> T: + for parser in parsers: + index = self.index + try: + return parser() + except RuleParseError: + self.index = index + + raise RuleParseError(self, description) + + +# RULE = LEFT SPACE '-' NAME '-' HEAD (SPACE RIGHT)? +# SPACE = ' '+ +# NAME = '' | 'exact' | 'name' | 're' | 'exact-re' | 'name-re' +# HEAD = '>' | '>>' +# LEFT = STR | QUOTED_STR +# RIGHT = STR | QUOTED_STR | '!' + + +def parse_zero_or_more_spaces(line: Line) -> None: + while line.peek() == " ": + line.take() + + +def parse_one_or_more_spaces(line: Line) -> None: + line.expect(" ") + parse_zero_or_more_spaces(line) + + +def parse_str(line: Line) -> str: + result = [] + while c := line.peek(): + if c == " ": + break + else: + line.take() + result.append(c) + + if result: + return "".join(result) + else: + raise RuleParseError(line, "Expected non-space character") + + +QUOTATION_MARKS = {'"', "'"} + + +def parse_quoted_str(line: Line) -> str: + escaped = False + + # Points to first character of string literal + start_index = line.index + + quotation_mark = line.peek() + if quotation_mark not in QUOTATION_MARKS: + raise RuleParseError(line, "Expected quotation mark") + line.take() + + while c := line.peek(): + if escaped: + escaped = False + line.take() + elif c == quotation_mark: + line.take() + stop_index = line.index + literal = line.line[start_index:stop_index] + try: + return ast.literal_eval(literal) + except SyntaxError as e: + line.index = start_index + raise RuleParseError(line, str(e)) from e + elif c == "\\": + escaped = True + line.take() + else: + line.take() + + raise RuleParseError(line, "Expected end of string literal") + + +def parse_left(line: Line) -> str: + if line.peek() in QUOTATION_MARKS: + return parse_quoted_str(line) + else: + return parse_str(line) + + +def parse_right(line: Line) -> str | Ignore: + c = line.peek() + if c in QUOTATION_MARKS: + return parse_quoted_str(line) + else: + string = parse_str(line) + if string == "!": + return Ignore() + return string + + +def parse_arrow_name(line: Line) -> str: + return line.one_of( + [ + lambda: line.expect("exact-re"), + lambda: line.expect("exact"), + lambda: line.expect("name-re"), + lambda: line.expect("name"), + lambda: line.expect("re"), + lambda: line.expect(""), + ], + "Expected arrow name", + ) + + +def parse_arrow_head(line: Line) -> ArrowHead: + return line.one_of( + [ + lambda: line.expect_with(">>", ArrowHead.SEQUENCE), + lambda: line.expect_with(">", ArrowHead.NORMAL), + ], + "Expected arrow head", + ) + + +def parse_eol(line: Line) -> None: + if line.peek(): + raise RuleParseError(line, "Expected end of line") + + +def parse_rule(line: Line) -> Rule: + parse_zero_or_more_spaces(line) + left_index = line.index + left = parse_left(line) + + parse_one_or_more_spaces(line) + + line.expect("-") + name = parse_arrow_name(line) + line.expect("-") + head = parse_arrow_head(line) + + right_index = line.index + right: RightSide + try: + parse_zero_or_more_spaces(line) + parse_eol(line) + right = Empty() + except RuleParseError: + line.index = right_index + parse_one_or_more_spaces(line) + right = parse_right(line) + parse_eol(line) + + return Rule(left, left_index, name, head, right, right_index) + + +def parse_transformation(line: Line) -> Transformation: + rule = parse_rule(line) + + if rule.name == "": + return RenamingParentsTf(ExactTf(rule)) + elif rule.name == "exact": + return ExactTf(rule) + elif rule.name == "name": + if len(PurePath(rule.left).parts) > 1: + line.index = rule.left_index + raise RuleParseError(line, "Expected name, not multiple segments") + return RenamingPartsTf(ExactTf(rule)) + elif rule.name == "re": + return RenamingParentsTf(ExactReTf(rule)) + elif rule.name == "exact-re": + return ExactReTf(rule) + elif rule.name == "name-re": + return RenamingPartsTf(ExactReTf(rule)) + else: + raise RuntimeError(f"Invalid arrow name {rule.name!r}") + + +class Transformer: + def __init__(self, rules: str): + """ + May throw a RuleParseException. + """ + + self._tfs = [] + for i, line in enumerate(rules.split("\n")): + line = line.strip() + if line: + tf = parse_transformation(Line(line, i)) + self._tfs.append((line, tf)) + + def transform(self, path: PurePath) -> Optional[PurePath]: + for i, (line, tf) in enumerate(self._tfs): + log.explain(f"Testing rule {i + 1}: {line}") + + try: + result = tf.transform(path) + except Exception as e: + log.warn(f"Error while testing rule {i + 1}: {line}") + log.warn_contd(str(e)) + continue + + if not result: + continue + + if isinstance(result, Ignored): + log.explain("Match found, path ignored") + return None + + if tf.rule.head == ArrowHead.NORMAL: + log.explain(f"Match found, transformed path to {fmt_path(result.path)}") + path = result.path + break + elif tf.rule.head == ArrowHead.SEQUENCE: + log.explain(f"Match found, updated path to {fmt_path(result.path)}") + path = result.path + else: + raise RuntimeError(f"Invalid transform result of type {type(result)}: {result}") + + log.explain(f"Final result: {fmt_path(path)}") + return path diff --git a/PFERD/utils.py b/PFERD/utils.py index 56c101a..1aa0585 100644 --- a/PFERD/utils.py +++ b/PFERD/utils.py @@ -1,98 +1,149 @@ -""" -A few utility bobs and bits. -""" - -import re +import asyncio +import getpass +import sys +import threading +from abc import ABC, abstractmethod +from collections.abc import Callable +from contextlib import AsyncExitStack from pathlib import Path, PurePath -from typing import Optional, Tuple, Union +from types import TracebackType +from typing import Any, Generic, Optional, TypeVar +from urllib.parse import parse_qs, urlencode, urlsplit, urlunsplit import bs4 -import requests -from .progress import ProgressSettings, progress_for, size_from_headers - -PathLike = Union[PurePath, str, Tuple[str, ...]] +T = TypeVar("T") -def to_path(pathlike: PathLike) -> Path: +async def in_daemon_thread(func: Callable[..., T], *args: Any, **kwargs: Any) -> T: + loop = asyncio.get_running_loop() + future: asyncio.Future[T] = asyncio.Future() + + def thread_func() -> None: + result = func() + loop.call_soon_threadsafe(future.set_result, result) + + threading.Thread(target=thread_func, daemon=True).start() + + return await future + + +async def ainput(prompt: str) -> str: + return await in_daemon_thread(lambda: input(prompt)) + + +async def agetpass(prompt: str) -> str: + return await in_daemon_thread(lambda: getpass.getpass(prompt)) + + +async def prompt_yes_no(query: str, default: Optional[bool]) -> bool: """ - Convert a given PathLike into a Path. - """ - if isinstance(pathlike, tuple): - return Path(*pathlike) - return Path(pathlike) - - -Regex = Union[str, re.Pattern] - - -def to_pattern(regex: Regex) -> re.Pattern: - """ - Convert a regex to a re.Pattern. - """ - if isinstance(regex, re.Pattern): - return regex - return re.compile(regex) - - -def soupify(response: requests.Response) -> bs4.BeautifulSoup: - """ - Wrap a requests response in a bs4 object. - """ - - return bs4.BeautifulSoup(response.text, "html.parser") - - -def stream_to_path( - response: requests.Response, - target: Path, - progress_name: Optional[str] = None, - chunk_size: int = 1024 ** 2 -) -> None: - """ - Download a requests response content to a file by streaming it. This - function avoids excessive memory usage when downloading large files. The - chunk_size is in bytes. - - If progress_name is None, no progress bar will be shown. Otherwise a progress - bar will appear, if the download is bigger than an internal threshold. - """ - - with response: - length = size_from_headers(response) - if progress_name and length and int(length) > 1024 * 1024 * 10: # 10 MiB - settings: Optional[ProgressSettings] = ProgressSettings(progress_name, length) - else: - settings = None - - with open(target, 'wb') as file_descriptor: - with progress_for(settings) as progress: - for chunk in response.iter_content(chunk_size=chunk_size): - file_descriptor.write(chunk) - progress.advance(len(chunk)) - - -def prompt_yes_no(question: str, default: Optional[bool] = None) -> bool: - """ - Prompts the user a yes/no question and returns their choice. + Asks the user a yes/no question and returns their choice. """ if default is True: - prompt = "[Y/n]" + query += " [Y/n] " elif default is False: - prompt = "[y/N]" + query += " [y/N] " else: - prompt = "[y/n]" - - text = f"{question} {prompt} " - wrong_reply = "Please reply with 'yes'/'y' or 'no'/'n'." + query += " [y/n] " while True: - response = input(text).strip().lower() - if response in {"yes", "ye", "y"}: + response = (await ainput(query)).strip().lower() + if response == "y": return True - if response in {"no", "n"}: + elif response == "n": return False - if response == "" and default is not None: + elif response == "" and default is not None: return default - print(wrong_reply) + + print("Please answer with 'y' or 'n'.") + + +def soupify(data: bytes) -> bs4.BeautifulSoup: + """ + Parses HTML to a beautifulsoup object. + """ + + return bs4.BeautifulSoup(data, "html.parser") + + +def url_set_query_param(url: str, param: str, value: str) -> str: + """ + Set a query parameter in an url, overwriting existing ones with the same name. + """ + scheme, netloc, path, query, fragment = urlsplit(url) + query_parameters = parse_qs(query) + query_parameters[param] = [value] + new_query_string = urlencode(query_parameters, doseq=True) + + return urlunsplit((scheme, netloc, path, new_query_string, fragment)) + + +def url_set_query_params(url: str, params: dict[str, str]) -> str: + """ + Sets multiple query parameters in an url, overwriting existing ones. + """ + result = url + + for key, val in params.items(): + result = url_set_query_param(result, key, val) + + return result + + +def str_path(path: PurePath) -> str: + if not path.parts: + return "." + return "/".join(path.parts) + + +def fmt_path(path: PurePath) -> str: + return repr(str_path(path)) + + +def fmt_real_path(path: Path) -> str: + return repr(str(path.absolute())) + + +def sanitize_path_name(name: str) -> str: + return name.replace("/", "-").replace("\\", "-").strip() + + +class ReusableAsyncContextManager(ABC, Generic[T]): + def __init__(self) -> None: + self._active = False + self._stack = AsyncExitStack() + + @abstractmethod + async def _on_aenter(self) -> T: + pass + + async def __aenter__(self) -> T: + if self._active: + raise RuntimeError("Nested or otherwise concurrent usage is not allowed") + + self._active = True + await self._stack.__aenter__() + + # See https://stackoverflow.com/a/13075071 + try: + result: T = await self._on_aenter() + return result + except: + if not await self.__aexit__(*sys.exc_info()): + raise + raise + + async def __aexit__( + self, + exc_type: Optional[type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> Optional[bool]: + if not self._active: + raise RuntimeError("__aexit__ called too many times") + + result = await self._stack.__aexit__(exc_type, exc_value, traceback) + self._active = False + return result diff --git a/PFERD/version.py b/PFERD/version.py new file mode 100644 index 0000000..c6c8b14 --- /dev/null +++ b/PFERD/version.py @@ -0,0 +1,2 @@ +NAME = "PFERD" +VERSION = "3.8.3" diff --git a/README.md b/README.md index a1cd1dd..c96fea0 100644 --- a/README.md +++ b/README.md @@ -2,253 +2,157 @@ **P**rogramm zum **F**lotten, **E**infachen **R**unterladen von **D**ateien -- [Quickstart with `sync_url`](#quickstart-with-sync_url) -- [Installation](#installation) - - [Upgrading from 2.0.0 to 2.1.0+](#upgrading-from-200-to-210) -- [Example setup](#example-setup) -- [Usage](#usage) - - [General concepts](#general-concepts) - - [Constructing transforms](#constructing-transforms) - - [Transform creators](#transform-creators) - - [Transform combinators](#transform-combinators) - - [A short, but commented example](#a-short-but-commented-example) +Other resources: -## Quickstart with `sync_url` - -The `sync_url` program allows you to just synchronize a given ILIAS URL (of a -course, a folder, your personal desktop, etc.) without any extra configuration -or setting up. Download the program, open ILIAS, copy the URL from the address -bar and pass it to sync_url. - -It bundles everything it needs in one executable and is easy to -use, but doesn't expose all the configuration options and tweaks a full install -does. - -1. Download the `sync_url` binary from the [latest release](https://github.com/Garmelon/PFERD/releases/latest). -2. Run the binary in your terminal (`./sync_url` or `sync_url.exe` in the CMD) to see the help and use it. I'd recommend using the `--cookies` option. - If you are on **Linux/Mac**, you need to *make the file executable* using `chmod +x `. - If you are on **Mac**, you need to allow this unverified program to run (see e.g. [here](https://www.switchingtomac.com/tutorials/osx/how-to-run-unverified-apps-on-macos/)) +- [Config file format](CONFIG.md) +- [Changelog](CHANGELOG.md) +- [Development Guide](DEV.md) ## Installation -Ensure that you have at least Python 3.8 installed. +### Direct download + +Binaries for Linux, Windows and Mac can be downloaded directly from the +[latest release](https://github.com/Garmelon/PFERD/releases/latest). + +### With pip + +Ensure you have at least Python 3.11 installed. Run the following command to +install PFERD or upgrade it to the latest version: -To install PFERD or update your installation to the latest version, run this -wherever you want to install or have already installed PFERD: ``` -$ pip install git+https://github.com/Garmelon/PFERD@v2.4.1 +$ pip install --upgrade git+https://github.com/Garmelon/PFERD@latest ``` -The use of [venv] is recommended. +The use of [venv](https://docs.python.org/3/library/venv.html) is recommended. -[venv]: https://docs.python.org/3/library/venv.html +### With package managers -### Upgrading from 2.0.0 to 2.1.0+ +Unofficial packages are available for: +- [AUR](https://aur.archlinux.org/packages/pferd) +- [brew](https://formulae.brew.sh/formula/pferd) +- [conda-forge](https://github.com/conda-forge/pferd-feedstock) +- [nixpkgs](https://github.com/NixOS/nixpkgs/blob/master/pkgs/tools/misc/pferd/default.nix) +- [PyPi](https://pypi.org/project/pferd) -- The `IliasDirectoryType` type was renamed to `IliasElementType` and is now far more detailed. - The new values are: `REGULAR_FOLDER`, `VIDEO_FOLDER`, `EXERCISE_FOLDER`, `REGULAR_FILE`, `VIDEO_FILE`, `FORUM`, `EXTERNAL_LINK`. -- Forums and external links are skipped automatically if you use the `kit_ilias` helper. +See also PFERD's [repology page](https://repology.org/project/pferd/versions). -## Example setup +## Basic usage -In this example, `python3` refers to at least Python 3.8. +PFERD can be run directly from the command line with no config file. Run `pferd +-h` to get an overview of available commands and options. Run `pferd +-h` to see which options a command has. + +For example, you can download your personal desktop from the KIT ILIAS like +this: -A full example setup and initial use could look like: ``` -$ mkdir Vorlesungen -$ cd Vorlesungen -$ python3 -m venv .venv -$ .venv/bin/activate -$ pip install git+https://github.com/Garmelon/PFERD@v2.4.1 -$ curl -O https://raw.githubusercontent.com/Garmelon/PFERD/v2.4.1/example_config.py -$ python3 example_config.py -$ deactivate +$ pferd kit-ilias-web desktop ``` -Subsequent runs of the program might look like: +Also, you can download most ILIAS pages directly like this: + ``` -$ cd Vorlesungen -$ .venv/bin/activate -$ python3 example_config.py -$ deactivate +$ pferd kit-ilias-web ``` -If you just want to get started and crawl *your entire ILIAS Desktop* instead -of a given set of courses, please replace `example_config.py` with -`example_config_personal_desktop.py` in all of the instructions below (`curl` call and -`python3` run command). +PFERD supports other ILIAS instances as well, using the `ilias-web` crawler (see +the [config section on `ilias-web`](CONFIG.md#the-ilias-web-crawler) for more +detail on the `base-url` and `client-id` parameters): -## Usage - -### General concepts - -A PFERD config is a normal python file that starts multiple *synchronizers* -which do all the heavy lifting. While you can create and wire them up manually, -you are encouraged to use the helper methods provided in `PFERD.Pferd`. - -The synchronizers take some input arguments specific to their service and a -*transform*. The transform receives the computed path of an element in ILIAS and -can return either an output path (so you can rename files or move them around as -you wish) or `None` if you do not want to save the given file. - -Additionally the ILIAS synchronizer allows you to define a *crawl filter*. This -filter also receives the computed path as the input, but is only called for -*directories*. If you return `True`, the directory will be crawled and -searched. If you return `False` the directory will be ignored and nothing in it -will be passed to the transform. - -### Constructing transforms - -While transforms are just normal python functions, writing them by hand can -quickly become tedious. In order to help you with writing your own transforms -and filters, PFERD defines a few useful transform creators and combinators in -the `PFERD.transform` module: - -#### Transform creators - -These methods let you create a few basic transform building blocks: - -- **`glob(glob)`** - Creates a transform that returns the unchanged path if the glob matches the path and `None` otherwise. - See also [Path.match]. - Example: `glob("Übung/*.pdf")` -- **`predicate(pred)`** - Creates a transform that returns the unchanged path if `pred(path)` returns a truthy value. - Returns `None` otherwise. - Example: `predicate(lambda path: len(path.parts) == 3)` -- **`move_dir(source, target)`** - Creates a transform that moves all files from the `source` to the `target` directory. - Example: `move_dir("Übung/", "Blätter/")` -- **`move(source, target)`** - Creates a transform that moves the `source` file to `target`. - Example: `move("Vorlesung/VL02_Automten.pdf", "Vorlesung/VL02_Automaten.pdf")` -- **`rename(source, target)`** - Creates a transform that renames all files named `source` to `target`. - This transform works on the file names, not paths, and thus works no matter where the file is located. - Example: `rename("VL02_Automten.pdf", "VL02_Automaten.pdf")` -- **`re_move(regex, target)`** - Creates a transform that moves all files matching `regex` to `target`. - The transform `str.format` on the `target` string with the contents of the capturing groups before returning it. - The capturing groups can be accessed via their index. - See also [Match.group]. - Example: `re_move(r"Übung/Blatt (\d+)\.pdf", "Blätter/Blatt_{1:0>2}.pdf")` -- **`re_rename(regex, target)`** - Creates a transform that renames all files matching `regex` to `target`. - This transform works on the file names, not paths, and thus works no matter where the file is located. - Example: `re_rename(r"VL(\d+)(.*)\.pdf", "Vorlesung_Nr_{1}__{2}.pdf")` - -All movement or rename transforms above return `None` if a file doesn't match -their movement or renaming criteria. This enables them to be used as building -blocks to build up more complex transforms. - -In addition, `PFERD.transform` also defines the `keep` transform which returns its input path unchanged. -This behaviour can be very useful when creating more complex transforms. -See below for example usage. - -[Path.match]: https://docs.python.org/3/library/pathlib.html#pathlib.Path.match -[Match.group]: https://docs.python.org/3/library/re.html#re.Match.group - -#### Transform combinators - -These methods let you combine transforms into more complex transforms: - -- **`optionally(transform)`** - Wraps a given transform and returns its result if it is not `None`. - Otherwise returns the input path unchanged. - See below for example usage. -* **`do(transforms)`** - Accepts a series of transforms and applies them in the given order to the result of the previous one. - If any transform returns `None`, `do` short-circuits and also returns `None`. - This can be used to perform multiple renames in a row: - ```py - do( - # Move them - move_dir("Vorlesungsmaterial/Vorlesungsvideos/", "Vorlesung/Videos/"), - # Fix extensions (if they have any) - optionally(re_rename("(.*).m4v.mp4", "{1}.mp4")), - # Remove the 'dbs' prefix (if they have any) - optionally(re_rename("(?i)dbs-(.+)", "{1}")), - ) - ``` -- **`attempt(transforms)`** - Applies the passed transforms in the given order until it finds one that does not return `None`. - If it does not find any, it returns `None`. - This can be used to give a list of possible transformations and automatically pick the first one that fits: - ```py - attempt( - # Move all videos. If a video is passed in, this `re_move` will succeed - # and attempt short-circuits with the result. - re_move(r"Vorlesungsmaterial/.*/(.+?)\.mp4", "Vorlesung/Videos/{1}.mp4"), - # Move the whole folder to a nicer name - now without any mp4! - move_dir("Vorlesungsmaterial/", "Vorlesung/"), - # If we got another file, keep it. - keep, - ) - ``` - -All of these combinators are used in the provided example configs, if you want -to see some more real-life usages. - -### A short, but commented example - -```py -from pathlib import Path, PurePath -from PFERD import Pferd -from PFERD.ilias import IliasElementType -from PFERD.transform import * - -# This filter will later be used by the ILIAS crawler to decide whether it -# should crawl a directory (or directory-like structure). -def filter_course(path: PurePath, type: IliasElementType) -> bool: - # Note that glob returns a Transform, which is a function from PurePath -> - # Optional[PurePath]. Because of this, we need to apply the result of - # 'glob' to our input path. The returned value will be truthy (a Path) if - # the transform succeeded, or `None` if it failed. - - # We need to crawl the 'Tutorien' folder as it contains one that we want. - if glob("Tutorien/")(path): - return True - # If we found 'Tutorium 10', keep it! - if glob("Tutorien/Tutorium 10")(path): - return True - # Discard all other folders inside 'Tutorien' - if glob("Tutorien/*")(path): - return False - - # All other dirs (including subdirs of 'Tutorium 10') should be searched :) - return True - - -# This transform will later be used to rename a few files. It can also be used -# to ignore some files. -transform_course = attempt( - # We don't care about the other tuts and would instead prefer a cleaner - # directory structure. - move_dir("Tutorien/Tutorium 10/", "Tutorium/"), - # We don't want to modify any other files, so we're going to keep them - # exactly as they are. - keep -) - -# Enable and configure the text output. Needs to be called before calling any -# other PFERD methods. -Pferd.enable_logging() -# Create a Pferd instance rooted in the same directory as the script file. This -# is not a test run, so files will be downloaded (default, can be omitted). -pferd = Pferd(Path(__file__).parent, test_run=False) - -# Use the ilias_kit helper to synchronize an ILIAS course -pferd.ilias_kit( - # The directory that all of the downloaded files should be placed in - "My_cool_course/", - # The course ID (found in the URL when on the course page in ILIAS) - "course id", - # A path to a cookie jar. If you synchronize multiple ILIAS courses, - # setting this to a common value requires you to only log in once. - cookies=Path("ilias_cookies.txt"), - # A transform can rename, move or filter out certain files - transform=transform_course, - # A crawl filter limits what paths the cralwer searches - dir_filter=filter_course, -) +``` +$ pferd ilias-web \ + --base-url https://ilias.my-university.example \ + --client-id My_University desktop \ + +``` + +However, the CLI only lets you download a single thing at a time, and the +resulting command can grow long quite quickly. Because of this, PFERD can also +be used with a config file. + +To get started, just take a command you've been using and add `--dump-config` +directly after `pferd`, like this: + +``` +$ pferd --dump-config kit-ilias-web +``` + +This will make PFERD write its current configuration to its default config file +path. You can then run `pferd` without a command and it will execute the config +file. Alternatively, you can use `--dump-config-to` and specify a path yourself. +Using `--dump-config-to -` will print the configuration to stdout instead of a +file, which is a good way to see what is actually going on when using a CLI +command. + +Another good way to see what PFERD is doing is the `--explain` option. When +enabled, PFERD explains in detail what it is doing and why. This can help with +debugging your own config. + +If you don't want to run all crawlers from your config file, you can specify the +crawlers you want to run with `--crawler` or `-C`, like this: + +``` +$ pferd -C crawler1 -C crawler2 +``` + +## Advanced usage + +PFERD supports lots of different options. For example, you can configure PFERD +to [use your system's keyring](CONFIG.md#the-keyring-authenticator) instead of +prompting you for your username and password. PFERD also supports +[transformation rules](CONFIG.md#transformation-rules) that let you rename or +exclude certain files. + +For more details, see the comprehensive [config format documentation](CONFIG.md). + +## Example + +This example downloads a few courses from the KIT ILIAS with a common keyring +authenticator. It reorganizes and ignores some files. + +```ini +[DEFAULT] +# All paths will be relative to this. +# The crawler output directories will be /Foo and /Bar. +working_dir = ~/stud +# If files vanish from ILIAS the local files are not deleted, allowing us to +# take a look at them before deleting them ourselves. +on_conflict = no-delete + +[auth:ilias] +type = keyring +username = foo + +[crawl:Foo] +type = kit-ilias-web +auth = auth:ilias +# Crawl a course by its ID (found as `ref_id=ID` in the URL) +target = 1234567 + +# Plaintext files are easier to read by other tools +links = plaintext + +transform = + # Ignore unneeded folders + Online-Tests --> ! + Vorlesungswerbung --> ! + + # Rename folders + Lehrbücher --> Vorlesung + # Note the ">>" arrow head which lets us apply further rules to files moved to "Übung" + Übungsunterlagen -->> Übung + + # Move exercises to own folder. Rename them to "Blatt-XX.pdf" to make them sort properly + "Übung/(\d+). Übungsblatt.pdf" -re-> Blätter/Blatt-{i1:02}.pdf + # Move solutions to own folder. Rename them to "Blatt-XX-Lösung.pdf" to make them sort properly + "Übung/(\d+). Übungsblatt.*Musterlösung.pdf" -re-> Blätter/Blatt-{i1:02}-Lösung.pdf + + # The course has nested folders with the same name - flatten them + "Übung/(.+?)/\\1" -re-> Übung/{g1} + +[crawl:Bar] +type = kit-ilias-web +auth = auth:ilias +target = 1337420 ``` diff --git a/example_config.py b/example_config.py deleted file mode 100644 index bffecfb..0000000 --- a/example_config.py +++ /dev/null @@ -1,131 +0,0 @@ -import argparse -from pathlib import Path, PurePath - -from PFERD import Pferd -from PFERD.ilias import IliasElementType -from PFERD.transform import (attempt, do, glob, keep, move, move_dir, - optionally, re_move, re_rename) - -tf_ss_2020_numerik = attempt( - re_move(r"Übungsblätter/(\d+)\. Übungsblatt/.*", "Blätter/Blatt_{1:0>2}.pdf"), - keep, -) - - -tf_ss_2020_db = attempt( - move_dir("Begrüßungsvideo/", "Vorlesung/Videos/"), - do( - move_dir("Vorlesungsmaterial/Vorlesungsvideos/", "Vorlesung/Videos/"), - optionally(re_rename("(.*).m4v.mp4", "{1}.mp4")), - optionally(re_rename("(?i)dbs-(.+)", "{1}")), - ), - move_dir("Vorlesungsmaterial/", "Vorlesung/"), - keep, -) - - -tf_ss_2020_rechnernetze = attempt( - re_move(r"Vorlesungsmaterial/.*/(.+?)\.mp4", "Vorlesung/Videos/{1}.mp4"), - move_dir("Vorlesungsmaterial/", "Vorlesung/"), - keep, -) - - -tf_ss_2020_sicherheit = attempt( - move_dir("Vorlesungsvideos/", "Vorlesung/Videos/"), - move_dir("Übungsvideos/", "Übung/Videos/"), - re_move(r"VL(.*)\.pdf", "Vorlesung/{1}.pdf"), - re_move(r"Übungsblatt (\d+)\.pdf", "Blätter/Blatt_{1:0>2}.pdf"), - move("Chiffrat.txt", "Blätter/Blatt_01_Chiffrat.txt"), - keep, -) - - -tf_ss_2020_pg = attempt( - move_dir("Vorlesungsaufzeichnungen/", "Vorlesung/Videos/"), - move_dir("Vorlesungsmaterial/", "Vorlesung/"), - re_move(r"Übungen/uebungsblatt(\d+).pdf", "Blätter/Blatt_{1:0>2}.pdf"), - keep, -) - - -def df_ss_2020_or1(path: PurePath, _type: IliasElementType) -> bool: - if glob("Tutorien/")(path): - return True - if glob("Tutorien/Tutorium 10, dienstags 15:45 Uhr/")(path): - return True - if glob("Tutorien/*")(path): - return False - return True - - -tf_ss_2020_or1 = attempt( - move_dir("Vorlesung/Unbeschriebene Folien/", "Vorlesung/Folien/"), - move_dir("Video zur Organisation/", "Vorlesung/Videos/"), - keep, -) - - -def main() -> None: - parser = argparse.ArgumentParser() - parser.add_argument("--test-run", action="store_true") - parser.add_argument("synchronizers", nargs="*") - args = parser.parse_args() - - pferd = Pferd(Path(__file__).parent, test_run=args.test_run) - pferd.enable_logging() - - if not args.synchronizers or "numerik" in args.synchronizers: - pferd.ilias_kit( - target="Numerik", - course_id="1083036", - transform=tf_ss_2020_numerik, - cookies="ilias_cookies.txt", - ) - - if not args.synchronizers or "db" in args.synchronizers: - pferd.ilias_kit( - target="DB", - course_id="1101554", - transform=tf_ss_2020_db, - cookies="ilias_cookies.txt", - ) - - if not args.synchronizers or "rechnernetze" in args.synchronizers: - pferd.ilias_kit( - target="Rechnernetze", - course_id="1099996", - transform=tf_ss_2020_rechnernetze, - cookies="ilias_cookies.txt", - ) - - if not args.synchronizers or "sicherheit" in args.synchronizers: - pferd.ilias_kit( - target="Sicherheit", - course_id="1101980", - transform=tf_ss_2020_sicherheit, - cookies="ilias_cookies.txt", - ) - - if not args.synchronizers or "pg" in args.synchronizers: - pferd.ilias_kit( - target="PG", - course_id="1106095", - transform=tf_ss_2020_pg, - cookies="ilias_cookies.txt", - ) - - if not args.synchronizers or "or1" in args.synchronizers: - pferd.ilias_kit( - target="OR1", - course_id="1105941", - dir_filter=df_ss_2020_or1, - transform=tf_ss_2020_or1, - cookies="ilias_cookies.txt", - ) - - # Prints a summary listing all new, modified or deleted files - pferd.print_summary() - -if __name__ == "__main__": - main() diff --git a/example_config_personal_desktop.py b/example_config_personal_desktop.py deleted file mode 100644 index 8d481b4..0000000 --- a/example_config_personal_desktop.py +++ /dev/null @@ -1,38 +0,0 @@ -""" -This is a small config that just crawls the ILIAS Personal Desktop. -It does not filter or rename anything, it just gobbles up everything it can find. - -Note that this still includes a test-run switch, so you can see what it *would* download. -You can enable that with the "--test-run" command line switch, -i. e. "python3 example_config_minimal.py --test-run". -""" - -import argparse -from pathlib import Path - -from PFERD import Pferd - - -def main() -> None: - # Parse command line arguments - parser = argparse.ArgumentParser() - parser.add_argument("--test-run", action="store_true") - args = parser.parse_args() - - # Create the Pferd helper instance - pferd = Pferd(Path(__file__).parent, test_run=args.test_run) - pferd.enable_logging() - - # Synchronize the personal desktop into the "ILIAS" directory. - # It saves the cookies, so you only need to log in again when the ILIAS cookies expire. - pferd.ilias_kit_personal_desktop( - "ILIAS", - cookies="ilias_cookies.txt", - ) - - # Prints a summary listing all new, modified or deleted files - pferd.print_summary() - - -if __name__ == "__main__": - main() diff --git a/flake.lock b/flake.lock new file mode 100644 index 0000000..ae603f1 --- /dev/null +++ b/flake.lock @@ -0,0 +1,27 @@ +{ + "nodes": { + "nixpkgs": { + "locked": { + "lastModified": 1760725957, + "narHash": "sha256-tdoIhL/NlER290HfSjOkgi4jfmjeqmqrzgnmiMtGepE=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "81b927b14b7b3988334d5282ef9cba802e193fe1", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-25.05", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "nixpkgs": "nixpkgs" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 0000000..7027e20 --- /dev/null +++ b/flake.nix @@ -0,0 +1,41 @@ +{ + description = "Tool for downloading course-related files from ILIAS"; + + inputs = { + nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05"; + }; + + outputs = { self, nixpkgs }: + let + # Helper function to generate an attrset '{ x86_64-linux = f "x86_64-linux"; ... }'. + forAllSystems = nixpkgs.lib.genAttrs nixpkgs.lib.systems.flakeExposed; + in + { + packages = forAllSystems (system: + let pkgs = import nixpkgs { inherit system; }; + in + rec { + default = pkgs.python3Packages.buildPythonApplication rec { + pname = "pferd"; + # Performing black magic + # Don't worry, I sacrificed enough goats for the next few years + version = (pkgs.lib.importTOML ./PFERD/version.py).VERSION; + format = "pyproject"; + + src = ./.; + + nativeBuildInputs = with pkgs.python3Packages; [ + setuptools + ]; + + propagatedBuildInputs = with pkgs.python3Packages; [ + aiohttp + beautifulsoup4 + rich + keyring + certifi + ]; + }; + }); + }; +} diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index 91792d8..0000000 --- a/mypy.ini +++ /dev/null @@ -1,7 +0,0 @@ -[mypy] -disallow_untyped_defs = True -disallow_incomplete_defs = True -no_implicit_optional = True - -[mypy-rich.*,bs4] -ignore_missing_imports = True diff --git a/pferd.py b/pferd.py new file mode 100644 index 0000000..dfea7c2 --- /dev/null +++ b/pferd.py @@ -0,0 +1,6 @@ +# File used by pyinstaller to create the executable + +from PFERD.__main__ import main + +if __name__ == "__main__": + main() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..96aa4a9 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,52 @@ +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "PFERD" +dependencies = [ + "aiohttp>=3.8.1", + "beautifulsoup4>=4.10.0", + "rich>=11.0.0", + "keyring>=23.5.0", + "certifi>=2021.10.8" +] +dynamic = ["version"] +requires-python = ">=3.11" + +[project.scripts] +pferd = "PFERD.__main__:main" + +[tool.setuptools.dynamic] +version = {attr = "PFERD.version.VERSION"} + +[tool.ruff] +line-length = 110 + +[tool.ruff.lint] +select = [ + # pycodestyle + "E", + # Pyflakes + "F", + # pyupgrade + "UP", + # flake8-bugbear + "B", + # flake8-simplify + "SIM", + # isort + "I", +] +ignore = [ + "UP045", + "SIM114", + "B023" +] + +[dependency-groups] +dev = [ + "pyinstaller>=6.16.0", + "pyright>=1.1.406", + "ruff>=0.14.1", +] diff --git a/scripts/build b/scripts/build new file mode 100755 index 0000000..65746c7 --- /dev/null +++ b/scripts/build @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +set -e + +uv run pyinstaller --onefile pferd.py diff --git a/scripts/bump-version b/scripts/bump-version new file mode 100755 index 0000000..e341a4e --- /dev/null +++ b/scripts/bump-version @@ -0,0 +1,111 @@ +#!/usr/bin/env python3 + +import argparse +import re +import time +from subprocess import run + + +def load_changelog(): + with open("CHANGELOG.md") as f: + return list(f) + + +def extract_changes(lines): + lines = iter(lines) + changes = [] + + # Find "Unreleased" section + for line in lines: + if line.strip() == "## Unreleased": + break + next(lines) + + # Read all lines from that section + for line in lines: + if line.startswith("## "): + # Found the beginning of the next section + break + elif line.startswith("### "): + # Found a heading in the current section + # Remove "#" symbols so git doesn't interpret the line as a comment later + changes.append(line[4:]) + else: + changes.append(line) + + # Remove trailing empty lines + while changes and not changes[-1].strip(): + changes.pop() + + return changes + + +def update_version(version): + with open("PFERD/version.py") as f: + text = f.read() + + text = re.sub(r'VERSION = ".*"', f'VERSION = "{version}"', text) + + with open("PFERD/version.py", "w") as f: + f.write(text) + + +def update_changelog(lines, version, date): + lines = iter(lines) + new_lines = [] + + # Find "Unreleased" section + for line in lines: + new_lines.append(line) + if line.strip() == "## Unreleased": + break + + # Add new heading below that + new_lines.append("\n") + new_lines.append(f"## {version} - {date}\n") + + # Add remaining lines + for line in lines: + new_lines.append(line) + + with open("CHANGELOG.md", "w") as f: + f.write("".join(new_lines)) + + +def commit_changes(version): + run(["git", "add", "CHANGELOG.md", "PFERD/version.py"]) + run(["git", "commit", "-m", f"Bump version to {version}"]) + + +def create_tag(version, annotation): + run(["git", "tag", "-am", annotation, f"v{version}"]) + + +def fastforward_latest(): + run(["git", "branch", "-f", "latest", "HEAD"]) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("version") + args = parser.parse_args() + + version = args.version + date = time.strftime("%Y-%m-%d") + changelog = load_changelog() + changes = extract_changes(changelog) + annotation = f"Version {version} - {date}\n\n{''.join(changes)}" + + update_version(version) + update_changelog(changelog, version, date) + commit_changes(version) + create_tag(version, annotation) + fastforward_latest() + + print() + print("Now the only thing left is to publish the changes:") + print(f" $ git push origin master latest v{version}") + + +if __name__ == "__main__": + main() diff --git a/scripts/check b/scripts/check new file mode 100755 index 0000000..0552f07 --- /dev/null +++ b/scripts/check @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +set -e + +uv run pyright . +uv run ruff check diff --git a/scripts/format b/scripts/format new file mode 100755 index 0000000..6e814b5 --- /dev/null +++ b/scripts/format @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +set -e + +uv run ruff format diff --git a/scripts/setup b/scripts/setup new file mode 100755 index 0000000..0114266 --- /dev/null +++ b/scripts/setup @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +set -e + +# Updating pip and setuptools because some older versions don't recognize the +# project setup correctly +if [[ $1 != '--no-pip' ]]; then + pip install --upgrade pip +fi +pip install --upgrade setuptools + +# Installing PFERD itself +pip install --editable . + +# Installing tools and type hints +pip install --upgrade mypy flake8 flake8-pyproject autopep8 isort pyinstaller +pip install --upgrade types-chardet types-certifi diff --git a/setup.py b/setup.py deleted file mode 100644 index bac40d9..0000000 --- a/setup.py +++ /dev/null @@ -1,16 +0,0 @@ -from setuptools import find_packages, setup - -setup( - name="PFERD", - version="2.4.1", - packages=find_packages(), - install_requires=[ - "requests>=2.21.0", - "beautifulsoup4>=4.7.1", - "rich>=2.1.0" - ], -) - -# When updating the version, also: -# - update the README.md installation instructions -# - set a tag on the update commit diff --git a/sync_url.py b/sync_url.py deleted file mode 100755 index d2dce94..0000000 --- a/sync_url.py +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env python - -""" -A simple script to download a course by name from ILIAS. -""" - -import argparse -from pathlib import Path -from urllib.parse import urlparse - -from PFERD import Pferd -from PFERD.cookie_jar import CookieJar -from PFERD.ilias import (IliasCrawler, IliasElementType, - KitShibbolethAuthenticator) -from PFERD.utils import to_path - - -def main() -> None: - parser = argparse.ArgumentParser() - parser.add_argument("--test-run", action="store_true") - parser.add_argument('-c', '--cookies', nargs='?', default=None, help="File to store cookies in") - parser.add_argument('--no-videos', nargs='?', default=None, help="Don't download videos") - parser.add_argument('url', help="URL to the course page") - parser.add_argument('folder', nargs='?', default=None, help="Folder to put stuff into") - args = parser.parse_args() - - url = urlparse(args.url) - - cookie_jar = CookieJar(to_path(args.cookies) if args.cookies else None) - session = cookie_jar.create_session() - authenticator = KitShibbolethAuthenticator() - crawler = IliasCrawler(url.scheme + '://' + url.netloc, session, - authenticator, lambda x, y: True) - - cookie_jar.load_cookies() - - if args.folder is not None: - folder = args.folder - # Initialize pferd at the *parent of the passed folder* - # This is needed so Pferd's internal protections against escaping the working directory - # do not trigger (e.g. if somebody names a file in ILIAS '../../bad thing.txt') - pferd = Pferd(Path(Path(__file__).parent, folder).parent, test_run=args.test_run) - else: - # fetch course name from ilias - folder = crawler.find_element_name(args.url) - cookie_jar.save_cookies() - - # Initialize pferd at the location of the script - pferd = Pferd(Path(__file__).parent, test_run=args.test_run) - - def dir_filter(_: Path, element: IliasElementType) -> bool: - if args.no_videos: - return element not in [IliasElementType.VIDEO_FILE, IliasElementType.VIDEO_FOLDER] - return True - - pferd.enable_logging() - # fetch - pferd.ilias_kit_folder( - target=folder, - full_url=args.url, - cookies=args.cookies, - dir_filter=dir_filter - ) - - -if __name__ == "__main__": - main() diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..9c2a58e --- /dev/null +++ b/uv.lock @@ -0,0 +1,1056 @@ +version = 1 +revision = 3 +requires-python = ">=3.11" + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.13.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ba/fa/3ae643cd525cf6844d3dc810481e5748107368eb49563c15a5fb9f680750/aiohttp-3.13.1.tar.gz", hash = "sha256:4b7ee9c355015813a6aa085170b96ec22315dabc3d866fd77d147927000e9464", size = 7835344, upload-time = "2025-10-17T14:03:29.337Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/2c/739d03730ffce57d2093e2e611e1541ac9a4b3bb88288c33275058b9ffc2/aiohttp-3.13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9eefa0a891e85dca56e2d00760945a6325bd76341ec386d3ad4ff72eb97b7e64", size = 742004, upload-time = "2025-10-17T13:59:29.73Z" }, + { url = "https://files.pythonhosted.org/packages/fc/f8/7f5b7f7184d7c80e421dbaecbd13e0b2a0bb8663fd0406864f9a167a438c/aiohttp-3.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c20eb646371a5a57a97de67e52aac6c47badb1564e719b3601bbb557a2e8fd0", size = 495601, upload-time = "2025-10-17T13:59:31.312Z" }, + { url = "https://files.pythonhosted.org/packages/3e/af/fb78d028b9642dd33ff127d9a6a151586f33daff631b05250fecd0ab23f8/aiohttp-3.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bfc28038cd86fb1deed5cc75c8fda45c6b0f5c51dfd76f8c63d3d22dc1ab3d1b", size = 491790, upload-time = "2025-10-17T13:59:33.304Z" }, + { url = "https://files.pythonhosted.org/packages/1e/ae/e40e422ee995e4f91f7f087b86304e3dd622d3a5b9ca902a1e94ebf9a117/aiohttp-3.13.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b22eeffca2e522451990c31a36fe0e71079e6112159f39a4391f1c1e259a795", size = 1746350, upload-time = "2025-10-17T13:59:35.158Z" }, + { url = "https://files.pythonhosted.org/packages/28/a5/fe6022bb869bf2d2633b155ed8348d76358c22d5ff9692a15016b2d1019f/aiohttp-3.13.1-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:65782b2977c05ebd78787e3c834abe499313bf69d6b8be4ff9c340901ee7541f", size = 1703046, upload-time = "2025-10-17T13:59:37.077Z" }, + { url = "https://files.pythonhosted.org/packages/5a/a5/c4ef3617d7cdc49f2d5af077f19794946f0f2d94b93c631ace79047361a2/aiohttp-3.13.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dacba54f9be3702eb866b0b9966754b475e1e39996e29e442c3cd7f1117b43a9", size = 1806161, upload-time = "2025-10-17T13:59:38.837Z" }, + { url = "https://files.pythonhosted.org/packages/ad/45/b87d2430aee7e7d00b24e3dff2c5bd69f21017f6edb19cfd91e514664fc8/aiohttp-3.13.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:aa878da718e8235302c365e376b768035add36b55177706d784a122cb822a6a4", size = 1894546, upload-time = "2025-10-17T13:59:40.741Z" }, + { url = "https://files.pythonhosted.org/packages/e8/a2/79eb466786a7f11a0292c353a8a9b95e88268c48c389239d7531d66dbb48/aiohttp-3.13.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e4b4e607fbd4964d65945a7b9d1e7f98b0d5545736ea613f77d5a2a37ff1e46", size = 1745683, upload-time = "2025-10-17T13:59:42.59Z" }, + { url = "https://files.pythonhosted.org/packages/93/1a/153b0ad694f377e94eacc85338efe03ed4776a396c8bb47bd9227135792a/aiohttp-3.13.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0c3db2d0e5477ad561bf7ba978c3ae5f8f78afda70daa05020179f759578754f", size = 1605418, upload-time = "2025-10-17T13:59:45.229Z" }, + { url = "https://files.pythonhosted.org/packages/3f/4e/18605b1bfeb4b00d3396d833647cdb213118e2a96862e5aebee62ad065b4/aiohttp-3.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9739d34506fdf59bf2c092560d502aa728b8cdb33f34ba15fb5e2852c35dd829", size = 1722379, upload-time = "2025-10-17T13:59:46.969Z" }, + { url = "https://files.pythonhosted.org/packages/72/13/0a38ad385d547fb283e0e1fe1ff1dff8899bd4ed0aaceeb13ec14abbf136/aiohttp-3.13.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:b902e30a268a85d50197b4997edc6e78842c14c0703450f632c2d82f17577845", size = 1716693, upload-time = "2025-10-17T13:59:49.217Z" }, + { url = "https://files.pythonhosted.org/packages/55/65/7029d7573ab9009adde380052c6130d02c8db52195fda112db35e914fe7b/aiohttp-3.13.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1bbfc04c8de7def6504cce0a97f9885a5c805fd2395a0634bc10f9d6ecb42524", size = 1784174, upload-time = "2025-10-17T13:59:51.439Z" }, + { url = "https://files.pythonhosted.org/packages/2d/36/fd46e39cb85418e45b0e4a8bfc39651ee0b8f08ea006adf217a221cdb269/aiohttp-3.13.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:6941853405a38a5eeb7d9776db77698df373ff7fa8c765cb81ea14a344fccbeb", size = 1593716, upload-time = "2025-10-17T13:59:53.367Z" }, + { url = "https://files.pythonhosted.org/packages/85/b8/188e0cb1be37b4408373171070fda17c3bf9c67c0d3d4fd5ee5b1fa108e1/aiohttp-3.13.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7764adcd2dc8bd21c8228a53dda2005428498dc4d165f41b6086f0ac1c65b1c9", size = 1799254, upload-time = "2025-10-17T13:59:55.352Z" }, + { url = "https://files.pythonhosted.org/packages/67/ff/fdf768764eb427b0cc9ebb2cebddf990f94d98b430679f8383c35aa114be/aiohttp-3.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c09e08d38586fa59e5a2f9626505a0326fadb8e9c45550f029feeb92097a0afc", size = 1738122, upload-time = "2025-10-17T13:59:57.263Z" }, + { url = "https://files.pythonhosted.org/packages/94/84/fce7a4d575943394d7c0e632273838eb6f39de8edf25386017bf5f0de23b/aiohttp-3.13.1-cp311-cp311-win32.whl", hash = "sha256:ce1371675e74f6cf271d0b5530defb44cce713fd0ab733713562b3a2b870815c", size = 430491, upload-time = "2025-10-17T13:59:59.466Z" }, + { url = "https://files.pythonhosted.org/packages/ac/d2/d21b8ab6315a5d588c550ab285b4f02ae363edf012920e597904c5a56608/aiohttp-3.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:77a2f5cc28cf4704cc157be135c6a6cfb38c9dea478004f1c0fd7449cf445c28", size = 454808, upload-time = "2025-10-17T14:00:01.247Z" }, + { url = "https://files.pythonhosted.org/packages/1a/72/d463a10bf29871f6e3f63bcf3c91362dc4d72ed5917a8271f96672c415ad/aiohttp-3.13.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0760bd9a28efe188d77b7c3fe666e6ef74320d0f5b105f2e931c7a7e884c8230", size = 736218, upload-time = "2025-10-17T14:00:03.51Z" }, + { url = "https://files.pythonhosted.org/packages/26/13/f7bccedbe52ea5a6eef1e4ebb686a8d7765319dfd0a5939f4238cb6e79e6/aiohttp-3.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7129a424b441c3fe018a414401bf1b9e1d49492445f5676a3aecf4f74f67fcdb", size = 491251, upload-time = "2025-10-17T14:00:05.756Z" }, + { url = "https://files.pythonhosted.org/packages/0c/7c/7ea51b5aed6cc69c873f62548da8345032aa3416336f2d26869d4d37b4a2/aiohttp-3.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e1cb04ae64a594f6ddf5cbb024aba6b4773895ab6ecbc579d60414f8115e9e26", size = 490394, upload-time = "2025-10-17T14:00:07.504Z" }, + { url = "https://files.pythonhosted.org/packages/31/05/1172cc4af4557f6522efdee6eb2b9f900e1e320a97e25dffd3c5a6af651b/aiohttp-3.13.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:782d656a641e755decd6bd98d61d2a8ea062fd45fd3ff8d4173605dd0d2b56a1", size = 1737455, upload-time = "2025-10-17T14:00:09.403Z" }, + { url = "https://files.pythonhosted.org/packages/24/3d/ce6e4eca42f797d6b1cd3053cf3b0a22032eef3e4d1e71b9e93c92a3f201/aiohttp-3.13.1-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f92ad8169767429a6d2237331726c03ccc5f245222f9373aa045510976af2b35", size = 1699176, upload-time = "2025-10-17T14:00:11.314Z" }, + { url = "https://files.pythonhosted.org/packages/25/04/7127ba55653e04da51477372566b16ae786ef854e06222a1c96b4ba6c8ef/aiohttp-3.13.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0e778f634ca50ec005eefa2253856921c429581422d887be050f2c1c92e5ce12", size = 1767216, upload-time = "2025-10-17T14:00:13.668Z" }, + { url = "https://files.pythonhosted.org/packages/b8/3b/43bca1e75847e600f40df829a6b2f0f4e1d4c70fb6c4818fdc09a462afd5/aiohttp-3.13.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9bc36b41cf4aab5d3b34d22934a696ab83516603d1bc1f3e4ff9930fe7d245e5", size = 1865870, upload-time = "2025-10-17T14:00:15.852Z" }, + { url = "https://files.pythonhosted.org/packages/9e/69/b204e5d43384197a614c88c1717c324319f5b4e7d0a1b5118da583028d40/aiohttp-3.13.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3fd4570ea696aee27204dd524f287127ed0966d14d309dc8cc440f474e3e7dbd", size = 1751021, upload-time = "2025-10-17T14:00:18.297Z" }, + { url = "https://files.pythonhosted.org/packages/1c/af/845dc6b6fdf378791d720364bf5150f80d22c990f7e3a42331d93b337cc7/aiohttp-3.13.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7bda795f08b8a620836ebfb0926f7973972a4bf8c74fdf9145e489f88c416811", size = 1561448, upload-time = "2025-10-17T14:00:20.152Z" }, + { url = "https://files.pythonhosted.org/packages/7a/91/d2ab08cd77ed76a49e4106b1cfb60bce2768242dd0c4f9ec0cb01e2cbf94/aiohttp-3.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:055a51d90e351aae53dcf324d0eafb2abe5b576d3ea1ec03827d920cf81a1c15", size = 1698196, upload-time = "2025-10-17T14:00:22.131Z" }, + { url = "https://files.pythonhosted.org/packages/5e/d1/082f0620dc428ecb8f21c08a191a4694915cd50f14791c74a24d9161cc50/aiohttp-3.13.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:d4131df864cbcc09bb16d3612a682af0db52f10736e71312574d90f16406a867", size = 1719252, upload-time = "2025-10-17T14:00:24.453Z" }, + { url = "https://files.pythonhosted.org/packages/fc/78/2af2f44491be7b08e43945b72d2b4fd76f0a14ba850ba9e41d28a7ce716a/aiohttp-3.13.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:163d3226e043f79bf47c87f8dfc89c496cc7bc9128cb7055ce026e435d551720", size = 1736529, upload-time = "2025-10-17T14:00:26.567Z" }, + { url = "https://files.pythonhosted.org/packages/b0/34/3e919ecdc93edaea8d140138049a0d9126141072e519535e2efa38eb7a02/aiohttp-3.13.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:a2370986a3b75c1a5f3d6f6d763fc6be4b430226577b0ed16a7c13a75bf43d8f", size = 1553723, upload-time = "2025-10-17T14:00:28.592Z" }, + { url = "https://files.pythonhosted.org/packages/21/4b/d8003aeda2f67f359b37e70a5a4b53fee336d8e89511ac307ff62aeefcdb/aiohttp-3.13.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d7c14de0c7c9f1e6e785ce6cbe0ed817282c2af0012e674f45b4e58c6d4ea030", size = 1763394, upload-time = "2025-10-17T14:00:31.051Z" }, + { url = "https://files.pythonhosted.org/packages/4c/7b/1dbe6a39e33af9baaafc3fc016a280663684af47ba9f0e5d44249c1f72ec/aiohttp-3.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb611489cf0db10b99beeb7280bd39e0ef72bc3eb6d8c0f0a16d8a56075d1eb7", size = 1718104, upload-time = "2025-10-17T14:00:33.407Z" }, + { url = "https://files.pythonhosted.org/packages/5c/88/bd1b38687257cce67681b9b0fa0b16437be03383fa1be4d1a45b168bef25/aiohttp-3.13.1-cp312-cp312-win32.whl", hash = "sha256:f90fe0ee75590f7428f7c8b5479389d985d83c949ea10f662ab928a5ed5cf5e6", size = 425303, upload-time = "2025-10-17T14:00:35.829Z" }, + { url = "https://files.pythonhosted.org/packages/0e/e3/4481f50dd6f27e9e58c19a60cff44029641640237e35d32b04aaee8cf95f/aiohttp-3.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:3461919a9dca272c183055f2aab8e6af0adc810a1b386cce28da11eb00c859d9", size = 452071, upload-time = "2025-10-17T14:00:37.764Z" }, + { url = "https://files.pythonhosted.org/packages/16/6d/d267b132342e1080f4c1bb7e1b4e96b168b3cbce931ec45780bff693ff95/aiohttp-3.13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:55785a7f8f13df0c9ca30b5243d9909bd59f48b274262a8fe78cee0828306e5d", size = 730727, upload-time = "2025-10-17T14:00:39.681Z" }, + { url = "https://files.pythonhosted.org/packages/92/c8/1cf495bac85cf71b80fad5f6d7693e84894f11b9fe876b64b0a1e7cbf32f/aiohttp-3.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4bef5b83296cebb8167707b4f8d06c1805db0af632f7a72d7c5288a84667e7c3", size = 488678, upload-time = "2025-10-17T14:00:41.541Z" }, + { url = "https://files.pythonhosted.org/packages/a8/19/23c6b81cca587ec96943d977a58d11d05a82837022e65cd5502d665a7d11/aiohttp-3.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:27af0619c33f9ca52f06069ec05de1a357033449ab101836f431768ecfa63ff5", size = 487637, upload-time = "2025-10-17T14:00:43.527Z" }, + { url = "https://files.pythonhosted.org/packages/48/58/8f9464afb88b3eed145ad7c665293739b3a6f91589694a2bb7e5778cbc72/aiohttp-3.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a47fe43229a8efd3764ef7728a5c1158f31cdf2a12151fe99fde81c9ac87019c", size = 1718975, upload-time = "2025-10-17T14:00:45.496Z" }, + { url = "https://files.pythonhosted.org/packages/e1/8b/c3da064ca392b2702f53949fd7c403afa38d9ee10bf52c6ad59a42537103/aiohttp-3.13.1-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6e68e126de5b46e8b2bee73cab086b5d791e7dc192056916077aa1e2e2b04437", size = 1686905, upload-time = "2025-10-17T14:00:47.707Z" }, + { url = "https://files.pythonhosted.org/packages/0a/a4/9c8a3843ecf526daee6010af1a66eb62579be1531d2d5af48ea6f405ad3c/aiohttp-3.13.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e65ef49dd22514329c55970d39079618a8abf856bae7147913bb774a3ab3c02f", size = 1754907, upload-time = "2025-10-17T14:00:49.702Z" }, + { url = "https://files.pythonhosted.org/packages/a4/80/1f470ed93e06436e3fc2659a9fc329c192fa893fb7ed4e884d399dbfb2a8/aiohttp-3.13.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0e425a7e0511648b3376839dcc9190098671a47f21a36e815b97762eb7d556b0", size = 1857129, upload-time = "2025-10-17T14:00:51.822Z" }, + { url = "https://files.pythonhosted.org/packages/cc/e6/33d305e6cce0a8daeb79c7d8d6547d6e5f27f4e35fa4883fc9c9eb638596/aiohttp-3.13.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:010dc9b7110f055006acd3648d5d5955bb6473b37c3663ec42a1b4cba7413e6b", size = 1738189, upload-time = "2025-10-17T14:00:53.976Z" }, + { url = "https://files.pythonhosted.org/packages/ac/42/8df03367e5a64327fe0c39291080697795430c438fc1139c7cc1831aa1df/aiohttp-3.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1b5c722d0ca5f57d61066b5dfa96cdb87111e2519156b35c1f8dd17c703bee7a", size = 1553608, upload-time = "2025-10-17T14:00:56.144Z" }, + { url = "https://files.pythonhosted.org/packages/96/17/6d5c73cd862f1cf29fddcbb54aac147037ff70a043a2829d03a379e95742/aiohttp-3.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:93029f0e9b77b714904a281b5aa578cdc8aa8ba018d78c04e51e1c3d8471b8ec", size = 1681809, upload-time = "2025-10-17T14:00:58.603Z" }, + { url = "https://files.pythonhosted.org/packages/be/31/8926c8ab18533f6076ce28d2c329a203b58c6861681906e2d73b9c397588/aiohttp-3.13.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:d1824c7d08d8ddfc8cb10c847f696942e5aadbd16fd974dfde8bd2c3c08a9fa1", size = 1711161, upload-time = "2025-10-17T14:01:01.744Z" }, + { url = "https://files.pythonhosted.org/packages/f2/36/2f83e1ca730b1e0a8cf1c8ab9559834c5eec9f5da86e77ac71f0d16b521d/aiohttp-3.13.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:8f47d0ff5b3eb9c1278a2f56ea48fda667da8ebf28bd2cb378b7c453936ce003", size = 1731999, upload-time = "2025-10-17T14:01:04.626Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ec/1f818cc368dfd4d5ab4e9efc8f2f6f283bfc31e1c06d3e848bcc862d4591/aiohttp-3.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:8a396b1da9b51ded79806ac3b57a598f84e0769eaa1ba300655d8b5e17b70c7b", size = 1548684, upload-time = "2025-10-17T14:01:06.828Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ad/33d36efd16e4fefee91b09a22a3a0e1b830f65471c3567ac5a8041fac812/aiohttp-3.13.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d9c52a65f54796e066b5d674e33b53178014752d28bca555c479c2c25ffcec5b", size = 1756676, upload-time = "2025-10-17T14:01:09.517Z" }, + { url = "https://files.pythonhosted.org/packages/3c/c4/4a526d84e77d464437713ca909364988ed2e0cd0cdad2c06cb065ece9e08/aiohttp-3.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a89da72d18d6c95a653470b78d8ee5aa3c4b37212004c103403d0776cbea6ff0", size = 1715577, upload-time = "2025-10-17T14:01:11.958Z" }, + { url = "https://files.pythonhosted.org/packages/a2/21/e39638b7d9c7f1362c4113a91870f89287e60a7ea2d037e258b81e8b37d5/aiohttp-3.13.1-cp313-cp313-win32.whl", hash = "sha256:02e0258b7585ddf5d01c79c716ddd674386bfbf3041fbbfe7bdf9c7c32eb4a9b", size = 424468, upload-time = "2025-10-17T14:01:14.344Z" }, + { url = "https://files.pythonhosted.org/packages/cc/00/f3a92c592a845ebb2f47d102a67f35f0925cb854c5e7386f1a3a1fdff2ab/aiohttp-3.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:ef56ffe60e8d97baac123272bde1ab889ee07d3419606fae823c80c2b86c403e", size = 450806, upload-time = "2025-10-17T14:01:16.437Z" }, + { url = "https://files.pythonhosted.org/packages/97/be/0f6c41d2fd0aab0af133c509cabaf5b1d78eab882cb0ceb872e87ceeabf7/aiohttp-3.13.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:77f83b3dc5870a2ea79a0fcfdcc3fc398187ec1675ff61ec2ceccad27ecbd303", size = 733828, upload-time = "2025-10-17T14:01:18.58Z" }, + { url = "https://files.pythonhosted.org/packages/75/14/24e2ac5efa76ae30e05813e0f50737005fd52da8ddffee474d4a5e7f38a6/aiohttp-3.13.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:9cafd2609ebb755e47323306c7666283fbba6cf82b5f19982ea627db907df23a", size = 489320, upload-time = "2025-10-17T14:01:20.644Z" }, + { url = "https://files.pythonhosted.org/packages/da/5a/4cbe599358d05ea7db4869aff44707b57d13f01724d48123dc68b3288d5a/aiohttp-3.13.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9c489309a2ca548d5f11131cfb4092f61d67954f930bba7e413bcdbbb82d7fae", size = 489899, upload-time = "2025-10-17T14:01:22.638Z" }, + { url = "https://files.pythonhosted.org/packages/67/96/3aec9d9cfc723273d4386328a1e2562cf23629d2f57d137047c49adb2afb/aiohttp-3.13.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79ac15fe5fdbf3c186aa74b656cd436d9a1e492ba036db8901c75717055a5b1c", size = 1716556, upload-time = "2025-10-17T14:01:25.406Z" }, + { url = "https://files.pythonhosted.org/packages/b9/99/39a3d250595b5c8172843831221fa5662884f63f8005b00b4034f2a7a836/aiohttp-3.13.1-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:095414be94fce3bc080684b4cd50fb70d439bc4662b2a1984f45f3bf9ede08aa", size = 1665814, upload-time = "2025-10-17T14:01:27.683Z" }, + { url = "https://files.pythonhosted.org/packages/3b/96/8319e7060a85db14a9c178bc7b3cf17fad458db32ba6d2910de3ca71452d/aiohttp-3.13.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c68172e1a2dca65fa1272c85ca72e802d78b67812b22827df01017a15c5089fa", size = 1755767, upload-time = "2025-10-17T14:01:29.914Z" }, + { url = "https://files.pythonhosted.org/packages/1c/c6/0a2b3d886b40aa740fa2294cd34ed46d2e8108696748492be722e23082a7/aiohttp-3.13.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3751f9212bcd119944d4ea9de6a3f0fee288c177b8ca55442a2cdff0c8201eb3", size = 1836591, upload-time = "2025-10-17T14:01:32.28Z" }, + { url = "https://files.pythonhosted.org/packages/fb/34/8ab5904b3331c91a58507234a1e2f662f837e193741609ee5832eb436251/aiohttp-3.13.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8619dca57d98a8353abdc7a1eeb415548952b39d6676def70d9ce76d41a046a9", size = 1714915, upload-time = "2025-10-17T14:01:35.138Z" }, + { url = "https://files.pythonhosted.org/packages/b5/d3/d36077ca5f447649112189074ac6c192a666bf68165b693e48c23b0d008c/aiohttp-3.13.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:97795a0cb0a5f8a843759620e9cbd8889f8079551f5dcf1ccd99ed2f056d9632", size = 1546579, upload-time = "2025-10-17T14:01:38.237Z" }, + { url = "https://files.pythonhosted.org/packages/a8/14/dbc426a1bb1305c4fc78ce69323498c9e7c699983366ef676aa5d3f949fa/aiohttp-3.13.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1060e058da8f9f28a7026cdfca9fc886e45e551a658f6a5c631188f72a3736d2", size = 1680633, upload-time = "2025-10-17T14:01:40.902Z" }, + { url = "https://files.pythonhosted.org/packages/29/83/1e68e519aff9f3ef6d4acb6cdda7b5f592ef5c67c8f095dc0d8e06ce1c3e/aiohttp-3.13.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:f48a2c26333659101ef214907d29a76fe22ad7e912aa1e40aeffdff5e8180977", size = 1678675, upload-time = "2025-10-17T14:01:43.779Z" }, + { url = "https://files.pythonhosted.org/packages/38/b9/7f3e32a81c08b6d29ea15060c377e1f038ad96cd9923a85f30e817afff22/aiohttp-3.13.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:f1dfad638b9c91ff225162b2824db0e99ae2d1abe0dc7272b5919701f0a1e685", size = 1726829, upload-time = "2025-10-17T14:01:46.546Z" }, + { url = "https://files.pythonhosted.org/packages/23/ce/610b1f77525a0a46639aea91377b12348e9f9412cc5ddcb17502aa4681c7/aiohttp-3.13.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:8fa09ab6dd567cb105db4e8ac4d60f377a7a94f67cf669cac79982f626360f32", size = 1542985, upload-time = "2025-10-17T14:01:49.082Z" }, + { url = "https://files.pythonhosted.org/packages/53/39/3ac8dfdad5de38c401846fa071fcd24cb3b88ccfb024854df6cbd9b4a07e/aiohttp-3.13.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4159fae827f9b5f655538a4f99b7cbc3a2187e5ca2eee82f876ef1da802ccfa9", size = 1741556, upload-time = "2025-10-17T14:01:51.846Z" }, + { url = "https://files.pythonhosted.org/packages/2a/48/b1948b74fea7930b0f29595d1956842324336de200593d49a51a40607fdc/aiohttp-3.13.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ad671118c19e9cfafe81a7a05c294449fe0ebb0d0c6d5bb445cd2190023f5cef", size = 1696175, upload-time = "2025-10-17T14:01:54.232Z" }, + { url = "https://files.pythonhosted.org/packages/96/26/063bba38e4b27b640f56cc89fe83cc3546a7ae162c2e30ca345f0ccdc3d1/aiohttp-3.13.1-cp314-cp314-win32.whl", hash = "sha256:c5c970c148c48cf6acb65224ca3c87a47f74436362dde75c27bc44155ccf7dfc", size = 430254, upload-time = "2025-10-17T14:01:56.451Z" }, + { url = "https://files.pythonhosted.org/packages/88/aa/25fd764384dc4eab714023112d3548a8dd69a058840d61d816ea736097a2/aiohttp-3.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:748a00167b7a88385756fa615417d24081cba7e58c8727d2e28817068b97c18c", size = 456256, upload-time = "2025-10-17T14:01:58.752Z" }, + { url = "https://files.pythonhosted.org/packages/d4/9f/9ba6059de4bad25c71cd88e3da53f93e9618ea369cf875c9f924b1c167e2/aiohttp-3.13.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:390b73e99d7a1f0f658b3f626ba345b76382f3edc65f49d6385e326e777ed00e", size = 765956, upload-time = "2025-10-17T14:02:01.515Z" }, + { url = "https://files.pythonhosted.org/packages/1f/30/b86da68b494447d3060f45c7ebb461347535dab4af9162a9267d9d86ca31/aiohttp-3.13.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:27e83abb330e687e019173d8fc1fd6a1cf471769624cf89b1bb49131198a810a", size = 503206, upload-time = "2025-10-17T14:02:03.818Z" }, + { url = "https://files.pythonhosted.org/packages/c1/21/d27a506552843ff9eeb9fcc2d45f943b09eefdfdf205aab044f4f1f39f6a/aiohttp-3.13.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2b20eed07131adbf3e873e009c2869b16a579b236e9d4b2f211bf174d8bef44a", size = 507719, upload-time = "2025-10-17T14:02:05.947Z" }, + { url = "https://files.pythonhosted.org/packages/58/23/4042230ec7e4edc7ba43d0342b5a3d2fe0222ca046933c4251a35aaf17f5/aiohttp-3.13.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:58fee9ef8477fd69e823b92cfd1f590ee388521b5ff8f97f3497e62ee0656212", size = 1862758, upload-time = "2025-10-17T14:02:08.469Z" }, + { url = "https://files.pythonhosted.org/packages/df/88/525c45bea7cbb9f65df42cadb4ff69f6a0dbf95931b0ff7d1fdc40a1cb5f/aiohttp-3.13.1-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1f62608fcb7b3d034d5e9496bea52d94064b7b62b06edba82cd38191336bbeda", size = 1717790, upload-time = "2025-10-17T14:02:11.37Z" }, + { url = "https://files.pythonhosted.org/packages/1d/80/21e9b5eb77df352a5788713f37359b570a793f0473f3a72db2e46df379b9/aiohttp-3.13.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fdc4d81c3dfc999437f23e36d197e8b557a3f779625cd13efe563a9cfc2ce712", size = 1842088, upload-time = "2025-10-17T14:02:13.872Z" }, + { url = "https://files.pythonhosted.org/packages/d2/bf/d1738f6d63fe8b2a0ad49533911b3347f4953cd001bf3223cb7b61f18dff/aiohttp-3.13.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:601d7ec812f746fd80ff8af38eeb3f196e1bab4a4d39816ccbc94c222d23f1d0", size = 1934292, upload-time = "2025-10-17T14:02:16.624Z" }, + { url = "https://files.pythonhosted.org/packages/04/e6/26cab509b42610ca49573f2fc2867810f72bd6a2070182256c31b14f2e98/aiohttp-3.13.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47c3f21c469b840d9609089435c0d9918ae89f41289bf7cc4afe5ff7af5458db", size = 1791328, upload-time = "2025-10-17T14:02:19.051Z" }, + { url = "https://files.pythonhosted.org/packages/8a/6d/baf7b462852475c9d045bee8418d9cdf280efb687752b553e82d0c58bcc2/aiohttp-3.13.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d6c6cdc0750db88520332d4aaa352221732b0cafe89fd0e42feec7cb1b5dc236", size = 1622663, upload-time = "2025-10-17T14:02:21.397Z" }, + { url = "https://files.pythonhosted.org/packages/c8/48/396a97318af9b5f4ca8b3dc14a67976f71c6400a9609c622f96da341453f/aiohttp-3.13.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:58a12299eeb1fca2414ee2bc345ac69b0f765c20b82c3ab2a75d91310d95a9f6", size = 1787791, upload-time = "2025-10-17T14:02:24.212Z" }, + { url = "https://files.pythonhosted.org/packages/a8/e2/6925f6784134ce3ff3ce1a8502ab366432a3b5605387618c1a939ce778d9/aiohttp-3.13.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:0989cbfc195a4de1bb48f08454ef1cb47424b937e53ed069d08404b9d3c7aea1", size = 1775459, upload-time = "2025-10-17T14:02:26.971Z" }, + { url = "https://files.pythonhosted.org/packages/c3/e3/b372047ba739fc39f199b99290c4cc5578ce5fd125f69168c967dac44021/aiohttp-3.13.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:feb5ee664300e2435e0d1bc3443a98925013dfaf2cae9699c1f3606b88544898", size = 1789250, upload-time = "2025-10-17T14:02:29.686Z" }, + { url = "https://files.pythonhosted.org/packages/02/8c/9f48b93d7d57fc9ef2ad4adace62e4663ea1ce1753806c4872fb36b54c39/aiohttp-3.13.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:58a6f8702da0c3606fb5cf2e669cce0ca681d072fe830968673bb4c69eb89e88", size = 1616139, upload-time = "2025-10-17T14:02:32.151Z" }, + { url = "https://files.pythonhosted.org/packages/5c/c6/c64e39d61aaa33d7de1be5206c0af3ead4b369bf975dac9fdf907a4291c1/aiohttp-3.13.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:a417ceb433b9d280e2368ffea22d4bc6e3e0d894c4bc7768915124d57d0964b6", size = 1815829, upload-time = "2025-10-17T14:02:34.635Z" }, + { url = "https://files.pythonhosted.org/packages/22/75/e19e93965ea675f1151753b409af97a14f1d888588a555e53af1e62b83eb/aiohttp-3.13.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8ac8854f7b0466c5d6a9ea49249b3f6176013859ac8f4bb2522ad8ed6b94ded2", size = 1760923, upload-time = "2025-10-17T14:02:37.364Z" }, + { url = "https://files.pythonhosted.org/packages/6c/a4/06ed38f1dabd98ea136fd116cba1d02c9b51af5a37d513b6850a9a567d86/aiohttp-3.13.1-cp314-cp314t-win32.whl", hash = "sha256:be697a5aeff42179ed13b332a411e674994bcd406c81642d014ace90bf4bb968", size = 463318, upload-time = "2025-10-17T14:02:39.924Z" }, + { url = "https://files.pythonhosted.org/packages/04/0f/27e4fdde899e1e90e35eeff56b54ed63826435ad6cdb06b09ed312d1b3fa/aiohttp-3.13.1-cp314-cp314t-win_amd64.whl", hash = "sha256:f1d6aa90546a4e8f20c3500cb68ab14679cd91f927fa52970035fd3207dfb3da", size = 496721, upload-time = "2025-10-17T14:02:42.199Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] + +[[package]] +name = "altgraph" +version = "0.17.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/de/a8/7145824cf0b9e3c28046520480f207df47e927df83aa9555fb47f8505922/altgraph-0.17.4.tar.gz", hash = "sha256:1b5afbb98f6c4dcadb2e2ae6ab9fa994bbb8c1d75f4fa96d340f9437ae454406", size = 48418, upload-time = "2023-09-25T09:04:52.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/3f/3bc3f1d83f6e4a7fcb834d3720544ca597590425be5ba9db032b2bf322a2/altgraph-0.17.4-py2.py3-none-any.whl", hash = "sha256:642743b4750de17e655e6711601b077bc6598dbfa3ba5fa2b2a35ce12b508dff", size = 21212, upload-time = "2023-09-25T09:04:50.691Z" }, +] + +[[package]] +name = "attrs" +version = "25.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, +] + +[[package]] +name = "backports-tarfile" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/86/72/cd9b395f25e290e633655a100af28cb253e4393396264a98bd5f5951d50f/backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991", size = 86406, upload-time = "2024-05-28T17:01:54.731Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/fa/123043af240e49752f1c4bd24da5053b6bd00cad78c2be53c0d1e8b975bc/backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34", size = 30181, upload-time = "2024-05-28T17:01:53.112Z" }, +] + +[[package]] +name = "beautifulsoup4" +version = "4.14.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/77/e9/df2358efd7659577435e2177bfa69cba6c33216681af51a707193dec162a/beautifulsoup4-4.14.2.tar.gz", hash = "sha256:2a98ab9f944a11acee9cc848508ec28d9228abfd522ef0fad6a02a72e0ded69e", size = 625822, upload-time = "2025-09-29T10:05:42.613Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/fe/3aed5d0be4d404d12d36ab97e2f1791424d9ca39c2f754a6285d59a3b01d/beautifulsoup4-4.14.2-py3-none-any.whl", hash = "sha256:5ef6fa3a8cbece8488d66985560f97ed091e22bbc4e9c2338508a9d5de6d4515", size = 106392, upload-time = "2025-09-29T10:05:43.771Z" }, +] + +[[package]] +name = "certifi" +version = "2025.10.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, +] + +[[package]] +name = "cryptography" +version = "46.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, + { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, + { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, + { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, + { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, + { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, + { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, + { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" }, + { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" }, + { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" }, + { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" }, + { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" }, + { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" }, + { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" }, + { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" }, + { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" }, + { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" }, + { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, + { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, + { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, + { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, + { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, + { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, + { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, + { url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" }, + { url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" }, + { url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" }, + { url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" }, +] + +[[package]] +name = "frozenlist" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912, upload-time = "2025-10-06T05:35:45.98Z" }, + { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046, upload-time = "2025-10-06T05:35:47.009Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119, upload-time = "2025-10-06T05:35:48.38Z" }, + { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067, upload-time = "2025-10-06T05:35:49.97Z" }, + { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160, upload-time = "2025-10-06T05:35:51.729Z" }, + { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544, upload-time = "2025-10-06T05:35:53.246Z" }, + { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797, upload-time = "2025-10-06T05:35:54.497Z" }, + { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923, upload-time = "2025-10-06T05:35:55.861Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886, upload-time = "2025-10-06T05:35:57.399Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731, upload-time = "2025-10-06T05:35:58.563Z" }, + { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544, upload-time = "2025-10-06T05:35:59.719Z" }, + { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806, upload-time = "2025-10-06T05:36:00.959Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382, upload-time = "2025-10-06T05:36:02.22Z" }, + { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647, upload-time = "2025-10-06T05:36:03.409Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064, upload-time = "2025-10-06T05:36:04.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937, upload-time = "2025-10-06T05:36:05.669Z" }, + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717, upload-time = "2025-10-06T05:36:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651, upload-time = "2025-10-06T05:36:28.855Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417, upload-time = "2025-10-06T05:36:29.877Z" }, + { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391, upload-time = "2025-10-06T05:36:31.301Z" }, + { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048, upload-time = "2025-10-06T05:36:32.531Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549, upload-time = "2025-10-06T05:36:33.706Z" }, + { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833, upload-time = "2025-10-06T05:36:34.947Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363, upload-time = "2025-10-06T05:36:36.534Z" }, + { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314, upload-time = "2025-10-06T05:36:38.582Z" }, + { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365, upload-time = "2025-10-06T05:36:40.152Z" }, + { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763, upload-time = "2025-10-06T05:36:41.355Z" }, + { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110, upload-time = "2025-10-06T05:36:42.716Z" }, + { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717, upload-time = "2025-10-06T05:36:44.251Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628, upload-time = "2025-10-06T05:36:45.423Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882, upload-time = "2025-10-06T05:36:46.796Z" }, + { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676, upload-time = "2025-10-06T05:36:47.8Z" }, + { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235, upload-time = "2025-10-06T05:36:48.78Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742, upload-time = "2025-10-06T05:36:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725, upload-time = "2025-10-06T05:36:50.851Z" }, + { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533, upload-time = "2025-10-06T05:36:51.898Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506, upload-time = "2025-10-06T05:36:53.101Z" }, + { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161, upload-time = "2025-10-06T05:36:54.309Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676, upload-time = "2025-10-06T05:36:55.566Z" }, + { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638, upload-time = "2025-10-06T05:36:56.758Z" }, + { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067, upload-time = "2025-10-06T05:36:57.965Z" }, + { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101, upload-time = "2025-10-06T05:36:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901, upload-time = "2025-10-06T05:37:00.811Z" }, + { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395, upload-time = "2025-10-06T05:37:02.115Z" }, + { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659, upload-time = "2025-10-06T05:37:03.711Z" }, + { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" }, + { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" }, + { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0", size = 86127, upload-time = "2025-10-06T05:37:08.438Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f", size = 49698, upload-time = "2025-10-06T05:37:09.48Z" }, + { url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c", size = 49749, upload-time = "2025-10-06T05:37:10.569Z" }, + { url = "https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2", size = 231298, upload-time = "2025-10-06T05:37:11.993Z" }, + { url = "https://files.pythonhosted.org/packages/3a/3b/d9b1e0b0eed36e70477ffb8360c49c85c8ca8ef9700a4e6711f39a6e8b45/frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8", size = 232015, upload-time = "2025-10-06T05:37:13.194Z" }, + { url = "https://files.pythonhosted.org/packages/dc/94/be719d2766c1138148564a3960fc2c06eb688da592bdc25adcf856101be7/frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686", size = 225038, upload-time = "2025-10-06T05:37:14.577Z" }, + { url = "https://files.pythonhosted.org/packages/e4/09/6712b6c5465f083f52f50cf74167b92d4ea2f50e46a9eea0523d658454ae/frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e", size = 240130, upload-time = "2025-10-06T05:37:15.781Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d4/cd065cdcf21550b54f3ce6a22e143ac9e4836ca42a0de1022da8498eac89/frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a", size = 242845, upload-time = "2025-10-06T05:37:17.037Z" }, + { url = "https://files.pythonhosted.org/packages/62/c3/f57a5c8c70cd1ead3d5d5f776f89d33110b1addae0ab010ad774d9a44fb9/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128", size = 229131, upload-time = "2025-10-06T05:37:18.221Z" }, + { url = "https://files.pythonhosted.org/packages/6c/52/232476fe9cb64f0742f3fde2b7d26c1dac18b6d62071c74d4ded55e0ef94/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f", size = 240542, upload-time = "2025-10-06T05:37:19.771Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7", size = 237308, upload-time = "2025-10-06T05:37:20.969Z" }, + { url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30", size = 238210, upload-time = "2025-10-06T05:37:22.252Z" }, + { url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7", size = 231972, upload-time = "2025-10-06T05:37:23.5Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806", size = 40536, upload-time = "2025-10-06T05:37:25.581Z" }, + { url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0", size = 44330, upload-time = "2025-10-06T05:37:26.928Z" }, + { url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b", size = 40627, upload-time = "2025-10-06T05:37:28.075Z" }, + { url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d", size = 89238, upload-time = "2025-10-06T05:37:29.373Z" }, + { url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed", size = 50738, upload-time = "2025-10-06T05:37:30.792Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930", size = 51739, upload-time = "2025-10-06T05:37:32.127Z" }, + { url = "https://files.pythonhosted.org/packages/62/1c/3d8622e60d0b767a5510d1d3cf21065b9db874696a51ea6d7a43180a259c/frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c", size = 284186, upload-time = "2025-10-06T05:37:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/2d/14/aa36d5f85a89679a85a1d44cd7a6657e0b1c75f61e7cad987b203d2daca8/frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24", size = 292196, upload-time = "2025-10-06T05:37:36.107Z" }, + { url = "https://files.pythonhosted.org/packages/05/23/6bde59eb55abd407d34f77d39a5126fb7b4f109a3f611d3929f14b700c66/frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37", size = 273830, upload-time = "2025-10-06T05:37:37.663Z" }, + { url = "https://files.pythonhosted.org/packages/d2/3f/22cff331bfad7a8afa616289000ba793347fcd7bc275f3b28ecea2a27909/frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a", size = 294289, upload-time = "2025-10-06T05:37:39.261Z" }, + { url = "https://files.pythonhosted.org/packages/a4/89/5b057c799de4838b6c69aa82b79705f2027615e01be996d2486a69ca99c4/frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2", size = 300318, upload-time = "2025-10-06T05:37:43.213Z" }, + { url = "https://files.pythonhosted.org/packages/30/de/2c22ab3eb2a8af6d69dc799e48455813bab3690c760de58e1bf43b36da3e/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef", size = 282814, upload-time = "2025-10-06T05:37:45.337Z" }, + { url = "https://files.pythonhosted.org/packages/59/f7/970141a6a8dbd7f556d94977858cfb36fa9b66e0892c6dd780d2219d8cd8/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe", size = 291762, upload-time = "2025-10-06T05:37:46.657Z" }, + { url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8", size = 289470, upload-time = "2025-10-06T05:37:47.946Z" }, + { url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a", size = 289042, upload-time = "2025-10-06T05:37:49.499Z" }, + { url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e", size = 283148, upload-time = "2025-10-06T05:37:50.745Z" }, + { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676, upload-time = "2025-10-06T05:37:52.222Z" }, + { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451, upload-time = "2025-10-06T05:37:53.425Z" }, + { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507, upload-time = "2025-10-06T05:37:54.513Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, +] + +[[package]] +name = "jaraco-classes" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/c0/ed4a27bc5571b99e3cff68f8a9fa5b56ff7df1c2251cc715a652ddd26402/jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd", size = 11780, upload-time = "2024-03-31T07:27:36.643Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/66/b15ce62552d84bbfcec9a4873ab79d993a1dd4edb922cbfccae192bd5b5f/jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790", size = 6777, upload-time = "2024-03-31T07:27:34.792Z" }, +] + +[[package]] +name = "jaraco-context" +version = "6.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-tarfile", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/ad/f3777b81bf0b6e7bc7514a1656d3e637b2e8e15fab2ce3235730b3e7a4e6/jaraco_context-6.0.1.tar.gz", hash = "sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3", size = 13912, upload-time = "2024-08-20T03:39:27.358Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/db/0c52c4cf5e4bd9f5d7135ec7669a3a767af21b3a308e1ed3674881e52b62/jaraco.context-6.0.1-py3-none-any.whl", hash = "sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4", size = 6825, upload-time = "2024-08-20T03:39:25.966Z" }, +] + +[[package]] +name = "jaraco-functools" +version = "4.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f7/ed/1aa2d585304ec07262e1a83a9889880701079dde796ac7b1d1826f40c63d/jaraco_functools-4.3.0.tar.gz", hash = "sha256:cfd13ad0dd2c47a3600b439ef72d8615d482cedcff1632930d6f28924d92f294", size = 19755, upload-time = "2025-08-18T20:05:09.91Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/09/726f168acad366b11e420df31bf1c702a54d373a83f968d94141a8c3fde0/jaraco_functools-4.3.0-py3-none-any.whl", hash = "sha256:227ff8ed6f7b8f62c56deff101545fa7543cf2c8e7b82a7c2116e672f29c26e8", size = 10408, upload-time = "2025-08-18T20:05:08.69Z" }, +] + +[[package]] +name = "jeepney" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/6f/357efd7602486741aa73ffc0617fb310a29b588ed0fd69c2399acbb85b0c/jeepney-0.9.0.tar.gz", hash = "sha256:cf0e9e845622b81e4a28df94c40345400256ec608d0e55bb8a3feaa9163f5732", size = 106758, upload-time = "2025-02-27T18:51:01.684Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/a3/e137168c9c44d18eff0376253da9f1e9234d0239e0ee230d2fee6cea8e55/jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683", size = 49010, upload-time = "2025-02-27T18:51:00.104Z" }, +] + +[[package]] +name = "keyring" +version = "25.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata", marker = "python_full_version < '3.12'" }, + { name = "jaraco-classes" }, + { name = "jaraco-context" }, + { name = "jaraco-functools" }, + { name = "jeepney", marker = "sys_platform == 'linux'" }, + { name = "pywin32-ctypes", marker = "sys_platform == 'win32'" }, + { name = "secretstorage", marker = "sys_platform == 'linux'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/70/09/d904a6e96f76ff214be59e7aa6ef7190008f52a0ab6689760a98de0bf37d/keyring-25.6.0.tar.gz", hash = "sha256:0b39998aa941431eb3d9b0d4b2460bc773b9df6fed7621c2dfb291a7e0187a66", size = 62750, upload-time = "2024-12-25T15:26:45.782Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d3/32/da7f44bcb1105d3e88a0b74ebdca50c59121d2ddf71c9e34ba47df7f3a56/keyring-25.6.0-py3-none-any.whl", hash = "sha256:552a3f7af126ece7ed5c89753650eec89c7eaae8617d0aa4d9ad2b75111266bd", size = 39085, upload-time = "2024-12-25T15:26:44.377Z" }, +] + +[[package]] +name = "macholib" +version = "1.16.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "altgraph" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/95/ee/af1a3842bdd5902ce133bd246eb7ffd4375c38642aeb5dc0ae3a0329dfa2/macholib-1.16.3.tar.gz", hash = "sha256:07ae9e15e8e4cd9a788013d81f5908b3609aa76f9b1421bae9c4d7606ec86a30", size = 59309, upload-time = "2023-09-25T09:10:16.155Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/5d/c059c180c84f7962db0aeae7c3b9303ed1d73d76f2bfbc32bc231c8be314/macholib-1.16.3-py2.py3-none-any.whl", hash = "sha256:0e315d7583d38b8c77e815b1ecbdbf504a8258d8b3e17b61165c6feb60d18f2c", size = 38094, upload-time = "2023-09-25T09:10:14.188Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "more-itertools" +version = "10.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/5d/38b681d3fce7a266dd9ab73c66959406d565b3e85f21d5e66e1181d93721/more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd", size = 137431, upload-time = "2025-09-02T15:23:11.018Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" }, +] + +[[package]] +name = "multidict" +version = "6.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/9e/5c727587644d67b2ed479041e4b1c58e30afc011e3d45d25bbe35781217c/multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc", size = 76604, upload-time = "2025-10-06T14:48:54.277Z" }, + { url = "https://files.pythonhosted.org/packages/17/e4/67b5c27bd17c085a5ea8f1ec05b8a3e5cba0ca734bfcad5560fb129e70ca/multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721", size = 44715, upload-time = "2025-10-06T14:48:55.445Z" }, + { url = "https://files.pythonhosted.org/packages/4d/e1/866a5d77be6ea435711bef2a4291eed11032679b6b28b56b4776ab06ba3e/multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6", size = 44332, upload-time = "2025-10-06T14:48:56.706Z" }, + { url = "https://files.pythonhosted.org/packages/31/61/0c2d50241ada71ff61a79518db85ada85fdabfcf395d5968dae1cbda04e5/multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c", size = 245212, upload-time = "2025-10-06T14:48:58.042Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e0/919666a4e4b57fff1b57f279be1c9316e6cdc5de8a8b525d76f6598fefc7/multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7", size = 246671, upload-time = "2025-10-06T14:49:00.004Z" }, + { url = "https://files.pythonhosted.org/packages/a1/cc/d027d9c5a520f3321b65adea289b965e7bcbd2c34402663f482648c716ce/multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7", size = 225491, upload-time = "2025-10-06T14:49:01.393Z" }, + { url = "https://files.pythonhosted.org/packages/75/c4/bbd633980ce6155a28ff04e6a6492dd3335858394d7bb752d8b108708558/multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9", size = 257322, upload-time = "2025-10-06T14:49:02.745Z" }, + { url = "https://files.pythonhosted.org/packages/4c/6d/d622322d344f1f053eae47e033b0b3f965af01212de21b10bcf91be991fb/multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8", size = 254694, upload-time = "2025-10-06T14:49:04.15Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9f/78f8761c2705d4c6d7516faed63c0ebdac569f6db1bef95e0d5218fdc146/multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd", size = 246715, upload-time = "2025-10-06T14:49:05.967Z" }, + { url = "https://files.pythonhosted.org/packages/78/59/950818e04f91b9c2b95aab3d923d9eabd01689d0dcd889563988e9ea0fd8/multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb", size = 243189, upload-time = "2025-10-06T14:49:07.37Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3d/77c79e1934cad2ee74991840f8a0110966d9599b3af95964c0cd79bb905b/multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6", size = 237845, upload-time = "2025-10-06T14:49:08.759Z" }, + { url = "https://files.pythonhosted.org/packages/63/1b/834ce32a0a97a3b70f86437f685f880136677ac00d8bce0027e9fd9c2db7/multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2", size = 246374, upload-time = "2025-10-06T14:49:10.574Z" }, + { url = "https://files.pythonhosted.org/packages/23/ef/43d1c3ba205b5dec93dc97f3fba179dfa47910fc73aaaea4f7ceb41cec2a/multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff", size = 253345, upload-time = "2025-10-06T14:49:12.331Z" }, + { url = "https://files.pythonhosted.org/packages/6b/03/eaf95bcc2d19ead522001f6a650ef32811aa9e3624ff0ad37c445c7a588c/multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b", size = 246940, upload-time = "2025-10-06T14:49:13.821Z" }, + { url = "https://files.pythonhosted.org/packages/e8/df/ec8a5fd66ea6cd6f525b1fcbb23511b033c3e9bc42b81384834ffa484a62/multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34", size = 242229, upload-time = "2025-10-06T14:49:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/8a/a2/59b405d59fd39ec86d1142630e9049243015a5f5291ba49cadf3c090c541/multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff", size = 41308, upload-time = "2025-10-06T14:49:16.871Z" }, + { url = "https://files.pythonhosted.org/packages/32/0f/13228f26f8b882c34da36efa776c3b7348455ec383bab4a66390e42963ae/multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81", size = 46037, upload-time = "2025-10-06T14:49:18.457Z" }, + { url = "https://files.pythonhosted.org/packages/84/1f/68588e31b000535a3207fd3c909ebeec4fb36b52c442107499c18a896a2a/multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912", size = 43023, upload-time = "2025-10-06T14:49:19.648Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877, upload-time = "2025-10-06T14:49:20.884Z" }, + { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467, upload-time = "2025-10-06T14:49:22.054Z" }, + { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834, upload-time = "2025-10-06T14:49:23.566Z" }, + { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545, upload-time = "2025-10-06T14:49:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305, upload-time = "2025-10-06T14:49:26.778Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363, upload-time = "2025-10-06T14:49:28.562Z" }, + { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375, upload-time = "2025-10-06T14:49:29.96Z" }, + { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346, upload-time = "2025-10-06T14:49:31.404Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107, upload-time = "2025-10-06T14:49:32.974Z" }, + { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592, upload-time = "2025-10-06T14:49:34.52Z" }, + { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024, upload-time = "2025-10-06T14:49:35.956Z" }, + { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484, upload-time = "2025-10-06T14:49:37.631Z" }, + { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579, upload-time = "2025-10-06T14:49:39.502Z" }, + { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654, upload-time = "2025-10-06T14:49:41.32Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511, upload-time = "2025-10-06T14:49:46.021Z" }, + { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895, upload-time = "2025-10-06T14:49:48.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073, upload-time = "2025-10-06T14:49:50.28Z" }, + { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226, upload-time = "2025-10-06T14:49:52.304Z" }, + { url = "https://files.pythonhosted.org/packages/d2/86/33272a544eeb36d66e4d9a920602d1a2f57d4ebea4ef3cdfe5a912574c95/multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6", size = 76135, upload-time = "2025-10-06T14:49:54.26Z" }, + { url = "https://files.pythonhosted.org/packages/91/1c/eb97db117a1ebe46d457a3d235a7b9d2e6dcab174f42d1b67663dd9e5371/multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159", size = 45117, upload-time = "2025-10-06T14:49:55.82Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d8/6c3442322e41fb1dd4de8bd67bfd11cd72352ac131f6368315617de752f1/multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca", size = 43472, upload-time = "2025-10-06T14:49:57.048Z" }, + { url = "https://files.pythonhosted.org/packages/75/3f/e2639e80325af0b6c6febdf8e57cc07043ff15f57fa1ef808f4ccb5ac4cd/multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8", size = 249342, upload-time = "2025-10-06T14:49:58.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/cc/84e0585f805cbeaa9cbdaa95f9a3d6aed745b9d25700623ac89a6ecff400/multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60", size = 257082, upload-time = "2025-10-06T14:49:59.89Z" }, + { url = "https://files.pythonhosted.org/packages/b0/9c/ac851c107c92289acbbf5cfb485694084690c1b17e555f44952c26ddc5bd/multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4", size = 240704, upload-time = "2025-10-06T14:50:01.485Z" }, + { url = "https://files.pythonhosted.org/packages/50/cc/5f93e99427248c09da95b62d64b25748a5f5c98c7c2ab09825a1d6af0e15/multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f", size = 266355, upload-time = "2025-10-06T14:50:02.955Z" }, + { url = "https://files.pythonhosted.org/packages/ec/0c/2ec1d883ceb79c6f7f6d7ad90c919c898f5d1c6ea96d322751420211e072/multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf", size = 267259, upload-time = "2025-10-06T14:50:04.446Z" }, + { url = "https://files.pythonhosted.org/packages/c6/2d/f0b184fa88d6630aa267680bdb8623fb69cb0d024b8c6f0d23f9a0f406d3/multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32", size = 254903, upload-time = "2025-10-06T14:50:05.98Z" }, + { url = "https://files.pythonhosted.org/packages/06/c9/11ea263ad0df7dfabcad404feb3c0dd40b131bc7f232d5537f2fb1356951/multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036", size = 252365, upload-time = "2025-10-06T14:50:07.511Z" }, + { url = "https://files.pythonhosted.org/packages/41/88/d714b86ee2c17d6e09850c70c9d310abac3d808ab49dfa16b43aba9d53fd/multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec", size = 250062, upload-time = "2025-10-06T14:50:09.074Z" }, + { url = "https://files.pythonhosted.org/packages/15/fe/ad407bb9e818c2b31383f6131ca19ea7e35ce93cf1310fce69f12e89de75/multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e", size = 249683, upload-time = "2025-10-06T14:50:10.714Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a4/a89abdb0229e533fb925e7c6e5c40201c2873efebc9abaf14046a4536ee6/multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64", size = 261254, upload-time = "2025-10-06T14:50:12.28Z" }, + { url = "https://files.pythonhosted.org/packages/8d/aa/0e2b27bd88b40a4fb8dc53dd74eecac70edaa4c1dd0707eb2164da3675b3/multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd", size = 257967, upload-time = "2025-10-06T14:50:14.16Z" }, + { url = "https://files.pythonhosted.org/packages/d0/8e/0c67b7120d5d5f6d874ed85a085f9dc770a7f9d8813e80f44a9fec820bb7/multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288", size = 250085, upload-time = "2025-10-06T14:50:15.639Z" }, + { url = "https://files.pythonhosted.org/packages/ba/55/b73e1d624ea4b8fd4dd07a3bb70f6e4c7c6c5d9d640a41c6ffe5cdbd2a55/multidict-6.7.0-cp313-cp313-win32.whl", hash = "sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17", size = 41713, upload-time = "2025-10-06T14:50:17.066Z" }, + { url = "https://files.pythonhosted.org/packages/32/31/75c59e7d3b4205075b4c183fa4ca398a2daf2303ddf616b04ae6ef55cffe/multidict-6.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390", size = 45915, upload-time = "2025-10-06T14:50:18.264Z" }, + { url = "https://files.pythonhosted.org/packages/31/2a/8987831e811f1184c22bc2e45844934385363ee61c0a2dcfa8f71b87e608/multidict-6.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e", size = 43077, upload-time = "2025-10-06T14:50:19.853Z" }, + { url = "https://files.pythonhosted.org/packages/e8/68/7b3a5170a382a340147337b300b9eb25a9ddb573bcdfff19c0fa3f31ffba/multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00", size = 83114, upload-time = "2025-10-06T14:50:21.223Z" }, + { url = "https://files.pythonhosted.org/packages/55/5c/3fa2d07c84df4e302060f555bbf539310980362236ad49f50eeb0a1c1eb9/multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb", size = 48442, upload-time = "2025-10-06T14:50:22.871Z" }, + { url = "https://files.pythonhosted.org/packages/fc/56/67212d33239797f9bd91962bb899d72bb0f4c35a8652dcdb8ed049bef878/multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b", size = 46885, upload-time = "2025-10-06T14:50:24.258Z" }, + { url = "https://files.pythonhosted.org/packages/46/d1/908f896224290350721597a61a69cd19b89ad8ee0ae1f38b3f5cd12ea2ac/multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c", size = 242588, upload-time = "2025-10-06T14:50:25.716Z" }, + { url = "https://files.pythonhosted.org/packages/ab/67/8604288bbd68680eee0ab568fdcb56171d8b23a01bcd5cb0c8fedf6e5d99/multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1", size = 249966, upload-time = "2025-10-06T14:50:28.192Z" }, + { url = "https://files.pythonhosted.org/packages/20/33/9228d76339f1ba51e3efef7da3ebd91964d3006217aae13211653193c3ff/multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b", size = 228618, upload-time = "2025-10-06T14:50:29.82Z" }, + { url = "https://files.pythonhosted.org/packages/f8/2d/25d9b566d10cab1c42b3b9e5b11ef79c9111eaf4463b8c257a3bd89e0ead/multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5", size = 257539, upload-time = "2025-10-06T14:50:31.731Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b1/8d1a965e6637fc33de3c0d8f414485c2b7e4af00f42cab3d84e7b955c222/multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad", size = 256345, upload-time = "2025-10-06T14:50:33.26Z" }, + { url = "https://files.pythonhosted.org/packages/ba/0c/06b5a8adbdeedada6f4fb8d8f193d44a347223b11939b42953eeb6530b6b/multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c", size = 247934, upload-time = "2025-10-06T14:50:34.808Z" }, + { url = "https://files.pythonhosted.org/packages/8f/31/b2491b5fe167ca044c6eb4b8f2c9f3b8a00b24c432c365358eadac5d7625/multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5", size = 245243, upload-time = "2025-10-06T14:50:36.436Z" }, + { url = "https://files.pythonhosted.org/packages/61/1a/982913957cb90406c8c94f53001abd9eafc271cb3e70ff6371590bec478e/multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10", size = 235878, upload-time = "2025-10-06T14:50:37.953Z" }, + { url = "https://files.pythonhosted.org/packages/be/c0/21435d804c1a1cf7a2608593f4d19bca5bcbd7a81a70b253fdd1c12af9c0/multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754", size = 243452, upload-time = "2025-10-06T14:50:39.574Z" }, + { url = "https://files.pythonhosted.org/packages/54/0a/4349d540d4a883863191be6eb9a928846d4ec0ea007d3dcd36323bb058ac/multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c", size = 252312, upload-time = "2025-10-06T14:50:41.612Z" }, + { url = "https://files.pythonhosted.org/packages/26/64/d5416038dbda1488daf16b676e4dbfd9674dde10a0cc8f4fc2b502d8125d/multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762", size = 246935, upload-time = "2025-10-06T14:50:43.972Z" }, + { url = "https://files.pythonhosted.org/packages/9f/8c/8290c50d14e49f35e0bd4abc25e1bc7711149ca9588ab7d04f886cdf03d9/multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6", size = 243385, upload-time = "2025-10-06T14:50:45.648Z" }, + { url = "https://files.pythonhosted.org/packages/ef/a0/f83ae75e42d694b3fbad3e047670e511c138be747bc713cf1b10d5096416/multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d", size = 47777, upload-time = "2025-10-06T14:50:47.154Z" }, + { url = "https://files.pythonhosted.org/packages/dc/80/9b174a92814a3830b7357307a792300f42c9e94664b01dee8e457551fa66/multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6", size = 53104, upload-time = "2025-10-06T14:50:48.851Z" }, + { url = "https://files.pythonhosted.org/packages/cc/28/04baeaf0428d95bb7a7bea0e691ba2f31394338ba424fb0679a9ed0f4c09/multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792", size = 45503, upload-time = "2025-10-06T14:50:50.16Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b1/3da6934455dd4b261d4c72f897e3a5728eba81db59959f3a639245891baa/multidict-6.7.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3bab1e4aff7adaa34410f93b1f8e57c4b36b9af0426a76003f441ee1d3c7e842", size = 75128, upload-time = "2025-10-06T14:50:51.92Z" }, + { url = "https://files.pythonhosted.org/packages/14/2c/f069cab5b51d175a1a2cb4ccdf7a2c2dabd58aa5bd933fa036a8d15e2404/multidict-6.7.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b8512bac933afc3e45fb2b18da8e59b78d4f408399a960339598374d4ae3b56b", size = 44410, upload-time = "2025-10-06T14:50:53.275Z" }, + { url = "https://files.pythonhosted.org/packages/42/e2/64bb41266427af6642b6b128e8774ed84c11b80a90702c13ac0a86bb10cc/multidict-6.7.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:79dcf9e477bc65414ebfea98ffd013cb39552b5ecd62908752e0e413d6d06e38", size = 43205, upload-time = "2025-10-06T14:50:54.911Z" }, + { url = "https://files.pythonhosted.org/packages/02/68/6b086fef8a3f1a8541b9236c594f0c9245617c29841f2e0395d979485cde/multidict-6.7.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:31bae522710064b5cbeddaf2e9f32b1abab70ac6ac91d42572502299e9953128", size = 245084, upload-time = "2025-10-06T14:50:56.369Z" }, + { url = "https://files.pythonhosted.org/packages/15/ee/f524093232007cd7a75c1d132df70f235cfd590a7c9eaccd7ff422ef4ae8/multidict-6.7.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a0df7ff02397bb63e2fd22af2c87dfa39e8c7f12947bc524dbdc528282c7e34", size = 252667, upload-time = "2025-10-06T14:50:57.991Z" }, + { url = "https://files.pythonhosted.org/packages/02/a5/eeb3f43ab45878f1895118c3ef157a480db58ede3f248e29b5354139c2c9/multidict-6.7.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a0222514e8e4c514660e182d5156a415c13ef0aabbd71682fc714e327b95e99", size = 233590, upload-time = "2025-10-06T14:50:59.589Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1e/76d02f8270b97269d7e3dbd45644b1785bda457b474315f8cf999525a193/multidict-6.7.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2397ab4daaf2698eb51a76721e98db21ce4f52339e535725de03ea962b5a3202", size = 264112, upload-time = "2025-10-06T14:51:01.183Z" }, + { url = "https://files.pythonhosted.org/packages/76/0b/c28a70ecb58963847c2a8efe334904cd254812b10e535aefb3bcce513918/multidict-6.7.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8891681594162635948a636c9fe0ff21746aeb3dd5463f6e25d9bea3a8a39ca1", size = 261194, upload-time = "2025-10-06T14:51:02.794Z" }, + { url = "https://files.pythonhosted.org/packages/b4/63/2ab26e4209773223159b83aa32721b4021ffb08102f8ac7d689c943fded1/multidict-6.7.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18706cc31dbf402a7945916dd5cddf160251b6dab8a2c5f3d6d5a55949f676b3", size = 248510, upload-time = "2025-10-06T14:51:04.724Z" }, + { url = "https://files.pythonhosted.org/packages/93/cd/06c1fa8282af1d1c46fd55c10a7930af652afdce43999501d4d68664170c/multidict-6.7.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f844a1bbf1d207dd311a56f383f7eda2d0e134921d45751842d8235e7778965d", size = 248395, upload-time = "2025-10-06T14:51:06.306Z" }, + { url = "https://files.pythonhosted.org/packages/99/ac/82cb419dd6b04ccf9e7e61befc00c77614fc8134362488b553402ecd55ce/multidict-6.7.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d4393e3581e84e5645506923816b9cc81f5609a778c7e7534054091acc64d1c6", size = 239520, upload-time = "2025-10-06T14:51:08.091Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f3/a0f9bf09493421bd8716a362e0cd1d244f5a6550f5beffdd6b47e885b331/multidict-6.7.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:fbd18dc82d7bf274b37aa48d664534330af744e03bccf696d6f4c6042e7d19e7", size = 245479, upload-time = "2025-10-06T14:51:10.365Z" }, + { url = "https://files.pythonhosted.org/packages/8d/01/476d38fc73a212843f43c852b0eee266b6971f0e28329c2184a8df90c376/multidict-6.7.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b6234e14f9314731ec45c42fc4554b88133ad53a09092cc48a88e771c125dadb", size = 258903, upload-time = "2025-10-06T14:51:12.466Z" }, + { url = "https://files.pythonhosted.org/packages/49/6d/23faeb0868adba613b817d0e69c5f15531b24d462af8012c4f6de4fa8dc3/multidict-6.7.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:08d4379f9744d8f78d98c8673c06e202ffa88296f009c71bbafe8a6bf847d01f", size = 252333, upload-time = "2025-10-06T14:51:14.48Z" }, + { url = "https://files.pythonhosted.org/packages/1e/cc/48d02ac22b30fa247f7dad82866e4b1015431092f4ba6ebc7e77596e0b18/multidict-6.7.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9fe04da3f79387f450fd0061d4dd2e45a72749d31bf634aecc9e27f24fdc4b3f", size = 243411, upload-time = "2025-10-06T14:51:16.072Z" }, + { url = "https://files.pythonhosted.org/packages/4a/03/29a8bf5a18abf1fe34535c88adbdfa88c9fb869b5a3b120692c64abe8284/multidict-6.7.0-cp314-cp314-win32.whl", hash = "sha256:fbafe31d191dfa7c4c51f7a6149c9fb7e914dcf9ffead27dcfd9f1ae382b3885", size = 40940, upload-time = "2025-10-06T14:51:17.544Z" }, + { url = "https://files.pythonhosted.org/packages/82/16/7ed27b680791b939de138f906d5cf2b4657b0d45ca6f5dd6236fdddafb1a/multidict-6.7.0-cp314-cp314-win_amd64.whl", hash = "sha256:2f67396ec0310764b9222a1728ced1ab638f61aadc6226f17a71dd9324f9a99c", size = 45087, upload-time = "2025-10-06T14:51:18.875Z" }, + { url = "https://files.pythonhosted.org/packages/cd/3c/e3e62eb35a1950292fe39315d3c89941e30a9d07d5d2df42965ab041da43/multidict-6.7.0-cp314-cp314-win_arm64.whl", hash = "sha256:ba672b26069957ee369cfa7fc180dde1fc6f176eaf1e6beaf61fbebbd3d9c000", size = 42368, upload-time = "2025-10-06T14:51:20.225Z" }, + { url = "https://files.pythonhosted.org/packages/8b/40/cd499bd0dbc5f1136726db3153042a735fffd0d77268e2ee20d5f33c010f/multidict-6.7.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:c1dcc7524066fa918c6a27d61444d4ee7900ec635779058571f70d042d86ed63", size = 82326, upload-time = "2025-10-06T14:51:21.588Z" }, + { url = "https://files.pythonhosted.org/packages/13/8a/18e031eca251c8df76daf0288e6790561806e439f5ce99a170b4af30676b/multidict-6.7.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:27e0b36c2d388dc7b6ced3406671b401e84ad7eb0656b8f3a2f46ed0ce483718", size = 48065, upload-time = "2025-10-06T14:51:22.93Z" }, + { url = "https://files.pythonhosted.org/packages/40/71/5e6701277470a87d234e433fb0a3a7deaf3bcd92566e421e7ae9776319de/multidict-6.7.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a7baa46a22e77f0988e3b23d4ede5513ebec1929e34ee9495be535662c0dfe2", size = 46475, upload-time = "2025-10-06T14:51:24.352Z" }, + { url = "https://files.pythonhosted.org/packages/fe/6a/bab00cbab6d9cfb57afe1663318f72ec28289ea03fd4e8236bb78429893a/multidict-6.7.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7bf77f54997a9166a2f5675d1201520586439424c2511723a7312bdb4bcc034e", size = 239324, upload-time = "2025-10-06T14:51:25.822Z" }, + { url = "https://files.pythonhosted.org/packages/2a/5f/8de95f629fc22a7769ade8b41028e3e5a822c1f8904f618d175945a81ad3/multidict-6.7.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e011555abada53f1578d63389610ac8a5400fc70ce71156b0aa30d326f1a5064", size = 246877, upload-time = "2025-10-06T14:51:27.604Z" }, + { url = "https://files.pythonhosted.org/packages/23/b4/38881a960458f25b89e9f4a4fdcb02ac101cfa710190db6e5528841e67de/multidict-6.7.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:28b37063541b897fd6a318007373930a75ca6d6ac7c940dbe14731ffdd8d498e", size = 225824, upload-time = "2025-10-06T14:51:29.664Z" }, + { url = "https://files.pythonhosted.org/packages/1e/39/6566210c83f8a261575f18e7144736059f0c460b362e96e9cf797a24b8e7/multidict-6.7.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05047ada7a2fde2631a0ed706f1fd68b169a681dfe5e4cf0f8e4cb6618bbc2cd", size = 253558, upload-time = "2025-10-06T14:51:31.684Z" }, + { url = "https://files.pythonhosted.org/packages/00/a3/67f18315100f64c269f46e6c0319fa87ba68f0f64f2b8e7fd7c72b913a0b/multidict-6.7.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:716133f7d1d946a4e1b91b1756b23c088881e70ff180c24e864c26192ad7534a", size = 252339, upload-time = "2025-10-06T14:51:33.699Z" }, + { url = "https://files.pythonhosted.org/packages/c8/2a/1cb77266afee2458d82f50da41beba02159b1d6b1f7973afc9a1cad1499b/multidict-6.7.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1bed1b467ef657f2a0ae62844a607909ef1c6889562de5e1d505f74457d0b96", size = 244895, upload-time = "2025-10-06T14:51:36.189Z" }, + { url = "https://files.pythonhosted.org/packages/dd/72/09fa7dd487f119b2eb9524946ddd36e2067c08510576d43ff68469563b3b/multidict-6.7.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ca43bdfa5d37bd6aee89d85e1d0831fb86e25541be7e9d376ead1b28974f8e5e", size = 241862, upload-time = "2025-10-06T14:51:41.291Z" }, + { url = "https://files.pythonhosted.org/packages/65/92/bc1f8bd0853d8669300f732c801974dfc3702c3eeadae2f60cef54dc69d7/multidict-6.7.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:44b546bd3eb645fd26fb949e43c02a25a2e632e2ca21a35e2e132c8105dc8599", size = 232376, upload-time = "2025-10-06T14:51:43.55Z" }, + { url = "https://files.pythonhosted.org/packages/09/86/ac39399e5cb9d0c2ac8ef6e10a768e4d3bc933ac808d49c41f9dc23337eb/multidict-6.7.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a6ef16328011d3f468e7ebc326f24c1445f001ca1dec335b2f8e66bed3006394", size = 240272, upload-time = "2025-10-06T14:51:45.265Z" }, + { url = "https://files.pythonhosted.org/packages/3d/b6/fed5ac6b8563ec72df6cb1ea8dac6d17f0a4a1f65045f66b6d3bf1497c02/multidict-6.7.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:5aa873cbc8e593d361ae65c68f85faadd755c3295ea2c12040ee146802f23b38", size = 248774, upload-time = "2025-10-06T14:51:46.836Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8d/b954d8c0dc132b68f760aefd45870978deec6818897389dace00fcde32ff/multidict-6.7.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3d7b6ccce016e29df4b7ca819659f516f0bc7a4b3efa3bb2012ba06431b044f9", size = 242731, upload-time = "2025-10-06T14:51:48.541Z" }, + { url = "https://files.pythonhosted.org/packages/16/9d/a2dac7009125d3540c2f54e194829ea18ac53716c61b655d8ed300120b0f/multidict-6.7.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:171b73bd4ee683d307599b66793ac80981b06f069b62eea1c9e29c9241aa66b0", size = 240193, upload-time = "2025-10-06T14:51:50.355Z" }, + { url = "https://files.pythonhosted.org/packages/39/ca/c05f144128ea232ae2178b008d5011d4e2cea86e4ee8c85c2631b1b94802/multidict-6.7.0-cp314-cp314t-win32.whl", hash = "sha256:b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13", size = 48023, upload-time = "2025-10-06T14:51:51.883Z" }, + { url = "https://files.pythonhosted.org/packages/ba/8f/0a60e501584145588be1af5cc829265701ba3c35a64aec8e07cbb71d39bb/multidict-6.7.0-cp314-cp314t-win_amd64.whl", hash = "sha256:09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd", size = 53507, upload-time = "2025-10-06T14:51:53.672Z" }, + { url = "https://files.pythonhosted.org/packages/7f/ae/3148b988a9c6239903e786eac19c889fab607c31d6efa7fb2147e5680f23/multidict-6.7.0-cp314-cp314t-win_arm64.whl", hash = "sha256:cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827", size = 44804, upload-time = "2025-10-06T14:51:55.415Z" }, + { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pefile" +version = "2023.2.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/78/c5/3b3c62223f72e2360737fd2a57c30e5b2adecd85e70276879609a7403334/pefile-2023.2.7.tar.gz", hash = "sha256:82e6114004b3d6911c77c3953e3838654b04511b8b66e8583db70c65998017dc", size = 74854, upload-time = "2023-02-07T12:23:55.958Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/26/d0ad8b448476d0a1e8d3ea5622dc77b916db84c6aa3cb1e1c0965af948fc/pefile-2023.2.7-py3-none-any.whl", hash = "sha256:da185cd2af68c08a6cd4481f7325ed600a88f6a813bad9dea07ab3ef73d8d8d6", size = 71791, upload-time = "2023-02-07T12:28:36.678Z" }, +] + +[[package]] +name = "pferd" +source = { editable = "." } +dependencies = [ + { name = "aiohttp" }, + { name = "beautifulsoup4" }, + { name = "certifi" }, + { name = "keyring" }, + { name = "rich" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pyinstaller" }, + { name = "pyright" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "aiohttp", specifier = ">=3.8.1" }, + { name = "beautifulsoup4", specifier = ">=4.10.0" }, + { name = "certifi", specifier = ">=2021.10.8" }, + { name = "keyring", specifier = ">=23.5.0" }, + { name = "rich", specifier = ">=11.0.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pyinstaller", specifier = ">=6.16.0" }, + { name = "pyright", specifier = ">=1.1.406" }, + { name = "ruff", specifier = ">=0.14.1" }, +] + +[[package]] +name = "propcache" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" }, + { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929, upload-time = "2025-10-08T19:46:28.62Z" }, + { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778, upload-time = "2025-10-08T19:46:30.358Z" }, + { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144, upload-time = "2025-10-08T19:46:32.607Z" }, + { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030, upload-time = "2025-10-08T19:46:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252, upload-time = "2025-10-08T19:46:35.309Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064, upload-time = "2025-10-08T19:46:36.993Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" }, + { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" }, + { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" }, + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750, upload-time = "2025-10-08T19:47:07.648Z" }, + { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780, upload-time = "2025-10-08T19:47:08.851Z" }, + { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308, upload-time = "2025-10-08T19:47:09.982Z" }, + { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182, upload-time = "2025-10-08T19:47:11.319Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215, upload-time = "2025-10-08T19:47:13.146Z" }, + { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112, upload-time = "2025-10-08T19:47:14.913Z" }, + { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442, upload-time = "2025-10-08T19:47:16.277Z" }, + { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398, upload-time = "2025-10-08T19:47:17.962Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920, upload-time = "2025-10-08T19:47:19.355Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748, upload-time = "2025-10-08T19:47:21.338Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877, upload-time = "2025-10-08T19:47:23.059Z" }, + { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437, upload-time = "2025-10-08T19:47:24.445Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586, upload-time = "2025-10-08T19:47:25.736Z" }, + { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790, upload-time = "2025-10-08T19:47:26.847Z" }, + { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158, upload-time = "2025-10-08T19:47:27.961Z" }, + { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451, upload-time = "2025-10-08T19:47:29.445Z" }, + { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374, upload-time = "2025-10-08T19:47:30.579Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396, upload-time = "2025-10-08T19:47:31.79Z" }, + { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950, upload-time = "2025-10-08T19:47:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856, upload-time = "2025-10-08T19:47:34.906Z" }, + { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420, upload-time = "2025-10-08T19:47:36.338Z" }, + { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254, upload-time = "2025-10-08T19:47:37.692Z" }, + { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205, upload-time = "2025-10-08T19:47:39.659Z" }, + { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873, upload-time = "2025-10-08T19:47:41.084Z" }, + { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739, upload-time = "2025-10-08T19:47:42.51Z" }, + { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514, upload-time = "2025-10-08T19:47:43.927Z" }, + { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781, upload-time = "2025-10-08T19:47:45.448Z" }, + { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, + { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, + { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152, upload-time = "2025-10-08T19:47:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869, upload-time = "2025-10-08T19:47:52.594Z" }, + { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596, upload-time = "2025-10-08T19:47:54.073Z" }, + { url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641", size = 206981, upload-time = "2025-10-08T19:47:55.715Z" }, + { url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4", size = 211490, upload-time = "2025-10-08T19:47:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44", size = 215371, upload-time = "2025-10-08T19:47:59.317Z" }, + { url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d", size = 201424, upload-time = "2025-10-08T19:48:00.67Z" }, + { url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b", size = 197566, upload-time = "2025-10-08T19:48:02.604Z" }, + { url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e", size = 193130, upload-time = "2025-10-08T19:48:04.499Z" }, + { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625, upload-time = "2025-10-08T19:48:06.213Z" }, + { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209, upload-time = "2025-10-08T19:48:08.432Z" }, + { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797, upload-time = "2025-10-08T19:48:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140, upload-time = "2025-10-08T19:48:11.232Z" }, + { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257, upload-time = "2025-10-08T19:48:12.707Z" }, + { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097, upload-time = "2025-10-08T19:48:13.923Z" }, + { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455, upload-time = "2025-10-08T19:48:15.16Z" }, + { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372, upload-time = "2025-10-08T19:48:16.424Z" }, + { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411, upload-time = "2025-10-08T19:48:17.577Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a", size = 275712, upload-time = "2025-10-08T19:48:18.901Z" }, + { url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be", size = 273557, upload-time = "2025-10-08T19:48:20.762Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc", size = 280015, upload-time = "2025-10-08T19:48:22.592Z" }, + { url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a", size = 262880, upload-time = "2025-10-08T19:48:23.947Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89", size = 260938, upload-time = "2025-10-08T19:48:25.656Z" }, + { url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726", size = 247641, upload-time = "2025-10-08T19:48:27.207Z" }, + { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510, upload-time = "2025-10-08T19:48:28.65Z" }, + { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161, upload-time = "2025-10-08T19:48:30.133Z" }, + { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393, upload-time = "2025-10-08T19:48:31.567Z" }, + { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546, upload-time = "2025-10-08T19:48:32.872Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259, upload-time = "2025-10-08T19:48:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428, upload-time = "2025-10-08T19:48:35.441Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, +] + +[[package]] +name = "pycparser" +version = "2.23" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyinstaller" +version = "6.16.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "altgraph" }, + { name = "macholib", marker = "sys_platform == 'darwin'" }, + { name = "packaging" }, + { name = "pefile", marker = "sys_platform == 'win32'" }, + { name = "pyinstaller-hooks-contrib" }, + { name = "pywin32-ctypes", marker = "sys_platform == 'win32'" }, + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/94/94/1f62e95e4a28b64cfbb5b922ef3046f968b47170d37a1e1a029f56ac9cb4/pyinstaller-6.16.0.tar.gz", hash = "sha256:53559fe1e041a234f2b4dcc3288ea8bdd57f7cad8a6644e422c27bb407f3edef", size = 4008473, upload-time = "2025-09-13T20:07:01.733Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/0a/c42ce6e5d3de287f2e9432a074fb209f1fb72a86a72f3903849fdb5e4829/pyinstaller-6.16.0-py3-none-macosx_10_13_universal2.whl", hash = "sha256:7fd1c785219a87ca747c21fa92f561b0d2926a7edc06d0a0fe37f3736e00bd7a", size = 1027899, upload-time = "2025-09-13T20:05:59.2Z" }, + { url = "https://files.pythonhosted.org/packages/4e/d0/f18fedde32835d5a758f464c75924e2154065625f09d5456c3c303527654/pyinstaller-6.16.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:b756ddb9007b8141c5476b553351f9d97559b8af5d07f9460869bfae02be26b0", size = 727990, upload-time = "2025-09-13T20:06:03.583Z" }, + { url = "https://files.pythonhosted.org/packages/7a/db/c8bb47514ce857b24bf9294cf1ff74844b6a489fa0ab4ef6f923288c4e38/pyinstaller-6.16.0-py3-none-manylinux2014_i686.whl", hash = "sha256:0a48f55b85ff60f83169e10050f2759019cf1d06773ad1c4da3a411cd8751058", size = 739238, upload-time = "2025-09-13T20:06:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/c6/3e/451dc784a8fcca0fe9f9b6b802d58555364a95b60f253613a2c83fc6b023/pyinstaller-6.16.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:73ba72e04fcece92e32518bbb1e1fb5ac2892677943dfdff38e01a06e8742851", size = 737142, upload-time = "2025-09-13T20:06:11.732Z" }, + { url = "https://files.pythonhosted.org/packages/71/37/2f457479ef8fa2821cdb448acee2421dfb19fbe908bf5499d1930c164084/pyinstaller-6.16.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:b1752488248f7899281b17ca3238eefb5410521291371a686a4f5830f29f52b3", size = 734133, upload-time = "2025-09-13T20:06:15.477Z" }, + { url = "https://files.pythonhosted.org/packages/63/c4/0f7daac4d062a4d1ac2571d8a8b9b5d6812094fcd914d139af591ca5e1ba/pyinstaller-6.16.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ba618a61627ee674d6d68e5de084ba17c707b59a4f2a856084b3999bdffbd3f0", size = 733817, upload-time = "2025-09-13T20:06:19.683Z" }, + { url = "https://files.pythonhosted.org/packages/11/e4/b6127265b42bef883e8873d850becadf748bc5652e5a7029b059328f3c31/pyinstaller-6.16.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:c8b7ef536711617e12fef4673806198872033fa06fa92326ad7fd1d84a9fa454", size = 732912, upload-time = "2025-09-13T20:06:23.46Z" }, + { url = "https://files.pythonhosted.org/packages/2b/00/c6663107bdf814b2916e71563beabd09f693c47712213bc228994cb2cc65/pyinstaller-6.16.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:d1ebf84d02c51fed19b82a8abb4df536923abd55bb684d694e1356e4ae2a0ce5", size = 732773, upload-time = "2025-09-13T20:06:27.352Z" }, + { url = "https://files.pythonhosted.org/packages/a3/14/cabe9bc5f60b95d2e70e7d045ab94b0015ff8f6c8b16e2142d3597e30749/pyinstaller-6.16.0-py3-none-win32.whl", hash = "sha256:6d5f8617f3650ff9ef893e2ab4ddbf3c0d23d0c602ef74b5df8fbef4607840c8", size = 1313878, upload-time = "2025-09-13T20:06:33.234Z" }, + { url = "https://files.pythonhosted.org/packages/aa/99/2005efbc297e7813c1d6f18484aa94a1a81ce87b6a5b497c563681f4c4ea/pyinstaller-6.16.0-py3-none-win_amd64.whl", hash = "sha256:bc10eb1a787f99fea613509f55b902fbd2d8b73ff5f51ff245ea29a481d97d41", size = 1374706, upload-time = "2025-09-13T20:06:39.95Z" }, + { url = "https://files.pythonhosted.org/packages/ca/f4/4dfcf69b86d60fcaae05a42bbff1616d48a91e71726e5ed795d773dae9b3/pyinstaller-6.16.0-py3-none-win_arm64.whl", hash = "sha256:d0af8a401de792c233c32c44b16d065ca9ab8262ee0c906835c12bdebc992a64", size = 1315923, upload-time = "2025-09-13T20:06:45.846Z" }, +] + +[[package]] +name = "pyinstaller-hooks-contrib" +version = "2025.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/83/be0f57c0b77b66c33c2283ebd4ea341022b5a743e97c5fb3bebab82b38b9/pyinstaller_hooks_contrib-2025.9.tar.gz", hash = "sha256:56e972bdaad4e9af767ed47d132362d162112260cbe488c9da7fee01f228a5a6", size = 165189, upload-time = "2025-09-24T11:21:35.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/26/23b4cfc77d7f808c69f59070e1e8293a579ec281a547c61562357160b346/pyinstaller_hooks_contrib-2025.9-py3-none-any.whl", hash = "sha256:ccbfaa49399ef6b18486a165810155e5a8d4c59b41f20dc5da81af7482aaf038", size = 444283, upload-time = "2025-09-24T11:21:33.67Z" }, +] + +[[package]] +name = "pyright" +version = "1.1.406" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f7/16/6b4fbdd1fef59a0292cbb99f790b44983e390321eccbc5921b4d161da5d1/pyright-1.1.406.tar.gz", hash = "sha256:c4872bc58c9643dac09e8a2e74d472c62036910b3bd37a32813989ef7576ea2c", size = 4113151, upload-time = "2025-10-02T01:04:45.488Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/a2/e309afbb459f50507103793aaef85ca4348b66814c86bc73908bdeb66d12/pyright-1.1.406-py3-none-any.whl", hash = "sha256:1d81fb43c2407bf566e97e57abb01c811973fdb21b2df8df59f870f688bdca71", size = 5980982, upload-time = "2025-10-02T01:04:43.137Z" }, +] + +[[package]] +name = "pywin32-ctypes" +version = "0.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/85/9f/01a1a99704853cb63f253eea009390c88e7131c67e66a0a02099a8c917cb/pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755", size = 29471, upload-time = "2024-08-14T10:15:34.626Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/3d/8161f7711c017e01ac9f008dfddd9410dff3674334c233bde66e7ba65bbf/pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8", size = 30756, upload-time = "2024-08-14T10:15:33.187Z" }, +] + +[[package]] +name = "rich" +version = "14.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, +] + +[[package]] +name = "ruff" +version = "0.14.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/58/6ca66896635352812de66f71cdf9ff86b3a4f79071ca5730088c0cd0fc8d/ruff-0.14.1.tar.gz", hash = "sha256:1dd86253060c4772867c61791588627320abcb6ed1577a90ef432ee319729b69", size = 5513429, upload-time = "2025-10-16T18:05:41.766Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/39/9cc5ab181478d7a18adc1c1e051a84ee02bec94eb9bdfd35643d7c74ca31/ruff-0.14.1-py3-none-linux_armv6l.whl", hash = "sha256:083bfc1f30f4a391ae09c6f4f99d83074416b471775b59288956f5bc18e82f8b", size = 12445415, upload-time = "2025-10-16T18:04:48.227Z" }, + { url = "https://files.pythonhosted.org/packages/ef/2e/1226961855ccd697255988f5a2474890ac7c5863b080b15bd038df820818/ruff-0.14.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f6fa757cd717f791009f7669fefb09121cc5f7d9bd0ef211371fad68c2b8b224", size = 12784267, upload-time = "2025-10-16T18:04:52.515Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ea/fd9e95863124ed159cd0667ec98449ae461de94acda7101f1acb6066da00/ruff-0.14.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d6191903d39ac156921398e9c86b7354d15e3c93772e7dbf26c9fcae59ceccd5", size = 11781872, upload-time = "2025-10-16T18:04:55.396Z" }, + { url = "https://files.pythonhosted.org/packages/1e/5a/e890f7338ff537dba4589a5e02c51baa63020acfb7c8cbbaea4831562c96/ruff-0.14.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed04f0e04f7a4587244e5c9d7df50e6b5bf2705d75059f409a6421c593a35896", size = 12226558, upload-time = "2025-10-16T18:04:58.166Z" }, + { url = "https://files.pythonhosted.org/packages/a6/7a/8ab5c3377f5bf31e167b73651841217542bcc7aa1c19e83030835cc25204/ruff-0.14.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5c9e6cf6cd4acae0febbce29497accd3632fe2025c0c583c8b87e8dbdeae5f61", size = 12187898, upload-time = "2025-10-16T18:05:01.455Z" }, + { url = "https://files.pythonhosted.org/packages/48/8d/ba7c33aa55406955fc124e62c8259791c3d42e3075a71710fdff9375134f/ruff-0.14.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fa2458527794ecdfbe45f654e42c61f2503a230545a91af839653a0a93dbc6", size = 12939168, upload-time = "2025-10-16T18:05:04.397Z" }, + { url = "https://files.pythonhosted.org/packages/b4/c2/70783f612b50f66d083380e68cbd1696739d88e9b4f6164230375532c637/ruff-0.14.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:39f1c392244e338b21d42ab29b8a6392a722c5090032eb49bb4d6defcdb34345", size = 14386942, upload-time = "2025-10-16T18:05:07.102Z" }, + { url = "https://files.pythonhosted.org/packages/48/44/cd7abb9c776b66d332119d67f96acf15830d120f5b884598a36d9d3f4d83/ruff-0.14.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7382fa12a26cce1f95070ce450946bec357727aaa428983036362579eadcc5cf", size = 13990622, upload-time = "2025-10-16T18:05:09.882Z" }, + { url = "https://files.pythonhosted.org/packages/eb/56/4259b696db12ac152fe472764b4f78bbdd9b477afd9bc3a6d53c01300b37/ruff-0.14.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd0bf2be3ae8521e1093a487c4aa3b455882f139787770698530d28ed3fbb37c", size = 13431143, upload-time = "2025-10-16T18:05:13.46Z" }, + { url = "https://files.pythonhosted.org/packages/e0/35/266a80d0eb97bd224b3265b9437bd89dde0dcf4faf299db1212e81824e7e/ruff-0.14.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cabcaa9ccf8089fb4fdb78d17cc0e28241520f50f4c2e88cb6261ed083d85151", size = 13132844, upload-time = "2025-10-16T18:05:16.1Z" }, + { url = "https://files.pythonhosted.org/packages/65/6e/d31ce218acc11a8d91ef208e002a31acf315061a85132f94f3df7a252b18/ruff-0.14.1-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:747d583400f6125ec11a4c14d1c8474bf75d8b419ad22a111a537ec1a952d192", size = 13401241, upload-time = "2025-10-16T18:05:19.395Z" }, + { url = "https://files.pythonhosted.org/packages/9f/b5/dbc4221bf0b03774b3b2f0d47f39e848d30664157c15b965a14d890637d2/ruff-0.14.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5a6e74c0efd78515a1d13acbfe6c90f0f5bd822aa56b4a6d43a9ffb2ae6e56cd", size = 12132476, upload-time = "2025-10-16T18:05:22.163Z" }, + { url = "https://files.pythonhosted.org/packages/98/4b/ac99194e790ccd092d6a8b5f341f34b6e597d698e3077c032c502d75ea84/ruff-0.14.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0ea6a864d2fb41a4b6d5b456ed164302a0d96f4daac630aeba829abfb059d020", size = 12139749, upload-time = "2025-10-16T18:05:25.162Z" }, + { url = "https://files.pythonhosted.org/packages/47/26/7df917462c3bb5004e6fdfcc505a49e90bcd8a34c54a051953118c00b53a/ruff-0.14.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0826b8764f94229604fa255918d1cc45e583e38c21c203248b0bfc9a0e930be5", size = 12544758, upload-time = "2025-10-16T18:05:28.018Z" }, + { url = "https://files.pythonhosted.org/packages/64/d0/81e7f0648e9764ad9b51dd4be5e5dac3fcfff9602428ccbae288a39c2c22/ruff-0.14.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cbc52160465913a1a3f424c81c62ac8096b6a491468e7d872cb9444a860bc33d", size = 13221811, upload-time = "2025-10-16T18:05:30.707Z" }, + { url = "https://files.pythonhosted.org/packages/c3/07/3c45562c67933cc35f6d5df4ca77dabbcd88fddaca0d6b8371693d29fd56/ruff-0.14.1-py3-none-win32.whl", hash = "sha256:e037ea374aaaff4103240ae79168c0945ae3d5ae8db190603de3b4012bd1def6", size = 12319467, upload-time = "2025-10-16T18:05:33.261Z" }, + { url = "https://files.pythonhosted.org/packages/02/88/0ee4ca507d4aa05f67e292d2e5eb0b3e358fbcfe527554a2eda9ac422d6b/ruff-0.14.1-py3-none-win_amd64.whl", hash = "sha256:59d599cdff9c7f925a017f6f2c256c908b094e55967f93f2821b1439928746a1", size = 13401123, upload-time = "2025-10-16T18:05:35.984Z" }, + { url = "https://files.pythonhosted.org/packages/b8/81/4b6387be7014858d924b843530e1b2a8e531846807516e9bea2ee0936bf7/ruff-0.14.1-py3-none-win_arm64.whl", hash = "sha256:e3b443c4c9f16ae850906b8d0a707b2a4c16f8d2f0a7fe65c475c5886665ce44", size = 12436636, upload-time = "2025-10-16T18:05:38.995Z" }, +] + +[[package]] +name = "secretstorage" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "jeepney" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/31/9f/11ef35cf1027c1339552ea7bfe6aaa74a8516d8b5caf6e7d338daf54fd80/secretstorage-3.4.0.tar.gz", hash = "sha256:c46e216d6815aff8a8a18706a2fbfd8d53fcbb0dce99301881687a1b0289ef7c", size = 19748, upload-time = "2025-09-09T16:42:13.859Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/ff/2e2eed29e02c14a5cb6c57f09b2d5b40e65d6cc71f45b52e0be295ccbc2f/secretstorage-3.4.0-py3-none-any.whl", hash = "sha256:0e3b6265c2c63509fb7415717607e4b2c9ab767b7f344a57473b779ca13bd02e", size = 15272, upload-time = "2025-09-09T16:42:12.744Z" }, +] + +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, +] + +[[package]] +name = "soupsieve" +version = "2.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/e6/21ccce3262dd4889aa3332e5a119a3491a95e8f60939870a3a035aabac0d/soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f", size = 103472, upload-time = "2025-08-27T15:39:51.78Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679, upload-time = "2025-08-27T15:39:50.179Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "yarl" +version = "1.22.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/27/5ab13fc84c76a0250afd3d26d5936349a35be56ce5785447d6c423b26d92/yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511", size = 141607, upload-time = "2025-10-06T14:09:16.298Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a1/d065d51d02dc02ce81501d476b9ed2229d9a990818332242a882d5d60340/yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6", size = 94027, upload-time = "2025-10-06T14:09:17.786Z" }, + { url = "https://files.pythonhosted.org/packages/c1/da/8da9f6a53f67b5106ffe902c6fa0164e10398d4e150d85838b82f424072a/yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028", size = 94963, upload-time = "2025-10-06T14:09:19.662Z" }, + { url = "https://files.pythonhosted.org/packages/68/fe/2c1f674960c376e29cb0bec1249b117d11738db92a6ccc4a530b972648db/yarl-1.22.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d", size = 368406, upload-time = "2025-10-06T14:09:21.402Z" }, + { url = "https://files.pythonhosted.org/packages/95/26/812a540e1c3c6418fec60e9bbd38e871eaba9545e94fa5eff8f4a8e28e1e/yarl-1.22.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503", size = 336581, upload-time = "2025-10-06T14:09:22.98Z" }, + { url = "https://files.pythonhosted.org/packages/0b/f5/5777b19e26fdf98563985e481f8be3d8a39f8734147a6ebf459d0dab5a6b/yarl-1.22.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65", size = 388924, upload-time = "2025-10-06T14:09:24.655Z" }, + { url = "https://files.pythonhosted.org/packages/86/08/24bd2477bd59c0bbd994fe1d93b126e0472e4e3df5a96a277b0a55309e89/yarl-1.22.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e", size = 392890, upload-time = "2025-10-06T14:09:26.617Z" }, + { url = "https://files.pythonhosted.org/packages/46/00/71b90ed48e895667ecfb1eaab27c1523ee2fa217433ed77a73b13205ca4b/yarl-1.22.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d", size = 365819, upload-time = "2025-10-06T14:09:28.544Z" }, + { url = "https://files.pythonhosted.org/packages/30/2d/f715501cae832651d3282387c6a9236cd26bd00d0ff1e404b3dc52447884/yarl-1.22.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7", size = 363601, upload-time = "2025-10-06T14:09:30.568Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f9/a678c992d78e394e7126ee0b0e4e71bd2775e4334d00a9278c06a6cce96a/yarl-1.22.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967", size = 358072, upload-time = "2025-10-06T14:09:32.528Z" }, + { url = "https://files.pythonhosted.org/packages/2c/d1/b49454411a60edb6fefdcad4f8e6dbba7d8019e3a508a1c5836cba6d0781/yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed", size = 385311, upload-time = "2025-10-06T14:09:34.634Z" }, + { url = "https://files.pythonhosted.org/packages/87/e5/40d7a94debb8448c7771a916d1861d6609dddf7958dc381117e7ba36d9e8/yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6", size = 381094, upload-time = "2025-10-06T14:09:36.268Z" }, + { url = "https://files.pythonhosted.org/packages/35/d8/611cc282502381ad855448643e1ad0538957fc82ae83dfe7762c14069e14/yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e", size = 370944, upload-time = "2025-10-06T14:09:37.872Z" }, + { url = "https://files.pythonhosted.org/packages/2d/df/fadd00fb1c90e1a5a8bd731fa3d3de2e165e5a3666a095b04e31b04d9cb6/yarl-1.22.0-cp311-cp311-win32.whl", hash = "sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca", size = 81804, upload-time = "2025-10-06T14:09:39.359Z" }, + { url = "https://files.pythonhosted.org/packages/b5/f7/149bb6f45f267cb5c074ac40c01c6b3ea6d8a620d34b337f6321928a1b4d/yarl-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b", size = 86858, upload-time = "2025-10-06T14:09:41.068Z" }, + { url = "https://files.pythonhosted.org/packages/2b/13/88b78b93ad3f2f0b78e13bfaaa24d11cbc746e93fe76d8c06bf139615646/yarl-1.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376", size = 81637, upload-time = "2025-10-06T14:09:42.712Z" }, + { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000, upload-time = "2025-10-06T14:09:44.631Z" }, + { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338, upload-time = "2025-10-06T14:09:46.372Z" }, + { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909, upload-time = "2025-10-06T14:09:48.648Z" }, + { url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df", size = 372940, upload-time = "2025-10-06T14:09:50.089Z" }, + { url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb", size = 345825, upload-time = "2025-10-06T14:09:52.142Z" }, + { url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2", size = 386705, upload-time = "2025-10-06T14:09:54.128Z" }, + { url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82", size = 396518, upload-time = "2025-10-06T14:09:55.762Z" }, + { url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a", size = 377267, upload-time = "2025-10-06T14:09:57.958Z" }, + { url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124", size = 365797, upload-time = "2025-10-06T14:09:59.527Z" }, + { url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa", size = 365535, upload-time = "2025-10-06T14:10:01.139Z" }, + { url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324, upload-time = "2025-10-06T14:10:02.756Z" }, + { url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803, upload-time = "2025-10-06T14:10:04.552Z" }, + { url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220, upload-time = "2025-10-06T14:10:06.489Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589, upload-time = "2025-10-06T14:10:09.254Z" }, + { url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213, upload-time = "2025-10-06T14:10:11.369Z" }, + { url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330, upload-time = "2025-10-06T14:10:13.112Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f3/d67de7260456ee105dc1d162d43a019ecad6b91e2f51809d6cddaa56690e/yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53", size = 139980, upload-time = "2025-10-06T14:10:14.601Z" }, + { url = "https://files.pythonhosted.org/packages/01/88/04d98af0b47e0ef42597b9b28863b9060bb515524da0a65d5f4db160b2d5/yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a", size = 93424, upload-time = "2025-10-06T14:10:16.115Z" }, + { url = "https://files.pythonhosted.org/packages/18/91/3274b215fd8442a03975ce6bee5fe6aa57a8326b29b9d3d56234a1dca244/yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c", size = 93821, upload-time = "2025-10-06T14:10:17.993Z" }, + { url = "https://files.pythonhosted.org/packages/61/3a/caf4e25036db0f2da4ca22a353dfeb3c9d3c95d2761ebe9b14df8fc16eb0/yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601", size = 373243, upload-time = "2025-10-06T14:10:19.44Z" }, + { url = "https://files.pythonhosted.org/packages/6e/9e/51a77ac7516e8e7803b06e01f74e78649c24ee1021eca3d6a739cb6ea49c/yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a", size = 342361, upload-time = "2025-10-06T14:10:21.124Z" }, + { url = "https://files.pythonhosted.org/packages/d4/f8/33b92454789dde8407f156c00303e9a891f1f51a0330b0fad7c909f87692/yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df", size = 387036, upload-time = "2025-10-06T14:10:22.902Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9a/c5db84ea024f76838220280f732970aa4ee154015d7f5c1bfb60a267af6f/yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2", size = 397671, upload-time = "2025-10-06T14:10:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/11/c9/cd8538dc2e7727095e0c1d867bad1e40c98f37763e6d995c1939f5fdc7b1/yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b", size = 377059, upload-time = "2025-10-06T14:10:26.406Z" }, + { url = "https://files.pythonhosted.org/packages/a1/b9/ab437b261702ced75122ed78a876a6dec0a1b0f5e17a4ac7a9a2482d8abe/yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273", size = 365356, upload-time = "2025-10-06T14:10:28.461Z" }, + { url = "https://files.pythonhosted.org/packages/b2/9d/8e1ae6d1d008a9567877b08f0ce4077a29974c04c062dabdb923ed98e6fe/yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a", size = 361331, upload-time = "2025-10-06T14:10:30.541Z" }, + { url = "https://files.pythonhosted.org/packages/ca/5a/09b7be3905962f145b73beb468cdd53db8aa171cf18c80400a54c5b82846/yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d", size = 382590, upload-time = "2025-10-06T14:10:33.352Z" }, + { url = "https://files.pythonhosted.org/packages/aa/7f/59ec509abf90eda5048b0bc3e2d7b5099dffdb3e6b127019895ab9d5ef44/yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02", size = 385316, upload-time = "2025-10-06T14:10:35.034Z" }, + { url = "https://files.pythonhosted.org/packages/e5/84/891158426bc8036bfdfd862fabd0e0fa25df4176ec793e447f4b85cf1be4/yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67", size = 374431, upload-time = "2025-10-06T14:10:37.76Z" }, + { url = "https://files.pythonhosted.org/packages/bb/49/03da1580665baa8bef5e8ed34c6df2c2aca0a2f28bf397ed238cc1bbc6f2/yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95", size = 81555, upload-time = "2025-10-06T14:10:39.649Z" }, + { url = "https://files.pythonhosted.org/packages/9a/ee/450914ae11b419eadd067c6183ae08381cfdfcb9798b90b2b713bbebddda/yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d", size = 86965, upload-time = "2025-10-06T14:10:41.313Z" }, + { url = "https://files.pythonhosted.org/packages/98/4d/264a01eae03b6cf629ad69bae94e3b0e5344741e929073678e84bf7a3e3b/yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b", size = 81205, upload-time = "2025-10-06T14:10:43.167Z" }, + { url = "https://files.pythonhosted.org/packages/88/fc/6908f062a2f77b5f9f6d69cecb1747260831ff206adcbc5b510aff88df91/yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10", size = 146209, upload-time = "2025-10-06T14:10:44.643Z" }, + { url = "https://files.pythonhosted.org/packages/65/47/76594ae8eab26210b4867be6f49129861ad33da1f1ebdf7051e98492bf62/yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3", size = 95966, upload-time = "2025-10-06T14:10:46.554Z" }, + { url = "https://files.pythonhosted.org/packages/ab/ce/05e9828a49271ba6b5b038b15b3934e996980dd78abdfeb52a04cfb9467e/yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9", size = 97312, upload-time = "2025-10-06T14:10:48.007Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c5/7dffad5e4f2265b29c9d7ec869c369e4223166e4f9206fc2243ee9eea727/yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f", size = 361967, upload-time = "2025-10-06T14:10:49.997Z" }, + { url = "https://files.pythonhosted.org/packages/50/b2/375b933c93a54bff7fc041e1a6ad2c0f6f733ffb0c6e642ce56ee3b39970/yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0", size = 323949, upload-time = "2025-10-06T14:10:52.004Z" }, + { url = "https://files.pythonhosted.org/packages/66/50/bfc2a29a1d78644c5a7220ce2f304f38248dc94124a326794e677634b6cf/yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e", size = 361818, upload-time = "2025-10-06T14:10:54.078Z" }, + { url = "https://files.pythonhosted.org/packages/46/96/f3941a46af7d5d0f0498f86d71275696800ddcdd20426298e572b19b91ff/yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708", size = 372626, upload-time = "2025-10-06T14:10:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/c1/42/8b27c83bb875cd89448e42cd627e0fb971fa1675c9ec546393d18826cb50/yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f", size = 341129, upload-time = "2025-10-06T14:10:57.985Z" }, + { url = "https://files.pythonhosted.org/packages/49/36/99ca3122201b382a3cf7cc937b95235b0ac944f7e9f2d5331d50821ed352/yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d", size = 346776, upload-time = "2025-10-06T14:10:59.633Z" }, + { url = "https://files.pythonhosted.org/packages/85/b4/47328bf996acd01a4c16ef9dcd2f59c969f495073616586f78cd5f2efb99/yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8", size = 334879, upload-time = "2025-10-06T14:11:01.454Z" }, + { url = "https://files.pythonhosted.org/packages/c2/ad/b77d7b3f14a4283bffb8e92c6026496f6de49751c2f97d4352242bba3990/yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5", size = 350996, upload-time = "2025-10-06T14:11:03.452Z" }, + { url = "https://files.pythonhosted.org/packages/81/c8/06e1d69295792ba54d556f06686cbd6a7ce39c22307100e3fb4a2c0b0a1d/yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f", size = 356047, upload-time = "2025-10-06T14:11:05.115Z" }, + { url = "https://files.pythonhosted.org/packages/4b/b8/4c0e9e9f597074b208d18cef227d83aac36184bfbc6eab204ea55783dbc5/yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62", size = 342947, upload-time = "2025-10-06T14:11:08.137Z" }, + { url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03", size = 86943, upload-time = "2025-10-06T14:11:10.284Z" }, + { url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715, upload-time = "2025-10-06T14:11:11.739Z" }, + { url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b", size = 83857, upload-time = "2025-10-06T14:11:13.586Z" }, + { url = "https://files.pythonhosted.org/packages/46/b3/e20ef504049f1a1c54a814b4b9bed96d1ac0e0610c3b4da178f87209db05/yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4", size = 140520, upload-time = "2025-10-06T14:11:15.465Z" }, + { url = "https://files.pythonhosted.org/packages/e4/04/3532d990fdbab02e5ede063676b5c4260e7f3abea2151099c2aa745acc4c/yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683", size = 93504, upload-time = "2025-10-06T14:11:17.106Z" }, + { url = "https://files.pythonhosted.org/packages/11/63/ff458113c5c2dac9a9719ac68ee7c947cb621432bcf28c9972b1c0e83938/yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b", size = 94282, upload-time = "2025-10-06T14:11:19.064Z" }, + { url = "https://files.pythonhosted.org/packages/a7/bc/315a56aca762d44a6aaaf7ad253f04d996cb6b27bad34410f82d76ea8038/yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e", size = 372080, upload-time = "2025-10-06T14:11:20.996Z" }, + { url = "https://files.pythonhosted.org/packages/3f/3f/08e9b826ec2e099ea6e7c69a61272f4f6da62cb5b1b63590bb80ca2e4a40/yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590", size = 338696, upload-time = "2025-10-06T14:11:22.847Z" }, + { url = "https://files.pythonhosted.org/packages/e3/9f/90360108e3b32bd76789088e99538febfea24a102380ae73827f62073543/yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2", size = 387121, upload-time = "2025-10-06T14:11:24.889Z" }, + { url = "https://files.pythonhosted.org/packages/98/92/ab8d4657bd5b46a38094cfaea498f18bb70ce6b63508fd7e909bd1f93066/yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da", size = 394080, upload-time = "2025-10-06T14:11:27.307Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e7/d8c5a7752fef68205296201f8ec2bf718f5c805a7a7e9880576c67600658/yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784", size = 372661, upload-time = "2025-10-06T14:11:29.387Z" }, + { url = "https://files.pythonhosted.org/packages/b6/2e/f4d26183c8db0bb82d491b072f3127fb8c381a6206a3a56332714b79b751/yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b", size = 364645, upload-time = "2025-10-06T14:11:31.423Z" }, + { url = "https://files.pythonhosted.org/packages/80/7c/428e5812e6b87cd00ee8e898328a62c95825bf37c7fa87f0b6bb2ad31304/yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694", size = 355361, upload-time = "2025-10-06T14:11:33.055Z" }, + { url = "https://files.pythonhosted.org/packages/ec/2a/249405fd26776f8b13c067378ef4d7dd49c9098d1b6457cdd152a99e96a9/yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d", size = 381451, upload-time = "2025-10-06T14:11:35.136Z" }, + { url = "https://files.pythonhosted.org/packages/67/a8/fb6b1adbe98cf1e2dd9fad71003d3a63a1bc22459c6e15f5714eb9323b93/yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd", size = 383814, upload-time = "2025-10-06T14:11:37.094Z" }, + { url = "https://files.pythonhosted.org/packages/d9/f9/3aa2c0e480fb73e872ae2814c43bc1e734740bb0d54e8cb2a95925f98131/yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da", size = 370799, upload-time = "2025-10-06T14:11:38.83Z" }, + { url = "https://files.pythonhosted.org/packages/50/3c/af9dba3b8b5eeb302f36f16f92791f3ea62e3f47763406abf6d5a4a3333b/yarl-1.22.0-cp314-cp314-win32.whl", hash = "sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2", size = 82990, upload-time = "2025-10-06T14:11:40.624Z" }, + { url = "https://files.pythonhosted.org/packages/ac/30/ac3a0c5bdc1d6efd1b41fa24d4897a4329b3b1e98de9449679dd327af4f0/yarl-1.22.0-cp314-cp314-win_amd64.whl", hash = "sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79", size = 88292, upload-time = "2025-10-06T14:11:42.578Z" }, + { url = "https://files.pythonhosted.org/packages/df/0a/227ab4ff5b998a1b7410abc7b46c9b7a26b0ca9e86c34ba4b8d8bc7c63d5/yarl-1.22.0-cp314-cp314-win_arm64.whl", hash = "sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33", size = 82888, upload-time = "2025-10-06T14:11:44.863Z" }, + { url = "https://files.pythonhosted.org/packages/06/5e/a15eb13db90abd87dfbefb9760c0f3f257ac42a5cac7e75dbc23bed97a9f/yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1", size = 146223, upload-time = "2025-10-06T14:11:46.796Z" }, + { url = "https://files.pythonhosted.org/packages/18/82/9665c61910d4d84f41a5bf6837597c89e665fa88aa4941080704645932a9/yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca", size = 95981, upload-time = "2025-10-06T14:11:48.845Z" }, + { url = "https://files.pythonhosted.org/packages/5d/9a/2f65743589809af4d0a6d3aa749343c4b5f4c380cc24a8e94a3c6625a808/yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53", size = 97303, upload-time = "2025-10-06T14:11:50.897Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ab/5b13d3e157505c43c3b43b5a776cbf7b24a02bc4cccc40314771197e3508/yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c", size = 361820, upload-time = "2025-10-06T14:11:52.549Z" }, + { url = "https://files.pythonhosted.org/packages/fb/76/242a5ef4677615cf95330cfc1b4610e78184400699bdda0acb897ef5e49a/yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf", size = 323203, upload-time = "2025-10-06T14:11:54.225Z" }, + { url = "https://files.pythonhosted.org/packages/8c/96/475509110d3f0153b43d06164cf4195c64d16999e0c7e2d8a099adcd6907/yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face", size = 363173, upload-time = "2025-10-06T14:11:56.069Z" }, + { url = "https://files.pythonhosted.org/packages/c9/66/59db471aecfbd559a1fd48aedd954435558cd98c7d0da8b03cc6c140a32c/yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b", size = 373562, upload-time = "2025-10-06T14:11:58.783Z" }, + { url = "https://files.pythonhosted.org/packages/03/1f/c5d94abc91557384719da10ff166b916107c1b45e4d0423a88457071dd88/yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486", size = 339828, upload-time = "2025-10-06T14:12:00.686Z" }, + { url = "https://files.pythonhosted.org/packages/5f/97/aa6a143d3afba17b6465733681c70cf175af89f76ec8d9286e08437a7454/yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138", size = 347551, upload-time = "2025-10-06T14:12:02.628Z" }, + { url = "https://files.pythonhosted.org/packages/43/3c/45a2b6d80195959239a7b2a8810506d4eea5487dce61c2a3393e7fc3c52e/yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a", size = 334512, upload-time = "2025-10-06T14:12:04.871Z" }, + { url = "https://files.pythonhosted.org/packages/86/a0/c2ab48d74599c7c84cb104ebd799c5813de252bea0f360ffc29d270c2caa/yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529", size = 352400, upload-time = "2025-10-06T14:12:06.624Z" }, + { url = "https://files.pythonhosted.org/packages/32/75/f8919b2eafc929567d3d8411f72bdb1a2109c01caaab4ebfa5f8ffadc15b/yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093", size = 357140, upload-time = "2025-10-06T14:12:08.362Z" }, + { url = "https://files.pythonhosted.org/packages/cf/72/6a85bba382f22cf78add705d8c3731748397d986e197e53ecc7835e76de7/yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c", size = 341473, upload-time = "2025-10-06T14:12:10.994Z" }, + { url = "https://files.pythonhosted.org/packages/35/18/55e6011f7c044dc80b98893060773cefcfdbf60dfefb8cb2f58b9bacbd83/yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e", size = 89056, upload-time = "2025-10-06T14:12:13.317Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/0f0dccb6e59a9e7f122c5afd43568b1d31b8ab7dda5f1b01fb5c7025c9a9/yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27", size = 96292, upload-time = "2025-10-06T14:12:15.398Z" }, + { url = "https://files.pythonhosted.org/packages/48/b7/503c98092fb3b344a179579f55814b613c1fbb1c23b3ec14a7b008a66a6e/yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1", size = 85171, upload-time = "2025-10-06T14:12:16.935Z" }, + { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +]