diff --git a/.gitignore b/.gitignore index ca20904..c3241a4 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,226 @@ /ankisyncd/_version.py /collections /venv + +# Created by https://www.toptal.com/developers/gitignore/api/windows,linux,macos,python,jupyternotebooks +# Edit at https://www.toptal.com/developers/gitignore?templates=windows,linux,macos,python,jupyternotebooks + +### JupyterNotebooks ### +# gitignore template for Jupyter Notebooks +# website: http://jupyter.org/ + +.ipynb_checkpoints +*/.ipynb_checkpoints/* + +# IPython +profile_default/ +ipython_config.py + +# Remove previous ipynb_checkpoints +# git rm -r .ipynb_checkpoints/ + +### Linux ### +*~ + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + +### macOS ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# Emacs temporary files +*#*# +*.#* + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook + +# IPython + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +### Windows ### +# Windows thumbnail cache files +Thumbs.db +Thumbs.db:encryptable +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +# End of https://www.toptal.com/developers/gitignore/api/windows,linux,macos,python,jupyternotebooks \ No newline at end of file diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index cc57eb1..0000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "anki-bundled"] - path = anki-bundled - url = https://github.com/dae/anki.git diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 0000000..0b89db9 --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,13 @@ +# .readthedocs.yml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +version: 2 + +# Build documentation with MkDocs +mkdocs: + configuration: docs/mkdocs.yml + +# Optionally set the version of Python and requirements required to build your docs +python: + version: 3.7 \ No newline at end of file diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..40533d6 --- /dev/null +++ b/Makefile @@ -0,0 +1,26 @@ +#/bin/make + +ANKI_SERVER_NAME ?= "Anki Sync Server" +ANKI_SERVER_VERSION ?= "v0.1.0" +ANKI_SERVER_DESCRIPTION ?= "Self-hosted Anki Sync Server." +ENV ?= local + +-include config/.env.${ENV} +export + +.DEFAULT_GOAL := help +.PHONY: help #: Display list of command and exit. +help: + @awk 'BEGIN {FS = " ?#?: "; print ""${ANKI_SERVER_NAME}" "${ANKI_SERVER_VERSION}"\n"${ANKI_SERVER_DESCRIPTION}"\n\nUsage: make \033[36m\033[0m\n\nCommands:"} /^.PHONY: ?[a-zA-Z_-]/ { printf " \033[36m%-10s\033[0m %s\n", $$2, $$3 }' $(MAKEFILE_LIST) + +.PHONY: docs #: Build and serve documentation. +docs: print-env + @${MKDOCS} ${MKDOCS_OPTION} -f docs/mkdocs.yml + +.PHONY: notebooks #: Run jupyter notebooks. +notebooks: + @${JUPYTER} ${JUPYTER_OPTION} + +%: + @test -f scripts/${*}.sh + @${SHELL} scripts/${*}.sh \ No newline at end of file diff --git a/README.md b/README.md index e3e5653..88334b7 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,9 @@ ankisyncd ========= +[![Documentation Status](https://readthedocs.org/projects/anki-sync-server/badge/?version=latest)](https://anki-sync-server.readthedocs.io/?badge=latest) +[![Gitter](https://badges.gitter.im/ankicommunity/community.svg)](https://gitter.im/ankicommunity/community?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) + [Anki][] is a powerful open source flashcard application, which helps you quickly and easily memorize facts over the long term utilizing a spaced repetition algorithm. Anki's main form is a desktop application (for Windows, @@ -26,9 +29,6 @@ It supports Python 3 and Anki 2.1. - [Anki 2.1](#anki-21) - [Anki 2.0](#anki-20) - [AnkiDroid](#ankidroid) - - [Running `ankisyncd` without `pyaudio`](#running-ankisyncd-without-pyaudio) - - [Anki ≥2.1.9](#anki-219) - - [Older versions](#older-versions) - [ENVVAR configuration overrides](#envvar-configuration-overrides) - [Support for other database backends](#support-for-other-database-backends) @@ -36,25 +36,9 @@ It supports Python 3 and Anki 2.1. Installing ---------- -0. Install Anki. The currently supported version range is 2.1.1〜2.1.11, with the - exception of 2.1.9[1](#readme-fn-01). (Keep in - mind this range only applies to the Anki used by the server, clients can be - as old as 2.0.27 and still work.) Running the server with other versions might - work as long as they're not 2.0.x, but things might break, so do it at your - own risk. If for some reason you can't get the supported Anki version easily - on your system, you can use `anki-bundled` from this repo: - - $ git submodule update --init - $ cd anki-bundled - $ pip install -r requirements.txt - - Keep in mind `pyaudio`, a dependency of Anki, requires development headers for - Python 3 and PortAudio to be present before running `pip`. If you can't or - don't want to install these, you can try [patching Anki](#running-ankisyncd-without-pyaudio). - 1. Install the dependencies: - $ pip install webob + $ pip install -r src/requirements.txt 2. Modify ankisyncd.conf according to your needs @@ -62,22 +46,39 @@ Installing $ ./ankisyncctl.py adduser -4. Run ankisyncd: +4. Setup a proxy to unchunk the requests. + + Webob does not support the header "Transfer-Encoding: chunked" used by Anki + and therefore ankisyncd sees chunked requests as empty. To solve this problem + setup Nginx (or any other webserver of your choice) and configure it to + "unchunk" the requests for ankisyncd. + + For example, if you use Nginx on the same machine as ankisyncd, you first + have to change the port in `ankisyncd.conf` to something other than `27701`. + Then configure Nginx to listen on port `27701` and forward the unchunked + requests to ankisyncd. + + An example configuration with ankisyncd running on the same machine as Nginx + and listening on port `27702` may look like: + + ``` + server { + listen 27701; + server_name default; + + location / { + proxy_http_version 1.0; + proxy_pass http://localhost:27702/; + } + } + ``` + +5. Run ankisyncd: $ python -m ankisyncd --- - -1. 2.1.9 is not supported due to [commit `95ccbfdd3679`][] introducing the - dependency on the `aqt` module, which depends on PyQt5. The server should - still work fine if you have PyQt5 installed. This has been fixed in - [commit `a389b8b4a0e2`][], which is a part of the 2.1.10 release. -[↑](#readme-fn-01b) - -[commit `95ccbfdd3679`]: https://github.com/dae/anki/commit/95ccbfdd3679dd46f22847c539c7fddb8fa904ea -[commit `a389b8b4a0e2`]: https://github.com/dae/anki/commit/a389b8b4a0e209023c4533a7ee335096a704079c - Installing (Docker) ------------------- @@ -86,6 +87,18 @@ Follow [these instructions](https://github.com/kuklinistvan/docker-anki-sync-ser Setting up Anki --------------- +### Anki 2.1.28 and above + +Create a new directory in [the add-ons folder][addons21] (name it something +like ankisyncd), create a file named `__init__.py` containing the code below +and put it in the `ankisyncd` directory. + + import os + + addr = "http://127.0.0.1:27701/" # put your server address here + os.environ["SYNC_ENDPOINT"] = addr + "sync/" + os.environ["SYNC_ENDPOINT_MEDIA"] = addr + "msync/" + ### Anki 2.1 Create a new directory in [the add-ons folder][addons21] (name it something @@ -111,7 +124,7 @@ and put it in `~/Anki/addons`. anki.sync.SYNC_BASE = addr anki.sync.SYNC_MEDIA_BASE = addr + "msync/" -[addons21]: https://apps.ankiweb.net/docs/addons.html#_add_on_folders +[addons21]: https://addon-docs.ankiweb.net/#/getting-started?id=add-on-folders ### AnkiDroid @@ -122,44 +135,13 @@ Unless you have set up a reverse proxy to handle encrypted connections, use whatever you have specified in `ankisyncd.conf` (or, if using a reverse proxy, whatever port you configured to accept the front-end connection). -**Do not use trailing slashes.** +Use the same base url for both the `Sync url` and the `Media sync url`, but append `/msync` to +the `Media sync url`. Do **not** append `/sync` to the `Sync url`. Even though the AnkiDroid interface will request an email address, this is not required; it will simply be the username you configured with `ankisyncctl.py adduser`. -Running `ankisyncd` without `pyaudio` -------------------------------------- - -`ankisyncd` doesn't use the audio recording feature of Anki, so if you don't -want to install PortAudio, you can edit some files in the `anki-bundled` -directory to exclude `pyaudio`: - -### Anki ≥2.1.9 - -Just remove "pyaudio" from requirements.txt and you're done. This change has -been introduced in [commit `ca710ab3f1c1`][]. - -[commit `ca710ab3f1c1`]: https://github.com/dae/anki/commit/ca710ab3f1c1174469a3b48f1257c0fc0ce624bf - -### Older versions - -First go to `anki-bundled`, then follow one of the instructions below. They all -do the same thing, you can pick whichever one you're most comfortable with. - -Manual version: remove every line past "# Packaged commands" in anki/sound.py, -remove every line starting with "pyaudio" in requirements.txt - -`ed` version: - - $ echo '/# Packaged commands/,$d;w' | tr ';' '\n' | ed anki/sound.py - $ echo '/^pyaudio/d;w' | tr ';' '\n' | ed requirements.txt - -`sed -i` version: - - $ sed -i '/# Packaged commands/,$d' anki/sound.py - $ sed -i '/^pyaudio/d' requirements.txt - ENVVAR configuration overrides ------------------------------ diff --git a/anki-bundled b/anki-bundled deleted file mode 160000 index cca3fcb..0000000 --- a/anki-bundled +++ /dev/null @@ -1 +0,0 @@ -Subproject commit cca3fcb2418880d0430a5c5c2e6b81ba260065b7 diff --git a/ankisyncd/full_sync.py b/ankisyncd/full_sync.py deleted file mode 100644 index 9044abd..0000000 --- a/ankisyncd/full_sync.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- - -import os -from sqlite3 import dbapi2 as sqlite - -import anki.db - -class FullSyncManager: - def upload(self, col, data, session): - # Verify integrity of the received database file before replacing our - # existing db. - temp_db_path = session.get_collection_path() + ".tmp" - with open(temp_db_path, 'wb') as f: - f.write(data) - - try: - with anki.db.DB(temp_db_path) as test_db: - if test_db.scalar("pragma integrity_check") != "ok": - raise HTTPBadRequest("Integrity check failed for uploaded " - "collection database file.") - except sqlite.Error as e: - raise HTTPBadRequest("Uploaded collection database file is " - "corrupt.") - - # Overwrite existing db. - col.close() - try: - os.replace(temp_db_path, session.get_collection_path()) - finally: - col.reopen() - col.load() - - return "OK" - - - def download(self, col, session): - col.close() - try: - data = open(session.get_collection_path(), 'rb').read() - finally: - col.reopen() - col.load() - return data - - -def get_full_sync_manager(config): - if "full_sync_manager" in config and config["full_sync_manager"]: # load from config - import importlib - import inspect - module_name, class_name = config['full_sync_manager'].rsplit('.', 1) - module = importlib.import_module(module_name.strip()) - class_ = getattr(module, class_name.strip()) - - if not FullSyncManager in inspect.getmro(class_): - raise TypeError('''"full_sync_manager" found in the conf file but it doesn''t - inherit from FullSyncManager''') - return class_(config) - else: - return FullSyncManager() diff --git a/config/.env.example b/config/.env.example new file mode 100644 index 0000000..65979ac --- /dev/null +++ b/config/.env.example @@ -0,0 +1,28 @@ +# .env.example (anki-sync-server) + +## Make +MKDOCS=mkdocs +JUPYTER=jupyter + +## Ankisyncd +ANKISYNCD_HOST=0.0.0.0 +ANKISYNCD_PORT=27701 +ANKISYNCD_DATA_ROOT=./collections +ANKISYNCD_BASE_URL=/sync/ +ANKISYNCD_BASE_MEDIA_URL=/msync/ +ANKISYNCD_AUTH_DB_PATH=./auth.db +ANKISYNCD_SESSION_DB_PATH=./session.db + +ANKISYNCD_FULL_SYNC_MANAGER +ANKISYNCD_SESSION_MANAGER +ANKISYNCD_USER_MANAGER +ANKISYNCD_COLLECTION_WRAPPER + +## Mkdocs +MKDOCS_OPTION=serve + +## Jupyter +JUPYTER_OPTION=lab + +## Path +PATH:=.venv/bin/path:${PATH} \ No newline at end of file diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml new file mode 100644 index 0000000..3737456 --- /dev/null +++ b/docs/mkdocs.yml @@ -0,0 +1,9 @@ +strict: true +theme: readthedocs +site_name: Anki Sync Server +site_description: Self-hosted Anki Sync Server. +site_author: Anki Community +site_url: https://ankicommunity.github.io/anki-sync-server +repo_url: https://github.com/ankicommunity/anki-sync-server +docs_dir: src +site_dir: build \ No newline at end of file diff --git a/docs/src/index.md b/docs/src/index.md new file mode 100644 index 0000000..03a9c49 --- /dev/null +++ b/docs/src/index.md @@ -0,0 +1,19 @@ +# Welcome to MkDocs + +Welcome to the anki-sync-server wiki! + +For full documentation visit [mkdocs.org](https://www.mkdocs.org). + +## Commands + +* `mkdocs new [dir-name]` - Create a new project. +* `mkdocs serve` - Start the live-reloading docs server. +* `mkdocs build` - Build the documentation site. +* `mkdocs -h` - Print help message and exit. + +## Project layout + + mkdocs.yml # The configuration file. + src/ + index.md # The documentation homepage. + ... # Other markdown pages, images and other files. diff --git a/notebooks/read_collections.ipynb b/notebooks/read_collections.ipynb new file mode 100644 index 0000000..865a6c6 --- /dev/null +++ b/notebooks/read_collections.ipynb @@ -0,0 +1,182 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cd .." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Reading Collections\n", + "\n", + "This notebook allows to view your collections. Note currently we are using the anki from the submodule. In the future, we should be able to use the anki installed using `pip install anki` however the current collections do not seem compatibile with the latest library." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Install Anki in venv\n", + "!pip3 install anki" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from anki import Collection\n", + "from anki.utils import intTime\n", + "import time" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open Database\n", + "\n", + "Make sure you close the database otherwise it will be locked and you will not be able to use your sync server." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "collection_path = \"./collections/anki/collection.anki2\"\n", + "col = Collection(collection_path)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### View Collections" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(f'Collection Name: {col.name()}')\n", + "print(f'Cards in Collection: {col.noteCount()}')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### View Decks" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print('Decks:')\n", + "for deck in col.decks.all():\n", + " print(f\"{deck['id']}. {deck['name']}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### View Cards" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "deck_id = None\n", + "print('Cards in deck:')\n", + "i = 0\n", + "for card_id in col.decks.cids(deck_id):\n", + " i+=1\n", + " print(f'{i}. {card_id}')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### View Notes" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "card_id = None\n", + "print('Notes in card:')\n", + "note_id = col.getCard(card_id).nid\n", + "print(f\"1. Front: {col.getNote(note_id).fields[0]}\")\n", + "print(f\"2. Back: {col.getNote(note_id).fields[1]}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Close Database" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "col.close()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..b5612f7 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1663 @@ +[[package]] +name = "anki" +version = "2.1.37" +description = "" +category = "main" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +beautifulsoup4 = "*" +decorator = "*" +distro = {version = "*", markers = "sys_platform != \"darwin\" and sys_platform != \"win32\""} +orjson = "*" +protobuf = "*" +psutil = {version = "*", markers = "sys_platform == \"win32\""} +requests = {version = "*", extras = ["socks"]} + +[[package]] +name = "appnope" +version = "0.1.2" +description = "Disable App Nap on macOS >= 10.9" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "argon2-cffi" +version = "20.1.0" +description = "The secure Argon2 password hashing algorithm." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +cffi = ">=1.0.0" +six = "*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest", "sphinx", "wheel", "pre-commit"] +docs = ["sphinx"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] + +[[package]] +name = "async-generator" +version = "1.10" +description = "Async generators and context managers for Python 3.5+" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "attrs" +version = "20.3.0" +description = "Classes Without Boilerplate" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"] +docs = ["furo", "sphinx", "zope.interface"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] + +[[package]] +name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "beautifulsoup4" +version = "4.9.3" +description = "Screen-scraping library" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +soupsieve = {version = ">1.2", markers = "python_version >= \"3.0\""} + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "bleach" +version = "3.2.1" +description = "An easy safelist-based HTML-sanitizing tool." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +packaging = "*" +six = ">=1.9.0" +webencodings = "*" + +[[package]] +name = "certifi" +version = "2020.12.5" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "cffi" +version = "1.14.4" +description = "Foreign Function Interface for Python calling C code." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "chardet" +version = "4.0.0" +description = "Universal encoding detector for Python 2 and 3" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "click" +version = "7.1.2" +description = "Composable command line interface toolkit" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "decorator" +version = "4.4.2" +description = "Decorators for Humans" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*" + +[[package]] +name = "defusedxml" +version = "0.6.0" +description = "XML bomb protection for Python stdlib modules" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "distro" +version = "1.5.0" +description = "Distro - an OS platform information API" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "entrypoints" +version = "0.3" +description = "Discover and load entry points from installed packages." +category = "dev" +optional = false +python-versions = ">=2.7" + +[[package]] +name = "future" +version = "0.18.2" +description = "Clean single-source support for Python 3 and 2" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "idna" +version = "2.10" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "ipykernel" +version = "5.4.2" +description = "IPython Kernel for Jupyter" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +appnope = {version = "*", markers = "platform_system == \"Darwin\""} +ipython = ">=5.0.0" +jupyter-client = "*" +tornado = ">=4.2" +traitlets = ">=4.1.0" + +[package.extras] +test = ["pytest (!=5.3.4)", "pytest-cov", "flaky", "nose"] + +[[package]] +name = "ipython" +version = "7.19.0" +description = "IPython: Productive Interactive Computing" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +appnope = {version = "*", markers = "sys_platform == \"darwin\""} +backcall = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.10" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pickleshare = "*" +prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" +pygments = "*" +traitlets = ">=4.2" + +[package.extras] +all = ["Sphinx (>=1.3)", "ipykernel", "ipyparallel", "ipywidgets", "nbconvert", "nbformat", "nose (>=0.10.1)", "notebook", "numpy (>=1.14)", "pygments", "qtconsole", "requests", "testpath"] +doc = ["Sphinx (>=1.3)"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["notebook", "ipywidgets"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipykernel", "numpy (>=1.14)"] + +[[package]] +name = "ipython-genutils" +version = "0.2.0" +description = "Vestigial utilities from IPython" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "ipywidgets" +version = "7.5.1" +description = "IPython HTML widgets for Jupyter" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +ipykernel = ">=4.5.1" +ipython = {version = ">=4.0.0", markers = "python_version >= \"3.3\""} +nbformat = ">=4.2.0" +traitlets = ">=4.3.1" +widgetsnbextension = ">=3.5.0,<3.6.0" + +[package.extras] +test = ["pytest (>=3.6.0)", "pytest-cov", "mock"] + +[[package]] +name = "jedi" +version = "0.17.2" +description = "An autocompletion tool for Python that can be used for text editors." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +parso = ">=0.7.0,<0.8.0" + +[package.extras] +qa = ["flake8 (==3.7.9)"] +testing = ["Django (<3.1)", "colorama", "docopt", "pytest (>=3.9.0,<5.0.0)"] + +[[package]] +name = "jinja2" +version = "2.11.2" +description = "A very fast and expressive template engine." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +MarkupSafe = ">=0.23" + +[package.extras] +i18n = ["Babel (>=0.8)"] + +[[package]] +name = "joblib" +version = "1.0.0" +description = "Lightweight pipelining with Python functions" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "json5" +version = "0.9.5" +description = "A Python implementation of the JSON5 data format." +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +dev = ["hypothesis"] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"] + +[[package]] +name = "jupyter" +version = "1.0.0" +description = "Jupyter metapackage. Install all the Jupyter components in one go." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +ipykernel = "*" +ipywidgets = "*" +jupyter-console = "*" +nbconvert = "*" +notebook = "*" +qtconsole = "*" + +[[package]] +name = "jupyter-client" +version = "6.1.7" +description = "Jupyter protocol implementation and client libraries" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +jupyter-core = ">=4.6.0" +python-dateutil = ">=2.1" +pyzmq = ">=13" +tornado = ">=4.1" +traitlets = "*" + +[package.extras] +test = ["ipykernel", "ipython", "mock", "pytest", "pytest-asyncio", "async-generator", "pytest-timeout"] + +[[package]] +name = "jupyter-console" +version = "6.2.0" +description = "Jupyter terminal console" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +ipykernel = "*" +ipython = "*" +jupyter-client = "*" +prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" +pygments = "*" + +[package.extras] +test = ["pexpect"] + +[[package]] +name = "jupyter-core" +version = "4.7.0" +description = "Jupyter core package. A base package on which Jupyter projects rely." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\""} +traitlets = "*" + +[[package]] +name = "jupyterlab" +version = "2.2.9" +description = "The JupyterLab notebook server extension." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +jinja2 = ">=2.10" +jupyterlab-server = ">=1.1.5,<2.0" +notebook = ">=4.3.1" +tornado = "<6.0.0 || >6.0.0,<6.0.1 || >6.0.1,<6.0.2 || >6.0.2" + +[package.extras] +docs = ["jsx-lexer", "recommonmark", "sphinx", "sphinx-rtd-theme", "sphinx-copybutton"] +test = ["pytest", "pytest-check-links", "requests", "wheel", "virtualenv"] + +[[package]] +name = "jupyterlab-pygments" +version = "0.1.2" +description = "Pygments theme using JupyterLab CSS variables" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +pygments = ">=2.4.1,<3" + +[[package]] +name = "jupyterlab-server" +version = "1.2.0" +description = "JupyterLab Server" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +jinja2 = ">=2.10" +json5 = "*" +jsonschema = ">=3.0.1" +notebook = ">=4.2.0" +requests = "*" + +[package.extras] +test = ["pytest", "requests"] + +[[package]] +name = "livereload" +version = "2.6.3" +description = "Python LiveReload is an awesome tool for web developers" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" +tornado = {version = "*", markers = "python_version > \"2.7\""} + +[[package]] +name = "lunr" +version = "0.5.8" +description = "A Python implementation of Lunr.js" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +future = ">=0.16.0" +nltk = {version = ">=3.2.5", optional = true, markers = "python_version > \"2.7\" and extra == \"languages\""} +six = ">=1.11.0" + +[package.extras] +languages = ["nltk (>=3.2.5,<3.5)", "nltk (>=3.2.5)"] + +[[package]] +name = "markdown" +version = "3.3.3" +description = "Python implementation of Markdown." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markupsafe" +version = "1.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" + +[[package]] +name = "mistune" +version = "0.8.4" +description = "The fastest markdown parser in pure Python" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "mkdocs" +version = "1.1.2" +description = "Project documentation with Markdown." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +click = ">=3.3" +Jinja2 = ">=2.10.1" +livereload = ">=2.5.1" +lunr = {version = "0.5.8", extras = ["languages"]} +Markdown = ">=3.2.1" +PyYAML = ">=3.10" +tornado = ">=5.0" + +[[package]] +name = "nbclient" +version = "0.5.1" +description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +async-generator = "*" +jupyter-client = ">=6.1.5" +nbformat = ">=5.0" +nest-asyncio = "*" +traitlets = ">=4.2" + +[package.extras] +dev = ["codecov", "coverage", "ipython", "ipykernel", "ipywidgets", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "tox", "bumpversion", "xmltodict", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)", "black"] +sphinx = ["Sphinx (>=1.7)", "sphinx-book-theme", "mock", "moto", "myst-parser"] +test = ["codecov", "coverage", "ipython", "ipykernel", "ipywidgets", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "tox", "bumpversion", "xmltodict", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)", "black"] + +[[package]] +name = "nbconvert" +version = "6.0.7" +description = "Converting Jupyter Notebooks" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +bleach = "*" +defusedxml = "*" +entrypoints = ">=0.2.2" +jinja2 = ">=2.4" +jupyter-core = "*" +jupyterlab-pygments = "*" +mistune = ">=0.8.1,<2" +nbclient = ">=0.5.0,<0.6.0" +nbformat = ">=4.4" +pandocfilters = ">=1.4.1" +pygments = ">=2.4.1" +testpath = "*" +traitlets = ">=4.2" + +[package.extras] +all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (==0.2.2)", "tornado (>=4.0)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] +docs = ["sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] +serve = ["tornado (>=4.0)"] +test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (==0.2.2)"] +webpdf = ["pyppeteer (==0.2.2)"] + +[[package]] +name = "nbformat" +version = "5.0.8" +description = "The Jupyter Notebook format" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +ipython-genutils = "*" +jsonschema = ">=2.4,<2.5.0 || >2.5.0" +jupyter-core = "*" +traitlets = ">=4.1" + +[package.extras] +fast = ["fastjsonschema"] +test = ["fastjsonschema", "testpath", "pytest", "pytest-cov"] + +[[package]] +name = "nest-asyncio" +version = "1.4.3" +description = "Patch asyncio to allow nested event loops" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "nltk" +version = "3.5" +description = "Natural Language Toolkit" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +click = "*" +joblib = "*" +regex = "*" +tqdm = "*" + +[package.extras] +all = ["requests", "numpy", "python-crfsuite", "scikit-learn", "twython", "pyparsing", "scipy", "matplotlib", "gensim"] +corenlp = ["requests"] +machine_learning = ["gensim", "numpy", "python-crfsuite", "scikit-learn", "scipy"] +plot = ["matplotlib"] +tgrep = ["pyparsing"] +twitter = ["twython"] + +[[package]] +name = "notebook" +version = "6.1.5" +description = "A web-based notebook environment for interactive computing" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +argon2-cffi = "*" +ipykernel = "*" +ipython-genutils = "*" +jinja2 = "*" +jupyter-client = ">=5.3.4" +jupyter-core = ">=4.6.1" +nbconvert = "*" +nbformat = "*" +prometheus-client = "*" +pyzmq = ">=17" +Send2Trash = "*" +terminado = ">=0.8.3" +tornado = ">=5.0" +traitlets = ">=4.2.1" + +[package.extras] +docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt"] +test = ["nose", "coverage", "requests", "nose-warnings-filters", "nbval", "nose-exclude", "selenium", "pytest", "pytest-cov", "requests-unixsocket"] + +[[package]] +name = "orjson" +version = "3.4.6" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "packaging" +version = "20.8" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.dependencies] +pyparsing = ">=2.0.2" + +[[package]] +name = "pandocfilters" +version = "1.4.3" +description = "Utilities for writing pandoc filters in python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "parso" +version = "0.7.1" +description = "A Python Parser" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +testing = ["docopt", "pytest (>=3.0.7)"] + +[[package]] +name = "pexpect" +version = "4.8.0" +description = "Pexpect allows easy control of interactive console applications." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "prometheus-client" +version = "0.9.0" +description = "Python client for the Prometheus monitoring system." +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +twisted = ["twisted"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.8" +description = "Library for building powerful interactive command lines in Python" +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "protobuf" +version = "3.14.0" +description = "Protocol Buffers" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +six = ">=1.9" + +[[package]] +name = "psutil" +version = "5.8.0" +description = "Cross-platform lib for process and system monitoring in Python." +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +test = ["ipaddress", "mock", "unittest2", "enum34", "pywin32", "wmi"] + +[[package]] +name = "ptyprocess" +version = "0.6.0" +description = "Run a subprocess in a pseudo terminal" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "py" +version = "1.10.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pyaudio" +version = "0.2.11" +description = "Bindings for PortAudio v19, the cross-platform audio input/output stream library." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "pycparser" +version = "2.20" +description = "C parser in Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pygments" +version = "2.7.3" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "pyparsing" +version = "2.4.7" +description = "Python parsing module" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "pyrsistent" +version = "0.17.3" +description = "Persistent/Functional/Immutable data structures" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "pysocks" +version = "1.7.1" +description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "python-dateutil" +version = "2.8.1" +description = "Extensions to the standard Python datetime module" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pywin32" +version = "300" +description = "Python for Window Extensions" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pywinpty" +version = "0.5.7" +description = "Python bindings for the winpty library" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pyyaml" +version = "5.3.1" +description = "YAML parser and emitter for Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pyzmq" +version = "20.0.0" +description = "Python bindings for 0MQ" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name === \"pypy\""} +py = {version = "*", markers = "implementation_name === \"pypy\""} + +[[package]] +name = "qtconsole" +version = "5.0.1" +description = "Jupyter Qt console" +category = "dev" +optional = false +python-versions = ">= 3.6" + +[package.dependencies] +ipykernel = ">=4.1" +ipython-genutils = "*" +jupyter-client = ">=4.1" +jupyter-core = "*" +pygments = "*" +pyzmq = ">=17.1" +qtpy = "*" +traitlets = "*" + +[package.extras] +doc = ["Sphinx (>=1.3)"] +test = ["flaky", "pytest", "pytest-qt"] + +[[package]] +name = "qtpy" +version = "1.9.0" +description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5, PyQt4 and PySide) and additional custom QWidgets." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "regex" +version = "2020.11.13" +description = "Alternative regular expression module, to replace re." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "requests" +version = "2.25.1" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +certifi = ">=2017.4.17" +chardet = ">=3.0.2,<5" +idna = ">=2.5,<3" +PySocks = {version = ">=1.5.6,<1.5.7 || >1.5.7", optional = true, markers = "extra == \"socks\""} +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] +socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] + +[[package]] +name = "send2trash" +version = "1.5.0" +description = "Send file to trash natively under Mac OS X, Windows and Linux." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "six" +version = "1.15.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "soupsieve" +version = "2.1" +description = "A modern CSS selector implementation for Beautiful Soup." +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "terminado" +version = "0.9.1" +description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +ptyprocess = {version = "*", markers = "os_name != \"nt\""} +pywinpty = {version = ">=0.5", markers = "os_name == \"nt\""} +tornado = ">=4" + +[[package]] +name = "testpath" +version = "0.4.4" +description = "Test utilities for code working with files and commands" +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +test = ["pathlib2"] + +[[package]] +name = "tornado" +version = "6.1" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "dev" +optional = false +python-versions = ">= 3.5" + +[[package]] +name = "tqdm" +version = "4.54.1" +description = "Fast, Extensible Progress Meter" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" + +[package.extras] +dev = ["py-make (>=0.1.0)", "twine", "argopt", "pydoc-markdown", "wheel"] + +[[package]] +name = "traitlets" +version = "5.0.5" +description = "Traitlets Python configuration system" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +ipython-genutils = "*" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "urllib3" +version = "1.26.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +brotli = ["brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "wcwidth" +version = "0.2.5" +description = "Measures the displayed width of unicode strings in a terminal" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "webob" +version = "1.8.6" +description = "WSGI request and response object" +category = "main" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*" + +[package.extras] +docs = ["Sphinx (>=1.7.5)", "pylons-sphinx-themes"] +testing = ["pytest (>=3.1.0)", "coverage", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "widgetsnbextension" +version = "3.5.1" +description = "IPython HTML widgets for Jupyter" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +notebook = ">=4.4.1" + +[metadata] +lock-version = "1.1" +python-versions = "^3.8" +content-hash = "4652673e61d57bf6f9a67c42f717aa0d014e0073a274183a8d47e65e0c251ffa" + +[metadata.files] +anki = [ + {file = "anki-2.1.37-cp38-abi3-macosx_10_7_x86_64.whl", hash = "sha256:959bb06f0a0e29a43f604fd876bcd552961de11d1014b6c3c1e6007611051264"}, + {file = "anki-2.1.37-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:f7d7f504d96bbe799d13a819607498fd9a7c61394a85c810eae642b2c0575bd9"}, + {file = "anki-2.1.37-cp38-abi3-win_amd64.whl", hash = "sha256:4b36d056f3b25c2d780e1bec7b5fe9ff9c253c60f1e8200bda4378f2a0310eb3"}, +] +appnope = [ + {file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"}, + {file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"}, +] +argon2-cffi = [ + {file = "argon2-cffi-20.1.0.tar.gz", hash = "sha256:d8029b2d3e4b4cea770e9e5a0104dd8fa185c1724a0f01528ae4826a6d25f97d"}, + {file = "argon2_cffi-20.1.0-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:6ea92c980586931a816d61e4faf6c192b4abce89aa767ff6581e6ddc985ed003"}, + {file = "argon2_cffi-20.1.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:05a8ac07c7026542377e38389638a8a1e9b78f1cd8439cd7493b39f08dd75fbf"}, + {file = "argon2_cffi-20.1.0-cp27-cp27m-win32.whl", hash = "sha256:0bf066bc049332489bb2d75f69216416329d9dc65deee127152caeb16e5ce7d5"}, + {file = "argon2_cffi-20.1.0-cp27-cp27m-win_amd64.whl", hash = "sha256:57358570592c46c420300ec94f2ff3b32cbccd10d38bdc12dc6979c4a8484fbc"}, + {file = "argon2_cffi-20.1.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:7d455c802727710e9dfa69b74ccaab04568386ca17b0ad36350b622cd34606fe"}, + {file = "argon2_cffi-20.1.0-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:b160416adc0f012fb1f12588a5e6954889510f82f698e23ed4f4fa57f12a0647"}, + {file = "argon2_cffi-20.1.0-cp35-cp35m-win32.whl", hash = "sha256:9bee3212ba4f560af397b6d7146848c32a800652301843df06b9e8f68f0f7361"}, + {file = "argon2_cffi-20.1.0-cp35-cp35m-win_amd64.whl", hash = "sha256:392c3c2ef91d12da510cfb6f9bae52512a4552573a9e27600bdb800e05905d2b"}, + {file = "argon2_cffi-20.1.0-cp36-cp36m-win32.whl", hash = "sha256:ba7209b608945b889457f949cc04c8e762bed4fe3fec88ae9a6b7765ae82e496"}, + {file = "argon2_cffi-20.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:da7f0445b71db6d3a72462e04f36544b0de871289b0bc8a7cc87c0f5ec7079fa"}, + {file = "argon2_cffi-20.1.0-cp37-abi3-macosx_10_6_intel.whl", hash = "sha256:cc0e028b209a5483b6846053d5fd7165f460a1f14774d79e632e75e7ae64b82b"}, + {file = "argon2_cffi-20.1.0-cp37-cp37m-win32.whl", hash = "sha256:18dee20e25e4be86680b178b35ccfc5d495ebd5792cd00781548d50880fee5c5"}, + {file = "argon2_cffi-20.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6678bb047373f52bcff02db8afab0d2a77d83bde61cfecea7c5c62e2335cb203"}, + {file = "argon2_cffi-20.1.0-cp38-cp38-win32.whl", hash = "sha256:77e909cc756ef81d6abb60524d259d959bab384832f0c651ed7dcb6e5ccdbb78"}, + {file = "argon2_cffi-20.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:9dfd5197852530294ecb5795c97a823839258dfd5eb9420233c7cfedec2058f2"}, +] +async-generator = [ + {file = "async_generator-1.10-py3-none-any.whl", hash = "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b"}, + {file = "async_generator-1.10.tar.gz", hash = "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"}, +] +attrs = [ + {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"}, + {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, +] +backcall = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] +beautifulsoup4 = [ + {file = "beautifulsoup4-4.9.3-py2-none-any.whl", hash = "sha256:4c98143716ef1cb40bf7f39a8e3eec8f8b009509e74904ba3a7b315431577e35"}, + {file = "beautifulsoup4-4.9.3-py3-none-any.whl", hash = "sha256:fff47e031e34ec82bf17e00da8f592fe7de69aeea38be00523c04623c04fb666"}, + {file = "beautifulsoup4-4.9.3.tar.gz", hash = "sha256:84729e322ad1d5b4d25f805bfa05b902dd96450f43842c4e99067d5e1369eb25"}, +] +bleach = [ + {file = "bleach-3.2.1-py2.py3-none-any.whl", hash = "sha256:9f8ccbeb6183c6e6cddea37592dfb0167485c1e3b13b3363bc325aa8bda3adbd"}, + {file = "bleach-3.2.1.tar.gz", hash = "sha256:52b5919b81842b1854196eaae5ca29679a2f2e378905c346d3ca8227c2c66080"}, +] +certifi = [ + {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, + {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, +] +cffi = [ + {file = "cffi-1.14.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ebb253464a5d0482b191274f1c8bf00e33f7e0b9c66405fbffc61ed2c839c775"}, + {file = "cffi-1.14.4-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:2c24d61263f511551f740d1a065eb0212db1dbbbbd241db758f5244281590c06"}, + {file = "cffi-1.14.4-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9f7a31251289b2ab6d4012f6e83e58bc3b96bd151f5b5262467f4bb6b34a7c26"}, + {file = "cffi-1.14.4-cp27-cp27m-win32.whl", hash = "sha256:5cf4be6c304ad0b6602f5c4e90e2f59b47653ac1ed9c662ed379fe48a8f26b0c"}, + {file = "cffi-1.14.4-cp27-cp27m-win_amd64.whl", hash = "sha256:f60567825f791c6f8a592f3c6e3bd93dd2934e3f9dac189308426bd76b00ef3b"}, + {file = "cffi-1.14.4-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:c6332685306b6417a91b1ff9fae889b3ba65c2292d64bd9245c093b1b284809d"}, + {file = "cffi-1.14.4-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d9efd8b7a3ef378dd61a1e77367f1924375befc2eba06168b6ebfa903a5e59ca"}, + {file = "cffi-1.14.4-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:51a8b381b16ddd370178a65360ebe15fbc1c71cf6f584613a7ea08bfad946698"}, + {file = "cffi-1.14.4-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:1d2c4994f515e5b485fd6d3a73d05526aa0fcf248eb135996b088d25dfa1865b"}, + {file = "cffi-1.14.4-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:af5c59122a011049aad5dd87424b8e65a80e4a6477419c0c1015f73fb5ea0293"}, + {file = "cffi-1.14.4-cp35-cp35m-win32.whl", hash = "sha256:594234691ac0e9b770aee9fcdb8fa02c22e43e5c619456efd0d6c2bf276f3eb2"}, + {file = "cffi-1.14.4-cp35-cp35m-win_amd64.whl", hash = "sha256:64081b3f8f6f3c3de6191ec89d7dc6c86a8a43911f7ecb422c60e90c70be41c7"}, + {file = "cffi-1.14.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f803eaa94c2fcda012c047e62bc7a51b0bdabda1cad7a92a522694ea2d76e49f"}, + {file = "cffi-1.14.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:105abaf8a6075dc96c1fe5ae7aae073f4696f2905fde6aeada4c9d2926752362"}, + {file = "cffi-1.14.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0638c3ae1a0edfb77c6765d487fee624d2b1ee1bdfeffc1f0b58c64d149e7eec"}, + {file = "cffi-1.14.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:7c6b1dece89874d9541fc974917b631406233ea0440d0bdfbb8e03bf39a49b3b"}, + {file = "cffi-1.14.4-cp36-cp36m-win32.whl", hash = "sha256:155136b51fd733fa94e1c2ea5211dcd4c8879869008fc811648f16541bf99668"}, + {file = "cffi-1.14.4-cp36-cp36m-win_amd64.whl", hash = "sha256:6bc25fc545a6b3d57b5f8618e59fc13d3a3a68431e8ca5fd4c13241cd70d0009"}, + {file = "cffi-1.14.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a7711edca4dcef1a75257b50a2fbfe92a65187c47dab5a0f1b9b332c5919a3fb"}, + {file = "cffi-1.14.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:00e28066507bfc3fe865a31f325c8391a1ac2916219340f87dfad602c3e48e5d"}, + {file = "cffi-1.14.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:798caa2a2384b1cbe8a2a139d80734c9db54f9cc155c99d7cc92441a23871c03"}, + {file = "cffi-1.14.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:a5ed8c05548b54b998b9498753fb9cadbfd92ee88e884641377d8a8b291bcc01"}, + {file = "cffi-1.14.4-cp37-cp37m-win32.whl", hash = "sha256:00a1ba5e2e95684448de9b89888ccd02c98d512064b4cb987d48f4b40aa0421e"}, + {file = "cffi-1.14.4-cp37-cp37m-win_amd64.whl", hash = "sha256:9cc46bc107224ff5b6d04369e7c595acb700c3613ad7bcf2e2012f62ece80c35"}, + {file = "cffi-1.14.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:df5169c4396adc04f9b0a05f13c074df878b6052430e03f50e68adf3a57aa28d"}, + {file = "cffi-1.14.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:9ffb888f19d54a4d4dfd4b3f29bc2c16aa4972f1c2ab9c4ab09b8ab8685b9c2b"}, + {file = "cffi-1.14.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8d6603078baf4e11edc4168a514c5ce5b3ba6e3e9c374298cb88437957960a53"}, + {file = "cffi-1.14.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:d5ff0621c88ce83a28a10d2ce719b2ee85635e85c515f12bac99a95306da4b2e"}, + {file = "cffi-1.14.4-cp38-cp38-win32.whl", hash = "sha256:b4e248d1087abf9f4c10f3c398896c87ce82a9856494a7155823eb45a892395d"}, + {file = "cffi-1.14.4-cp38-cp38-win_amd64.whl", hash = "sha256:ec80dc47f54e6e9a78181ce05feb71a0353854cc26999db963695f950b5fb375"}, + {file = "cffi-1.14.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909"}, + {file = "cffi-1.14.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd"}, + {file = "cffi-1.14.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a"}, + {file = "cffi-1.14.4-cp39-cp39-win32.whl", hash = "sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3"}, + {file = "cffi-1.14.4-cp39-cp39-win_amd64.whl", hash = "sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b"}, + {file = "cffi-1.14.4.tar.gz", hash = "sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c"}, +] +chardet = [ + {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, + {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, +] +click = [ + {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, + {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, +] +colorama = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +decorator = [ + {file = "decorator-4.4.2-py2.py3-none-any.whl", hash = "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760"}, + {file = "decorator-4.4.2.tar.gz", hash = "sha256:e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7"}, +] +defusedxml = [ + {file = "defusedxml-0.6.0-py2.py3-none-any.whl", hash = "sha256:6687150770438374ab581bb7a1b327a847dd9c5749e396102de3fad4e8a3ef93"}, + {file = "defusedxml-0.6.0.tar.gz", hash = "sha256:f684034d135af4c6cbb949b8a4d2ed61634515257a67299e5f940fbaa34377f5"}, +] +distro = [ + {file = "distro-1.5.0-py2.py3-none-any.whl", hash = "sha256:df74eed763e18d10d0da624258524ae80486432cd17392d9c3d96f5e83cd2799"}, + {file = "distro-1.5.0.tar.gz", hash = "sha256:0e58756ae38fbd8fc3020d54badb8eae17c5b9dcbed388b17bb55b8a5928df92"}, +] +entrypoints = [ + {file = "entrypoints-0.3-py2.py3-none-any.whl", hash = "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19"}, + {file = "entrypoints-0.3.tar.gz", hash = "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"}, +] +future = [ + {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, +] +idna = [ + {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, + {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, +] +ipykernel = [ + {file = "ipykernel-5.4.2-py3-none-any.whl", hash = "sha256:63b4b96c513e1138874934e3e783a8e5e13c02b9036e37107bfe042ac8955005"}, + {file = "ipykernel-5.4.2.tar.gz", hash = "sha256:e20ceb7e52cb4d250452e1230be76e0b2323f33bd46c6b2bc7abb6601740e182"}, +] +ipython = [ + {file = "ipython-7.19.0-py3-none-any.whl", hash = "sha256:c987e8178ced651532b3b1ff9965925bfd445c279239697052561a9ab806d28f"}, + {file = "ipython-7.19.0.tar.gz", hash = "sha256:cbb2ef3d5961d44e6a963b9817d4ea4e1fa2eb589c371a470fed14d8d40cbd6a"}, +] +ipython-genutils = [ + {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, + {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, +] +ipywidgets = [ + {file = "ipywidgets-7.5.1-py2.py3-none-any.whl", hash = "sha256:13ffeca438e0c0f91ae583dc22f50379b9d6b28390ac7be8b757140e9a771516"}, + {file = "ipywidgets-7.5.1.tar.gz", hash = "sha256:e945f6e02854a74994c596d9db83444a1850c01648f1574adf144fbbabe05c97"}, +] +jedi = [ + {file = "jedi-0.17.2-py2.py3-none-any.whl", hash = "sha256:98cc583fa0f2f8304968199b01b6b4b94f469a1f4a74c1560506ca2a211378b5"}, + {file = "jedi-0.17.2.tar.gz", hash = "sha256:86ed7d9b750603e4ba582ea8edc678657fb4007894a12bcf6f4bb97892f31d20"}, +] +jinja2 = [ + {file = "Jinja2-2.11.2-py2.py3-none-any.whl", hash = "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"}, + {file = "Jinja2-2.11.2.tar.gz", hash = "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0"}, +] +joblib = [ + {file = "joblib-1.0.0-py3-none-any.whl", hash = "sha256:75ead23f13484a2a414874779d69ade40d4fa1abe62b222a23cd50d4bc822f6f"}, + {file = "joblib-1.0.0.tar.gz", hash = "sha256:7ad866067ac1fdec27d51c8678ea760601b70e32ff1881d4dc8e1171f2b64b24"}, +] +json5 = [ + {file = "json5-0.9.5-py2.py3-none-any.whl", hash = "sha256:af1a1b9a2850c7f62c23fde18be4749b3599fd302f494eebf957e2ada6b9e42c"}, + {file = "json5-0.9.5.tar.gz", hash = "sha256:703cfee540790576b56a92e1c6aaa6c4b0d98971dc358ead83812aa4d06bdb96"}, +] +jsonschema = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] +jupyter = [ + {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, + {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"}, + {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, +] +jupyter-client = [ + {file = "jupyter_client-6.1.7-py3-none-any.whl", hash = "sha256:c958d24d6eacb975c1acebb68ac9077da61b5f5c040f22f6849928ad7393b950"}, + {file = "jupyter_client-6.1.7.tar.gz", hash = "sha256:49e390b36fe4b4226724704ea28d9fb903f1a3601b6882ce3105221cd09377a1"}, +] +jupyter-console = [ + {file = "jupyter_console-6.2.0-py3-none-any.whl", hash = "sha256:1d80c06b2d85bfb10bd5cc731b3db18e9023bc81ab00491d3ac31f206490aee3"}, + {file = "jupyter_console-6.2.0.tar.gz", hash = "sha256:7f6194f4f4692d292da3f501c7f343ccd5e36c6a1becf7b7515e23e66d6bf1e9"}, +] +jupyter-core = [ + {file = "jupyter_core-4.7.0-py3-none-any.whl", hash = "sha256:0a451c9b295e4db772bdd8d06f2f1eb31caeec0e81fbb77ba37d4a3024e3b315"}, + {file = "jupyter_core-4.7.0.tar.gz", hash = "sha256:aa1f9496ab3abe72da4efe0daab0cb2233997914581f9a071e07498c6add8ed3"}, +] +jupyterlab = [ + {file = "jupyterlab-2.2.9-py3-none-any.whl", hash = "sha256:59af02c26a15ec2d2862a15bc72e41ae304b406a0b0d3f4f705eeb7caf91902b"}, + {file = "jupyterlab-2.2.9.tar.gz", hash = "sha256:3be8f8edea173753dd838c1b6d3bbcb6f5c801121f824a477025c1b6a1d33dc6"}, +] +jupyterlab-pygments = [ + {file = "jupyterlab_pygments-0.1.2-py2.py3-none-any.whl", hash = "sha256:abfb880fd1561987efaefcb2d2ac75145d2a5d0139b1876d5be806e32f630008"}, + {file = "jupyterlab_pygments-0.1.2.tar.gz", hash = "sha256:cfcda0873626150932f438eccf0f8bf22bfa92345b814890ab360d666b254146"}, +] +jupyterlab-server = [ + {file = "jupyterlab_server-1.2.0-py3-none-any.whl", hash = "sha256:55d256077bf13e5bc9e8fbd5aac51bef82f6315111cec6b712b9a5ededbba924"}, + {file = "jupyterlab_server-1.2.0.tar.gz", hash = "sha256:5431d9dde96659364b7cc877693d5d21e7b80cea7ae3959ecc2b87518e5f5d8c"}, +] +livereload = [ + {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, +] +lunr = [ + {file = "lunr-0.5.8-py2.py3-none-any.whl", hash = "sha256:aab3f489c4d4fab4c1294a257a30fec397db56f0a50273218ccc3efdbf01d6ca"}, + {file = "lunr-0.5.8.tar.gz", hash = "sha256:c4fb063b98eff775dd638b3df380008ae85e6cb1d1a24d1cd81a10ef6391c26e"}, +] +markdown = [ + {file = "Markdown-3.3.3-py3-none-any.whl", hash = "sha256:c109c15b7dc20a9ac454c9e6025927d44460b85bd039da028d85e2b6d0bcc328"}, + {file = "Markdown-3.3.3.tar.gz", hash = "sha256:5d9f2b5ca24bc4c7a390d22323ca4bad200368612b5aaa7796babf971d2b2f18"}, +] +markupsafe = [ + {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"}, + {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"}, + {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183"}, + {file = "MarkupSafe-1.1.1-cp27-cp27m-win32.whl", hash = "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b"}, + {file = "MarkupSafe-1.1.1-cp27-cp27m-win_amd64.whl", hash = "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e"}, + {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f"}, + {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-win32.whl", hash = "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-win_amd64.whl", hash = "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-win32.whl", hash = "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-win_amd64.whl", hash = "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-win32.whl", hash = "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-win32.whl", hash = "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c"}, + {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, +] +mistune = [ + {file = "mistune-0.8.4-py2.py3-none-any.whl", hash = "sha256:88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4"}, + {file = "mistune-0.8.4.tar.gz", hash = "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e"}, +] +mkdocs = [ + {file = "mkdocs-1.1.2-py3-none-any.whl", hash = "sha256:096f52ff52c02c7e90332d2e53da862fde5c062086e1b5356a6e392d5d60f5e9"}, + {file = "mkdocs-1.1.2.tar.gz", hash = "sha256:f0b61e5402b99d7789efa032c7a74c90a20220a9c81749da06dbfbcbd52ffb39"}, +] +nbclient = [ + {file = "nbclient-0.5.1-py3-none-any.whl", hash = "sha256:4d6b116187c795c99b9dba13d46e764d596574b14c296d60670c8dfe454db364"}, + {file = "nbclient-0.5.1.tar.gz", hash = "sha256:01e2d726d16eaf2cde6db74a87e2451453547e8832d142f73f72fddcd4fe0250"}, +] +nbconvert = [ + {file = "nbconvert-6.0.7-py3-none-any.whl", hash = "sha256:39e9f977920b203baea0be67eea59f7b37a761caa542abe80f5897ce3cf6311d"}, + {file = "nbconvert-6.0.7.tar.gz", hash = "sha256:cbbc13a86dfbd4d1b5dee106539de0795b4db156c894c2c5dc382062bbc29002"}, +] +nbformat = [ + {file = "nbformat-5.0.8-py3-none-any.whl", hash = "sha256:aa9450c16d29286dc69b92ea4913c1bffe86488f90184445996ccc03a2f60382"}, + {file = "nbformat-5.0.8.tar.gz", hash = "sha256:f545b22138865bfbcc6b1ffe89ed5a2b8e2dc5d4fe876f2ca60d8e6f702a30f8"}, +] +nest-asyncio = [ + {file = "nest_asyncio-1.4.3-py3-none-any.whl", hash = "sha256:dbe032f3e9ff7f120e76be22bf6e7958e867aed1743e6894b8a9585fe8495cc9"}, + {file = "nest_asyncio-1.4.3.tar.gz", hash = "sha256:eaa09ef1353ebefae19162ad423eef7a12166bcc63866f8bff8f3635353cd9fa"}, +] +nltk = [ + {file = "nltk-3.5.zip", hash = "sha256:845365449cd8c5f9731f7cb9f8bd6fd0767553b9d53af9eb1b3abf7700936b35"}, +] +notebook = [ + {file = "notebook-6.1.5-py3-none-any.whl", hash = "sha256:508cf9dad7cdb3188f1aa27017dc78179029dfe83814fc505329f689bc2ab50f"}, + {file = "notebook-6.1.5.tar.gz", hash = "sha256:3db37ae834c5f3b6378381229d0e5dfcbfb558d08c8ce646b1ad355147f5e91d"}, +] +orjson = [ + {file = "orjson-3.4.6-cp36-cp36m-macosx_10_7_x86_64.whl", hash = "sha256:4e258f4696255de8038fd01ead8277a7c5c6d1e453cc7ca5aad8c1e9f74af62e"}, + {file = "orjson-3.4.6-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:283e54f0e2175ffe3f3acb20473da9d13f944a5faca6b066e0df2096ca8dda58"}, + {file = "orjson-3.4.6-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:9864c587a009cc266fce02fbb2d99dd25c773bdd650d4728ef419686c4130380"}, + {file = "orjson-3.4.6-cp36-none-win_amd64.whl", hash = "sha256:9a861504727f3ded5e13ca321fb4187ace3300113c6bf1554088619bbb557f89"}, + {file = "orjson-3.4.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:3fe17a3f0f68b29a2f096817afd98ef680dec7c7577d12de6465e942cd9e4e71"}, + {file = "orjson-3.4.6-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:38f01ee249813d80e18eaeb5c434e026ddce631a7f1a93265f7035bc7e6621ff"}, + {file = "orjson-3.4.6-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:c961711a8e1ec688fcc978638a1b618c1bfff65929f99edecfa8b67ab26ec2de"}, + {file = "orjson-3.4.6-cp37-none-win_amd64.whl", hash = "sha256:218f164aa917b82e328f177c4121fb45c178b746f917c21739fc3eb5f5b7ca8b"}, + {file = "orjson-3.4.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:67d8e09030342d0153c86676cebdbca5cd12e257a436c8238a25e52f800de98a"}, + {file = "orjson-3.4.6-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:bac00616ee44c78c8a8bd7e3d6c394ff97d2a45e1b3f453d6a29ffce97b6ffca"}, + {file = "orjson-3.4.6-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:f5008f92ecf5d0cb0cb172d6d9aa76f48d54cc1b6abc4fc83f430d58de9148ba"}, + {file = "orjson-3.4.6-cp38-none-win_amd64.whl", hash = "sha256:5fe9097f622c7ad47a511a3d2189576b11d1be4b067f094089c45a01ae80b34f"}, + {file = "orjson-3.4.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7132aa4779388f0c0ef2d944efd7f170b41f9d5eadd69813b715afe05af23fbc"}, + {file = "orjson-3.4.6-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:8b246b9234d920fb8f1373167e63254581639482e710ea515354979ec13a47a9"}, + {file = "orjson-3.4.6-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:b62c64d2336fe9e1a21f0b89f12946d988fd1feb365c2e6f90071c21aca3127d"}, + {file = "orjson-3.4.6-cp39-none-win_amd64.whl", hash = "sha256:a60db27bcba1645c0199ebe4edc1290a91ee22644dde61ee9257ebbacbf5d81e"}, + {file = "orjson-3.4.6.tar.gz", hash = "sha256:e1b4128baebf7968572343834b282794e20c5082f55f42b9675b04df0749e087"}, +] +packaging = [ + {file = "packaging-20.8-py2.py3-none-any.whl", hash = "sha256:24e0da08660a87484d1602c30bb4902d74816b6985b93de36926f5bc95741858"}, + {file = "packaging-20.8.tar.gz", hash = "sha256:78598185a7008a470d64526a8059de9aaa449238f280fc9eb6b13ba6c4109093"}, +] +pandocfilters = [ + {file = "pandocfilters-1.4.3.tar.gz", hash = "sha256:bc63fbb50534b4b1f8ebe1860889289e8af94a23bff7445259592df25a3906eb"}, +] +parso = [ + {file = "parso-0.7.1-py2.py3-none-any.whl", hash = "sha256:97218d9159b2520ff45eb78028ba8b50d2bc61dcc062a9682666f2dc4bd331ea"}, + {file = "parso-0.7.1.tar.gz", hash = "sha256:caba44724b994a8a5e086460bb212abc5a8bc46951bf4a9a1210745953622eb9"}, +] +pexpect = [ + {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, + {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, +] +pickleshare = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] +prometheus-client = [ + {file = "prometheus_client-0.9.0-py2.py3-none-any.whl", hash = "sha256:b08c34c328e1bf5961f0b4352668e6c8f145b4a087e09b7296ef62cbe4693d35"}, + {file = "prometheus_client-0.9.0.tar.gz", hash = "sha256:9da7b32f02439d8c04f7777021c304ed51d9ec180604700c1ba72a4d44dceb03"}, +] +prompt-toolkit = [ + {file = "prompt_toolkit-3.0.8-py3-none-any.whl", hash = "sha256:7debb9a521e0b1ee7d2fe96ee4bd60ef03c6492784de0547337ca4433e46aa63"}, + {file = "prompt_toolkit-3.0.8.tar.gz", hash = "sha256:25c95d2ac813909f813c93fde734b6e44406d1477a9faef7c915ff37d39c0a8c"}, +] +protobuf = [ + {file = "protobuf-3.14.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:629b03fd3caae7f815b0c66b41273f6b1900a579e2ccb41ef4493a4f5fb84f3a"}, + {file = "protobuf-3.14.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:5b7a637212cc9b2bcf85dd828b1178d19efdf74dbfe1ddf8cd1b8e01fdaaa7f5"}, + {file = "protobuf-3.14.0-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:43b554b9e73a07ba84ed6cf25db0ff88b1e06be610b37656e292e3cbb5437472"}, + {file = "protobuf-3.14.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5e9806a43232a1fa0c9cf5da8dc06f6910d53e4390be1fa06f06454d888a9142"}, + {file = "protobuf-3.14.0-cp35-cp35m-win32.whl", hash = "sha256:1c51fda1bbc9634246e7be6016d860be01747354ed7015ebe38acf4452f470d2"}, + {file = "protobuf-3.14.0-cp35-cp35m-win_amd64.whl", hash = "sha256:4b74301b30513b1a7494d3055d95c714b560fbb630d8fb9956b6f27992c9f980"}, + {file = "protobuf-3.14.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:86a75477addde4918e9a1904e5c6af8d7b691f2a3f65587d73b16100fbe4c3b2"}, + {file = "protobuf-3.14.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ecc33531a213eee22ad60e0e2aaea6c8ba0021f0cce35dbf0ab03dee6e2a23a1"}, + {file = "protobuf-3.14.0-cp36-cp36m-win32.whl", hash = "sha256:72230ed56f026dd664c21d73c5db73ebba50d924d7ba6b7c0d81a121e390406e"}, + {file = "protobuf-3.14.0-cp36-cp36m-win_amd64.whl", hash = "sha256:0fc96785262042e4863b3f3b5c429d4636f10d90061e1840fce1baaf59b1a836"}, + {file = "protobuf-3.14.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4e75105c9dfe13719b7293f75bd53033108f4ba03d44e71db0ec2a0e8401eafd"}, + {file = "protobuf-3.14.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:2a7e2fe101a7ace75e9327b9c946d247749e564a267b0515cf41dfe450b69bac"}, + {file = "protobuf-3.14.0-cp37-cp37m-win32.whl", hash = "sha256:b0d5d35faeb07e22a1ddf8dce620860c8fe145426c02d1a0ae2688c6e8ede36d"}, + {file = "protobuf-3.14.0-cp37-cp37m-win_amd64.whl", hash = "sha256:8971c421dbd7aad930c9bd2694122f332350b6ccb5202a8b7b06f3f1a5c41ed5"}, + {file = "protobuf-3.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9616f0b65a30851e62f1713336c931fcd32c057202b7ff2cfbfca0fc7d5e3043"}, + {file = "protobuf-3.14.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:22bcd2e284b3b1d969c12e84dc9b9a71701ec82d8ce975fdda19712e1cfd4e00"}, + {file = "protobuf-3.14.0-py2.py3-none-any.whl", hash = "sha256:0e247612fadda953047f53301a7b0407cb0c3cb4ae25a6fde661597a04039b3c"}, + {file = "protobuf-3.14.0.tar.gz", hash = "sha256:1d63eb389347293d8915fb47bee0951c7b5dab522a4a60118b9a18f33e21f8ce"}, +] +psutil = [ + {file = "psutil-5.8.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0066a82f7b1b37d334e68697faba68e5ad5e858279fd6351c8ca6024e8d6ba64"}, + {file = "psutil-5.8.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:0ae6f386d8d297177fd288be6e8d1afc05966878704dad9847719650e44fc49c"}, + {file = "psutil-5.8.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:12d844996d6c2b1d3881cfa6fa201fd635971869a9da945cf6756105af73d2df"}, + {file = "psutil-5.8.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:02b8292609b1f7fcb34173b25e48d0da8667bc85f81d7476584d889c6e0f2131"}, + {file = "psutil-5.8.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6ffe81843131ee0ffa02c317186ed1e759a145267d54fdef1bc4ea5f5931ab60"}, + {file = "psutil-5.8.0-cp27-none-win32.whl", hash = "sha256:ea313bb02e5e25224e518e4352af4bf5e062755160f77e4b1767dd5ccb65f876"}, + {file = "psutil-5.8.0-cp27-none-win_amd64.whl", hash = "sha256:5da29e394bdedd9144c7331192e20c1f79283fb03b06e6abd3a8ae45ffecee65"}, + {file = "psutil-5.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:74fb2557d1430fff18ff0d72613c5ca30c45cdbfcddd6a5773e9fc1fe9364be8"}, + {file = "psutil-5.8.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:74f2d0be88db96ada78756cb3a3e1b107ce8ab79f65aa885f76d7664e56928f6"}, + {file = "psutil-5.8.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:99de3e8739258b3c3e8669cb9757c9a861b2a25ad0955f8e53ac662d66de61ac"}, + {file = "psutil-5.8.0-cp36-cp36m-win32.whl", hash = "sha256:36b3b6c9e2a34b7d7fbae330a85bf72c30b1c827a4366a07443fc4b6270449e2"}, + {file = "psutil-5.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:52de075468cd394ac98c66f9ca33b2f54ae1d9bff1ef6b67a212ee8f639ec06d"}, + {file = "psutil-5.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c6a5fd10ce6b6344e616cf01cc5b849fa8103fbb5ba507b6b2dee4c11e84c935"}, + {file = "psutil-5.8.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:61f05864b42fedc0771d6d8e49c35f07efd209ade09a5afe6a5059e7bb7bf83d"}, + {file = "psutil-5.8.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:0dd4465a039d343925cdc29023bb6960ccf4e74a65ad53e768403746a9207023"}, + {file = "psutil-5.8.0-cp37-cp37m-win32.whl", hash = "sha256:1bff0d07e76114ec24ee32e7f7f8d0c4b0514b3fae93e3d2aaafd65d22502394"}, + {file = "psutil-5.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:fcc01e900c1d7bee2a37e5d6e4f9194760a93597c97fee89c4ae51701de03563"}, + {file = "psutil-5.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6223d07a1ae93f86451d0198a0c361032c4c93ebd4bf6d25e2fb3edfad9571ef"}, + {file = "psutil-5.8.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d225cd8319aa1d3c85bf195c4e07d17d3cd68636b8fc97e6cf198f782f99af28"}, + {file = "psutil-5.8.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:28ff7c95293ae74bf1ca1a79e8805fcde005c18a122ca983abf676ea3466362b"}, + {file = "psutil-5.8.0-cp38-cp38-win32.whl", hash = "sha256:ce8b867423291cb65cfc6d9c4955ee9bfc1e21fe03bb50e177f2b957f1c2469d"}, + {file = "psutil-5.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:90f31c34d25b1b3ed6c40cdd34ff122b1887a825297c017e4cbd6796dd8b672d"}, + {file = "psutil-5.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6323d5d845c2785efb20aded4726636546b26d3b577aded22492908f7c1bdda7"}, + {file = "psutil-5.8.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:245b5509968ac0bd179287d91210cd3f37add77dad385ef238b275bad35fa1c4"}, + {file = "psutil-5.8.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:90d4091c2d30ddd0a03e0b97e6a33a48628469b99585e2ad6bf21f17423b112b"}, + {file = "psutil-5.8.0-cp39-cp39-win32.whl", hash = "sha256:ea372bcc129394485824ae3e3ddabe67dc0b118d262c568b4d2602a7070afdb0"}, + {file = "psutil-5.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:f4634b033faf0d968bb9220dd1c793b897ab7f1189956e1aa9eae752527127d3"}, + {file = "psutil-5.8.0.tar.gz", hash = "sha256:0c9ccb99ab76025f2f0bbecf341d4656e9c1351db8cc8a03ccd62e318ab4b5c6"}, +] +ptyprocess = [ + {file = "ptyprocess-0.6.0-py2.py3-none-any.whl", hash = "sha256:d7cc528d76e76342423ca640335bd3633420dc1366f258cb31d05e865ef5ca1f"}, + {file = "ptyprocess-0.6.0.tar.gz", hash = "sha256:923f299cc5ad920c68f2bc0bc98b75b9f838b93b599941a6b63ddbc2476394c0"}, +] +py = [ + {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, + {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, +] +pyaudio = [ + {file = "PyAudio-0.2.11-cp27-cp27m-win32.whl", hash = "sha256:f78d543a98b730e64621ebf7f3e2868a79ade0a373882ef51c0293455ffa8e6e"}, + {file = "PyAudio-0.2.11-cp27-cp27m-win_amd64.whl", hash = "sha256:259bb9c1363be895b4f9a97e320a6017dd06bc540728c1a04eb4a7b6fe75035b"}, + {file = "PyAudio-0.2.11-cp34-cp34m-win32.whl", hash = "sha256:0d92f6a294565260a282f7c9a0b0d309fc8cc988b5ee5b50645634ab9e2da7f7"}, + {file = "PyAudio-0.2.11-cp34-cp34m-win_amd64.whl", hash = "sha256:589bfad2c615dd4b5d3757e763019c42ab82f06fba5cae64ec02fd7f5ae407ed"}, + {file = "PyAudio-0.2.11-cp35-cp35m-win32.whl", hash = "sha256:8f89075b4844ea94dde0c951c2937581c989fabd4df09bfd3f075035f50955df"}, + {file = "PyAudio-0.2.11-cp35-cp35m-win_amd64.whl", hash = "sha256:cf1543ba50bd44ac0d0ab5c035bb9c3127eb76047ff12235149d9adf86f532b6"}, + {file = "PyAudio-0.2.11-cp36-cp36m-win32.whl", hash = "sha256:51b558d1b28c68437b53218279110db44f69f3f5dd3d81859f569a4a96962bdc"}, + {file = "PyAudio-0.2.11-cp36-cp36m-win_amd64.whl", hash = "sha256:2a19bdb8ec1445b4f3e4b7b109e0e4cec1fd1f1ce588592aeb6db0b58d4fb3b0"}, + {file = "PyAudio-0.2.11.tar.gz", hash = "sha256:93bfde30e0b64e63a46f2fd77e85c41fd51182a4a3413d9edfaf9ffaa26efb74"}, +] +pycparser = [ + {file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"}, + {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"}, +] +pygments = [ + {file = "Pygments-2.7.3-py3-none-any.whl", hash = "sha256:f275b6c0909e5dafd2d6269a656aa90fa58ebf4a74f8fcf9053195d226b24a08"}, + {file = "Pygments-2.7.3.tar.gz", hash = "sha256:ccf3acacf3782cbed4a989426012f1c535c9a90d3a7fc3f16d231b9372d2b716"}, +] +pyparsing = [ + {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, + {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, +] +pyrsistent = [ + {file = "pyrsistent-0.17.3.tar.gz", hash = "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e"}, +] +pysocks = [ + {file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"}, + {file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"}, + {file = "PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, + {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, +] +pywin32 = [ + {file = "pywin32-300-cp35-cp35m-win32.whl", hash = "sha256:1c204a81daed2089e55d11eefa4826c05e604d27fe2be40b6bf8db7b6a39da63"}, + {file = "pywin32-300-cp35-cp35m-win_amd64.whl", hash = "sha256:350c5644775736351b77ba68da09a39c760d75d2467ecec37bd3c36a94fbed64"}, + {file = "pywin32-300-cp36-cp36m-win32.whl", hash = "sha256:a3b4c48c852d4107e8a8ec980b76c94ce596ea66d60f7a697582ea9dce7e0db7"}, + {file = "pywin32-300-cp36-cp36m-win_amd64.whl", hash = "sha256:27a30b887afbf05a9cbb05e3ffd43104a9b71ce292f64a635389dbad0ed1cd85"}, + {file = "pywin32-300-cp37-cp37m-win32.whl", hash = "sha256:d7e8c7efc221f10d6400c19c32a031add1c4a58733298c09216f57b4fde110dc"}, + {file = "pywin32-300-cp37-cp37m-win_amd64.whl", hash = "sha256:8151e4d7a19262d6694162d6da85d99a16f8b908949797fd99c83a0bfaf5807d"}, + {file = "pywin32-300-cp38-cp38-win32.whl", hash = "sha256:fbb3b1b0fbd0b4fc2a3d1d81fe0783e30062c1abed1d17c32b7879d55858cfae"}, + {file = "pywin32-300-cp38-cp38-win_amd64.whl", hash = "sha256:60a8fa361091b2eea27f15718f8eb7f9297e8d51b54dbc4f55f3d238093d5190"}, + {file = "pywin32-300-cp39-cp39-win32.whl", hash = "sha256:638b68eea5cfc8def537e43e9554747f8dee786b090e47ead94bfdafdb0f2f50"}, + {file = "pywin32-300-cp39-cp39-win_amd64.whl", hash = "sha256:b1609ce9bd5c411b81f941b246d683d6508992093203d4eb7f278f4ed1085c3f"}, +] +pywinpty = [ + {file = "pywinpty-0.5.7-cp27-cp27m-win32.whl", hash = "sha256:b358cb552c0f6baf790de375fab96524a0498c9df83489b8c23f7f08795e966b"}, + {file = "pywinpty-0.5.7-cp27-cp27m-win_amd64.whl", hash = "sha256:1e525a4de05e72016a7af27836d512db67d06a015aeaf2fa0180f8e6a039b3c2"}, + {file = "pywinpty-0.5.7-cp35-cp35m-win32.whl", hash = "sha256:2740eeeb59297593a0d3f762269b01d0285c1b829d6827445fcd348fb47f7e70"}, + {file = "pywinpty-0.5.7-cp35-cp35m-win_amd64.whl", hash = "sha256:33df97f79843b2b8b8bc5c7aaf54adec08cc1bae94ee99dfb1a93c7a67704d95"}, + {file = "pywinpty-0.5.7-cp36-cp36m-win32.whl", hash = "sha256:e854211df55d107f0edfda8a80b39dfc87015bef52a8fe6594eb379240d81df2"}, + {file = "pywinpty-0.5.7-cp36-cp36m-win_amd64.whl", hash = "sha256:dbd838de92de1d4ebf0dce9d4d5e4fc38d0b7b1de837947a18b57a882f219139"}, + {file = "pywinpty-0.5.7-cp37-cp37m-win32.whl", hash = "sha256:5fb2c6c6819491b216f78acc2c521b9df21e0f53b9a399d58a5c151a3c4e2a2d"}, + {file = "pywinpty-0.5.7-cp37-cp37m-win_amd64.whl", hash = "sha256:dd22c8efacf600730abe4a46c1388355ce0d4ab75dc79b15d23a7bd87bf05b48"}, + {file = "pywinpty-0.5.7-cp38-cp38-win_amd64.whl", hash = "sha256:8fc5019ff3efb4f13708bd3b5ad327589c1a554cb516d792527361525a7cb78c"}, + {file = "pywinpty-0.5.7.tar.gz", hash = "sha256:2d7e9c881638a72ffdca3f5417dd1563b60f603e1b43e5895674c2a1b01f95a0"}, +] +pyyaml = [ + {file = "PyYAML-5.3.1-cp27-cp27m-win32.whl", hash = "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f"}, + {file = "PyYAML-5.3.1-cp27-cp27m-win_amd64.whl", hash = "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76"}, + {file = "PyYAML-5.3.1-cp35-cp35m-win32.whl", hash = "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2"}, + {file = "PyYAML-5.3.1-cp35-cp35m-win_amd64.whl", hash = "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c"}, + {file = "PyYAML-5.3.1-cp36-cp36m-win32.whl", hash = "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2"}, + {file = "PyYAML-5.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648"}, + {file = "PyYAML-5.3.1-cp37-cp37m-win32.whl", hash = "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"}, + {file = "PyYAML-5.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf"}, + {file = "PyYAML-5.3.1-cp38-cp38-win32.whl", hash = "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97"}, + {file = "PyYAML-5.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee"}, + {file = "PyYAML-5.3.1.tar.gz", hash = "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"}, +] +pyzmq = [ + {file = "pyzmq-20.0.0-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:523d542823cabb94065178090e05347bd204365f6e7cb260f0071c995d392fc2"}, + {file = "pyzmq-20.0.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:225774a48ed7414c0395335e7123ef8c418dbcbe172caabdc2496133b03254c2"}, + {file = "pyzmq-20.0.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:bc7dd697356b31389d5118b9bcdef3e8d8079e8181800c4e8d72dccd56e1ff68"}, + {file = "pyzmq-20.0.0-cp35-cp35m-win32.whl", hash = "sha256:d81184489369ec325bd50ba1c935361e63f31f578430b9ad95471899361a8253"}, + {file = "pyzmq-20.0.0-cp35-cp35m-win_amd64.whl", hash = "sha256:7113eb93dcd0a5750c65d123ed0099e036a3a3f2dcb48afedd025ffa125c983b"}, + {file = "pyzmq-20.0.0-cp36-cp36m-macosx_10_9_intel.whl", hash = "sha256:b62113eeb9a0649cebed9b21fd578f3a0175ef214a2a91dcb7b31bbf55805295"}, + {file = "pyzmq-20.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:f0beef935efe78a63c785bb21ed56c1c24448511383e3994927c8bb2caf5e714"}, + {file = "pyzmq-20.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:46250789730489009fe139cbf576679557c070a6a3628077d09a4153d52fd381"}, + {file = "pyzmq-20.0.0-cp36-cp36m-win32.whl", hash = "sha256:bf755905a7d30d2749079611b9a89924c1f2da2695dc09ce221f42122c9808e3"}, + {file = "pyzmq-20.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2742e380d186673eee6a570ef83d4568741945434ba36d92b98d36cdbfedbd44"}, + {file = "pyzmq-20.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1e9b75a119606732023a305d1c214146c09a91f8116f6aff3e8b7d0a60b6f0ff"}, + {file = "pyzmq-20.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:03638e46d486dd1c118e03c8bf9c634bdcae679600eac6573ae1e54906de7c2f"}, + {file = "pyzmq-20.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:63ee08e35be72fdd7568065a249a5b5cf51a2e8ab6ee63cf9f73786fcb9e710b"}, + {file = "pyzmq-20.0.0-cp37-cp37m-win32.whl", hash = "sha256:c95dda497a7c1b1e734b5e8353173ca5dd7b67784d8821d13413a97856588057"}, + {file = "pyzmq-20.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:cc09c5cd1a4332611c8564d65e6a432dc6db3e10793d0254da9fa1e31d9ffd6d"}, + {file = "pyzmq-20.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6e24907857c80dc67692e31f5bf3ad5bf483ee0142cec95b3d47e2db8c43bdda"}, + {file = "pyzmq-20.0.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:53706f4a792cdae422121fb6a5e65119bad02373153364fc9d004cf6a90394de"}, + {file = "pyzmq-20.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:895695be380f0f85d2e3ec5ccf68a93c92d45bd298567525ad5633071589872c"}, + {file = "pyzmq-20.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:d92c7f41a53ece82b91703ea433c7d34143248cf0cead33aa11c5fc621c764bf"}, + {file = "pyzmq-20.0.0-cp38-cp38-win32.whl", hash = "sha256:309d763d89ec1845c0e0fa14e1fb6558fd8c9ef05ed32baec27d7a8499cc7bb0"}, + {file = "pyzmq-20.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:0e554fd390021edbe0330b67226325a820b0319c5b45e1b0a59bf22ccc36e793"}, + {file = "pyzmq-20.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cfa54a162a7b32641665e99b2c12084555afe9fc8fe80ec8b2f71a57320d10e1"}, + {file = "pyzmq-20.0.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:dc2f48b575dff6edefd572f1ac84cf0c3f18ad5fcf13384de32df740a010594a"}, + {file = "pyzmq-20.0.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:5efe02bdcc5eafcac0aab531292294298f0ab8d28ed43be9e507d0e09173d1a4"}, + {file = "pyzmq-20.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:0af84f34f27b5c6a0e906c648bdf46d4caebf9c8e6e16db0728f30a58141cad6"}, + {file = "pyzmq-20.0.0-cp39-cp39-win32.whl", hash = "sha256:c63fafd2556d218368c51d18588f8e6f8d86d09d493032415057faf6de869b34"}, + {file = "pyzmq-20.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:f110a4d3f8f01209eec304ed542f6c8054cce9b0f16dfe3d571e57c290e4e133"}, + {file = "pyzmq-20.0.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4d9259a5eb3f71abbaf61f165cacf42240bfeea3783bebd8255341abdfe206f1"}, + {file = "pyzmq-20.0.0.tar.gz", hash = "sha256:824ad5888331aadeac772bce27e1c2fbcab82fade92edbd234542c4e12f0dca9"}, +] +qtconsole = [ + {file = "qtconsole-5.0.1-py3-none-any.whl", hash = "sha256:4d70967aeb62a5bd13a109d61b169a3cf844afc24a35c11f5518574bb8abe670"}, + {file = "qtconsole-5.0.1.tar.gz", hash = "sha256:4d7dd4eae8a90d0b2b19b31794b30f137238463998989734a3acb8a53b506bab"}, +] +qtpy = [ + {file = "QtPy-1.9.0-py2.py3-none-any.whl", hash = "sha256:fa0b8363b363e89b2a6f49eddc162a04c0699ae95e109a6be3bb145a913190ea"}, + {file = "QtPy-1.9.0.tar.gz", hash = "sha256:2db72c44b55d0fe1407be8fba35c838ad0d6d3bb81f23007886dc1fc0f459c8d"}, +] +regex = [ + {file = "regex-2020.11.13-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8b882a78c320478b12ff024e81dc7d43c1462aa4a3341c754ee65d857a521f85"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a63f1a07932c9686d2d416fb295ec2c01ab246e89b4d58e5fa468089cab44b70"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:6e4b08c6f8daca7d8f07c8d24e4331ae7953333dbd09c648ed6ebd24db5a10ee"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bba349276b126947b014e50ab3316c027cac1495992f10e5682dc677b3dfa0c5"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:56e01daca75eae420bce184edd8bb341c8eebb19dd3bce7266332258f9fb9dd7"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:6a8ce43923c518c24a2579fda49f093f1397dad5d18346211e46f134fc624e31"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab79fcb02b930de09c76d024d279686ec5d532eb814fd0ed1e0051eb8bd2daa"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:9801c4c1d9ae6a70aeb2128e5b4b68c45d4f0af0d1535500884d644fa9b768c6"}, + {file = "regex-2020.11.13-cp36-cp36m-win32.whl", hash = "sha256:49cae022fa13f09be91b2c880e58e14b6da5d10639ed45ca69b85faf039f7a4e"}, + {file = "regex-2020.11.13-cp36-cp36m-win_amd64.whl", hash = "sha256:749078d1eb89484db5f34b4012092ad14b327944ee7f1c4f74d6279a6e4d1884"}, + {file = "regex-2020.11.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2f4007bff007c96a173e24dcda236e5e83bde4358a557f9ccf5e014439eae4b"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:38c8fd190db64f513fe4e1baa59fed086ae71fa45083b6936b52d34df8f86a88"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5862975b45d451b6db51c2e654990c1820523a5b07100fc6903e9c86575202a0"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:262c6825b309e6485ec2493ffc7e62a13cf13fb2a8b6d212f72bd53ad34118f1"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bafb01b4688833e099d79e7efd23f99172f501a15c44f21ea2118681473fdba0"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:e32f5f3d1b1c663af7f9c4c1e72e6ffe9a78c03a31e149259f531e0fed826512"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3bddc701bdd1efa0d5264d2649588cbfda549b2899dc8d50417e47a82e1387ba"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:02951b7dacb123d8ea6da44fe45ddd084aa6777d4b2454fa0da61d569c6fa538"}, + {file = "regex-2020.11.13-cp37-cp37m-win32.whl", hash = "sha256:0d08e71e70c0237883d0bef12cad5145b84c3705e9c6a588b2a9c7080e5af2a4"}, + {file = "regex-2020.11.13-cp37-cp37m-win_amd64.whl", hash = "sha256:1fa7ee9c2a0e30405e21031d07d7ba8617bc590d391adfc2b7f1e8b99f46f444"}, + {file = "regex-2020.11.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:baf378ba6151f6e272824b86a774326f692bc2ef4cc5ce8d5bc76e38c813a55f"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e3faaf10a0d1e8e23a9b51d1900b72e1635c2d5b0e1bea1c18022486a8e2e52d"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2a11a3e90bd9901d70a5b31d7dd85114755a581a5da3fc996abfefa48aee78af"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1ebb090a426db66dd80df8ca85adc4abfcbad8a7c2e9a5ec7513ede522e0a8f"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:b2b1a5ddae3677d89b686e5c625fc5547c6e492bd755b520de5332773a8af06b"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2c99e97d388cd0a8d30f7c514d67887d8021541b875baf09791a3baad48bb4f8"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:c084582d4215593f2f1d28b65d2a2f3aceff8342aa85afd7be23a9cad74a0de5"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:a3d748383762e56337c39ab35c6ed4deb88df5326f97a38946ddd19028ecce6b"}, + {file = "regex-2020.11.13-cp38-cp38-win32.whl", hash = "sha256:7913bd25f4ab274ba37bc97ad0e21c31004224ccb02765ad984eef43e04acc6c"}, + {file = "regex-2020.11.13-cp38-cp38-win_amd64.whl", hash = "sha256:6c54ce4b5d61a7129bad5c5dc279e222afd00e721bf92f9ef09e4fae28755683"}, + {file = "regex-2020.11.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1862a9d9194fae76a7aaf0150d5f2a8ec1da89e8b55890b1786b8f88a0f619dc"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4902e6aa086cbb224241adbc2f06235927d5cdacffb2425c73e6570e8d862364"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7a25fcbeae08f96a754b45bdc050e1fb94b95cab046bf56b016c25e9ab127b3e"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:d2d8ce12b7c12c87e41123997ebaf1a5767a5be3ec545f64675388970f415e2e"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f7d29a6fc4760300f86ae329e3b6ca28ea9c20823df123a2ea8693e967b29917"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:717881211f46de3ab130b58ec0908267961fadc06e44f974466d1887f865bd5b"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3128e30d83f2e70b0bed9b2a34e92707d0877e460b402faca908c6667092ada9"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8f6a2229e8ad946e36815f2a03386bb8353d4bde368fdf8ca5f0cb97264d3b5c"}, + {file = "regex-2020.11.13-cp39-cp39-win32.whl", hash = "sha256:f8f295db00ef5f8bae530fc39af0b40486ca6068733fb860b42115052206466f"}, + {file = "regex-2020.11.13-cp39-cp39-win_amd64.whl", hash = "sha256:a15f64ae3a027b64496a71ab1f722355e570c3fac5ba2801cafce846bf5af01d"}, + {file = "regex-2020.11.13.tar.gz", hash = "sha256:83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562"}, +] +requests = [ + {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, + {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, +] +send2trash = [ + {file = "Send2Trash-1.5.0-py3-none-any.whl", hash = "sha256:f1691922577b6fa12821234aeb57599d887c4900b9ca537948d2dac34aea888b"}, + {file = "Send2Trash-1.5.0.tar.gz", hash = "sha256:60001cc07d707fe247c94f74ca6ac0d3255aabcb930529690897ca2a39db28b2"}, +] +six = [ + {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, + {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, +] +soupsieve = [ + {file = "soupsieve-2.1-py3-none-any.whl", hash = "sha256:4bb21a6ee4707bf43b61230e80740e71bfe56e55d1f1f50924b087bb2975c851"}, + {file = "soupsieve-2.1.tar.gz", hash = "sha256:6dc52924dc0bc710a5d16794e6b3480b2c7c08b07729505feab2b2c16661ff6e"}, +] +terminado = [ + {file = "terminado-0.9.1-py3-none-any.whl", hash = "sha256:c55f025beb06c2e2669f7ba5a04f47bb3304c30c05842d4981d8f0fc9ab3b4e3"}, + {file = "terminado-0.9.1.tar.gz", hash = "sha256:3da72a155b807b01c9e8a5babd214e052a0a45a975751da3521a1c3381ce6d76"}, +] +testpath = [ + {file = "testpath-0.4.4-py2.py3-none-any.whl", hash = "sha256:bfcf9411ef4bf3db7579063e0546938b1edda3d69f4e1fb8756991f5951f85d4"}, + {file = "testpath-0.4.4.tar.gz", hash = "sha256:60e0a3261c149755f4399a1fff7d37523179a70fdc3abdf78de9fc2604aeec7e"}, +] +tornado = [ + {file = "tornado-6.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:d371e811d6b156d82aa5f9a4e08b58debf97c302a35714f6f45e35139c332e32"}, + {file = "tornado-6.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0d321a39c36e5f2c4ff12b4ed58d41390460f798422c4504e09eb5678e09998c"}, + {file = "tornado-6.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9de9e5188a782be6b1ce866e8a51bc76a0fbaa0e16613823fc38e4fc2556ad05"}, + {file = "tornado-6.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:61b32d06ae8a036a6607805e6720ef00a3c98207038444ba7fd3d169cd998910"}, + {file = "tornado-6.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:3e63498f680547ed24d2c71e6497f24bca791aca2fe116dbc2bd0ac7f191691b"}, + {file = "tornado-6.1-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:6c77c9937962577a6a76917845d06af6ab9197702a42e1346d8ae2e76b5e3675"}, + {file = "tornado-6.1-cp35-cp35m-win32.whl", hash = "sha256:6286efab1ed6e74b7028327365cf7346b1d777d63ab30e21a0f4d5b275fc17d5"}, + {file = "tornado-6.1-cp35-cp35m-win_amd64.whl", hash = "sha256:fa2ba70284fa42c2a5ecb35e322e68823288a4251f9ba9cc77be04ae15eada68"}, + {file = "tornado-6.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a00ff4561e2929a2c37ce706cb8233b7907e0cdc22eab98888aca5dd3775feb"}, + {file = "tornado-6.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:748290bf9112b581c525e6e6d3820621ff020ed95af6f17fedef416b27ed564c"}, + {file = "tornado-6.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e385b637ac3acaae8022e7e47dfa7b83d3620e432e3ecb9a3f7f58f150e50921"}, + {file = "tornado-6.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:25ad220258349a12ae87ede08a7b04aca51237721f63b1808d39bdb4b2164558"}, + {file = "tornado-6.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:65d98939f1a2e74b58839f8c4dab3b6b3c1ce84972ae712be02845e65391ac7c"}, + {file = "tornado-6.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:e519d64089b0876c7b467274468709dadf11e41d65f63bba207e04217f47c085"}, + {file = "tornado-6.1-cp36-cp36m-win32.whl", hash = "sha256:b87936fd2c317b6ee08a5741ea06b9d11a6074ef4cc42e031bc6403f82a32575"}, + {file = "tornado-6.1-cp36-cp36m-win_amd64.whl", hash = "sha256:cc0ee35043162abbf717b7df924597ade8e5395e7b66d18270116f8745ceb795"}, + {file = "tornado-6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7250a3fa399f08ec9cb3f7b1b987955d17e044f1ade821b32e5f435130250d7f"}, + {file = "tornado-6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ed3ad863b1b40cd1d4bd21e7498329ccaece75db5a5bf58cd3c9f130843e7102"}, + {file = "tornado-6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:dcef026f608f678c118779cd6591c8af6e9b4155c44e0d1bc0c87c036fb8c8c4"}, + {file = "tornado-6.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:70dec29e8ac485dbf57481baee40781c63e381bebea080991893cd297742b8fd"}, + {file = "tornado-6.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:d3f7594930c423fd9f5d1a76bee85a2c36fd8b4b16921cae7e965f22575e9c01"}, + {file = "tornado-6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3447475585bae2e77ecb832fc0300c3695516a47d46cefa0528181a34c5b9d3d"}, + {file = "tornado-6.1-cp37-cp37m-win32.whl", hash = "sha256:e7229e60ac41a1202444497ddde70a48d33909e484f96eb0da9baf8dc68541df"}, + {file = "tornado-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:cb5ec8eead331e3bb4ce8066cf06d2dfef1bfb1b2a73082dfe8a161301b76e37"}, + {file = "tornado-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:20241b3cb4f425e971cb0a8e4ffc9b0a861530ae3c52f2b0434e6c1b57e9fd95"}, + {file = "tornado-6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c77da1263aa361938476f04c4b6c8916001b90b2c2fdd92d8d535e1af48fba5a"}, + {file = "tornado-6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fba85b6cd9c39be262fcd23865652920832b61583de2a2ca907dbd8e8a8c81e5"}, + {file = "tornado-6.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:1e8225a1070cd8eec59a996c43229fe8f95689cb16e552d130b9793cb570a288"}, + {file = "tornado-6.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d14d30e7f46a0476efb0deb5b61343b1526f73ebb5ed84f23dc794bdb88f9d9f"}, + {file = "tornado-6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8f959b26f2634a091bb42241c3ed8d3cedb506e7c27b8dd5c7b9f745318ddbb6"}, + {file = "tornado-6.1-cp38-cp38-win32.whl", hash = "sha256:34ca2dac9e4d7afb0bed4677512e36a52f09caa6fded70b4e3e1c89dbd92c326"}, + {file = "tornado-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:6196a5c39286cc37c024cd78834fb9345e464525d8991c21e908cc046d1cc02c"}, + {file = "tornado-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ba29bafd8e7e22920567ce0d232c26d4d47c8b5cf4ed7b562b5db39fa199c5"}, + {file = "tornado-6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:33892118b165401f291070100d6d09359ca74addda679b60390b09f8ef325ffe"}, + {file = "tornado-6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7da13da6f985aab7f6f28debab00c67ff9cbacd588e8477034c0652ac141feea"}, + {file = "tornado-6.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:e0791ac58d91ac58f694d8d2957884df8e4e2f6687cdf367ef7eb7497f79eaa2"}, + {file = "tornado-6.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:66324e4e1beede9ac79e60f88de548da58b1f8ab4b2f1354d8375774f997e6c0"}, + {file = "tornado-6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a48900ecea1cbb71b8c71c620dee15b62f85f7c14189bdeee54966fbd9a0c5bd"}, + {file = "tornado-6.1-cp39-cp39-win32.whl", hash = "sha256:d3d20ea5782ba63ed13bc2b8c291a053c8d807a8fa927d941bd718468f7b950c"}, + {file = "tornado-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:548430be2740e327b3fe0201abe471f314741efcb0067ec4f2d7dcfb4825f3e4"}, + {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, +] +tqdm = [ + {file = "tqdm-4.54.1-py2.py3-none-any.whl", hash = "sha256:d4f413aecb61c9779888c64ddf0c62910ad56dcbe857d8922bb505d4dbff0df1"}, + {file = "tqdm-4.54.1.tar.gz", hash = "sha256:38b658a3e4ecf9b4f6f8ff75ca16221ae3378b2e175d846b6b33ea3a20852cf5"}, +] +traitlets = [ + {file = "traitlets-5.0.5-py3-none-any.whl", hash = "sha256:69ff3f9d5351f31a7ad80443c2674b7099df13cc41fc5fa6e2f6d3b0330b0426"}, + {file = "traitlets-5.0.5.tar.gz", hash = "sha256:178f4ce988f69189f7e523337a3e11d91c786ded9360174a3d9ca83e79bc5396"}, +] +urllib3 = [ + {file = "urllib3-1.26.2-py2.py3-none-any.whl", hash = "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"}, + {file = "urllib3-1.26.2.tar.gz", hash = "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08"}, +] +wcwidth = [ + {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, + {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, +] +webencodings = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] +webob = [ + {file = "WebOb-1.8.6-py2.py3-none-any.whl", hash = "sha256:a3c89a8e9ba0aeb17382836cdb73c516d0ecf6630ec40ec28288f3ed459ce87b"}, + {file = "WebOb-1.8.6.tar.gz", hash = "sha256:aa3a917ed752ba3e0b242234b2a373f9c4e2a75d35291dcbe977649bd21fd108"}, +] +widgetsnbextension = [ + {file = "widgetsnbextension-3.5.1-py2.py3-none-any.whl", hash = "sha256:bd314f8ceb488571a5ffea6cc5b9fc6cba0adaf88a9d2386b93a489751938bcd"}, + {file = "widgetsnbextension-3.5.1.tar.gz", hash = "sha256:079f87d87270bce047512400efd70238820751a11d2d8cb137a5a5bdbaf255c7"}, +] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..0b956d8 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,27 @@ +[tool.poetry] +name = "anki-sync-server" +version = "2.3.0" +description = "Self-hosted Anki Sync Server." +authors = ["Vikash Kothary "] + +[tool.poetry.dependencies] +python = "^3.8" +anki = "^2.1.36" +beautifulsoup4 = "^4.9.1" +requests = "^2.24.0" +markdown = "^3.2.2" +send2trash = "^1.5.0" +pyaudio = "^0.2.11" +decorator = "^4.4.2" +psutil = "^5.7.2" +distro = "^1.5.0" +webob = "^1.8.6" + +[tool.poetry.dev-dependencies] +mkdocs = "^1.1.2" +jupyter = "^1.0.0" +jupyterlab = "^2.2.2" + +[build-system] +requires = ["poetry>=0.12"] +build-backend = "poetry.masonry.api" diff --git a/scripts/lock.sh b/scripts/lock.sh new file mode 100644 index 0000000..f9d9039 --- /dev/null +++ b/scripts/lock.sh @@ -0,0 +1,13 @@ +#!/bin/bash +# file: lock.sh +# description: Lock dependencies and export requirements. + +echo "THE FILE WAS GENERATED BY POETRY, DO NOT EDIT!\n\n" > src/requirements.txt +echo "THE FILE WAS GENERATED BY POETRY, DO NOT EDIT!\n\n" > src/requirements-dev.txt +poetry lock +poetry export --without-hashes -f requirements.txt >> src/requirements.txt +poetry export --dev --without-hashes -f requirements.txt >> src/requirements-dev.txt + +echo "-e src/." >> src/requirements-dev.txt + + diff --git a/scripts/print-env.sh b/scripts/print-env.sh new file mode 100644 index 0000000..e755e2e --- /dev/null +++ b/scripts/print-env.sh @@ -0,0 +1,5 @@ +#!/bin/bash +# file: print-env.sh +# description: Print env variable. + +echo "${ENV}" \ No newline at end of file diff --git a/addon/__init__.py b/src/addon/__init__.py similarity index 100% rename from addon/__init__.py rename to src/addon/__init__.py diff --git a/addon/config.json b/src/addon/config.json similarity index 100% rename from addon/config.json rename to src/addon/config.json diff --git a/ankisyncctl.py b/src/ankisyncctl.py similarity index 100% rename from ankisyncctl.py rename to src/ankisyncctl.py diff --git a/ankisyncd.conf b/src/ankisyncd.conf similarity index 100% rename from ankisyncd.conf rename to src/ankisyncd.conf diff --git a/ankisyncd/__init__.py b/src/ankisyncd/__init__.py similarity index 76% rename from ankisyncd/__init__.py rename to src/ankisyncd/__init__.py index 8ad83df..322c52a 100644 --- a/ankisyncd/__init__.py +++ b/src/ankisyncd/__init__.py @@ -1,10 +1,7 @@ import os import sys -sys.path.insert(0, "/usr/share/anki") -sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), "anki-bundled")) - -_homepage = "https://github.com/tsudoko/anki-sync-server" +_homepage = "https://github.com/ankicommunity/anki-sync-server.git" _unknown_version = "[unknown version]" diff --git a/ankisyncd/__main__.py b/src/ankisyncd/__main__.py similarity index 100% rename from ankisyncd/__main__.py rename to src/ankisyncd/__main__.py diff --git a/ankisyncd/collection.py b/src/ankisyncd/collection.py similarity index 98% rename from ankisyncd/collection.py rename to src/ankisyncd/collection.py index e32dbfe..d29e97e 100644 --- a/ankisyncd/collection.py +++ b/src/ankisyncd/collection.py @@ -1,4 +1,3 @@ -import anki import anki.storage import ankisyncd.media @@ -65,7 +64,7 @@ class CollectionWrapper: return col def _get_collection(self): - col = anki.storage.Collection(self.path) + col = anki.storage.Collection(self.path, server=True) # Ugly hack, replace default media manager with our custom one col.media.close() diff --git a/ankisyncd/config.py b/src/ankisyncd/config.py similarity index 100% rename from ankisyncd/config.py rename to src/ankisyncd/config.py diff --git a/src/ankisyncd/full_sync.py b/src/ankisyncd/full_sync.py new file mode 100644 index 0000000..a6c9b9d --- /dev/null +++ b/src/ankisyncd/full_sync.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- + +import logging +import os +from sqlite3 import dbapi2 as sqlite +import shutil +import sys +from webob.exc import HTTPBadRequest + +from anki.db import DB +from anki.collection import Collection + +logger = logging.getLogger("ankisyncd.media") +logger.setLevel(1) + +class FullSyncManager: + def test_db(self, db: DB): + """ + :param anki.db.DB db: the database uploaded from the client. + """ + if db.scalar("pragma integrity_check") != "ok": + raise HTTPBadRequest( + "Integrity check failed for uploaded collection database file." + ) + + def upload(self, col: Collection, data: bytes, session) -> str: + """ + Uploads a sqlite database from the client to the sync server. + + :param anki.collection.Collectio col: + :param bytes data: The binary sqlite database from the client. + :param .sync_app.SyncUserSession session: The current session. + """ + # Verify integrity of the received database file before replacing our + # existing db. + temp_db_path = session.get_collection_path() + ".tmp" + with open(temp_db_path, 'wb') as f: + f.write(data) + + try: + with DB(temp_db_path) as test_db: + self.test_db(test_db) + except sqlite.Error as e: + raise HTTPBadRequest("Uploaded collection database file is " + "corrupt.") + + # Overwrite existing db. + col.close() + try: + shutil.copyfile(temp_db_path, session.get_collection_path()) + finally: + col.reopen() + # Reopen the media database + col.media.connect() + + return "OK" + + def download(self, col: Collection, session) -> bytes: + """Download the binary database. + + Performs a downgrade to database schema 11 before sending the database + to the client. + + :param anki.collection.Collection col: + :param .sync_app.SyncUserSession session: + + :return bytes: the binary sqlite3 database + """ + col.close(downgrade=True) + db_path = session.get_collection_path() + try: + with open(db_path, 'rb') as tmp: + data = tmp.read() + finally: + col.reopen() + # Reopen the media database + col.media.connect() + + return data + + +def get_full_sync_manager(config): + if "full_sync_manager" in config and config["full_sync_manager"]: # load from config + import importlib + import inspect + module_name, class_name = config['full_sync_manager'].rsplit('.', 1) + module = importlib.import_module(module_name.strip()) + class_ = getattr(module, class_name.strip()) + + if not FullSyncManager in inspect.getmro(class_): + raise TypeError('''"full_sync_manager" found in the conf file but it doesn''t + inherit from FullSyncManager''') + return class_(config) + else: + return FullSyncManager() diff --git a/ankisyncd/media.py b/src/ankisyncd/media.py similarity index 60% rename from ankisyncd/media.py rename to src/ankisyncd/media.py index 9c68c4c..47341f4 100644 --- a/ankisyncd/media.py +++ b/src/ankisyncd/media.py @@ -8,21 +8,32 @@ import os import os.path import anki.db +from anki.media import MediaManager logger = logging.getLogger("ankisyncd.media") - -class ServerMediaManager: - def __init__(self, col): +class ServerMediaManager(MediaManager): + def __init__(self, col, server=True): + super().__init__(col, server) self._dir = re.sub(r"(?i)\.(anki2)$", ".media", col.path) self.connect() + def addMedia(self, media_to_add): + self._db.executemany( + "INSERT OR REPLACE INTO media VALUES (?,?,?)", + media_to_add + ) + self._db.commit() + + def changes(self, lastUsn): + return self._db.execute("select fname,usn,csum from media order by usn desc limit ?", self.lastUsn() - lastUsn) + def connect(self): path = self.dir() + ".server.db" create = not os.path.exists(path) - self.db = anki.db.DB(path) + self._db = anki.db.DB(path) if create: - self.db.executescript( + self._db.executescript( """CREATE TABLE media ( fname TEXT NOT NULL PRIMARY KEY, usn INT NOT NULL, @@ -33,35 +44,36 @@ class ServerMediaManager: oldpath = self.dir() + ".db2" if os.path.exists(oldpath): logger.info("Found client media database, migrating contents") - self.db.execute("ATTACH ? AS old", oldpath) - self.db.execute( + self._db.execute("ATTACH ? AS old", oldpath) + self._db.execute( "INSERT INTO media SELECT fname, lastUsn, csum FROM old.media, old.meta" ) - self.db.commit() - self.db.execute("DETACH old") + self._db.commit() + self._db.execute("DETACH old") def close(self): - self.db.close() + self._db.close() def dir(self): return self._dir def lastUsn(self): - return self.db.scalar("SELECT max(usn) FROM media") or 0 + return self._db.scalar("SELECT max(usn) FROM media") or 0 def mediaCount(self): - return self.db.scalar("SELECT count() FROM media WHERE csum IS NOT NULL") + return self._db.scalar("SELECT count() FROM media WHERE csum IS NOT NULL") # used only in unit tests def syncInfo(self, fname): - return self.db.first("SELECT csum, 0 FROM media WHERE fname=?", fname) + return self._db.first("SELECT csum, 0 FROM media WHERE fname=?", fname) def syncDelete(self, fname): fpath = os.path.join(self.dir(), fname) if os.path.exists(fpath): os.remove(fpath) - self.db.execute( + self._db.execute( "UPDATE media SET csum = NULL, usn = ? WHERE fname = ?", self.lastUsn() + 1, fname, ) + self._db.commit() diff --git a/ankisyncd/sessions.py b/src/ankisyncd/sessions.py similarity index 99% rename from ankisyncd/sessions.py rename to src/ankisyncd/sessions.py index 2e09ab6..7c609db 100644 --- a/ankisyncd/sessions.py +++ b/src/ankisyncd/sessions.py @@ -32,7 +32,7 @@ class SqliteSessionManager(SimpleSessionManager): everytime the SyncApp is restarted.""" def __init__(self, session_db_path): - SimpleSessionManager.__init__(self) + super().__init__() self.session_db_path = os.path.realpath(session_db_path) self._ensure_schema_up_to_date() diff --git a/src/ankisyncd/sync.py b/src/ankisyncd/sync.py new file mode 100644 index 0000000..f50cbd5 --- /dev/null +++ b/src/ankisyncd/sync.py @@ -0,0 +1,634 @@ +# -*- coding: utf-8 -*- +# Copyright: Ankitects Pty Ltd and contributors +# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +# Taken from https://github.com/ankitects/anki/blob/cca3fcb2418880d0430a5c5c2e6b81ba260065b7/anki/sync.py + +import io +import gzip +import random +import requests +import json +import os + +from anki.db import DB, DBError +from anki.utils import ids2str, intTime, platDesc, checksum, devMode +from anki.consts import * +from anki.config import ConfigManager +from anki.utils import versionWithBuild +import anki +from anki.lang import ngettext + + +# https://github.com/ankitects/anki/blob/04b1ca75599f18eb783a8bf0bdeeeb32362f4da0/rslib/src/sync/http_client.rs#L11 +SYNC_VER = 10 +# https://github.com/ankitects/anki/blob/cca3fcb2418880d0430a5c5c2e6b81ba260065b7/anki/consts.py#L50 +SYNC_ZIP_SIZE = int(2.5*1024*1024) +# https://github.com/ankitects/anki/blob/cca3fcb2418880d0430a5c5c2e6b81ba260065b7/anki/consts.py#L51 +SYNC_ZIP_COUNT = 25 + +# syncing vars +HTTP_TIMEOUT = 90 +HTTP_PROXY = None +HTTP_BUF_SIZE = 64*1024 + +# Incremental syncing +########################################################################## + +class Syncer(object): + def __init__(self, col, server=None): + self.col = col + self.server = server + + def meta(self): + return dict( + mod=self.col.mod, + scm=self.col.scm, + usn=self.col._usn, + ts=intTime(), + musn=0, + msg="", + cont=True + ) + + def changes(self): + "Bundle up small objects." + d = dict(models=self.getModels(), + decks=self.getDecks(), + tags=self.getTags()) + if self.lnewer: + d['conf'] = json.loads(self.col.backend.get_all_config()) + d['crt'] = self.col.crt + return d + + def mergeChanges(self, lchg, rchg): + # then the other objects + self.mergeModels(rchg['models']) + self.mergeDecks(rchg['decks']) + self.mergeTags(rchg['tags']) + if 'conf' in rchg: + self.mergeConf(rchg['conf']) + # this was left out of earlier betas + if 'crt' in rchg: + self.col.crt = rchg['crt'] + self.prepareToChunk() + + def sanityCheck(self, full): + if not self.col.basicCheck(): + return "failed basic check" + for t in "cards", "notes", "revlog", "graves": + if self.col.db.scalar( + "select count() from %s where usn = -1" % t): + return "%s had usn = -1" % t + for g in self.col.decks.all(): + if g['usn'] == -1: + return "deck had usn = -1" + for t, usn in self.col.tags.allItems(): + if usn == -1: + return "tag had usn = -1" + found = False + for m in self.col.models.all(): + if m['usn'] == -1: + return "model had usn = -1" + if found: + self.col.models.save() + self.col.sched.reset() + # check for missing parent decks + #self.col.sched.deckDueList() + # return summary of deck + return [ + list(self.col.sched.counts()), + self.col.db.scalar("select count() from cards"), + self.col.db.scalar("select count() from notes"), + self.col.db.scalar("select count() from revlog"), + self.col.db.scalar("select count() from graves"), + len(self.col.models.all()), + len(self.col.decks.all()), + len(self.col.decks.allConf()), + ] + + def usnLim(self): + return "usn = -1" + + def finish(self, mod=None): + self.col.ls = mod + self.col._usn = self.maxUsn + 1 + # ensure we save the mod time even if no changes made + self.col.db.mod = True + self.col.save(mod=mod) + return mod + + # Chunked syncing + ########################################################################## + + def prepareToChunk(self): + self.tablesLeft = ["revlog", "cards", "notes"] + self.cursor = None + + def queryTable(self, table): + lim = self.usnLim() + if table == "revlog": + return self.col.db.execute(""" +select id, cid, ?, ease, ivl, lastIvl, factor, time, type +from revlog where %s""" % lim, self.maxUsn) + elif table == "cards": + return self.col.db.execute(""" +select id, nid, did, ord, mod, ?, type, queue, due, ivl, factor, reps, +lapses, left, odue, odid, flags, data from cards where %s""" % lim, self.maxUsn) + else: + return self.col.db.execute(""" +select id, guid, mid, mod, ?, tags, flds, '', '', flags, data +from notes where %s""" % lim, self.maxUsn) + + def chunk(self): + buf = dict(done=False) + while self.tablesLeft: + curTable = self.tablesLeft.pop() + buf[curTable] = self.queryTable(curTable) + self.col.db.execute( + f"update {curTable} set usn=? where usn=-1", self.maxUsn + ) + if not self.tablesLeft: + buf['done'] = True + return buf + + def applyChunk(self, chunk): + if "revlog" in chunk: + self.mergeRevlog(chunk['revlog']) + if "cards" in chunk: + self.mergeCards(chunk['cards']) + if "notes" in chunk: + self.mergeNotes(chunk['notes']) + + # Deletions + ########################################################################## + + def removed(self): + cards = [] + notes = [] + decks = [] + + curs = self.col.db.execute( + "select oid, type from graves where usn = -1") + + for oid, type in curs: + if type == REM_CARD: + cards.append(oid) + elif type == REM_NOTE: + notes.append(oid) + else: + decks.append(oid) + + self.col.db.execute("update graves set usn=? where usn=-1", + self.maxUsn) + + return dict(cards=cards, notes=notes, decks=decks) + + def remove(self, graves): + # remove card and the card's orphaned notes + self.col.remove_cards_and_orphaned_notes(graves['cards']) + + # only notes + self.col.remove_notes(graves['notes']) + + # since level 0 deck ,we only remove deck ,but backend will delete child,it is ok, the delete + # will have once effect + for oid in graves['decks']: + self.col.decks.rem(oid) + + + # we can place non-exist grave after above delete. + localgcards = [] + localgnotes = [] + localgdecks = [] + curs = self.col.db.execute( + "select oid, type from graves where usn = %d" % self.col.usn()) + + for oid, type in curs: + if type == REM_CARD: + localgcards.append(oid) + elif type == REM_NOTE: + localgnotes.append(oid) + else: + localgdecks.append(oid) + + # n meaning non-exsiting grave in the server compared to client + ncards = [ oid for oid in graves['cards'] if oid not in localgcards] + for oid in ncards: + self.col._logRem([oid], REM_CARD) + + nnotes = [ oid for oid in graves['notes'] if oid not in localgnotes] + for oid in nnotes: + self.col._logRem([oid], REM_NOTE) + + ndecks = [ oid for oid in graves['decks'] if oid not in localgdecks] + for oid in ndecks: + self.col._logRem([oid], REM_DECK) + + # Models + ########################################################################## + + def getModels(self): + mods = [m for m in self.col.models.all() if m['usn'] == -1] + for m in mods: + m['usn'] = self.maxUsn + self.col.models.save() + return mods + + def mergeModels(self, rchg): + for r in rchg: + l = self.col.models.get(r['id']) + # if missing locally or server is newer, update + if not l or r['mod'] > l['mod']: + self.col.models.update(r) + + # Decks + ########################################################################## + + def getDecks(self): + decks = [g for g in self.col.decks.all() if g['usn'] == -1] + for g in decks: + g['usn'] = self.maxUsn + dconf = [g for g in self.col.decks.allConf() if g['usn'] == -1] + for g in dconf: + g['usn'] = self.maxUsn + self.col.decks.save() + return [decks, dconf] + + def mergeDecks(self, rchg): + for r in rchg[0]: + l = self.col.decks.get(r['id'], False) + # work around mod time being stored as string + if l and not isinstance(l['mod'], int): + l['mod'] = int(l['mod']) + + # if missing locally or server is newer, update + if not l or r['mod'] > l['mod']: + self.col.decks.update(r) + for r in rchg[1]: + try: + l = self.col.decks.getConf(r['id']) + except KeyError: + l = None + # if missing locally or server is newer, update + if not l or r['mod'] > l['mod']: + self.col.decks.updateConf(r) + + # Tags + ########################################################################## + + def getTags(self): + tags = [] + for t, usn in self.col.tags.allItems(): + if usn == -1: + self.col.tags.tags[t] = self.maxUsn + tags.append(t) + self.col.tags.save() + return tags + + def mergeTags(self, tags): + self.col.tags.register(tags, usn=self.maxUsn) + + # Cards/notes/revlog + ########################################################################## + + def mergeRevlog(self, logs): + self.col.db.executemany( + "insert or ignore into revlog values (?,?,?,?,?,?,?,?,?)", + logs) + + def newerRows(self, data, table, modIdx): + ids = (r[0] for r in data) + lmods = {} + for id, mod in self.col.db.execute( + "select id, mod from %s where id in %s and %s" % ( + table, ids2str(ids), self.usnLim())): + lmods[id] = mod + update = [] + for r in data: + if r[0] not in lmods or lmods[r[0]] < r[modIdx]: + update.append(r) + self.col.log(table, data) + return update + + def mergeCards(self, cards): + self.col.db.executemany( + "insert or replace into cards values " + "(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)", + self.newerRows(cards, "cards", 4)) + + def mergeNotes(self, notes): + rows = self.newerRows(notes, "notes", 3) + self.col.db.executemany( + "insert or replace into notes values (?,?,?,?,?,?,?,?,?,?,?)", + rows) + self.col.updateFieldCache([f[0] for f in rows]) + + # Col config + ########################################################################## + + def getConf(self): + return self.col.conf + + def mergeConf(self, conf): + self.col.backend.set_all_config(json.dumps(conf).encode()) + +# Wrapper for requests that tracks upload/download progress +########################################################################## + +class AnkiRequestsClient(object): + verify = True + timeout = 60 + + def __init__(self): + self.session = requests.Session() + + def post(self, url, data, headers): + data = _MonitoringFile(data) + headers['User-Agent'] = self._agentName() + return self.session.post( + url, data=data, headers=headers, stream=True, timeout=self.timeout, verify=self.verify) + + def get(self, url, headers=None): + if headers is None: + headers = {} + headers['User-Agent'] = self._agentName() + return self.session.get(url, stream=True, headers=headers, timeout=self.timeout, verify=self.verify) + + def streamContent(self, resp): + resp.raise_for_status() + + buf = io.BytesIO() + for chunk in resp.iter_content(chunk_size=HTTP_BUF_SIZE): + buf.write(chunk) + return buf.getvalue() + + def _agentName(self): + from anki import version + return "Anki {}".format(version) + +# allow user to accept invalid certs in work/school settings +if os.environ.get("ANKI_NOVERIFYSSL"): + AnkiRequestsClient.verify = False + + import warnings + warnings.filterwarnings("ignore") + +class _MonitoringFile(io.BufferedReader): + def read(self, size=-1): + data = io.BufferedReader.read(self, HTTP_BUF_SIZE) + + return data + +# HTTP syncing tools +########################################################################## + +class HttpSyncer(object): + def __init__(self, hkey=None, client=None, hostNum=None): + self.hkey = hkey + self.skey = checksum(str(random.random()))[:8] + self.client = client or AnkiRequestsClient() + self.postVars = {} + self.hostNum = hostNum + self.prefix = "sync/" + + def syncURL(self): + if devMode: + url = "https://l1sync.ankiweb.net/" + else: + url = SYNC_BASE % (self.hostNum or "") + return url + self.prefix + + def assertOk(self, resp): + # not using raise_for_status() as aqt expects this error msg + if resp.status_code != 200: + raise Exception("Unknown response code: %s" % resp.status_code) + + # Posting data as a file + ###################################################################### + # We don't want to post the payload as a form var, as the percent-encoding is + # costly. We could send it as a raw post, but more HTTP clients seem to + # support file uploading, so this is the more compatible choice. + + def _buildPostData(self, fobj, comp): + BOUNDARY=b"Anki-sync-boundary" + bdry = b"--"+BOUNDARY + buf = io.BytesIO() + # post vars + self.postVars['c'] = 1 if comp else 0 + for (key, value) in list(self.postVars.items()): + buf.write(bdry + b"\r\n") + buf.write( + ('Content-Disposition: form-data; name="%s"\r\n\r\n%s\r\n' % + (key, value)).encode("utf8")) + # payload as raw data or json + rawSize = 0 + if fobj: + # header + buf.write(bdry + b"\r\n") + buf.write(b"""\ +Content-Disposition: form-data; name="data"; filename="data"\r\n\ +Content-Type: application/octet-stream\r\n\r\n""") + # write file into buffer, optionally compressing + if comp: + tgt = gzip.GzipFile(mode="wb", fileobj=buf, compresslevel=comp) + else: + tgt = buf + while 1: + data = fobj.read(65536) + if not data: + if comp: + tgt.close() + break + rawSize += len(data) + tgt.write(data) + buf.write(b"\r\n") + buf.write(bdry + b'--\r\n') + size = buf.tell() + # connection headers + headers = { + 'Content-Type': 'multipart/form-data; boundary=%s' % BOUNDARY.decode("utf8"), + 'Content-Length': str(size), + } + buf.seek(0) + + if size >= 100*1024*1024 or rawSize >= 250*1024*1024: + raise Exception("Collection too large to upload to AnkiWeb.") + + return headers, buf + + def req(self, method, fobj=None, comp=6, badAuthRaises=True): + headers, body = self._buildPostData(fobj, comp) + + r = self.client.post(self.syncURL()+method, data=body, headers=headers) + if not badAuthRaises and r.status_code == 403: + return False + self.assertOk(r) + + buf = self.client.streamContent(r) + return buf + +# Incremental sync over HTTP +###################################################################### + +class RemoteServer(HttpSyncer): + def __init__(self, hkey, hostNum): + super().__init__(self, hkey, hostNum=hostNum) + + def hostKey(self, user, pw): + "Returns hkey or none if user/pw incorrect." + self.postVars = dict() + ret = self.req( + "hostKey", io.BytesIO(json.dumps(dict(u=user, p=pw)).encode("utf8")), + badAuthRaises=False) + if not ret: + # invalid auth + return + self.hkey = json.loads(ret.decode("utf8"))['key'] + return self.hkey + + def meta(self): + self.postVars = dict( + k=self.hkey, + s=self.skey, + ) + ret = self.req( + "meta", io.BytesIO(json.dumps(dict( + v=SYNC_VER, cv="ankidesktop,%s,%s"%(versionWithBuild(), platDesc()))).encode("utf8")), + badAuthRaises=False) + if not ret: + # invalid auth + return + return json.loads(ret.decode("utf8")) + + def applyGraves(self, **kw): + return self._run("applyGraves", kw) + + def applyChanges(self, **kw): + return self._run("applyChanges", kw) + + def start(self, **kw): + return self._run("start", kw) + + def chunk(self, **kw): + return self._run("chunk", kw) + + def applyChunk(self, **kw): + return self._run("applyChunk", kw) + + def sanityCheck2(self, **kw): + return self._run("sanityCheck2", kw) + + def finish(self, **kw): + return self._run("finish", kw) + + def abort(self, **kw): + return self._run("abort", kw) + + def _run(self, cmd, data): + return json.loads( + self.req(cmd, io.BytesIO(json.dumps(data).encode("utf8"))).decode("utf8")) + +# Full syncing +########################################################################## + +class FullSyncer(HttpSyncer): + def __init__(self, col, hkey, client, hostNum): + super().__init__(self, hkey, client, hostNum=hostNum) + self.postVars = dict( + k=self.hkey, + v="ankidesktop,%s,%s"%(anki.version, platDesc()), + ) + self.col = col + + def download(self): + localNotEmpty = self.col.db.scalar("select 1 from cards") + self.col.close() + cont = self.req("download") + tpath = self.col.path + ".tmp" + if cont == "upgradeRequired": + return + open(tpath, "wb").write(cont) + # check the received file is ok + d = DB(tpath) + assert d.scalar("pragma integrity_check") == "ok" + remoteEmpty = not d.scalar("select 1 from cards") + d.close() + # accidental clobber? + if localNotEmpty and remoteEmpty: + os.unlink(tpath) + return "downloadClobber" + # overwrite existing collection + os.unlink(self.col.path) + os.rename(tpath, self.col.path) + self.col = None + + def upload(self): + "True if upload successful." + # make sure it's ok before we try to upload + if self.col.db.scalar("pragma integrity_check") != "ok": + return False + if not self.col.basicCheck(): + return False + # apply some adjustments, then upload + self.col.beforeUpload() + if self.req("upload", open(self.col.path, "rb")) != b"OK": + return False + return True + +# Remote media syncing +########################################################################## + +class RemoteMediaServer(HttpSyncer): + def __init__(self, col, hkey, client, hostNum): + self.col = col + super().__init__(self, hkey, client, hostNum=hostNum) + self.prefix = "msync/" + + def begin(self): + self.postVars = dict( + k=self.hkey, + v="ankidesktop,%s,%s"%(anki.version, platDesc()) + ) + ret = self._dataOnly(self.req( + "begin", io.BytesIO(json.dumps(dict()).encode("utf8")))) + self.skey = ret['sk'] + return ret + + # args: lastUsn + def mediaChanges(self, **kw): + self.postVars = dict( + sk=self.skey, + ) + return self._dataOnly( + self.req("mediaChanges", io.BytesIO(json.dumps(kw).encode("utf8")))) + + # args: files + def downloadFiles(self, **kw): + return self.req("downloadFiles", io.BytesIO(json.dumps(kw).encode("utf8"))) + + def uploadChanges(self, zip): + # no compression, as we compress the zip file instead + return self._dataOnly( + self.req("uploadChanges", io.BytesIO(zip), comp=0)) + + # args: local + def mediaSanity(self, **kw): + return self._dataOnly( + self.req("mediaSanity", io.BytesIO(json.dumps(kw).encode("utf8")))) + + def _dataOnly(self, resp): + resp = json.loads(resp.decode("utf8")) + if resp['err']: + self.col.log("error returned:%s"%resp['err']) + raise Exception("SyncError:%s"%resp['err']) + return resp['data'] + + # only for unit tests + def mediatest(self, cmd): + self.postVars = dict( + k=self.hkey, + ) + return self._dataOnly( + self.req("newMediaTest", io.BytesIO( + json.dumps(dict(cmd=cmd)).encode("utf8")))) diff --git a/ankisyncd/sync_app.py b/src/ankisyncd/sync_app.py similarity index 87% rename from ankisyncd/sync_app.py rename to src/ankisyncd/sync_app.py index 7cce5ae..2147232 100644 --- a/ankisyncd/sync_app.py +++ b/src/ankisyncd/sync_app.py @@ -35,24 +35,24 @@ from webob.dec import wsgify from webob.exc import * import anki.db -import anki.sync import anki.utils -from anki.consts import SYNC_VER, SYNC_ZIP_SIZE, SYNC_ZIP_COUNT from anki.consts import REM_CARD, REM_NOTE -from ankisyncd.users import get_user_manager -from ankisyncd.sessions import get_session_manager from ankisyncd.full_sync import get_full_sync_manager +from ankisyncd.sessions import get_session_manager +from ankisyncd.sync import Syncer, SYNC_VER, SYNC_ZIP_SIZE, SYNC_ZIP_COUNT +from ankisyncd.users import get_user_manager logger = logging.getLogger("ankisyncd") -class SyncCollectionHandler(anki.sync.Syncer): +class SyncCollectionHandler(Syncer): operations = ['meta', 'applyChanges', 'start', 'applyGraves', 'chunk', 'applyChunk', 'sanityCheck2', 'finish'] - def __init__(self, col): + def __init__(self, col, session): # So that 'server' (the 3rd argument) can't get set - anki.sync.Syncer.__init__(self, col) + super().__init__(col) + self.session = session @staticmethod def _old_client(cv): @@ -62,11 +62,12 @@ class SyncCollectionHandler(anki.sync.Syncer): note = {"alpha": 0, "beta": 0, "rc": 0} client, version, platform = cv.split(',') - for name in note.keys(): - if name in version: - vs = version.split(name) - version = vs[0] - note[name] = int(vs[-1]) + if 'arch' not in version: + for name in note.keys(): + if name in version: + vs = version.split(name) + version = vs[0] + note[name] = int(vs[-1]) # convert the version string, ignoring non-numeric suffixes like in beta versions of Anki version_nosuffix = re.sub(r'[^0-9.].*$', '', version) @@ -92,17 +93,18 @@ class SyncCollectionHandler(anki.sync.Syncer): return {"cont": False, "msg": "Your client doesn't support the v{} scheduler.".format(self.col.schedVer())} # Make sure the media database is open! - if self.col.media.db is None: - self.col.media.connect() + self.col.media.connect() return { - 'scm': self.col.scm, - 'ts': anki.utils.intTime(), 'mod': self.col.mod, + 'scm': self.col.scm, 'usn': self.col._usn, + 'ts': anki.utils.intTime(), 'musn': self.col.media.lastUsn(), + 'uname': self.session.name, 'msg': '', 'cont': True, + 'hostNum': 0, } def usnLim(self): @@ -111,8 +113,10 @@ class SyncCollectionHandler(anki.sync.Syncer): # ankidesktop >=2.1rc2 sends graves in applyGraves, but still expects # server-side deletions to be returned by start def start(self, minUsn, lnewer, graves={"cards": [], "notes": [], "decks": []}, offset=None): - if offset is not None: - raise NotImplementedError('You are using the experimental V2 scheduler, which is not supported by the server.') + # The offset para is passed by client V2 scheduler,which is minutes_west. + # Since now have not thorougly test the V2 scheduler, we leave this comments here, and + # just enable the V2 scheduler in the serve code. + self.maxUsn = self.col._usn self.minUsn = minUsn self.lnewer = not lnewer @@ -130,14 +134,18 @@ class SyncCollectionHandler(anki.sync.Syncer): self.mergeChanges(lchg, self.rchg) return lchg - def sanityCheck2(self, client): - server = self.sanityCheck() + def sanityCheck2(self, client, full=None): + server = self.sanityCheck(full) if client != server: + logger.info( + f"sanity check failed with server: {server} client: {client}" + ) + return dict(status="bad", c=client, s=server) return dict(status="ok") def finish(self, mod=None): - return anki.sync.Syncer.finish(self, anki.utils.intTime(1000)) + return super().finish(anki.utils.intTime(1000)) # This function had to be put here in its entirety because Syncer.removed() # doesn't use self.usnLim() (which we override in this class) in queries. @@ -176,8 +184,9 @@ class SyncCollectionHandler(anki.sync.Syncer): class SyncMediaHandler: operations = ['begin', 'mediaChanges', 'mediaSanity', 'uploadChanges', 'downloadFiles'] - def __init__(self, col): + def __init__(self, col, session): self.col = col + self.session = session def begin(self, skey): return { @@ -230,13 +239,16 @@ class SyncMediaHandler: # Remove media files that were removed on the client. media_to_remove = [] for normname, ordinal in meta: - if ordinal == '': + if not ordinal: media_to_remove.append(self._normalize_filename(normname)) # Add media files that were added on the client. media_to_add = [] usn = self.col.media.lastUsn() oldUsn = usn + media_dir = self.col.media.dir() + os.makedirs(media_dir, exist_ok=True) + for i in zip_file.infolist(): if i.filename == "_meta": # Ignore previously retrieved metadata. continue @@ -244,7 +256,7 @@ class SyncMediaHandler: file_data = zip_file.read(i) csum = anki.utils.checksum(file_data) filename = self._normalize_filename(meta[int(i.filename)][0]) - file_path = os.path.join(self.col.media.dir(), filename) + file_path = os.path.join(media_dir, filename) # Save file to media directory. with open(file_path, 'wb') as f: @@ -263,9 +275,7 @@ class SyncMediaHandler: self._remove_media_files(media_to_remove) if media_to_add: - self.col.media.db.executemany( - "INSERT OR REPLACE INTO media VALUES (?,?,?)", media_to_add) - self.col.media.db.commit() + self.col.media.addMedia(media_to_add) assert self.col.media.lastUsn() == oldUsn + processed_count # TODO: move to some unit test return processed_count @@ -294,7 +304,6 @@ class SyncMediaHandler: for filename in filenames: try: self.col.media.syncDelete(filename) - self.col.media.db.commit() except OSError as err: logger.error("Error when removing file '%s' from media dir: " "%s" % (filename, str(err))) @@ -321,10 +330,9 @@ class SyncMediaHandler: def mediaChanges(self, lastUsn): result = [] server_lastUsn = self.col.media.lastUsn() - fname = csum = None if lastUsn < server_lastUsn or lastUsn == 0: - for fname,usn,csum, in self.col.media.db.execute("select fname,usn,csum from media order by usn desc limit ?", server_lastUsn - lastUsn): + for fname,usn,csum, in self.col.media.changes(lastUsn): result.append([fname, usn, csum]) # anki assumes server_lastUsn == result[-1][1] @@ -376,7 +384,7 @@ class SyncUserSession: raise Exception("no handler for {}".format(operation)) if getattr(self, attr) is None: - setattr(self, attr, handler_class(col)) + setattr(self, attr, handler_class(col, self)) handler = getattr(self, attr) # The col object may actually be new now! This happens when we close a collection # for inactivity and then later re-open it (creating a new Collection object). @@ -394,9 +402,6 @@ class SyncApp: self.base_media_url = config['base_media_url'] self.setup_new_collection = None - self.prehooks = {} - self.posthooks = {} - self.user_manager = get_user_manager(config) self.session_manager = get_session_manager(config) self.full_sync_manager = get_full_sync_manager(config) @@ -408,39 +413,6 @@ class SyncApp: if not self.base_media_url.endswith('/'): self.base_media_url += '/' - # backwards compat - @property - def hook_pre_sync(self): - return self.prehooks.get("start") - - @hook_pre_sync.setter - def hook_pre_sync(self, value): - self.prehooks['start'] = value - - @property - def hook_post_sync(self): - return self.posthooks.get("finish") - - @hook_post_sync.setter - def hook_post_sync(self, value): - self.posthooks['finish'] = value - - @property - def hook_upload(self): - return self.prehooks.get("upload") - - @hook_upload.setter - def hook_upload(self, value): - self.prehooks['upload'] = value - - @property - def hook_download(self): - return self.posthooks.get("download") - - @hook_download.setter - def hook_download(self, value): - self.posthooks['download'] = value - def generateHostKey(self, username): """Generates a new host key to be used by the given username to identify their session. This values is random.""" @@ -495,7 +467,7 @@ class SyncApp: def __call__(self, req): # Get and verify the session try: - hkey = req.POST['k'] + hkey = req.params['k'] except KeyError: hkey = None @@ -547,39 +519,22 @@ class SyncApp: self.session_manager.save(hkey, session) session = self.session_manager.load(hkey, self.create_session) - thread = session.get_thread() - - if url in self.prehooks: - thread.execute(self.prehooks[url], [session]) - result = self._execute_handler_method_in_thread(url, data, session) - # If it's a complex data type, we convert it to JSON if type(result) not in (str, bytes, Response): result = json.dumps(result) - if url in self.posthooks: - thread.execute(self.posthooks[url], [session]) - return result elif url == 'upload': thread = session.get_thread() - if url in self.prehooks: - thread.execute(self.prehooks[url], [session]) result = thread.execute(self.operation_upload, [data['data'], session]) - if url in self.posthooks: - thread.execute(self.posthooks[url], [session]) return result elif url == 'download': thread = session.get_thread() - if url in self.prehooks: - thread.execute(self.prehooks[url], [session]) result = thread.execute(self.operation_download, [session]) - if url in self.posthooks: - thread.execute(self.posthooks[url], [session]) return result # This was one of our operations but it didn't get handled... Oops! diff --git a/ankisyncd/thread.py b/src/ankisyncd/thread.py similarity index 100% rename from ankisyncd/thread.py rename to src/ankisyncd/thread.py diff --git a/ankisyncd/users.py b/src/ankisyncd/users.py similarity index 100% rename from ankisyncd/users.py rename to src/ankisyncd/users.py diff --git a/src/requirements-dev.txt b/src/requirements-dev.txt new file mode 100644 index 0000000..007ad7c --- /dev/null +++ b/src/requirements-dev.txt @@ -0,0 +1,91 @@ +THE FILE WAS GENERATED BY POETRY, DO NOT EDIT! + + +anki==2.1.37; python_version >= "3.8" +appnope==0.1.2; platform_system == "Darwin" and python_version >= "3.7" and sys_platform == "darwin" +argon2-cffi==20.1.0; python_version >= "3.5" +async-generator==1.10; python_version >= "3.6" +attrs==20.3.0; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.5" +backcall==0.2.0; python_version >= "3.7" +beautifulsoup4==4.9.3 +bleach==3.2.1; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" +certifi==2020.12.5; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.8" +cffi==1.14.4; implementation_name === "pypy" and python_version >= "3.5" +chardet==4.0.0; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.8" +click==7.1.2; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.5" +colorama==0.4.4; python_version >= "3.7" and python_full_version < "3.0.0" and sys_platform == "win32" or sys_platform == "win32" and python_version >= "3.7" and python_full_version >= "3.5.0" +decorator==4.4.2; (python_version >= "2.6" and python_full_version < "3.0.0") or (python_full_version >= "3.2.0") +defusedxml==0.6.0; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" +distro==1.5.0 +entrypoints==0.3; python_version >= "3.6" +future==0.18.2; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version >= "3.5" +idna==2.10; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.8" +ipykernel==5.4.2; python_version >= "3.6" +ipython-genutils==0.2.0; python_version >= "3.7" +ipython==7.19.0; python_version >= "3.7" +ipywidgets==7.5.1 +jedi==0.17.2; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7" +jinja2==2.11.2; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" +joblib==1.0.0; python_version >= "3.6" +json5==0.9.5; python_version >= "3.5" +jsonschema==3.2.0; python_version >= "3.5" +jupyter-client==6.1.7; python_version >= "3.6" +jupyter-console==6.2.0; python_version >= "3.6" +jupyter-core==4.7.0; python_version >= "3.6" +jupyter==1.0.0 +jupyterlab-pygments==0.1.2; python_version >= "3.6" +jupyterlab-server==1.2.0; python_version >= "3.5" +jupyterlab==2.2.9; python_version >= "3.5" +livereload==2.6.3; python_version >= "3.5" +lunr==0.5.8; python_version >= "3.5" +markdown==3.3.3; python_version >= "3.6" +markupsafe==1.1.1; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.5" +mistune==0.8.4; python_version >= "3.6" +mkdocs==1.1.2; python_version >= "3.5" +nbclient==0.5.1; python_version >= "3.6" +nbconvert==6.0.7; python_version >= "3.6" +nbformat==5.0.8; python_version >= "3.6" +nest-asyncio==1.4.3; python_version >= "3.6" +nltk==3.5; python_version >= "3.5" +notebook==6.1.5; python_version >= "3.5" +orjson==3.4.6; python_version >= "3.8" +packaging==20.8; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" +pandocfilters==1.4.3; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" +parso==0.7.1; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7" +pexpect==4.8.0; sys_platform != "win32" and python_version >= "3.7" +pickleshare==0.7.5; python_version >= "3.7" +prometheus-client==0.9.0; python_version >= "3.5" +prompt-toolkit==3.0.8; python_full_version >= "3.6.1" and python_version >= "3.7" +protobuf==3.14.0; python_version >= "3.8" +psutil==5.8.0; (python_version >= "2.6" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") +ptyprocess==0.6.0; os_name != "nt" and python_version >= "3.7" and sys_platform != "win32" +py==1.10.0; python_version >= "3.5" and python_full_version < "3.0.0" and implementation_name === "pypy" or implementation_name === "pypy" and python_version >= "3.5" and python_full_version >= "3.4.0" +pyaudio==0.2.11 +pycparser==2.20; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.5" +pygments==2.7.3; python_version >= "3.7" +pyparsing==2.4.7; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" +pyrsistent==0.17.3; python_version >= "3.5" +pysocks==1.7.1; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.8" +python-dateutil==2.8.1; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version >= "3.5" +pywin32==300; sys_platform == "win32" and python_version >= "3.6" +pywinpty==0.5.7; os_name == "nt" and python_version >= "3.6" +pyyaml==5.3.1; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.5" +pyzmq==20.0.0; python_version >= "3.6" +qtconsole==5.0.1; python_version >= "3.6" +qtpy==1.9.0; python_version >= "3.6" +regex==2020.11.13; python_version >= "3.5" +requests==2.25.1; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") +send2trash==1.5.0 +six==1.15.0; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.8" +soupsieve==2.1; python_version >= "3.8" +terminado==0.9.1; python_version >= "3.6" +testpath==0.4.4; python_version >= "3.6" +tornado==6.1; python_version >= "3.6" +tqdm==4.54.1; python_version >= "3.5" and python_full_version < "3.0.0" or python_version >= "3.5" and python_full_version >= "3.4.0" +traitlets==5.0.5; python_version >= "3.7" +urllib3==1.26.2; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version < "4" and python_version >= "3.8" +wcwidth==0.2.5; python_full_version >= "3.6.1" and python_version >= "3.6" +webencodings==0.5.1; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" +webob==1.8.6; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.3.0") +widgetsnbextension==3.5.1 +-e src/. diff --git a/src/requirements.txt b/src/requirements.txt new file mode 100644 index 0000000..251170b --- /dev/null +++ b/src/requirements.txt @@ -0,0 +1,22 @@ +THE FILE WAS GENERATED BY POETRY, DO NOT EDIT! + + +anki==2.1.37; python_version >= "3.8" +beautifulsoup4==4.9.3 +certifi==2020.12.5; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.8" +chardet==4.0.0; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.8" +decorator==4.4.2; (python_version >= "2.6" and python_full_version < "3.0.0") or (python_full_version >= "3.2.0") +distro==1.5.0 +idna==2.10; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.8" +markdown==3.3.3; python_version >= "3.6" +orjson==3.4.6; python_version >= "3.8" +protobuf==3.14.0; python_version >= "3.8" +psutil==5.8.0; (python_version >= "2.6" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") +pyaudio==0.2.11 +pysocks==1.7.1; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.8" +requests==2.25.1; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") +send2trash==1.5.0 +six==1.15.0; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version >= "3.8" +soupsieve==2.1; python_version >= "3.8" +urllib3==1.26.2; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version < "4" and python_version >= "3.8" +webob==1.8.6; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.3.0") diff --git a/utils/migrate_user_tables.py b/src/utils/migrate_user_tables.py similarity index 100% rename from utils/migrate_user_tables.py rename to src/utils/migrate_user_tables.py diff --git a/tests/collection_test_base.py b/tests/collection_test_base.py index 03d05b0..b1da4fd 100644 --- a/tests/collection_test_base.py +++ b/tests/collection_test_base.py @@ -8,6 +8,8 @@ import shutil import anki import anki.storage +from ankisyncd.collection import CollectionManager + class CollectionTestBase(unittest.TestCase): """Parent class for tests that need a collection set up and torn down.""" @@ -15,7 +17,9 @@ class CollectionTestBase(unittest.TestCase): def setUp(self): self.temp_dir = tempfile.mkdtemp() self.collection_path = os.path.join(self.temp_dir, 'collection.anki2'); - self.collection = anki.storage.Collection(self.collection_path) + cm = CollectionManager({}) + collectionWrapper = cm.get_collection(self.collection_path) + self.collection = collectionWrapper._get_collection() self.mock_app = MagicMock() def tearDown(self): diff --git a/tests/helpers/collection_utils.py b/tests/helpers/collection_utils.py index 10fcaf5..b8e1231 100644 --- a/tests/helpers/collection_utils.py +++ b/tests/helpers/collection_utils.py @@ -5,7 +5,6 @@ import tempfile from anki import Collection - class CollectionUtils: """ Provides utility methods for creating, inspecting and manipulating anki @@ -26,7 +25,7 @@ class CollectionUtils: file_path = os.path.join(self.tempdir, "collection.anki2") master_col = Collection(file_path) - master_col.db.close() + master_col.close() self.master_db_path = file_path def __enter__(self): diff --git a/tests/helpers/file_utils.py b/tests/helpers/file_utils.py index ad99b5d..28dece3 100644 --- a/tests/helpers/file_utils.py +++ b/tests/helpers/file_utils.py @@ -10,7 +10,7 @@ import tempfile import unicodedata import zipfile -from anki.consts import SYNC_ZIP_SIZE +from ankisyncd.sync import SYNC_ZIP_SIZE def create_named_file(filename, file_contents=None): diff --git a/tests/helpers/mock_servers.py b/tests/helpers/mock_servers.py index 38442d3..fdd3a18 100644 --- a/tests/helpers/mock_servers.py +++ b/tests/helpers/mock_servers.py @@ -3,7 +3,7 @@ import io import logging import types -from anki.sync import HttpSyncer, RemoteServer, RemoteMediaServer +from ankisyncd.sync import HttpSyncer, RemoteServer, RemoteMediaServer class MockServerConnection: diff --git a/tests/helpers/monkey_patches.py b/tests/helpers/monkey_patches.py index 6a9792e..e65fe80 100644 --- a/tests/helpers/monkey_patches.py +++ b/tests/helpers/monkey_patches.py @@ -2,7 +2,7 @@ import os import sqlite3 as sqlite from anki.media import MediaManager -from anki.storage import DB +from anki.db import DB mediamanager_orig_funcs = { "findChanges": None, @@ -26,10 +26,6 @@ def monkeypatch_mediamanager(): def make_cwd_safe(original_func): mediamanager_orig_funcs["findChanges"] = MediaManager.findChanges - mediamanager_orig_funcs["mediaChangesZip"] = MediaManager.mediaChangesZip - mediamanager_orig_funcs["addFilesFromZip"] = MediaManager.addFilesFromZip - mediamanager_orig_funcs["syncDelete"] = MediaManager.syncDelete - mediamanager_orig_funcs["_logChanges"] = MediaManager._logChanges def wrapper(instance, *args): old_cwd = os.getcwd() @@ -42,27 +38,14 @@ def monkeypatch_mediamanager(): return wrapper MediaManager.findChanges = make_cwd_safe(MediaManager.findChanges) - MediaManager.mediaChangesZip = make_cwd_safe(MediaManager.mediaChangesZip) - MediaManager.addFilesFromZip = make_cwd_safe(MediaManager.addFilesFromZip) - MediaManager.syncDelete = make_cwd_safe(MediaManager.syncDelete) - MediaManager._logChanges = make_cwd_safe(MediaManager._logChanges) def unpatch_mediamanager(): """Undoes monkey patches to Anki's MediaManager.""" MediaManager.findChanges = mediamanager_orig_funcs["findChanges"] - MediaManager.mediaChangesZip = mediamanager_orig_funcs["mediaChangesZip"] - MediaManager.addFilesFromZip = mediamanager_orig_funcs["addFilesFromZip"] - MediaManager.syncDelete = mediamanager_orig_funcs["syncDelete"] - MediaManager._logChanges = mediamanager_orig_funcs["_logChanges"] mediamanager_orig_funcs["findChanges"] = None - mediamanager_orig_funcs["mediaChangesZip"] = None - mediamanager_orig_funcs["mediaChangesZip"] = None - mediamanager_orig_funcs["mediaChangesZip"] = None - mediamanager_orig_funcs["_logChanges"] = None - def monkeypatch_db(): """ diff --git a/tests/helpers/server_utils.py b/tests/helpers/server_utils.py index fed41ac..45e6b76 100644 --- a/tests/helpers/server_utils.py +++ b/tests/helpers/server_utils.py @@ -86,5 +86,6 @@ def add_files_to_server_mediadb(media, filepaths): with open(os.path.join(media.dir(), fname), 'wb') as f: f.write(data) - media.db.execute("INSERT INTO media VALUES (?, ?, ?)", fname, media.lastUsn() + 1, csum) - media.db.commit() + media.addMedia( + ((fname, media.lastUsn() + 1, csum),) + ) diff --git a/tests/test_media.py b/tests/test_media.py index fc67dd4..a7aacd9 100644 --- a/tests/test_media.py +++ b/tests/test_media.py @@ -1,5 +1,6 @@ import os.path import unittest +from unittest.mock import MagicMock import ankisyncd.media import helpers.collection_utils @@ -15,6 +16,9 @@ class ServerMediaManagerTest(unittest.TestCase): cls.colutils.clean_up() cls.colutils = None + # This test is currently expected to fail because the _logChanges + # method of the media manager does not exist anymore. + @unittest.expectedFailure def test_upgrade(self): col = self.colutils.create_empty_col() cm = col.media @@ -41,19 +45,26 @@ class ServerMediaManagerTest(unittest.TestCase): list(cm.db.execute("SELECT fname, csum FROM media")), ) self.assertEqual(cm.lastUsn(), sm.lastUsn()) - self.assertEqual(list(sm.db.execute("SELECT usn FROM media")), [(161,), (161,)]) + self.assertEqual( + list(sm.db.execute("SELECT usn FROM media")), + [(161,), (161,)] + ) def test_mediaChanges_lastUsn_order(self): col = self.colutils.create_empty_col() col.media = ankisyncd.media.ServerMediaManager(col) - mh = ankisyncd.sync_app.SyncMediaHandler(col) - mh.col.media.db.execute(""" - INSERT INTO media (fname, usn, csum) - VALUES + session = MagicMock() + session.name = 'test' + mh = ankisyncd.sync_app.SyncMediaHandler(col, session) + mh.col.media.addMedia( + ( ('fileA', 101, '53059abba1a72c7aff34a3eaf7fef10ed65541ce'), - ('fileB', 100, 'a5ae546046d09559399c80fa7076fb10f1ce4bcd') - """) - + ('fileB', 100, 'a5ae546046d09559399c80fa7076fb10f1ce4bcd'), + ) + ) # anki assumes mh.col.media.lastUsn() == mh.mediaChanges()['data'][-1][1] # ref: anki/sync.py:720 (commit cca3fcb2418880d0430a5c5c2e6b81ba260065b7) - self.assertEqual(mh.mediaChanges(lastUsn=99)['data'][-1][1], mh.col.media.lastUsn()) + self.assertEqual( + mh.mediaChanges(lastUsn=99)['data'][-1][1], + mh.col.media.lastUsn() + ) diff --git a/tests/test_sync_app.py b/tests/test_sync_app.py index 8e3ff89..3147daf 100644 --- a/tests/test_sync_app.py +++ b/tests/test_sync_app.py @@ -3,9 +3,9 @@ import os import sqlite3 import tempfile import unittest +from unittest.mock import MagicMock, Mock -from anki.consts import SYNC_VER - +from ankisyncd.sync import SYNC_VER from ankisyncd.sync_app import SyncCollectionHandler from ankisyncd.sync_app import SyncUserSession @@ -14,8 +14,13 @@ from collection_test_base import CollectionTestBase class SyncCollectionHandlerTest(CollectionTestBase): def setUp(self): - CollectionTestBase.setUp(self) - self.syncCollectionHandler = SyncCollectionHandler(self.collection) + super().setUp() + self.session = MagicMock() + self.session.name = 'test' + self.syncCollectionHandler = SyncCollectionHandler( + self.collection, + self.session + ) def tearDown(self): CollectionTestBase.tearDown(self) @@ -38,6 +43,7 @@ class SyncCollectionHandlerTest(CollectionTestBase): ','.join(('ankidesktop', '2.1.0', 'lin::')), ','.join(('ankidesktop', '2.1.6-beta2', 'lin::')), ','.join(('ankidesktop', '2.1.9 (dev)', 'lin::')), + ','.join(('ankidesktop', '2.1.26 (arch-linux-2.1.26-1)', 'lin:arch:')), ','.join(('ankidroid', '2.2.3', '')), ','.join(('ankidroid', '2.3alpha4', '')), ','.join(('ankidroid', '2.3alpha5', '')), @@ -60,6 +66,7 @@ class SyncCollectionHandlerTest(CollectionTestBase): self.assertTrue((type(meta['ts']) == int) and meta['ts'] > 0) self.assertEqual(meta['mod'], self.collection.mod) self.assertEqual(meta['usn'], self.collection._usn) + self.assertEqual(meta['uname'], self.session.name) self.assertEqual(meta['musn'], self.collection.media.lastUsn()) self.assertEqual(meta['msg'], '') self.assertEqual(meta['cont'], True) diff --git a/tests/test_web_media.py b/tests/test_web_media.py deleted file mode 100644 index 0e2c787..0000000 --- a/tests/test_web_media.py +++ /dev/null @@ -1,435 +0,0 @@ -# -*- coding: utf-8 -*- -import tempfile -import filecmp -import sqlite3 -import os -import shutil - -import helpers.file_utils -import helpers.server_utils -import helpers.db_utils -import anki.utils -from anki.sync import MediaSyncer -from helpers.mock_servers import MockRemoteMediaServer -from helpers.monkey_patches import monkeypatch_mediamanager, unpatch_mediamanager -from sync_app_functional_test_base import SyncAppFunctionalTestBase - - -class SyncAppFunctionalMediaTest(SyncAppFunctionalTestBase): - def setUp(self): - SyncAppFunctionalTestBase.setUp(self) - - monkeypatch_mediamanager() - self.tempdir = tempfile.mkdtemp(prefix=self.__class__.__name__) - self.hkey = self.mock_remote_server.hostKey("testuser", "testpassword") - client_collection = self.colutils.create_empty_col() - self.client_syncer = self.create_client_syncer(client_collection, - self.hkey, - self.server_test_app) - - def tearDown(self): - self.hkey = None - self.client_syncer = None - unpatch_mediamanager() - SyncAppFunctionalTestBase.tearDown(self) - - @staticmethod - def create_client_syncer(collection, hkey, server_test_app): - mock_remote_server = MockRemoteMediaServer(col=collection, - hkey=hkey, - server_test_app=server_test_app) - media_syncer = MediaSyncer(col=collection, - server=mock_remote_server) - return media_syncer - - @staticmethod - def file_checksum(fname): - with open(fname, "rb") as f: - return anki.utils.checksum(f.read()) - - def media_dbs_differ(self, left_db_path, right_db_path, compare_timestamps=False): - """ - Compares two media sqlite database files for equality. mtime and dirMod - timestamps are not considered when comparing. - - :param left_db_path: path to the left db file - :param right_db_path: path to the right db file - :param compare_timestamps: flag determining if timestamp values - (media.mtime and meta.dirMod) are included - in the comparison - :return: True if the specified databases differ, False else - """ - - if not os.path.isfile(right_db_path): - raise IOError("file '" + left_db_path + "' does not exist") - elif not os.path.isfile(right_db_path): - raise IOError("file '" + right_db_path + "' does not exist") - - # Create temporary copies of the files to act on. - newleft = os.path.join(self.tempdir, left_db_path) + ".tmp" - shutil.copyfile(left_db_path, newleft) - left_db_path = newleft - - newright = os.path.join(self.tempdir, left_db_path) + ".tmp" - shutil.copyfile(right_db_path, newright) - right_db_path = newright - - if not compare_timestamps: - # Set all timestamps that are not NULL to 0. - for dbPath in [left_db_path, right_db_path]: - connection = sqlite3.connect(dbPath) - - connection.execute("""UPDATE media SET mtime=0 - WHERE mtime IS NOT NULL""") - - connection.execute("""UPDATE meta SET dirMod=0 - WHERE rowid=1""") - connection.commit() - connection.close() - - return helpers.db_utils.diff(left_db_path, right_db_path) - - def test_sync_empty_media_dbs(self): - # With both the client and the server having no media to sync, - # syncing should change nothing. - self.assertEqual('noChanges', self.client_syncer.sync()) - self.assertEqual('noChanges', self.client_syncer.sync()) - - def test_sync_file_from_server(self): - """ - Adds a file on the server. After syncing, client and server should have - the identical file in their media directories and media databases. - """ - client = self.client_syncer - server = helpers.server_utils.get_syncer_for_hkey(self.server_app, - self.hkey, - 'media') - - # Create a test file. - temp_file_path = helpers.file_utils.create_named_file("foo.jpg", "hello") - - # Add the test file to the server's collection. - helpers.server_utils.add_files_to_server_mediadb(server.col.media, [temp_file_path]) - - # Syncing should work. - self.assertEqual(client.sync(), 'OK') - - # The test file should be present in the server's and in the client's - # media directory. - self.assertTrue( - filecmp.cmp(os.path.join(client.col.media.dir(), "foo.jpg"), - os.path.join(server.col.media.dir(), "foo.jpg"))) - - # Further syncing should do nothing. - self.assertEqual(client.sync(), 'noChanges') - - def test_sync_file_from_client(self): - """ - Adds a file on the client. After syncing, client and server should have - the identical file in their media directories and media databases. - """ - join = os.path.join - client = self.client_syncer - server = helpers.server_utils.get_syncer_for_hkey(self.server_app, - self.hkey, - 'media') - - # Create a test file. - temp_file_path = helpers.file_utils.create_named_file("foo.jpg", "hello") - - # Add the test file to the client's media collection. - helpers.server_utils.add_files_to_client_mediadb(client.col.media, - [temp_file_path], - update_db=True) - - # Syncing should work. - self.assertEqual(client.sync(), 'OK') - - # The same file should be present in both the client's and the server's - # media directory. - self.assertTrue(filecmp.cmp(join(client.col.media.dir(), "foo.jpg"), - join(server.col.media.dir(), "foo.jpg"))) - - # Further syncing should do nothing. - self.assertEqual(client.sync(), 'noChanges') - - # The media data of client and server should be identical. - self.assertEqual( - list(client.col.media.db.execute("SELECT fname, csum FROM media")), - list(server.col.media.db.execute("SELECT fname, csum FROM media")) - ) - self.assertEqual(client.col.media.lastUsn(), server.col.media.lastUsn()) - - def test_sync_different_files(self): - """ - Adds a file on the client and a file with different name and content on - the server. After syncing, both client and server should have both - files in their media directories and databases. - """ - join = os.path.join - isfile = os.path.isfile - client = self.client_syncer - server = helpers.server_utils.get_syncer_for_hkey(self.server_app, - self.hkey, - 'media') - - # Create two files and add one to the server and one to the client. - file_for_client = helpers.file_utils.create_named_file("foo.jpg", "hello") - file_for_server = helpers.file_utils.create_named_file("bar.jpg", "goodbye") - - helpers.server_utils.add_files_to_client_mediadb(client.col.media, - [file_for_client], - update_db=True) - helpers.server_utils.add_files_to_server_mediadb(server.col.media, [file_for_server]) - - # Syncing should work. - self.assertEqual(client.sync(), 'OK') - - # Both files should be present in the client's and in the server's - # media directories. - self.assertTrue(isfile(join(client.col.media.dir(), "foo.jpg"))) - self.assertTrue(isfile(join(server.col.media.dir(), "foo.jpg"))) - self.assertTrue(filecmp.cmp( - join(client.col.media.dir(), "foo.jpg"), - join(server.col.media.dir(), "foo.jpg")) - ) - self.assertTrue(isfile(join(client.col.media.dir(), "bar.jpg"))) - self.assertTrue(isfile(join(server.col.media.dir(), "bar.jpg"))) - self.assertTrue(filecmp.cmp( - join(client.col.media.dir(), "bar.jpg"), - join(server.col.media.dir(), "bar.jpg")) - ) - - # Further syncing should change nothing. - self.assertEqual(client.sync(), 'noChanges') - - def test_sync_different_contents(self): - """ - Adds a file to the client and a file with identical name but different - contents to the server. After syncing, both client and server should - have the server's version of the file in their media directories and - databases. - """ - join = os.path.join - isfile = os.path.isfile - client = self.client_syncer - server = helpers.server_utils.get_syncer_for_hkey(self.server_app, - self.hkey, - 'media') - - # Create two files with identical names but different contents and - # checksums. Add one to the server and one to the client. - file_for_client = helpers.file_utils.create_named_file("foo.jpg", "hello") - file_for_server = helpers.file_utils.create_named_file("foo.jpg", "goodbye") - - helpers.server_utils.add_files_to_client_mediadb(client.col.media, - [file_for_client], - update_db=True) - helpers.server_utils.add_files_to_server_mediadb(server.col.media, [file_for_server]) - - # Syncing should work. - self.assertEqual(client.sync(), 'OK') - - # A version of the file should be present in both the client's and the - # server's media directory. - self.assertTrue(isfile(join(client.col.media.dir(), "foo.jpg"))) - self.assertEqual(os.listdir(client.col.media.dir()), ['foo.jpg']) - self.assertTrue(isfile(join(server.col.media.dir(), "foo.jpg"))) - self.assertEqual(os.listdir(server.col.media.dir()), ['foo.jpg']) - self.assertEqual(client.sync(), 'noChanges') - - # Both files should have the contents of the server's version. - _checksum = client.col.media._checksum - self.assertEqual(_checksum(join(client.col.media.dir(), "foo.jpg")), - _checksum(file_for_server)) - self.assertEqual(_checksum(join(server.col.media.dir(), "foo.jpg")), - _checksum(file_for_server)) - - def test_sync_add_and_delete_on_client(self): - """ - Adds a file on the client. After syncing, the client and server should - both have the file. Then removes the file from the client's directory - and marks it as deleted in its database. After syncing again, the - server should have removed its version of the file from its media dir - and marked it as deleted in its db. - """ - join = os.path.join - isfile = os.path.isfile - client = self.client_syncer - server = helpers.server_utils.get_syncer_for_hkey(self.server_app, - self.hkey, - 'media') - - # Create a test file. - temp_file_path = helpers.file_utils.create_named_file("foo.jpg", "hello") - - # Add the test file to client's media collection. - helpers.server_utils.add_files_to_client_mediadb(client.col.media, - [temp_file_path], - update_db=True) - - # Syncing client should work. - self.assertEqual(client.sync(), 'OK') - - # The same file should be present in both client's and the server's - # media directory. - self.assertTrue(filecmp.cmp(join(client.col.media.dir(), "foo.jpg"), - join(server.col.media.dir(), "foo.jpg"))) - - # Syncing client again should do nothing. - self.assertEqual(client.sync(), 'noChanges') - - # Remove files from client's media dir and write changes to its db. - os.remove(join(client.col.media.dir(), "foo.jpg")) - - # TODO: client.col.media.findChanges() doesn't work here - why? - client.col.media._logChanges() - self.assertEqual(client.col.media.syncInfo("foo.jpg"), (None, 1)) - self.assertFalse(isfile(join(client.col.media.dir(), "foo.jpg"))) - - # Syncing client again should work. - self.assertEqual(client.sync(), 'OK') - - # server should have picked up the removal from client. - self.assertEqual(server.col.media.syncInfo("foo.jpg"), (None, 0)) - self.assertFalse(isfile(join(server.col.media.dir(), "foo.jpg"))) - - # Syncing client again should do nothing. - self.assertEqual(client.sync(), 'noChanges') - - def test_sync_compare_database_to_expected(self): - """ - Adds a test image file to the client's media directory. After syncing, - the server's database should, except for timestamps, be identical to a - database containing the expected data. - """ - client = self.client_syncer - - # Add a test image file to the client's media collection but don't - # update its media db since the desktop client updates that, using - # findChanges(), only during syncs. - support_file = helpers.file_utils.get_asset_path('blue.jpg') - self.assertTrue(os.path.isfile(support_file)) - helpers.server_utils.add_files_to_client_mediadb(client.col.media, - [support_file], - update_db=False) - - # Syncing should work. - self.assertEqual(client.sync(), "OK") - - # Create temporary db file with expected results. - chksum = client.col.media._checksum(support_file) - sql = (""" - CREATE TABLE meta (dirMod int, lastUsn int); - - INSERT INTO `meta` (dirMod, lastUsn) VALUES (123456789,1); - - CREATE TABLE media ( - fname text not null primary key, - csum text, - mtime int not null, - dirty int not null - ); - - INSERT INTO `media` (fname, csum, mtime, dirty) VALUES ( - 'blue.jpg', - '%s', - 1441483037, - 0 - ); - - CREATE INDEX idx_media_dirty on media (dirty); - """ % chksum) - - _, dbpath = tempfile.mkstemp(suffix=".anki2") - helpers.db_utils.from_sql(dbpath, sql) - - # Except for timestamps, the client's db after sync should be identical - # to the expected data. - self.assertFalse(self.media_dbs_differ( - client.col.media.db._path, - dbpath - )) - os.unlink(dbpath) - - def test_sync_mediaChanges(self): - client = self.client_syncer - client2 = self.create_client_syncer(self.colutils.create_empty_col(), self.hkey, self.server_test_app) - server = helpers.server_utils.get_syncer_for_hkey(self.server_app, self.hkey, 'media') - self.assertEqual(server.mediaChanges(lastUsn=client.col.media.lastUsn())['data'], []) - - helpers.server_utils.add_files_to_client_mediadb(client.col.media, [ - helpers.file_utils.create_named_file("a", "lastUsn a"), - helpers.file_utils.create_named_file("b", "lastUsn b"), - helpers.file_utils.create_named_file("c", "lastUsn c"), - ], update_db=True) - self.assertEqual(client.sync(), "OK") - self.assertEqual(server.mediaChanges(lastUsn=client.col.media.lastUsn())['data'], []) - - self.assertEqual(client2.sync(), "OK") - os.remove(os.path.join(client2.col.media.dir(), "c")) - client2.col.media._logChanges() - self.assertEqual(client2.sync(), "OK") - self.assertEqual(server.mediaChanges(lastUsn=client.col.media.lastUsn())['data'], [['c', 4, None]]) - self.assertEqual(client.sync(), "OK") - self.assertEqual(server.mediaChanges(lastUsn=client.col.media.lastUsn())['data'], []) - - helpers.server_utils.add_files_to_client_mediadb(client.col.media, [ - helpers.file_utils.create_named_file("d", "lastUsn d"), - ], update_db=True) - client.col.media._logChanges() - self.assertEqual(client.sync(), "OK") - - self.assertEqual(server.mediaChanges(lastUsn=client2.col.media.lastUsn())['data'], [['d', 5, self.file_checksum(os.path.join(server.col.media.dir(), "d"))]]) - - self.assertEqual(client2.sync(), "OK") - self.assertEqual(server.mediaChanges(lastUsn=client2.col.media.lastUsn())['data'], []) - - dpath = os.path.join(client.col.media.dir(), "d") - with open(dpath, "a") as f: - f.write("\nsome change") - # files with the same mtime and name are considered equivalent by anki.media.MediaManager._changes - os.utime(dpath, (315529200, 315529200)) - client.col.media._logChanges() - self.assertEqual(client.sync(), "OK") - self.assertEqual(server.mediaChanges(lastUsn=client2.col.media.lastUsn())['data'], [['d', 6, self.file_checksum(os.path.join(server.col.media.dir(), "d"))]]) - self.assertEqual(client2.sync(), "OK") - self.assertEqual(server.mediaChanges(lastUsn=client2.col.media.lastUsn())['data'], []) - - def test_sync_rename(self): - """ - Adds 3 media files to the client's media directory, syncs and then - renames them and syncs again. After syncing, both the client and the - server should only have the renamed files. - """ - client = self.client_syncer - client2 = self.create_client_syncer(self.colutils.create_empty_col(), self.hkey, self.server_test_app) - server = helpers.server_utils.get_syncer_for_hkey(self.server_app, self.hkey, 'media') - self.assertEqual(server.mediaChanges(lastUsn=client.col.media.lastUsn())['data'], []) - - helpers.server_utils.add_files_to_client_mediadb(client.col.media, [ - helpers.file_utils.create_named_file("a.wav", "lastUsn a"), - helpers.file_utils.create_named_file("b.wav", "lastUsn b"), - helpers.file_utils.create_named_file("c.wav", "lastUsn c"), - ], update_db=True) - self.assertEqual(client.sync(), "OK") - - for fname in os.listdir(client.col.media.dir()): - os.rename( - os.path.join(client.col.media.dir(), fname), - os.path.join(client.col.media.dir(), fname[:1] + ".mp3") - ) - client.col.media._logChanges() - self.assertEqual(client.sync(), "OK") - self.assertEqual( - set(os.listdir(server.col.media.dir())), - {"a.mp3", "b.mp3", "c.mp3"}, - ) - self.assertEqual( - set(os.listdir(client.col.media.dir())), - set(os.listdir(server.col.media.dir())), - ) - self.assertEqual( - list(client.col.media.db.execute("SELECT fname, csum FROM media ORDER BY fname")), - list(server.col.media.db.execute("SELECT fname, csum FROM media ORDER BY fname")), - )