Merge pull request #65 from ankicommunity/release/2.3.0
This commit is contained in:
223
.gitignore
vendored
223
.gitignore
vendored
@@ -4,3 +4,226 @@
|
||||
/ankisyncd/_version.py
|
||||
/collections
|
||||
/venv
|
||||
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/windows,linux,macos,python,jupyternotebooks
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=windows,linux,macos,python,jupyternotebooks
|
||||
|
||||
### JupyterNotebooks ###
|
||||
# gitignore template for Jupyter Notebooks
|
||||
# website: http://jupyter.org/
|
||||
|
||||
.ipynb_checkpoints
|
||||
*/.ipynb_checkpoints/*
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# Remove previous ipynb_checkpoints
|
||||
# git rm -r .ipynb_checkpoints/
|
||||
|
||||
### Linux ###
|
||||
*~
|
||||
|
||||
# temporary files which can be created if a process still has a handle open of a deleted file
|
||||
.fuse_hidden*
|
||||
|
||||
# KDE directory preferences
|
||||
.directory
|
||||
|
||||
# Linux trash folder which might appear on any partition or disk
|
||||
.Trash-*
|
||||
|
||||
# .nfs files are created when an open file is removed but is still being accessed
|
||||
.nfs*
|
||||
|
||||
### macOS ###
|
||||
# General
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
.LSOverride
|
||||
|
||||
# Icon must end with two \r
|
||||
Icon
|
||||
|
||||
# Thumbnails
|
||||
._*
|
||||
|
||||
# Files that might appear in the root of a volume
|
||||
.DocumentRevisions-V100
|
||||
.fseventsd
|
||||
.Spotlight-V100
|
||||
.TemporaryItems
|
||||
.Trashes
|
||||
.VolumeIcon.icns
|
||||
.com.apple.timemachine.donotpresent
|
||||
|
||||
# Directories potentially created on remote AFP share
|
||||
.AppleDB
|
||||
.AppleDesktop
|
||||
Network Trash Folder
|
||||
Temporary Items
|
||||
.apdisk
|
||||
|
||||
### Python ###
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
pip-wheel-metadata/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# Emacs temporary files
|
||||
*#*#
|
||||
*.#*
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
|
||||
# IPython
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
### Windows ###
|
||||
# Windows thumbnail cache files
|
||||
Thumbs.db
|
||||
Thumbs.db:encryptable
|
||||
ehthumbs.db
|
||||
ehthumbs_vista.db
|
||||
|
||||
# Dump file
|
||||
*.stackdump
|
||||
|
||||
# Folder config file
|
||||
[Dd]esktop.ini
|
||||
|
||||
# Recycle Bin used on file shares
|
||||
$RECYCLE.BIN/
|
||||
|
||||
# Windows Installer files
|
||||
*.cab
|
||||
*.msi
|
||||
*.msix
|
||||
*.msm
|
||||
*.msp
|
||||
|
||||
# Windows shortcuts
|
||||
*.lnk
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/windows,linux,macos,python,jupyternotebooks
|
||||
3
.gitmodules
vendored
3
.gitmodules
vendored
@@ -1,3 +0,0 @@
|
||||
[submodule "anki-bundled"]
|
||||
path = anki-bundled
|
||||
url = https://github.com/dae/anki.git
|
||||
13
.readthedocs.yml
Normal file
13
.readthedocs.yml
Normal file
@@ -0,0 +1,13 @@
|
||||
# .readthedocs.yml
|
||||
# Read the Docs configuration file
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
version: 2
|
||||
|
||||
# Build documentation with MkDocs
|
||||
mkdocs:
|
||||
configuration: docs/mkdocs.yml
|
||||
|
||||
# Optionally set the version of Python and requirements required to build your docs
|
||||
python:
|
||||
version: 3.7
|
||||
26
Makefile
Normal file
26
Makefile
Normal file
@@ -0,0 +1,26 @@
|
||||
#/bin/make
|
||||
|
||||
ANKI_SERVER_NAME ?= "Anki Sync Server"
|
||||
ANKI_SERVER_VERSION ?= "v0.1.0"
|
||||
ANKI_SERVER_DESCRIPTION ?= "Self-hosted Anki Sync Server."
|
||||
ENV ?= local
|
||||
|
||||
-include config/.env.${ENV}
|
||||
export
|
||||
|
||||
.DEFAULT_GOAL := help
|
||||
.PHONY: help #: Display list of command and exit.
|
||||
help:
|
||||
@awk 'BEGIN {FS = " ?#?: "; print ""${ANKI_SERVER_NAME}" "${ANKI_SERVER_VERSION}"\n"${ANKI_SERVER_DESCRIPTION}"\n\nUsage: make \033[36m<command>\033[0m\n\nCommands:"} /^.PHONY: ?[a-zA-Z_-]/ { printf " \033[36m%-10s\033[0m %s\n", $$2, $$3 }' $(MAKEFILE_LIST)
|
||||
|
||||
.PHONY: docs #: Build and serve documentation.
|
||||
docs: print-env
|
||||
@${MKDOCS} ${MKDOCS_OPTION} -f docs/mkdocs.yml
|
||||
|
||||
.PHONY: notebooks #: Run jupyter notebooks.
|
||||
notebooks:
|
||||
@${JUPYTER} ${JUPYTER_OPTION}
|
||||
|
||||
%:
|
||||
@test -f scripts/${*}.sh
|
||||
@${SHELL} scripts/${*}.sh
|
||||
112
README.md
112
README.md
@@ -1,6 +1,9 @@
|
||||
ankisyncd
|
||||
=========
|
||||
|
||||
[](https://anki-sync-server.readthedocs.io/?badge=latest)
|
||||
[](https://gitter.im/ankicommunity/community?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)
|
||||
|
||||
[Anki][] is a powerful open source flashcard application, which helps you
|
||||
quickly and easily memorize facts over the long term utilizing a spaced
|
||||
repetition algorithm. Anki's main form is a desktop application (for Windows,
|
||||
@@ -26,9 +29,6 @@ It supports Python 3 and Anki 2.1.
|
||||
- [Anki 2.1](#anki-21)
|
||||
- [Anki 2.0](#anki-20)
|
||||
- [AnkiDroid](#ankidroid)
|
||||
- [Running `ankisyncd` without `pyaudio`](#running-ankisyncd-without-pyaudio)
|
||||
- [Anki ≥2.1.9](#anki-219)
|
||||
- [Older versions](#older-versions)
|
||||
- [ENVVAR configuration overrides](#envvar-configuration-overrides)
|
||||
- [Support for other database backends](#support-for-other-database-backends)
|
||||
</details>
|
||||
@@ -36,25 +36,9 @@ It supports Python 3 and Anki 2.1.
|
||||
Installing
|
||||
----------
|
||||
|
||||
0. Install Anki. The currently supported version range is 2.1.1〜2.1.11, with the
|
||||
exception of 2.1.9<sup id="readme-fn-01b">[1](#readme-fn-01)</sup>. (Keep in
|
||||
mind this range only applies to the Anki used by the server, clients can be
|
||||
as old as 2.0.27 and still work.) Running the server with other versions might
|
||||
work as long as they're not 2.0.x, but things might break, so do it at your
|
||||
own risk. If for some reason you can't get the supported Anki version easily
|
||||
on your system, you can use `anki-bundled` from this repo:
|
||||
|
||||
$ git submodule update --init
|
||||
$ cd anki-bundled
|
||||
$ pip install -r requirements.txt
|
||||
|
||||
Keep in mind `pyaudio`, a dependency of Anki, requires development headers for
|
||||
Python 3 and PortAudio to be present before running `pip`. If you can't or
|
||||
don't want to install these, you can try [patching Anki](#running-ankisyncd-without-pyaudio).
|
||||
|
||||
1. Install the dependencies:
|
||||
|
||||
$ pip install webob
|
||||
$ pip install -r src/requirements.txt
|
||||
|
||||
2. Modify ankisyncd.conf according to your needs
|
||||
|
||||
@@ -62,22 +46,39 @@ Installing
|
||||
|
||||
$ ./ankisyncctl.py adduser <username>
|
||||
|
||||
4. Run ankisyncd:
|
||||
4. Setup a proxy to unchunk the requests.
|
||||
|
||||
Webob does not support the header "Transfer-Encoding: chunked" used by Anki
|
||||
and therefore ankisyncd sees chunked requests as empty. To solve this problem
|
||||
setup Nginx (or any other webserver of your choice) and configure it to
|
||||
"unchunk" the requests for ankisyncd.
|
||||
|
||||
For example, if you use Nginx on the same machine as ankisyncd, you first
|
||||
have to change the port in `ankisyncd.conf` to something other than `27701`.
|
||||
Then configure Nginx to listen on port `27701` and forward the unchunked
|
||||
requests to ankisyncd.
|
||||
|
||||
An example configuration with ankisyncd running on the same machine as Nginx
|
||||
and listening on port `27702` may look like:
|
||||
|
||||
```
|
||||
server {
|
||||
listen 27701;
|
||||
server_name default;
|
||||
|
||||
location / {
|
||||
proxy_http_version 1.0;
|
||||
proxy_pass http://localhost:27702/;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
5. Run ankisyncd:
|
||||
|
||||
$ python -m ankisyncd
|
||||
|
||||
---
|
||||
|
||||
<span id="readme-fn-01"></span>
|
||||
1. 2.1.9 is not supported due to [commit `95ccbfdd3679`][] introducing the
|
||||
dependency on the `aqt` module, which depends on PyQt5. The server should
|
||||
still work fine if you have PyQt5 installed. This has been fixed in
|
||||
[commit `a389b8b4a0e2`][], which is a part of the 2.1.10 release.
|
||||
[↑](#readme-fn-01b)
|
||||
|
||||
[commit `95ccbfdd3679`]: https://github.com/dae/anki/commit/95ccbfdd3679dd46f22847c539c7fddb8fa904ea
|
||||
[commit `a389b8b4a0e2`]: https://github.com/dae/anki/commit/a389b8b4a0e209023c4533a7ee335096a704079c
|
||||
|
||||
Installing (Docker)
|
||||
-------------------
|
||||
|
||||
@@ -86,6 +87,18 @@ Follow [these instructions](https://github.com/kuklinistvan/docker-anki-sync-ser
|
||||
Setting up Anki
|
||||
---------------
|
||||
|
||||
### Anki 2.1.28 and above
|
||||
|
||||
Create a new directory in [the add-ons folder][addons21] (name it something
|
||||
like ankisyncd), create a file named `__init__.py` containing the code below
|
||||
and put it in the `ankisyncd` directory.
|
||||
|
||||
import os
|
||||
|
||||
addr = "http://127.0.0.1:27701/" # put your server address here
|
||||
os.environ["SYNC_ENDPOINT"] = addr + "sync/"
|
||||
os.environ["SYNC_ENDPOINT_MEDIA"] = addr + "msync/"
|
||||
|
||||
### Anki 2.1
|
||||
|
||||
Create a new directory in [the add-ons folder][addons21] (name it something
|
||||
@@ -111,7 +124,7 @@ and put it in `~/Anki/addons`.
|
||||
anki.sync.SYNC_BASE = addr
|
||||
anki.sync.SYNC_MEDIA_BASE = addr + "msync/"
|
||||
|
||||
[addons21]: https://apps.ankiweb.net/docs/addons.html#_add_on_folders
|
||||
[addons21]: https://addon-docs.ankiweb.net/#/getting-started?id=add-on-folders
|
||||
|
||||
### AnkiDroid
|
||||
|
||||
@@ -122,44 +135,13 @@ Unless you have set up a reverse proxy to handle encrypted connections, use
|
||||
whatever you have specified in `ankisyncd.conf` (or, if using a reverse proxy,
|
||||
whatever port you configured to accept the front-end connection).
|
||||
|
||||
**Do not use trailing slashes.**
|
||||
Use the same base url for both the `Sync url` and the `Media sync url`, but append `/msync` to
|
||||
the `Media sync url`. Do **not** append `/sync` to the `Sync url`.
|
||||
|
||||
Even though the AnkiDroid interface will request an email address, this is not
|
||||
required; it will simply be the username you configured with `ankisyncctl.py
|
||||
adduser`.
|
||||
|
||||
Running `ankisyncd` without `pyaudio`
|
||||
-------------------------------------
|
||||
|
||||
`ankisyncd` doesn't use the audio recording feature of Anki, so if you don't
|
||||
want to install PortAudio, you can edit some files in the `anki-bundled`
|
||||
directory to exclude `pyaudio`:
|
||||
|
||||
### Anki ≥2.1.9
|
||||
|
||||
Just remove "pyaudio" from requirements.txt and you're done. This change has
|
||||
been introduced in [commit `ca710ab3f1c1`][].
|
||||
|
||||
[commit `ca710ab3f1c1`]: https://github.com/dae/anki/commit/ca710ab3f1c1174469a3b48f1257c0fc0ce624bf
|
||||
|
||||
### Older versions
|
||||
|
||||
First go to `anki-bundled`, then follow one of the instructions below. They all
|
||||
do the same thing, you can pick whichever one you're most comfortable with.
|
||||
|
||||
Manual version: remove every line past "# Packaged commands" in anki/sound.py,
|
||||
remove every line starting with "pyaudio" in requirements.txt
|
||||
|
||||
`ed` version:
|
||||
|
||||
$ echo '/# Packaged commands/,$d;w' | tr ';' '\n' | ed anki/sound.py
|
||||
$ echo '/^pyaudio/d;w' | tr ';' '\n' | ed requirements.txt
|
||||
|
||||
`sed -i` version:
|
||||
|
||||
$ sed -i '/# Packaged commands/,$d' anki/sound.py
|
||||
$ sed -i '/^pyaudio/d' requirements.txt
|
||||
|
||||
ENVVAR configuration overrides
|
||||
------------------------------
|
||||
|
||||
|
||||
Submodule anki-bundled deleted from cca3fcb241
@@ -1,59 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
from sqlite3 import dbapi2 as sqlite
|
||||
|
||||
import anki.db
|
||||
|
||||
class FullSyncManager:
|
||||
def upload(self, col, data, session):
|
||||
# Verify integrity of the received database file before replacing our
|
||||
# existing db.
|
||||
temp_db_path = session.get_collection_path() + ".tmp"
|
||||
with open(temp_db_path, 'wb') as f:
|
||||
f.write(data)
|
||||
|
||||
try:
|
||||
with anki.db.DB(temp_db_path) as test_db:
|
||||
if test_db.scalar("pragma integrity_check") != "ok":
|
||||
raise HTTPBadRequest("Integrity check failed for uploaded "
|
||||
"collection database file.")
|
||||
except sqlite.Error as e:
|
||||
raise HTTPBadRequest("Uploaded collection database file is "
|
||||
"corrupt.")
|
||||
|
||||
# Overwrite existing db.
|
||||
col.close()
|
||||
try:
|
||||
os.replace(temp_db_path, session.get_collection_path())
|
||||
finally:
|
||||
col.reopen()
|
||||
col.load()
|
||||
|
||||
return "OK"
|
||||
|
||||
|
||||
def download(self, col, session):
|
||||
col.close()
|
||||
try:
|
||||
data = open(session.get_collection_path(), 'rb').read()
|
||||
finally:
|
||||
col.reopen()
|
||||
col.load()
|
||||
return data
|
||||
|
||||
|
||||
def get_full_sync_manager(config):
|
||||
if "full_sync_manager" in config and config["full_sync_manager"]: # load from config
|
||||
import importlib
|
||||
import inspect
|
||||
module_name, class_name = config['full_sync_manager'].rsplit('.', 1)
|
||||
module = importlib.import_module(module_name.strip())
|
||||
class_ = getattr(module, class_name.strip())
|
||||
|
||||
if not FullSyncManager in inspect.getmro(class_):
|
||||
raise TypeError('''"full_sync_manager" found in the conf file but it doesn''t
|
||||
inherit from FullSyncManager''')
|
||||
return class_(config)
|
||||
else:
|
||||
return FullSyncManager()
|
||||
28
config/.env.example
Normal file
28
config/.env.example
Normal file
@@ -0,0 +1,28 @@
|
||||
# .env.example (anki-sync-server)
|
||||
|
||||
## Make
|
||||
MKDOCS=mkdocs
|
||||
JUPYTER=jupyter
|
||||
|
||||
## Ankisyncd
|
||||
ANKISYNCD_HOST=0.0.0.0
|
||||
ANKISYNCD_PORT=27701
|
||||
ANKISYNCD_DATA_ROOT=./collections
|
||||
ANKISYNCD_BASE_URL=/sync/
|
||||
ANKISYNCD_BASE_MEDIA_URL=/msync/
|
||||
ANKISYNCD_AUTH_DB_PATH=./auth.db
|
||||
ANKISYNCD_SESSION_DB_PATH=./session.db
|
||||
|
||||
ANKISYNCD_FULL_SYNC_MANAGER
|
||||
ANKISYNCD_SESSION_MANAGER
|
||||
ANKISYNCD_USER_MANAGER
|
||||
ANKISYNCD_COLLECTION_WRAPPER
|
||||
|
||||
## Mkdocs
|
||||
MKDOCS_OPTION=serve
|
||||
|
||||
## Jupyter
|
||||
JUPYTER_OPTION=lab
|
||||
|
||||
## Path
|
||||
PATH:=.venv/bin/path:${PATH}
|
||||
9
docs/mkdocs.yml
Normal file
9
docs/mkdocs.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
strict: true
|
||||
theme: readthedocs
|
||||
site_name: Anki Sync Server
|
||||
site_description: Self-hosted Anki Sync Server.
|
||||
site_author: Anki Community
|
||||
site_url: https://ankicommunity.github.io/anki-sync-server
|
||||
repo_url: https://github.com/ankicommunity/anki-sync-server
|
||||
docs_dir: src
|
||||
site_dir: build
|
||||
19
docs/src/index.md
Normal file
19
docs/src/index.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# Welcome to MkDocs
|
||||
|
||||
Welcome to the anki-sync-server wiki!
|
||||
|
||||
For full documentation visit [mkdocs.org](https://www.mkdocs.org).
|
||||
|
||||
## Commands
|
||||
|
||||
* `mkdocs new [dir-name]` - Create a new project.
|
||||
* `mkdocs serve` - Start the live-reloading docs server.
|
||||
* `mkdocs build` - Build the documentation site.
|
||||
* `mkdocs -h` - Print help message and exit.
|
||||
|
||||
## Project layout
|
||||
|
||||
mkdocs.yml # The configuration file.
|
||||
src/
|
||||
index.md # The documentation homepage.
|
||||
... # Other markdown pages, images and other files.
|
||||
182
notebooks/read_collections.ipynb
Normal file
182
notebooks/read_collections.ipynb
Normal file
@@ -0,0 +1,182 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"cd .."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Reading Collections\n",
|
||||
"\n",
|
||||
"This notebook allows to view your collections. Note currently we are using the anki from the submodule. In the future, we should be able to use the anki installed using `pip install anki` however the current collections do not seem compatibile with the latest library."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Install Anki in venv\n",
|
||||
"!pip3 install anki"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from anki import Collection\n",
|
||||
"from anki.utils import intTime\n",
|
||||
"import time"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Open Database\n",
|
||||
"\n",
|
||||
"Make sure you close the database otherwise it will be locked and you will not be able to use your sync server."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"collection_path = \"./collections/anki/collection.anki2\"\n",
|
||||
"col = Collection(collection_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### View Collections"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"print(f'Collection Name: {col.name()}')\n",
|
||||
"print(f'Cards in Collection: {col.noteCount()}')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### View Decks"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"print('Decks:')\n",
|
||||
"for deck in col.decks.all():\n",
|
||||
" print(f\"{deck['id']}. {deck['name']}\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### View Cards"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"deck_id = None\n",
|
||||
"print('Cards in deck:')\n",
|
||||
"i = 0\n",
|
||||
"for card_id in col.decks.cids(deck_id):\n",
|
||||
" i+=1\n",
|
||||
" print(f'{i}. {card_id}')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### View Notes"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"card_id = None\n",
|
||||
"print('Notes in card:')\n",
|
||||
"note_id = col.getCard(card_id).nid\n",
|
||||
"print(f\"1. Front: {col.getNote(note_id).fields[0]}\")\n",
|
||||
"print(f\"2. Back: {col.getNote(note_id).fields[1]}\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Close Database"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"col.close()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.7.6"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 4
|
||||
}
|
||||
1663
poetry.lock
generated
Normal file
1663
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
27
pyproject.toml
Normal file
27
pyproject.toml
Normal file
@@ -0,0 +1,27 @@
|
||||
[tool.poetry]
|
||||
name = "anki-sync-server"
|
||||
version = "2.3.0"
|
||||
description = "Self-hosted Anki Sync Server."
|
||||
authors = ["Vikash Kothary <kothary.vikash@gmail.com>"]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8"
|
||||
anki = "^2.1.36"
|
||||
beautifulsoup4 = "^4.9.1"
|
||||
requests = "^2.24.0"
|
||||
markdown = "^3.2.2"
|
||||
send2trash = "^1.5.0"
|
||||
pyaudio = "^0.2.11"
|
||||
decorator = "^4.4.2"
|
||||
psutil = "^5.7.2"
|
||||
distro = "^1.5.0"
|
||||
webob = "^1.8.6"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
mkdocs = "^1.1.2"
|
||||
jupyter = "^1.0.0"
|
||||
jupyterlab = "^2.2.2"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry>=0.12"]
|
||||
build-backend = "poetry.masonry.api"
|
||||
13
scripts/lock.sh
Normal file
13
scripts/lock.sh
Normal file
@@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
# file: lock.sh
|
||||
# description: Lock dependencies and export requirements.
|
||||
|
||||
echo "THE FILE WAS GENERATED BY POETRY, DO NOT EDIT!\n\n" > src/requirements.txt
|
||||
echo "THE FILE WAS GENERATED BY POETRY, DO NOT EDIT!\n\n" > src/requirements-dev.txt
|
||||
poetry lock
|
||||
poetry export --without-hashes -f requirements.txt >> src/requirements.txt
|
||||
poetry export --dev --without-hashes -f requirements.txt >> src/requirements-dev.txt
|
||||
|
||||
echo "-e src/." >> src/requirements-dev.txt
|
||||
|
||||
|
||||
5
scripts/print-env.sh
Normal file
5
scripts/print-env.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
# file: print-env.sh
|
||||
# description: Print env variable.
|
||||
|
||||
echo "${ENV}"
|
||||
@@ -1,10 +1,7 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, "/usr/share/anki")
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), "anki-bundled"))
|
||||
|
||||
_homepage = "https://github.com/tsudoko/anki-sync-server"
|
||||
_homepage = "https://github.com/ankicommunity/anki-sync-server.git"
|
||||
_unknown_version = "[unknown version]"
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import anki
|
||||
import anki.storage
|
||||
|
||||
import ankisyncd.media
|
||||
@@ -65,7 +64,7 @@ class CollectionWrapper:
|
||||
return col
|
||||
|
||||
def _get_collection(self):
|
||||
col = anki.storage.Collection(self.path)
|
||||
col = anki.storage.Collection(self.path, server=True)
|
||||
|
||||
# Ugly hack, replace default media manager with our custom one
|
||||
col.media.close()
|
||||
95
src/ankisyncd/full_sync.py
Normal file
95
src/ankisyncd/full_sync.py
Normal file
@@ -0,0 +1,95 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import logging
|
||||
import os
|
||||
from sqlite3 import dbapi2 as sqlite
|
||||
import shutil
|
||||
import sys
|
||||
from webob.exc import HTTPBadRequest
|
||||
|
||||
from anki.db import DB
|
||||
from anki.collection import Collection
|
||||
|
||||
logger = logging.getLogger("ankisyncd.media")
|
||||
logger.setLevel(1)
|
||||
|
||||
class FullSyncManager:
|
||||
def test_db(self, db: DB):
|
||||
"""
|
||||
:param anki.db.DB db: the database uploaded from the client.
|
||||
"""
|
||||
if db.scalar("pragma integrity_check") != "ok":
|
||||
raise HTTPBadRequest(
|
||||
"Integrity check failed for uploaded collection database file."
|
||||
)
|
||||
|
||||
def upload(self, col: Collection, data: bytes, session) -> str:
|
||||
"""
|
||||
Uploads a sqlite database from the client to the sync server.
|
||||
|
||||
:param anki.collection.Collectio col:
|
||||
:param bytes data: The binary sqlite database from the client.
|
||||
:param .sync_app.SyncUserSession session: The current session.
|
||||
"""
|
||||
# Verify integrity of the received database file before replacing our
|
||||
# existing db.
|
||||
temp_db_path = session.get_collection_path() + ".tmp"
|
||||
with open(temp_db_path, 'wb') as f:
|
||||
f.write(data)
|
||||
|
||||
try:
|
||||
with DB(temp_db_path) as test_db:
|
||||
self.test_db(test_db)
|
||||
except sqlite.Error as e:
|
||||
raise HTTPBadRequest("Uploaded collection database file is "
|
||||
"corrupt.")
|
||||
|
||||
# Overwrite existing db.
|
||||
col.close()
|
||||
try:
|
||||
shutil.copyfile(temp_db_path, session.get_collection_path())
|
||||
finally:
|
||||
col.reopen()
|
||||
# Reopen the media database
|
||||
col.media.connect()
|
||||
|
||||
return "OK"
|
||||
|
||||
def download(self, col: Collection, session) -> bytes:
|
||||
"""Download the binary database.
|
||||
|
||||
Performs a downgrade to database schema 11 before sending the database
|
||||
to the client.
|
||||
|
||||
:param anki.collection.Collection col:
|
||||
:param .sync_app.SyncUserSession session:
|
||||
|
||||
:return bytes: the binary sqlite3 database
|
||||
"""
|
||||
col.close(downgrade=True)
|
||||
db_path = session.get_collection_path()
|
||||
try:
|
||||
with open(db_path, 'rb') as tmp:
|
||||
data = tmp.read()
|
||||
finally:
|
||||
col.reopen()
|
||||
# Reopen the media database
|
||||
col.media.connect()
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def get_full_sync_manager(config):
|
||||
if "full_sync_manager" in config and config["full_sync_manager"]: # load from config
|
||||
import importlib
|
||||
import inspect
|
||||
module_name, class_name = config['full_sync_manager'].rsplit('.', 1)
|
||||
module = importlib.import_module(module_name.strip())
|
||||
class_ = getattr(module, class_name.strip())
|
||||
|
||||
if not FullSyncManager in inspect.getmro(class_):
|
||||
raise TypeError('''"full_sync_manager" found in the conf file but it doesn''t
|
||||
inherit from FullSyncManager''')
|
||||
return class_(config)
|
||||
else:
|
||||
return FullSyncManager()
|
||||
@@ -8,21 +8,32 @@ import os
|
||||
import os.path
|
||||
|
||||
import anki.db
|
||||
from anki.media import MediaManager
|
||||
|
||||
logger = logging.getLogger("ankisyncd.media")
|
||||
|
||||
|
||||
class ServerMediaManager:
|
||||
def __init__(self, col):
|
||||
class ServerMediaManager(MediaManager):
|
||||
def __init__(self, col, server=True):
|
||||
super().__init__(col, server)
|
||||
self._dir = re.sub(r"(?i)\.(anki2)$", ".media", col.path)
|
||||
self.connect()
|
||||
|
||||
def addMedia(self, media_to_add):
|
||||
self._db.executemany(
|
||||
"INSERT OR REPLACE INTO media VALUES (?,?,?)",
|
||||
media_to_add
|
||||
)
|
||||
self._db.commit()
|
||||
|
||||
def changes(self, lastUsn):
|
||||
return self._db.execute("select fname,usn,csum from media order by usn desc limit ?", self.lastUsn() - lastUsn)
|
||||
|
||||
def connect(self):
|
||||
path = self.dir() + ".server.db"
|
||||
create = not os.path.exists(path)
|
||||
self.db = anki.db.DB(path)
|
||||
self._db = anki.db.DB(path)
|
||||
if create:
|
||||
self.db.executescript(
|
||||
self._db.executescript(
|
||||
"""CREATE TABLE media (
|
||||
fname TEXT NOT NULL PRIMARY KEY,
|
||||
usn INT NOT NULL,
|
||||
@@ -33,35 +44,36 @@ class ServerMediaManager:
|
||||
oldpath = self.dir() + ".db2"
|
||||
if os.path.exists(oldpath):
|
||||
logger.info("Found client media database, migrating contents")
|
||||
self.db.execute("ATTACH ? AS old", oldpath)
|
||||
self.db.execute(
|
||||
self._db.execute("ATTACH ? AS old", oldpath)
|
||||
self._db.execute(
|
||||
"INSERT INTO media SELECT fname, lastUsn, csum FROM old.media, old.meta"
|
||||
)
|
||||
self.db.commit()
|
||||
self.db.execute("DETACH old")
|
||||
self._db.commit()
|
||||
self._db.execute("DETACH old")
|
||||
|
||||
def close(self):
|
||||
self.db.close()
|
||||
self._db.close()
|
||||
|
||||
def dir(self):
|
||||
return self._dir
|
||||
|
||||
def lastUsn(self):
|
||||
return self.db.scalar("SELECT max(usn) FROM media") or 0
|
||||
return self._db.scalar("SELECT max(usn) FROM media") or 0
|
||||
|
||||
def mediaCount(self):
|
||||
return self.db.scalar("SELECT count() FROM media WHERE csum IS NOT NULL")
|
||||
return self._db.scalar("SELECT count() FROM media WHERE csum IS NOT NULL")
|
||||
|
||||
# used only in unit tests
|
||||
def syncInfo(self, fname):
|
||||
return self.db.first("SELECT csum, 0 FROM media WHERE fname=?", fname)
|
||||
return self._db.first("SELECT csum, 0 FROM media WHERE fname=?", fname)
|
||||
|
||||
def syncDelete(self, fname):
|
||||
fpath = os.path.join(self.dir(), fname)
|
||||
if os.path.exists(fpath):
|
||||
os.remove(fpath)
|
||||
self.db.execute(
|
||||
self._db.execute(
|
||||
"UPDATE media SET csum = NULL, usn = ? WHERE fname = ?",
|
||||
self.lastUsn() + 1,
|
||||
fname,
|
||||
)
|
||||
self._db.commit()
|
||||
@@ -32,7 +32,7 @@ class SqliteSessionManager(SimpleSessionManager):
|
||||
everytime the SyncApp is restarted."""
|
||||
|
||||
def __init__(self, session_db_path):
|
||||
SimpleSessionManager.__init__(self)
|
||||
super().__init__()
|
||||
|
||||
self.session_db_path = os.path.realpath(session_db_path)
|
||||
self._ensure_schema_up_to_date()
|
||||
634
src/ankisyncd/sync.py
Normal file
634
src/ankisyncd/sync.py
Normal file
@@ -0,0 +1,634 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
# Taken from https://github.com/ankitects/anki/blob/cca3fcb2418880d0430a5c5c2e6b81ba260065b7/anki/sync.py
|
||||
|
||||
import io
|
||||
import gzip
|
||||
import random
|
||||
import requests
|
||||
import json
|
||||
import os
|
||||
|
||||
from anki.db import DB, DBError
|
||||
from anki.utils import ids2str, intTime, platDesc, checksum, devMode
|
||||
from anki.consts import *
|
||||
from anki.config import ConfigManager
|
||||
from anki.utils import versionWithBuild
|
||||
import anki
|
||||
from anki.lang import ngettext
|
||||
|
||||
|
||||
# https://github.com/ankitects/anki/blob/04b1ca75599f18eb783a8bf0bdeeeb32362f4da0/rslib/src/sync/http_client.rs#L11
|
||||
SYNC_VER = 10
|
||||
# https://github.com/ankitects/anki/blob/cca3fcb2418880d0430a5c5c2e6b81ba260065b7/anki/consts.py#L50
|
||||
SYNC_ZIP_SIZE = int(2.5*1024*1024)
|
||||
# https://github.com/ankitects/anki/blob/cca3fcb2418880d0430a5c5c2e6b81ba260065b7/anki/consts.py#L51
|
||||
SYNC_ZIP_COUNT = 25
|
||||
|
||||
# syncing vars
|
||||
HTTP_TIMEOUT = 90
|
||||
HTTP_PROXY = None
|
||||
HTTP_BUF_SIZE = 64*1024
|
||||
|
||||
# Incremental syncing
|
||||
##########################################################################
|
||||
|
||||
class Syncer(object):
|
||||
def __init__(self, col, server=None):
|
||||
self.col = col
|
||||
self.server = server
|
||||
|
||||
def meta(self):
|
||||
return dict(
|
||||
mod=self.col.mod,
|
||||
scm=self.col.scm,
|
||||
usn=self.col._usn,
|
||||
ts=intTime(),
|
||||
musn=0,
|
||||
msg="",
|
||||
cont=True
|
||||
)
|
||||
|
||||
def changes(self):
|
||||
"Bundle up small objects."
|
||||
d = dict(models=self.getModels(),
|
||||
decks=self.getDecks(),
|
||||
tags=self.getTags())
|
||||
if self.lnewer:
|
||||
d['conf'] = json.loads(self.col.backend.get_all_config())
|
||||
d['crt'] = self.col.crt
|
||||
return d
|
||||
|
||||
def mergeChanges(self, lchg, rchg):
|
||||
# then the other objects
|
||||
self.mergeModels(rchg['models'])
|
||||
self.mergeDecks(rchg['decks'])
|
||||
self.mergeTags(rchg['tags'])
|
||||
if 'conf' in rchg:
|
||||
self.mergeConf(rchg['conf'])
|
||||
# this was left out of earlier betas
|
||||
if 'crt' in rchg:
|
||||
self.col.crt = rchg['crt']
|
||||
self.prepareToChunk()
|
||||
|
||||
def sanityCheck(self, full):
|
||||
if not self.col.basicCheck():
|
||||
return "failed basic check"
|
||||
for t in "cards", "notes", "revlog", "graves":
|
||||
if self.col.db.scalar(
|
||||
"select count() from %s where usn = -1" % t):
|
||||
return "%s had usn = -1" % t
|
||||
for g in self.col.decks.all():
|
||||
if g['usn'] == -1:
|
||||
return "deck had usn = -1"
|
||||
for t, usn in self.col.tags.allItems():
|
||||
if usn == -1:
|
||||
return "tag had usn = -1"
|
||||
found = False
|
||||
for m in self.col.models.all():
|
||||
if m['usn'] == -1:
|
||||
return "model had usn = -1"
|
||||
if found:
|
||||
self.col.models.save()
|
||||
self.col.sched.reset()
|
||||
# check for missing parent decks
|
||||
#self.col.sched.deckDueList()
|
||||
# return summary of deck
|
||||
return [
|
||||
list(self.col.sched.counts()),
|
||||
self.col.db.scalar("select count() from cards"),
|
||||
self.col.db.scalar("select count() from notes"),
|
||||
self.col.db.scalar("select count() from revlog"),
|
||||
self.col.db.scalar("select count() from graves"),
|
||||
len(self.col.models.all()),
|
||||
len(self.col.decks.all()),
|
||||
len(self.col.decks.allConf()),
|
||||
]
|
||||
|
||||
def usnLim(self):
|
||||
return "usn = -1"
|
||||
|
||||
def finish(self, mod=None):
|
||||
self.col.ls = mod
|
||||
self.col._usn = self.maxUsn + 1
|
||||
# ensure we save the mod time even if no changes made
|
||||
self.col.db.mod = True
|
||||
self.col.save(mod=mod)
|
||||
return mod
|
||||
|
||||
# Chunked syncing
|
||||
##########################################################################
|
||||
|
||||
def prepareToChunk(self):
|
||||
self.tablesLeft = ["revlog", "cards", "notes"]
|
||||
self.cursor = None
|
||||
|
||||
def queryTable(self, table):
|
||||
lim = self.usnLim()
|
||||
if table == "revlog":
|
||||
return self.col.db.execute("""
|
||||
select id, cid, ?, ease, ivl, lastIvl, factor, time, type
|
||||
from revlog where %s""" % lim, self.maxUsn)
|
||||
elif table == "cards":
|
||||
return self.col.db.execute("""
|
||||
select id, nid, did, ord, mod, ?, type, queue, due, ivl, factor, reps,
|
||||
lapses, left, odue, odid, flags, data from cards where %s""" % lim, self.maxUsn)
|
||||
else:
|
||||
return self.col.db.execute("""
|
||||
select id, guid, mid, mod, ?, tags, flds, '', '', flags, data
|
||||
from notes where %s""" % lim, self.maxUsn)
|
||||
|
||||
def chunk(self):
|
||||
buf = dict(done=False)
|
||||
while self.tablesLeft:
|
||||
curTable = self.tablesLeft.pop()
|
||||
buf[curTable] = self.queryTable(curTable)
|
||||
self.col.db.execute(
|
||||
f"update {curTable} set usn=? where usn=-1", self.maxUsn
|
||||
)
|
||||
if not self.tablesLeft:
|
||||
buf['done'] = True
|
||||
return buf
|
||||
|
||||
def applyChunk(self, chunk):
|
||||
if "revlog" in chunk:
|
||||
self.mergeRevlog(chunk['revlog'])
|
||||
if "cards" in chunk:
|
||||
self.mergeCards(chunk['cards'])
|
||||
if "notes" in chunk:
|
||||
self.mergeNotes(chunk['notes'])
|
||||
|
||||
# Deletions
|
||||
##########################################################################
|
||||
|
||||
def removed(self):
|
||||
cards = []
|
||||
notes = []
|
||||
decks = []
|
||||
|
||||
curs = self.col.db.execute(
|
||||
"select oid, type from graves where usn = -1")
|
||||
|
||||
for oid, type in curs:
|
||||
if type == REM_CARD:
|
||||
cards.append(oid)
|
||||
elif type == REM_NOTE:
|
||||
notes.append(oid)
|
||||
else:
|
||||
decks.append(oid)
|
||||
|
||||
self.col.db.execute("update graves set usn=? where usn=-1",
|
||||
self.maxUsn)
|
||||
|
||||
return dict(cards=cards, notes=notes, decks=decks)
|
||||
|
||||
def remove(self, graves):
|
||||
# remove card and the card's orphaned notes
|
||||
self.col.remove_cards_and_orphaned_notes(graves['cards'])
|
||||
|
||||
# only notes
|
||||
self.col.remove_notes(graves['notes'])
|
||||
|
||||
# since level 0 deck ,we only remove deck ,but backend will delete child,it is ok, the delete
|
||||
# will have once effect
|
||||
for oid in graves['decks']:
|
||||
self.col.decks.rem(oid)
|
||||
|
||||
|
||||
# we can place non-exist grave after above delete.
|
||||
localgcards = []
|
||||
localgnotes = []
|
||||
localgdecks = []
|
||||
curs = self.col.db.execute(
|
||||
"select oid, type from graves where usn = %d" % self.col.usn())
|
||||
|
||||
for oid, type in curs:
|
||||
if type == REM_CARD:
|
||||
localgcards.append(oid)
|
||||
elif type == REM_NOTE:
|
||||
localgnotes.append(oid)
|
||||
else:
|
||||
localgdecks.append(oid)
|
||||
|
||||
# n meaning non-exsiting grave in the server compared to client
|
||||
ncards = [ oid for oid in graves['cards'] if oid not in localgcards]
|
||||
for oid in ncards:
|
||||
self.col._logRem([oid], REM_CARD)
|
||||
|
||||
nnotes = [ oid for oid in graves['notes'] if oid not in localgnotes]
|
||||
for oid in nnotes:
|
||||
self.col._logRem([oid], REM_NOTE)
|
||||
|
||||
ndecks = [ oid for oid in graves['decks'] if oid not in localgdecks]
|
||||
for oid in ndecks:
|
||||
self.col._logRem([oid], REM_DECK)
|
||||
|
||||
# Models
|
||||
##########################################################################
|
||||
|
||||
def getModels(self):
|
||||
mods = [m for m in self.col.models.all() if m['usn'] == -1]
|
||||
for m in mods:
|
||||
m['usn'] = self.maxUsn
|
||||
self.col.models.save()
|
||||
return mods
|
||||
|
||||
def mergeModels(self, rchg):
|
||||
for r in rchg:
|
||||
l = self.col.models.get(r['id'])
|
||||
# if missing locally or server is newer, update
|
||||
if not l or r['mod'] > l['mod']:
|
||||
self.col.models.update(r)
|
||||
|
||||
# Decks
|
||||
##########################################################################
|
||||
|
||||
def getDecks(self):
|
||||
decks = [g for g in self.col.decks.all() if g['usn'] == -1]
|
||||
for g in decks:
|
||||
g['usn'] = self.maxUsn
|
||||
dconf = [g for g in self.col.decks.allConf() if g['usn'] == -1]
|
||||
for g in dconf:
|
||||
g['usn'] = self.maxUsn
|
||||
self.col.decks.save()
|
||||
return [decks, dconf]
|
||||
|
||||
def mergeDecks(self, rchg):
|
||||
for r in rchg[0]:
|
||||
l = self.col.decks.get(r['id'], False)
|
||||
# work around mod time being stored as string
|
||||
if l and not isinstance(l['mod'], int):
|
||||
l['mod'] = int(l['mod'])
|
||||
|
||||
# if missing locally or server is newer, update
|
||||
if not l or r['mod'] > l['mod']:
|
||||
self.col.decks.update(r)
|
||||
for r in rchg[1]:
|
||||
try:
|
||||
l = self.col.decks.getConf(r['id'])
|
||||
except KeyError:
|
||||
l = None
|
||||
# if missing locally or server is newer, update
|
||||
if not l or r['mod'] > l['mod']:
|
||||
self.col.decks.updateConf(r)
|
||||
|
||||
# Tags
|
||||
##########################################################################
|
||||
|
||||
def getTags(self):
|
||||
tags = []
|
||||
for t, usn in self.col.tags.allItems():
|
||||
if usn == -1:
|
||||
self.col.tags.tags[t] = self.maxUsn
|
||||
tags.append(t)
|
||||
self.col.tags.save()
|
||||
return tags
|
||||
|
||||
def mergeTags(self, tags):
|
||||
self.col.tags.register(tags, usn=self.maxUsn)
|
||||
|
||||
# Cards/notes/revlog
|
||||
##########################################################################
|
||||
|
||||
def mergeRevlog(self, logs):
|
||||
self.col.db.executemany(
|
||||
"insert or ignore into revlog values (?,?,?,?,?,?,?,?,?)",
|
||||
logs)
|
||||
|
||||
def newerRows(self, data, table, modIdx):
|
||||
ids = (r[0] for r in data)
|
||||
lmods = {}
|
||||
for id, mod in self.col.db.execute(
|
||||
"select id, mod from %s where id in %s and %s" % (
|
||||
table, ids2str(ids), self.usnLim())):
|
||||
lmods[id] = mod
|
||||
update = []
|
||||
for r in data:
|
||||
if r[0] not in lmods or lmods[r[0]] < r[modIdx]:
|
||||
update.append(r)
|
||||
self.col.log(table, data)
|
||||
return update
|
||||
|
||||
def mergeCards(self, cards):
|
||||
self.col.db.executemany(
|
||||
"insert or replace into cards values "
|
||||
"(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)",
|
||||
self.newerRows(cards, "cards", 4))
|
||||
|
||||
def mergeNotes(self, notes):
|
||||
rows = self.newerRows(notes, "notes", 3)
|
||||
self.col.db.executemany(
|
||||
"insert or replace into notes values (?,?,?,?,?,?,?,?,?,?,?)",
|
||||
rows)
|
||||
self.col.updateFieldCache([f[0] for f in rows])
|
||||
|
||||
# Col config
|
||||
##########################################################################
|
||||
|
||||
def getConf(self):
|
||||
return self.col.conf
|
||||
|
||||
def mergeConf(self, conf):
|
||||
self.col.backend.set_all_config(json.dumps(conf).encode())
|
||||
|
||||
# Wrapper for requests that tracks upload/download progress
|
||||
##########################################################################
|
||||
|
||||
class AnkiRequestsClient(object):
|
||||
verify = True
|
||||
timeout = 60
|
||||
|
||||
def __init__(self):
|
||||
self.session = requests.Session()
|
||||
|
||||
def post(self, url, data, headers):
|
||||
data = _MonitoringFile(data)
|
||||
headers['User-Agent'] = self._agentName()
|
||||
return self.session.post(
|
||||
url, data=data, headers=headers, stream=True, timeout=self.timeout, verify=self.verify)
|
||||
|
||||
def get(self, url, headers=None):
|
||||
if headers is None:
|
||||
headers = {}
|
||||
headers['User-Agent'] = self._agentName()
|
||||
return self.session.get(url, stream=True, headers=headers, timeout=self.timeout, verify=self.verify)
|
||||
|
||||
def streamContent(self, resp):
|
||||
resp.raise_for_status()
|
||||
|
||||
buf = io.BytesIO()
|
||||
for chunk in resp.iter_content(chunk_size=HTTP_BUF_SIZE):
|
||||
buf.write(chunk)
|
||||
return buf.getvalue()
|
||||
|
||||
def _agentName(self):
|
||||
from anki import version
|
||||
return "Anki {}".format(version)
|
||||
|
||||
# allow user to accept invalid certs in work/school settings
|
||||
if os.environ.get("ANKI_NOVERIFYSSL"):
|
||||
AnkiRequestsClient.verify = False
|
||||
|
||||
import warnings
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
class _MonitoringFile(io.BufferedReader):
|
||||
def read(self, size=-1):
|
||||
data = io.BufferedReader.read(self, HTTP_BUF_SIZE)
|
||||
|
||||
return data
|
||||
|
||||
# HTTP syncing tools
|
||||
##########################################################################
|
||||
|
||||
class HttpSyncer(object):
|
||||
def __init__(self, hkey=None, client=None, hostNum=None):
|
||||
self.hkey = hkey
|
||||
self.skey = checksum(str(random.random()))[:8]
|
||||
self.client = client or AnkiRequestsClient()
|
||||
self.postVars = {}
|
||||
self.hostNum = hostNum
|
||||
self.prefix = "sync/"
|
||||
|
||||
def syncURL(self):
|
||||
if devMode:
|
||||
url = "https://l1sync.ankiweb.net/"
|
||||
else:
|
||||
url = SYNC_BASE % (self.hostNum or "")
|
||||
return url + self.prefix
|
||||
|
||||
def assertOk(self, resp):
|
||||
# not using raise_for_status() as aqt expects this error msg
|
||||
if resp.status_code != 200:
|
||||
raise Exception("Unknown response code: %s" % resp.status_code)
|
||||
|
||||
# Posting data as a file
|
||||
######################################################################
|
||||
# We don't want to post the payload as a form var, as the percent-encoding is
|
||||
# costly. We could send it as a raw post, but more HTTP clients seem to
|
||||
# support file uploading, so this is the more compatible choice.
|
||||
|
||||
def _buildPostData(self, fobj, comp):
|
||||
BOUNDARY=b"Anki-sync-boundary"
|
||||
bdry = b"--"+BOUNDARY
|
||||
buf = io.BytesIO()
|
||||
# post vars
|
||||
self.postVars['c'] = 1 if comp else 0
|
||||
for (key, value) in list(self.postVars.items()):
|
||||
buf.write(bdry + b"\r\n")
|
||||
buf.write(
|
||||
('Content-Disposition: form-data; name="%s"\r\n\r\n%s\r\n' %
|
||||
(key, value)).encode("utf8"))
|
||||
# payload as raw data or json
|
||||
rawSize = 0
|
||||
if fobj:
|
||||
# header
|
||||
buf.write(bdry + b"\r\n")
|
||||
buf.write(b"""\
|
||||
Content-Disposition: form-data; name="data"; filename="data"\r\n\
|
||||
Content-Type: application/octet-stream\r\n\r\n""")
|
||||
# write file into buffer, optionally compressing
|
||||
if comp:
|
||||
tgt = gzip.GzipFile(mode="wb", fileobj=buf, compresslevel=comp)
|
||||
else:
|
||||
tgt = buf
|
||||
while 1:
|
||||
data = fobj.read(65536)
|
||||
if not data:
|
||||
if comp:
|
||||
tgt.close()
|
||||
break
|
||||
rawSize += len(data)
|
||||
tgt.write(data)
|
||||
buf.write(b"\r\n")
|
||||
buf.write(bdry + b'--\r\n')
|
||||
size = buf.tell()
|
||||
# connection headers
|
||||
headers = {
|
||||
'Content-Type': 'multipart/form-data; boundary=%s' % BOUNDARY.decode("utf8"),
|
||||
'Content-Length': str(size),
|
||||
}
|
||||
buf.seek(0)
|
||||
|
||||
if size >= 100*1024*1024 or rawSize >= 250*1024*1024:
|
||||
raise Exception("Collection too large to upload to AnkiWeb.")
|
||||
|
||||
return headers, buf
|
||||
|
||||
def req(self, method, fobj=None, comp=6, badAuthRaises=True):
|
||||
headers, body = self._buildPostData(fobj, comp)
|
||||
|
||||
r = self.client.post(self.syncURL()+method, data=body, headers=headers)
|
||||
if not badAuthRaises and r.status_code == 403:
|
||||
return False
|
||||
self.assertOk(r)
|
||||
|
||||
buf = self.client.streamContent(r)
|
||||
return buf
|
||||
|
||||
# Incremental sync over HTTP
|
||||
######################################################################
|
||||
|
||||
class RemoteServer(HttpSyncer):
|
||||
def __init__(self, hkey, hostNum):
|
||||
super().__init__(self, hkey, hostNum=hostNum)
|
||||
|
||||
def hostKey(self, user, pw):
|
||||
"Returns hkey or none if user/pw incorrect."
|
||||
self.postVars = dict()
|
||||
ret = self.req(
|
||||
"hostKey", io.BytesIO(json.dumps(dict(u=user, p=pw)).encode("utf8")),
|
||||
badAuthRaises=False)
|
||||
if not ret:
|
||||
# invalid auth
|
||||
return
|
||||
self.hkey = json.loads(ret.decode("utf8"))['key']
|
||||
return self.hkey
|
||||
|
||||
def meta(self):
|
||||
self.postVars = dict(
|
||||
k=self.hkey,
|
||||
s=self.skey,
|
||||
)
|
||||
ret = self.req(
|
||||
"meta", io.BytesIO(json.dumps(dict(
|
||||
v=SYNC_VER, cv="ankidesktop,%s,%s"%(versionWithBuild(), platDesc()))).encode("utf8")),
|
||||
badAuthRaises=False)
|
||||
if not ret:
|
||||
# invalid auth
|
||||
return
|
||||
return json.loads(ret.decode("utf8"))
|
||||
|
||||
def applyGraves(self, **kw):
|
||||
return self._run("applyGraves", kw)
|
||||
|
||||
def applyChanges(self, **kw):
|
||||
return self._run("applyChanges", kw)
|
||||
|
||||
def start(self, **kw):
|
||||
return self._run("start", kw)
|
||||
|
||||
def chunk(self, **kw):
|
||||
return self._run("chunk", kw)
|
||||
|
||||
def applyChunk(self, **kw):
|
||||
return self._run("applyChunk", kw)
|
||||
|
||||
def sanityCheck2(self, **kw):
|
||||
return self._run("sanityCheck2", kw)
|
||||
|
||||
def finish(self, **kw):
|
||||
return self._run("finish", kw)
|
||||
|
||||
def abort(self, **kw):
|
||||
return self._run("abort", kw)
|
||||
|
||||
def _run(self, cmd, data):
|
||||
return json.loads(
|
||||
self.req(cmd, io.BytesIO(json.dumps(data).encode("utf8"))).decode("utf8"))
|
||||
|
||||
# Full syncing
|
||||
##########################################################################
|
||||
|
||||
class FullSyncer(HttpSyncer):
|
||||
def __init__(self, col, hkey, client, hostNum):
|
||||
super().__init__(self, hkey, client, hostNum=hostNum)
|
||||
self.postVars = dict(
|
||||
k=self.hkey,
|
||||
v="ankidesktop,%s,%s"%(anki.version, platDesc()),
|
||||
)
|
||||
self.col = col
|
||||
|
||||
def download(self):
|
||||
localNotEmpty = self.col.db.scalar("select 1 from cards")
|
||||
self.col.close()
|
||||
cont = self.req("download")
|
||||
tpath = self.col.path + ".tmp"
|
||||
if cont == "upgradeRequired":
|
||||
return
|
||||
open(tpath, "wb").write(cont)
|
||||
# check the received file is ok
|
||||
d = DB(tpath)
|
||||
assert d.scalar("pragma integrity_check") == "ok"
|
||||
remoteEmpty = not d.scalar("select 1 from cards")
|
||||
d.close()
|
||||
# accidental clobber?
|
||||
if localNotEmpty and remoteEmpty:
|
||||
os.unlink(tpath)
|
||||
return "downloadClobber"
|
||||
# overwrite existing collection
|
||||
os.unlink(self.col.path)
|
||||
os.rename(tpath, self.col.path)
|
||||
self.col = None
|
||||
|
||||
def upload(self):
|
||||
"True if upload successful."
|
||||
# make sure it's ok before we try to upload
|
||||
if self.col.db.scalar("pragma integrity_check") != "ok":
|
||||
return False
|
||||
if not self.col.basicCheck():
|
||||
return False
|
||||
# apply some adjustments, then upload
|
||||
self.col.beforeUpload()
|
||||
if self.req("upload", open(self.col.path, "rb")) != b"OK":
|
||||
return False
|
||||
return True
|
||||
|
||||
# Remote media syncing
|
||||
##########################################################################
|
||||
|
||||
class RemoteMediaServer(HttpSyncer):
|
||||
def __init__(self, col, hkey, client, hostNum):
|
||||
self.col = col
|
||||
super().__init__(self, hkey, client, hostNum=hostNum)
|
||||
self.prefix = "msync/"
|
||||
|
||||
def begin(self):
|
||||
self.postVars = dict(
|
||||
k=self.hkey,
|
||||
v="ankidesktop,%s,%s"%(anki.version, platDesc())
|
||||
)
|
||||
ret = self._dataOnly(self.req(
|
||||
"begin", io.BytesIO(json.dumps(dict()).encode("utf8"))))
|
||||
self.skey = ret['sk']
|
||||
return ret
|
||||
|
||||
# args: lastUsn
|
||||
def mediaChanges(self, **kw):
|
||||
self.postVars = dict(
|
||||
sk=self.skey,
|
||||
)
|
||||
return self._dataOnly(
|
||||
self.req("mediaChanges", io.BytesIO(json.dumps(kw).encode("utf8"))))
|
||||
|
||||
# args: files
|
||||
def downloadFiles(self, **kw):
|
||||
return self.req("downloadFiles", io.BytesIO(json.dumps(kw).encode("utf8")))
|
||||
|
||||
def uploadChanges(self, zip):
|
||||
# no compression, as we compress the zip file instead
|
||||
return self._dataOnly(
|
||||
self.req("uploadChanges", io.BytesIO(zip), comp=0))
|
||||
|
||||
# args: local
|
||||
def mediaSanity(self, **kw):
|
||||
return self._dataOnly(
|
||||
self.req("mediaSanity", io.BytesIO(json.dumps(kw).encode("utf8"))))
|
||||
|
||||
def _dataOnly(self, resp):
|
||||
resp = json.loads(resp.decode("utf8"))
|
||||
if resp['err']:
|
||||
self.col.log("error returned:%s"%resp['err'])
|
||||
raise Exception("SyncError:%s"%resp['err'])
|
||||
return resp['data']
|
||||
|
||||
# only for unit tests
|
||||
def mediatest(self, cmd):
|
||||
self.postVars = dict(
|
||||
k=self.hkey,
|
||||
)
|
||||
return self._dataOnly(
|
||||
self.req("newMediaTest", io.BytesIO(
|
||||
json.dumps(dict(cmd=cmd)).encode("utf8"))))
|
||||
@@ -35,24 +35,24 @@ from webob.dec import wsgify
|
||||
from webob.exc import *
|
||||
|
||||
import anki.db
|
||||
import anki.sync
|
||||
import anki.utils
|
||||
from anki.consts import SYNC_VER, SYNC_ZIP_SIZE, SYNC_ZIP_COUNT
|
||||
from anki.consts import REM_CARD, REM_NOTE
|
||||
|
||||
from ankisyncd.users import get_user_manager
|
||||
from ankisyncd.sessions import get_session_manager
|
||||
from ankisyncd.full_sync import get_full_sync_manager
|
||||
from ankisyncd.sessions import get_session_manager
|
||||
from ankisyncd.sync import Syncer, SYNC_VER, SYNC_ZIP_SIZE, SYNC_ZIP_COUNT
|
||||
from ankisyncd.users import get_user_manager
|
||||
|
||||
logger = logging.getLogger("ankisyncd")
|
||||
|
||||
|
||||
class SyncCollectionHandler(anki.sync.Syncer):
|
||||
class SyncCollectionHandler(Syncer):
|
||||
operations = ['meta', 'applyChanges', 'start', 'applyGraves', 'chunk', 'applyChunk', 'sanityCheck2', 'finish']
|
||||
|
||||
def __init__(self, col):
|
||||
def __init__(self, col, session):
|
||||
# So that 'server' (the 3rd argument) can't get set
|
||||
anki.sync.Syncer.__init__(self, col)
|
||||
super().__init__(col)
|
||||
self.session = session
|
||||
|
||||
@staticmethod
|
||||
def _old_client(cv):
|
||||
@@ -62,11 +62,12 @@ class SyncCollectionHandler(anki.sync.Syncer):
|
||||
note = {"alpha": 0, "beta": 0, "rc": 0}
|
||||
client, version, platform = cv.split(',')
|
||||
|
||||
for name in note.keys():
|
||||
if name in version:
|
||||
vs = version.split(name)
|
||||
version = vs[0]
|
||||
note[name] = int(vs[-1])
|
||||
if 'arch' not in version:
|
||||
for name in note.keys():
|
||||
if name in version:
|
||||
vs = version.split(name)
|
||||
version = vs[0]
|
||||
note[name] = int(vs[-1])
|
||||
|
||||
# convert the version string, ignoring non-numeric suffixes like in beta versions of Anki
|
||||
version_nosuffix = re.sub(r'[^0-9.].*$', '', version)
|
||||
@@ -92,17 +93,18 @@ class SyncCollectionHandler(anki.sync.Syncer):
|
||||
return {"cont": False, "msg": "Your client doesn't support the v{} scheduler.".format(self.col.schedVer())}
|
||||
|
||||
# Make sure the media database is open!
|
||||
if self.col.media.db is None:
|
||||
self.col.media.connect()
|
||||
self.col.media.connect()
|
||||
|
||||
return {
|
||||
'scm': self.col.scm,
|
||||
'ts': anki.utils.intTime(),
|
||||
'mod': self.col.mod,
|
||||
'scm': self.col.scm,
|
||||
'usn': self.col._usn,
|
||||
'ts': anki.utils.intTime(),
|
||||
'musn': self.col.media.lastUsn(),
|
||||
'uname': self.session.name,
|
||||
'msg': '',
|
||||
'cont': True,
|
||||
'hostNum': 0,
|
||||
}
|
||||
|
||||
def usnLim(self):
|
||||
@@ -111,8 +113,10 @@ class SyncCollectionHandler(anki.sync.Syncer):
|
||||
# ankidesktop >=2.1rc2 sends graves in applyGraves, but still expects
|
||||
# server-side deletions to be returned by start
|
||||
def start(self, minUsn, lnewer, graves={"cards": [], "notes": [], "decks": []}, offset=None):
|
||||
if offset is not None:
|
||||
raise NotImplementedError('You are using the experimental V2 scheduler, which is not supported by the server.')
|
||||
# The offset para is passed by client V2 scheduler,which is minutes_west.
|
||||
# Since now have not thorougly test the V2 scheduler, we leave this comments here, and
|
||||
# just enable the V2 scheduler in the serve code.
|
||||
|
||||
self.maxUsn = self.col._usn
|
||||
self.minUsn = minUsn
|
||||
self.lnewer = not lnewer
|
||||
@@ -130,14 +134,18 @@ class SyncCollectionHandler(anki.sync.Syncer):
|
||||
self.mergeChanges(lchg, self.rchg)
|
||||
return lchg
|
||||
|
||||
def sanityCheck2(self, client):
|
||||
server = self.sanityCheck()
|
||||
def sanityCheck2(self, client, full=None):
|
||||
server = self.sanityCheck(full)
|
||||
if client != server:
|
||||
logger.info(
|
||||
f"sanity check failed with server: {server} client: {client}"
|
||||
)
|
||||
|
||||
return dict(status="bad", c=client, s=server)
|
||||
return dict(status="ok")
|
||||
|
||||
def finish(self, mod=None):
|
||||
return anki.sync.Syncer.finish(self, anki.utils.intTime(1000))
|
||||
return super().finish(anki.utils.intTime(1000))
|
||||
|
||||
# This function had to be put here in its entirety because Syncer.removed()
|
||||
# doesn't use self.usnLim() (which we override in this class) in queries.
|
||||
@@ -176,8 +184,9 @@ class SyncCollectionHandler(anki.sync.Syncer):
|
||||
class SyncMediaHandler:
|
||||
operations = ['begin', 'mediaChanges', 'mediaSanity', 'uploadChanges', 'downloadFiles']
|
||||
|
||||
def __init__(self, col):
|
||||
def __init__(self, col, session):
|
||||
self.col = col
|
||||
self.session = session
|
||||
|
||||
def begin(self, skey):
|
||||
return {
|
||||
@@ -230,13 +239,16 @@ class SyncMediaHandler:
|
||||
# Remove media files that were removed on the client.
|
||||
media_to_remove = []
|
||||
for normname, ordinal in meta:
|
||||
if ordinal == '':
|
||||
if not ordinal:
|
||||
media_to_remove.append(self._normalize_filename(normname))
|
||||
|
||||
# Add media files that were added on the client.
|
||||
media_to_add = []
|
||||
usn = self.col.media.lastUsn()
|
||||
oldUsn = usn
|
||||
media_dir = self.col.media.dir()
|
||||
os.makedirs(media_dir, exist_ok=True)
|
||||
|
||||
for i in zip_file.infolist():
|
||||
if i.filename == "_meta": # Ignore previously retrieved metadata.
|
||||
continue
|
||||
@@ -244,7 +256,7 @@ class SyncMediaHandler:
|
||||
file_data = zip_file.read(i)
|
||||
csum = anki.utils.checksum(file_data)
|
||||
filename = self._normalize_filename(meta[int(i.filename)][0])
|
||||
file_path = os.path.join(self.col.media.dir(), filename)
|
||||
file_path = os.path.join(media_dir, filename)
|
||||
|
||||
# Save file to media directory.
|
||||
with open(file_path, 'wb') as f:
|
||||
@@ -263,9 +275,7 @@ class SyncMediaHandler:
|
||||
self._remove_media_files(media_to_remove)
|
||||
|
||||
if media_to_add:
|
||||
self.col.media.db.executemany(
|
||||
"INSERT OR REPLACE INTO media VALUES (?,?,?)", media_to_add)
|
||||
self.col.media.db.commit()
|
||||
self.col.media.addMedia(media_to_add)
|
||||
|
||||
assert self.col.media.lastUsn() == oldUsn + processed_count # TODO: move to some unit test
|
||||
return processed_count
|
||||
@@ -294,7 +304,6 @@ class SyncMediaHandler:
|
||||
for filename in filenames:
|
||||
try:
|
||||
self.col.media.syncDelete(filename)
|
||||
self.col.media.db.commit()
|
||||
except OSError as err:
|
||||
logger.error("Error when removing file '%s' from media dir: "
|
||||
"%s" % (filename, str(err)))
|
||||
@@ -321,10 +330,9 @@ class SyncMediaHandler:
|
||||
def mediaChanges(self, lastUsn):
|
||||
result = []
|
||||
server_lastUsn = self.col.media.lastUsn()
|
||||
fname = csum = None
|
||||
|
||||
if lastUsn < server_lastUsn or lastUsn == 0:
|
||||
for fname,usn,csum, in self.col.media.db.execute("select fname,usn,csum from media order by usn desc limit ?", server_lastUsn - lastUsn):
|
||||
for fname,usn,csum, in self.col.media.changes(lastUsn):
|
||||
result.append([fname, usn, csum])
|
||||
|
||||
# anki assumes server_lastUsn == result[-1][1]
|
||||
@@ -376,7 +384,7 @@ class SyncUserSession:
|
||||
raise Exception("no handler for {}".format(operation))
|
||||
|
||||
if getattr(self, attr) is None:
|
||||
setattr(self, attr, handler_class(col))
|
||||
setattr(self, attr, handler_class(col, self))
|
||||
handler = getattr(self, attr)
|
||||
# The col object may actually be new now! This happens when we close a collection
|
||||
# for inactivity and then later re-open it (creating a new Collection object).
|
||||
@@ -394,9 +402,6 @@ class SyncApp:
|
||||
self.base_media_url = config['base_media_url']
|
||||
self.setup_new_collection = None
|
||||
|
||||
self.prehooks = {}
|
||||
self.posthooks = {}
|
||||
|
||||
self.user_manager = get_user_manager(config)
|
||||
self.session_manager = get_session_manager(config)
|
||||
self.full_sync_manager = get_full_sync_manager(config)
|
||||
@@ -408,39 +413,6 @@ class SyncApp:
|
||||
if not self.base_media_url.endswith('/'):
|
||||
self.base_media_url += '/'
|
||||
|
||||
# backwards compat
|
||||
@property
|
||||
def hook_pre_sync(self):
|
||||
return self.prehooks.get("start")
|
||||
|
||||
@hook_pre_sync.setter
|
||||
def hook_pre_sync(self, value):
|
||||
self.prehooks['start'] = value
|
||||
|
||||
@property
|
||||
def hook_post_sync(self):
|
||||
return self.posthooks.get("finish")
|
||||
|
||||
@hook_post_sync.setter
|
||||
def hook_post_sync(self, value):
|
||||
self.posthooks['finish'] = value
|
||||
|
||||
@property
|
||||
def hook_upload(self):
|
||||
return self.prehooks.get("upload")
|
||||
|
||||
@hook_upload.setter
|
||||
def hook_upload(self, value):
|
||||
self.prehooks['upload'] = value
|
||||
|
||||
@property
|
||||
def hook_download(self):
|
||||
return self.posthooks.get("download")
|
||||
|
||||
@hook_download.setter
|
||||
def hook_download(self, value):
|
||||
self.posthooks['download'] = value
|
||||
|
||||
def generateHostKey(self, username):
|
||||
"""Generates a new host key to be used by the given username to identify their session.
|
||||
This values is random."""
|
||||
@@ -495,7 +467,7 @@ class SyncApp:
|
||||
def __call__(self, req):
|
||||
# Get and verify the session
|
||||
try:
|
||||
hkey = req.POST['k']
|
||||
hkey = req.params['k']
|
||||
except KeyError:
|
||||
hkey = None
|
||||
|
||||
@@ -547,39 +519,22 @@ class SyncApp:
|
||||
|
||||
self.session_manager.save(hkey, session)
|
||||
session = self.session_manager.load(hkey, self.create_session)
|
||||
|
||||
thread = session.get_thread()
|
||||
|
||||
if url in self.prehooks:
|
||||
thread.execute(self.prehooks[url], [session])
|
||||
|
||||
result = self._execute_handler_method_in_thread(url, data, session)
|
||||
|
||||
# If it's a complex data type, we convert it to JSON
|
||||
if type(result) not in (str, bytes, Response):
|
||||
result = json.dumps(result)
|
||||
|
||||
if url in self.posthooks:
|
||||
thread.execute(self.posthooks[url], [session])
|
||||
|
||||
return result
|
||||
|
||||
elif url == 'upload':
|
||||
thread = session.get_thread()
|
||||
if url in self.prehooks:
|
||||
thread.execute(self.prehooks[url], [session])
|
||||
result = thread.execute(self.operation_upload, [data['data'], session])
|
||||
if url in self.posthooks:
|
||||
thread.execute(self.posthooks[url], [session])
|
||||
return result
|
||||
|
||||
elif url == 'download':
|
||||
thread = session.get_thread()
|
||||
if url in self.prehooks:
|
||||
thread.execute(self.prehooks[url], [session])
|
||||
result = thread.execute(self.operation_download, [session])
|
||||
if url in self.posthooks:
|
||||
thread.execute(self.posthooks[url], [session])
|
||||
return result
|
||||
|
||||
# This was one of our operations but it didn't get handled... Oops!
|
||||
91
src/requirements-dev.txt
Normal file
91
src/requirements-dev.txt
Normal file
@@ -0,0 +1,91 @@
|
||||
THE FILE WAS GENERATED BY POETRY, DO NOT EDIT!
|
||||
|
||||
|
||||
anki==2.1.37; python_version >= "3.8"
|
||||
appnope==0.1.2; platform_system == "Darwin" and python_version >= "3.7" and sys_platform == "darwin"
|
||||
argon2-cffi==20.1.0; python_version >= "3.5"
|
||||
async-generator==1.10; python_version >= "3.6"
|
||||
attrs==20.3.0; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.5"
|
||||
backcall==0.2.0; python_version >= "3.7"
|
||||
beautifulsoup4==4.9.3
|
||||
bleach==3.2.1; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6"
|
||||
certifi==2020.12.5; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.8"
|
||||
cffi==1.14.4; implementation_name === "pypy" and python_version >= "3.5"
|
||||
chardet==4.0.0; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.8"
|
||||
click==7.1.2; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.5"
|
||||
colorama==0.4.4; python_version >= "3.7" and python_full_version < "3.0.0" and sys_platform == "win32" or sys_platform == "win32" and python_version >= "3.7" and python_full_version >= "3.5.0"
|
||||
decorator==4.4.2; (python_version >= "2.6" and python_full_version < "3.0.0") or (python_full_version >= "3.2.0")
|
||||
defusedxml==0.6.0; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6"
|
||||
distro==1.5.0
|
||||
entrypoints==0.3; python_version >= "3.6"
|
||||
future==0.18.2; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version >= "3.5"
|
||||
idna==2.10; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.8"
|
||||
ipykernel==5.4.2; python_version >= "3.6"
|
||||
ipython-genutils==0.2.0; python_version >= "3.7"
|
||||
ipython==7.19.0; python_version >= "3.7"
|
||||
ipywidgets==7.5.1
|
||||
jedi==0.17.2; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7"
|
||||
jinja2==2.11.2; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6"
|
||||
joblib==1.0.0; python_version >= "3.6"
|
||||
json5==0.9.5; python_version >= "3.5"
|
||||
jsonschema==3.2.0; python_version >= "3.5"
|
||||
jupyter-client==6.1.7; python_version >= "3.6"
|
||||
jupyter-console==6.2.0; python_version >= "3.6"
|
||||
jupyter-core==4.7.0; python_version >= "3.6"
|
||||
jupyter==1.0.0
|
||||
jupyterlab-pygments==0.1.2; python_version >= "3.6"
|
||||
jupyterlab-server==1.2.0; python_version >= "3.5"
|
||||
jupyterlab==2.2.9; python_version >= "3.5"
|
||||
livereload==2.6.3; python_version >= "3.5"
|
||||
lunr==0.5.8; python_version >= "3.5"
|
||||
markdown==3.3.3; python_version >= "3.6"
|
||||
markupsafe==1.1.1; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.5"
|
||||
mistune==0.8.4; python_version >= "3.6"
|
||||
mkdocs==1.1.2; python_version >= "3.5"
|
||||
nbclient==0.5.1; python_version >= "3.6"
|
||||
nbconvert==6.0.7; python_version >= "3.6"
|
||||
nbformat==5.0.8; python_version >= "3.6"
|
||||
nest-asyncio==1.4.3; python_version >= "3.6"
|
||||
nltk==3.5; python_version >= "3.5"
|
||||
notebook==6.1.5; python_version >= "3.5"
|
||||
orjson==3.4.6; python_version >= "3.8"
|
||||
packaging==20.8; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6"
|
||||
pandocfilters==1.4.3; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6"
|
||||
parso==0.7.1; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7"
|
||||
pexpect==4.8.0; sys_platform != "win32" and python_version >= "3.7"
|
||||
pickleshare==0.7.5; python_version >= "3.7"
|
||||
prometheus-client==0.9.0; python_version >= "3.5"
|
||||
prompt-toolkit==3.0.8; python_full_version >= "3.6.1" and python_version >= "3.7"
|
||||
protobuf==3.14.0; python_version >= "3.8"
|
||||
psutil==5.8.0; (python_version >= "2.6" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0")
|
||||
ptyprocess==0.6.0; os_name != "nt" and python_version >= "3.7" and sys_platform != "win32"
|
||||
py==1.10.0; python_version >= "3.5" and python_full_version < "3.0.0" and implementation_name === "pypy" or implementation_name === "pypy" and python_version >= "3.5" and python_full_version >= "3.4.0"
|
||||
pyaudio==0.2.11
|
||||
pycparser==2.20; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.5"
|
||||
pygments==2.7.3; python_version >= "3.7"
|
||||
pyparsing==2.4.7; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6"
|
||||
pyrsistent==0.17.3; python_version >= "3.5"
|
||||
pysocks==1.7.1; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.8"
|
||||
python-dateutil==2.8.1; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version >= "3.5"
|
||||
pywin32==300; sys_platform == "win32" and python_version >= "3.6"
|
||||
pywinpty==0.5.7; os_name == "nt" and python_version >= "3.6"
|
||||
pyyaml==5.3.1; python_version >= "3.5" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.5"
|
||||
pyzmq==20.0.0; python_version >= "3.6"
|
||||
qtconsole==5.0.1; python_version >= "3.6"
|
||||
qtpy==1.9.0; python_version >= "3.6"
|
||||
regex==2020.11.13; python_version >= "3.5"
|
||||
requests==2.25.1; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0")
|
||||
send2trash==1.5.0
|
||||
six==1.15.0; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.8"
|
||||
soupsieve==2.1; python_version >= "3.8"
|
||||
terminado==0.9.1; python_version >= "3.6"
|
||||
testpath==0.4.4; python_version >= "3.6"
|
||||
tornado==6.1; python_version >= "3.6"
|
||||
tqdm==4.54.1; python_version >= "3.5" and python_full_version < "3.0.0" or python_version >= "3.5" and python_full_version >= "3.4.0"
|
||||
traitlets==5.0.5; python_version >= "3.7"
|
||||
urllib3==1.26.2; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version < "4" and python_version >= "3.8"
|
||||
wcwidth==0.2.5; python_full_version >= "3.6.1" and python_version >= "3.6"
|
||||
webencodings==0.5.1; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6"
|
||||
webob==1.8.6; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.3.0")
|
||||
widgetsnbextension==3.5.1
|
||||
-e src/.
|
||||
22
src/requirements.txt
Normal file
22
src/requirements.txt
Normal file
@@ -0,0 +1,22 @@
|
||||
THE FILE WAS GENERATED BY POETRY, DO NOT EDIT!
|
||||
|
||||
|
||||
anki==2.1.37; python_version >= "3.8"
|
||||
beautifulsoup4==4.9.3
|
||||
certifi==2020.12.5; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.8"
|
||||
chardet==4.0.0; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.8"
|
||||
decorator==4.4.2; (python_version >= "2.6" and python_full_version < "3.0.0") or (python_full_version >= "3.2.0")
|
||||
distro==1.5.0
|
||||
idna==2.10; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.8"
|
||||
markdown==3.3.3; python_version >= "3.6"
|
||||
orjson==3.4.6; python_version >= "3.8"
|
||||
protobuf==3.14.0; python_version >= "3.8"
|
||||
psutil==5.8.0; (python_version >= "2.6" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0")
|
||||
pyaudio==0.2.11
|
||||
pysocks==1.7.1; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.8"
|
||||
requests==2.25.1; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0")
|
||||
send2trash==1.5.0
|
||||
six==1.15.0; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version >= "3.8"
|
||||
soupsieve==2.1; python_version >= "3.8"
|
||||
urllib3==1.26.2; python_version >= "3.8" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version < "4" and python_version >= "3.8"
|
||||
webob==1.8.6; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.3.0")
|
||||
@@ -8,6 +8,8 @@ import shutil
|
||||
import anki
|
||||
import anki.storage
|
||||
|
||||
from ankisyncd.collection import CollectionManager
|
||||
|
||||
|
||||
class CollectionTestBase(unittest.TestCase):
|
||||
"""Parent class for tests that need a collection set up and torn down."""
|
||||
@@ -15,7 +17,9 @@ class CollectionTestBase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.temp_dir = tempfile.mkdtemp()
|
||||
self.collection_path = os.path.join(self.temp_dir, 'collection.anki2');
|
||||
self.collection = anki.storage.Collection(self.collection_path)
|
||||
cm = CollectionManager({})
|
||||
collectionWrapper = cm.get_collection(self.collection_path)
|
||||
self.collection = collectionWrapper._get_collection()
|
||||
self.mock_app = MagicMock()
|
||||
|
||||
def tearDown(self):
|
||||
|
||||
@@ -5,7 +5,6 @@ import tempfile
|
||||
|
||||
from anki import Collection
|
||||
|
||||
|
||||
class CollectionUtils:
|
||||
"""
|
||||
Provides utility methods for creating, inspecting and manipulating anki
|
||||
@@ -26,7 +25,7 @@ class CollectionUtils:
|
||||
|
||||
file_path = os.path.join(self.tempdir, "collection.anki2")
|
||||
master_col = Collection(file_path)
|
||||
master_col.db.close()
|
||||
master_col.close()
|
||||
self.master_db_path = file_path
|
||||
|
||||
def __enter__(self):
|
||||
|
||||
@@ -10,7 +10,7 @@ import tempfile
|
||||
import unicodedata
|
||||
import zipfile
|
||||
|
||||
from anki.consts import SYNC_ZIP_SIZE
|
||||
from ankisyncd.sync import SYNC_ZIP_SIZE
|
||||
|
||||
|
||||
def create_named_file(filename, file_contents=None):
|
||||
|
||||
@@ -3,7 +3,7 @@ import io
|
||||
import logging
|
||||
import types
|
||||
|
||||
from anki.sync import HttpSyncer, RemoteServer, RemoteMediaServer
|
||||
from ankisyncd.sync import HttpSyncer, RemoteServer, RemoteMediaServer
|
||||
|
||||
|
||||
class MockServerConnection:
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
import os
|
||||
import sqlite3 as sqlite
|
||||
from anki.media import MediaManager
|
||||
from anki.storage import DB
|
||||
from anki.db import DB
|
||||
|
||||
mediamanager_orig_funcs = {
|
||||
"findChanges": None,
|
||||
@@ -26,10 +26,6 @@ def monkeypatch_mediamanager():
|
||||
|
||||
def make_cwd_safe(original_func):
|
||||
mediamanager_orig_funcs["findChanges"] = MediaManager.findChanges
|
||||
mediamanager_orig_funcs["mediaChangesZip"] = MediaManager.mediaChangesZip
|
||||
mediamanager_orig_funcs["addFilesFromZip"] = MediaManager.addFilesFromZip
|
||||
mediamanager_orig_funcs["syncDelete"] = MediaManager.syncDelete
|
||||
mediamanager_orig_funcs["_logChanges"] = MediaManager._logChanges
|
||||
|
||||
def wrapper(instance, *args):
|
||||
old_cwd = os.getcwd()
|
||||
@@ -42,27 +38,14 @@ def monkeypatch_mediamanager():
|
||||
return wrapper
|
||||
|
||||
MediaManager.findChanges = make_cwd_safe(MediaManager.findChanges)
|
||||
MediaManager.mediaChangesZip = make_cwd_safe(MediaManager.mediaChangesZip)
|
||||
MediaManager.addFilesFromZip = make_cwd_safe(MediaManager.addFilesFromZip)
|
||||
MediaManager.syncDelete = make_cwd_safe(MediaManager.syncDelete)
|
||||
MediaManager._logChanges = make_cwd_safe(MediaManager._logChanges)
|
||||
|
||||
|
||||
def unpatch_mediamanager():
|
||||
"""Undoes monkey patches to Anki's MediaManager."""
|
||||
|
||||
MediaManager.findChanges = mediamanager_orig_funcs["findChanges"]
|
||||
MediaManager.mediaChangesZip = mediamanager_orig_funcs["mediaChangesZip"]
|
||||
MediaManager.addFilesFromZip = mediamanager_orig_funcs["addFilesFromZip"]
|
||||
MediaManager.syncDelete = mediamanager_orig_funcs["syncDelete"]
|
||||
MediaManager._logChanges = mediamanager_orig_funcs["_logChanges"]
|
||||
|
||||
mediamanager_orig_funcs["findChanges"] = None
|
||||
mediamanager_orig_funcs["mediaChangesZip"] = None
|
||||
mediamanager_orig_funcs["mediaChangesZip"] = None
|
||||
mediamanager_orig_funcs["mediaChangesZip"] = None
|
||||
mediamanager_orig_funcs["_logChanges"] = None
|
||||
|
||||
|
||||
def monkeypatch_db():
|
||||
"""
|
||||
|
||||
@@ -86,5 +86,6 @@ def add_files_to_server_mediadb(media, filepaths):
|
||||
|
||||
with open(os.path.join(media.dir(), fname), 'wb') as f:
|
||||
f.write(data)
|
||||
media.db.execute("INSERT INTO media VALUES (?, ?, ?)", fname, media.lastUsn() + 1, csum)
|
||||
media.db.commit()
|
||||
media.addMedia(
|
||||
((fname, media.lastUsn() + 1, csum),)
|
||||
)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import os.path
|
||||
import unittest
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import ankisyncd.media
|
||||
import helpers.collection_utils
|
||||
@@ -15,6 +16,9 @@ class ServerMediaManagerTest(unittest.TestCase):
|
||||
cls.colutils.clean_up()
|
||||
cls.colutils = None
|
||||
|
||||
# This test is currently expected to fail because the _logChanges
|
||||
# method of the media manager does not exist anymore.
|
||||
@unittest.expectedFailure
|
||||
def test_upgrade(self):
|
||||
col = self.colutils.create_empty_col()
|
||||
cm = col.media
|
||||
@@ -41,19 +45,26 @@ class ServerMediaManagerTest(unittest.TestCase):
|
||||
list(cm.db.execute("SELECT fname, csum FROM media")),
|
||||
)
|
||||
self.assertEqual(cm.lastUsn(), sm.lastUsn())
|
||||
self.assertEqual(list(sm.db.execute("SELECT usn FROM media")), [(161,), (161,)])
|
||||
self.assertEqual(
|
||||
list(sm.db.execute("SELECT usn FROM media")),
|
||||
[(161,), (161,)]
|
||||
)
|
||||
|
||||
def test_mediaChanges_lastUsn_order(self):
|
||||
col = self.colutils.create_empty_col()
|
||||
col.media = ankisyncd.media.ServerMediaManager(col)
|
||||
mh = ankisyncd.sync_app.SyncMediaHandler(col)
|
||||
mh.col.media.db.execute("""
|
||||
INSERT INTO media (fname, usn, csum)
|
||||
VALUES
|
||||
session = MagicMock()
|
||||
session.name = 'test'
|
||||
mh = ankisyncd.sync_app.SyncMediaHandler(col, session)
|
||||
mh.col.media.addMedia(
|
||||
(
|
||||
('fileA', 101, '53059abba1a72c7aff34a3eaf7fef10ed65541ce'),
|
||||
('fileB', 100, 'a5ae546046d09559399c80fa7076fb10f1ce4bcd')
|
||||
""")
|
||||
|
||||
('fileB', 100, 'a5ae546046d09559399c80fa7076fb10f1ce4bcd'),
|
||||
)
|
||||
)
|
||||
# anki assumes mh.col.media.lastUsn() == mh.mediaChanges()['data'][-1][1]
|
||||
# ref: anki/sync.py:720 (commit cca3fcb2418880d0430a5c5c2e6b81ba260065b7)
|
||||
self.assertEqual(mh.mediaChanges(lastUsn=99)['data'][-1][1], mh.col.media.lastUsn())
|
||||
self.assertEqual(
|
||||
mh.mediaChanges(lastUsn=99)['data'][-1][1],
|
||||
mh.col.media.lastUsn()
|
||||
)
|
||||
|
||||
@@ -3,9 +3,9 @@ import os
|
||||
import sqlite3
|
||||
import tempfile
|
||||
import unittest
|
||||
from unittest.mock import MagicMock, Mock
|
||||
|
||||
from anki.consts import SYNC_VER
|
||||
|
||||
from ankisyncd.sync import SYNC_VER
|
||||
from ankisyncd.sync_app import SyncCollectionHandler
|
||||
from ankisyncd.sync_app import SyncUserSession
|
||||
|
||||
@@ -14,8 +14,13 @@ from collection_test_base import CollectionTestBase
|
||||
|
||||
class SyncCollectionHandlerTest(CollectionTestBase):
|
||||
def setUp(self):
|
||||
CollectionTestBase.setUp(self)
|
||||
self.syncCollectionHandler = SyncCollectionHandler(self.collection)
|
||||
super().setUp()
|
||||
self.session = MagicMock()
|
||||
self.session.name = 'test'
|
||||
self.syncCollectionHandler = SyncCollectionHandler(
|
||||
self.collection,
|
||||
self.session
|
||||
)
|
||||
|
||||
def tearDown(self):
|
||||
CollectionTestBase.tearDown(self)
|
||||
@@ -38,6 +43,7 @@ class SyncCollectionHandlerTest(CollectionTestBase):
|
||||
','.join(('ankidesktop', '2.1.0', 'lin::')),
|
||||
','.join(('ankidesktop', '2.1.6-beta2', 'lin::')),
|
||||
','.join(('ankidesktop', '2.1.9 (dev)', 'lin::')),
|
||||
','.join(('ankidesktop', '2.1.26 (arch-linux-2.1.26-1)', 'lin:arch:')),
|
||||
','.join(('ankidroid', '2.2.3', '')),
|
||||
','.join(('ankidroid', '2.3alpha4', '')),
|
||||
','.join(('ankidroid', '2.3alpha5', '')),
|
||||
@@ -60,6 +66,7 @@ class SyncCollectionHandlerTest(CollectionTestBase):
|
||||
self.assertTrue((type(meta['ts']) == int) and meta['ts'] > 0)
|
||||
self.assertEqual(meta['mod'], self.collection.mod)
|
||||
self.assertEqual(meta['usn'], self.collection._usn)
|
||||
self.assertEqual(meta['uname'], self.session.name)
|
||||
self.assertEqual(meta['musn'], self.collection.media.lastUsn())
|
||||
self.assertEqual(meta['msg'], '')
|
||||
self.assertEqual(meta['cont'], True)
|
||||
|
||||
@@ -1,435 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import tempfile
|
||||
import filecmp
|
||||
import sqlite3
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import helpers.file_utils
|
||||
import helpers.server_utils
|
||||
import helpers.db_utils
|
||||
import anki.utils
|
||||
from anki.sync import MediaSyncer
|
||||
from helpers.mock_servers import MockRemoteMediaServer
|
||||
from helpers.monkey_patches import monkeypatch_mediamanager, unpatch_mediamanager
|
||||
from sync_app_functional_test_base import SyncAppFunctionalTestBase
|
||||
|
||||
|
||||
class SyncAppFunctionalMediaTest(SyncAppFunctionalTestBase):
|
||||
def setUp(self):
|
||||
SyncAppFunctionalTestBase.setUp(self)
|
||||
|
||||
monkeypatch_mediamanager()
|
||||
self.tempdir = tempfile.mkdtemp(prefix=self.__class__.__name__)
|
||||
self.hkey = self.mock_remote_server.hostKey("testuser", "testpassword")
|
||||
client_collection = self.colutils.create_empty_col()
|
||||
self.client_syncer = self.create_client_syncer(client_collection,
|
||||
self.hkey,
|
||||
self.server_test_app)
|
||||
|
||||
def tearDown(self):
|
||||
self.hkey = None
|
||||
self.client_syncer = None
|
||||
unpatch_mediamanager()
|
||||
SyncAppFunctionalTestBase.tearDown(self)
|
||||
|
||||
@staticmethod
|
||||
def create_client_syncer(collection, hkey, server_test_app):
|
||||
mock_remote_server = MockRemoteMediaServer(col=collection,
|
||||
hkey=hkey,
|
||||
server_test_app=server_test_app)
|
||||
media_syncer = MediaSyncer(col=collection,
|
||||
server=mock_remote_server)
|
||||
return media_syncer
|
||||
|
||||
@staticmethod
|
||||
def file_checksum(fname):
|
||||
with open(fname, "rb") as f:
|
||||
return anki.utils.checksum(f.read())
|
||||
|
||||
def media_dbs_differ(self, left_db_path, right_db_path, compare_timestamps=False):
|
||||
"""
|
||||
Compares two media sqlite database files for equality. mtime and dirMod
|
||||
timestamps are not considered when comparing.
|
||||
|
||||
:param left_db_path: path to the left db file
|
||||
:param right_db_path: path to the right db file
|
||||
:param compare_timestamps: flag determining if timestamp values
|
||||
(media.mtime and meta.dirMod) are included
|
||||
in the comparison
|
||||
:return: True if the specified databases differ, False else
|
||||
"""
|
||||
|
||||
if not os.path.isfile(right_db_path):
|
||||
raise IOError("file '" + left_db_path + "' does not exist")
|
||||
elif not os.path.isfile(right_db_path):
|
||||
raise IOError("file '" + right_db_path + "' does not exist")
|
||||
|
||||
# Create temporary copies of the files to act on.
|
||||
newleft = os.path.join(self.tempdir, left_db_path) + ".tmp"
|
||||
shutil.copyfile(left_db_path, newleft)
|
||||
left_db_path = newleft
|
||||
|
||||
newright = os.path.join(self.tempdir, left_db_path) + ".tmp"
|
||||
shutil.copyfile(right_db_path, newright)
|
||||
right_db_path = newright
|
||||
|
||||
if not compare_timestamps:
|
||||
# Set all timestamps that are not NULL to 0.
|
||||
for dbPath in [left_db_path, right_db_path]:
|
||||
connection = sqlite3.connect(dbPath)
|
||||
|
||||
connection.execute("""UPDATE media SET mtime=0
|
||||
WHERE mtime IS NOT NULL""")
|
||||
|
||||
connection.execute("""UPDATE meta SET dirMod=0
|
||||
WHERE rowid=1""")
|
||||
connection.commit()
|
||||
connection.close()
|
||||
|
||||
return helpers.db_utils.diff(left_db_path, right_db_path)
|
||||
|
||||
def test_sync_empty_media_dbs(self):
|
||||
# With both the client and the server having no media to sync,
|
||||
# syncing should change nothing.
|
||||
self.assertEqual('noChanges', self.client_syncer.sync())
|
||||
self.assertEqual('noChanges', self.client_syncer.sync())
|
||||
|
||||
def test_sync_file_from_server(self):
|
||||
"""
|
||||
Adds a file on the server. After syncing, client and server should have
|
||||
the identical file in their media directories and media databases.
|
||||
"""
|
||||
client = self.client_syncer
|
||||
server = helpers.server_utils.get_syncer_for_hkey(self.server_app,
|
||||
self.hkey,
|
||||
'media')
|
||||
|
||||
# Create a test file.
|
||||
temp_file_path = helpers.file_utils.create_named_file("foo.jpg", "hello")
|
||||
|
||||
# Add the test file to the server's collection.
|
||||
helpers.server_utils.add_files_to_server_mediadb(server.col.media, [temp_file_path])
|
||||
|
||||
# Syncing should work.
|
||||
self.assertEqual(client.sync(), 'OK')
|
||||
|
||||
# The test file should be present in the server's and in the client's
|
||||
# media directory.
|
||||
self.assertTrue(
|
||||
filecmp.cmp(os.path.join(client.col.media.dir(), "foo.jpg"),
|
||||
os.path.join(server.col.media.dir(), "foo.jpg")))
|
||||
|
||||
# Further syncing should do nothing.
|
||||
self.assertEqual(client.sync(), 'noChanges')
|
||||
|
||||
def test_sync_file_from_client(self):
|
||||
"""
|
||||
Adds a file on the client. After syncing, client and server should have
|
||||
the identical file in their media directories and media databases.
|
||||
"""
|
||||
join = os.path.join
|
||||
client = self.client_syncer
|
||||
server = helpers.server_utils.get_syncer_for_hkey(self.server_app,
|
||||
self.hkey,
|
||||
'media')
|
||||
|
||||
# Create a test file.
|
||||
temp_file_path = helpers.file_utils.create_named_file("foo.jpg", "hello")
|
||||
|
||||
# Add the test file to the client's media collection.
|
||||
helpers.server_utils.add_files_to_client_mediadb(client.col.media,
|
||||
[temp_file_path],
|
||||
update_db=True)
|
||||
|
||||
# Syncing should work.
|
||||
self.assertEqual(client.sync(), 'OK')
|
||||
|
||||
# The same file should be present in both the client's and the server's
|
||||
# media directory.
|
||||
self.assertTrue(filecmp.cmp(join(client.col.media.dir(), "foo.jpg"),
|
||||
join(server.col.media.dir(), "foo.jpg")))
|
||||
|
||||
# Further syncing should do nothing.
|
||||
self.assertEqual(client.sync(), 'noChanges')
|
||||
|
||||
# The media data of client and server should be identical.
|
||||
self.assertEqual(
|
||||
list(client.col.media.db.execute("SELECT fname, csum FROM media")),
|
||||
list(server.col.media.db.execute("SELECT fname, csum FROM media"))
|
||||
)
|
||||
self.assertEqual(client.col.media.lastUsn(), server.col.media.lastUsn())
|
||||
|
||||
def test_sync_different_files(self):
|
||||
"""
|
||||
Adds a file on the client and a file with different name and content on
|
||||
the server. After syncing, both client and server should have both
|
||||
files in their media directories and databases.
|
||||
"""
|
||||
join = os.path.join
|
||||
isfile = os.path.isfile
|
||||
client = self.client_syncer
|
||||
server = helpers.server_utils.get_syncer_for_hkey(self.server_app,
|
||||
self.hkey,
|
||||
'media')
|
||||
|
||||
# Create two files and add one to the server and one to the client.
|
||||
file_for_client = helpers.file_utils.create_named_file("foo.jpg", "hello")
|
||||
file_for_server = helpers.file_utils.create_named_file("bar.jpg", "goodbye")
|
||||
|
||||
helpers.server_utils.add_files_to_client_mediadb(client.col.media,
|
||||
[file_for_client],
|
||||
update_db=True)
|
||||
helpers.server_utils.add_files_to_server_mediadb(server.col.media, [file_for_server])
|
||||
|
||||
# Syncing should work.
|
||||
self.assertEqual(client.sync(), 'OK')
|
||||
|
||||
# Both files should be present in the client's and in the server's
|
||||
# media directories.
|
||||
self.assertTrue(isfile(join(client.col.media.dir(), "foo.jpg")))
|
||||
self.assertTrue(isfile(join(server.col.media.dir(), "foo.jpg")))
|
||||
self.assertTrue(filecmp.cmp(
|
||||
join(client.col.media.dir(), "foo.jpg"),
|
||||
join(server.col.media.dir(), "foo.jpg"))
|
||||
)
|
||||
self.assertTrue(isfile(join(client.col.media.dir(), "bar.jpg")))
|
||||
self.assertTrue(isfile(join(server.col.media.dir(), "bar.jpg")))
|
||||
self.assertTrue(filecmp.cmp(
|
||||
join(client.col.media.dir(), "bar.jpg"),
|
||||
join(server.col.media.dir(), "bar.jpg"))
|
||||
)
|
||||
|
||||
# Further syncing should change nothing.
|
||||
self.assertEqual(client.sync(), 'noChanges')
|
||||
|
||||
def test_sync_different_contents(self):
|
||||
"""
|
||||
Adds a file to the client and a file with identical name but different
|
||||
contents to the server. After syncing, both client and server should
|
||||
have the server's version of the file in their media directories and
|
||||
databases.
|
||||
"""
|
||||
join = os.path.join
|
||||
isfile = os.path.isfile
|
||||
client = self.client_syncer
|
||||
server = helpers.server_utils.get_syncer_for_hkey(self.server_app,
|
||||
self.hkey,
|
||||
'media')
|
||||
|
||||
# Create two files with identical names but different contents and
|
||||
# checksums. Add one to the server and one to the client.
|
||||
file_for_client = helpers.file_utils.create_named_file("foo.jpg", "hello")
|
||||
file_for_server = helpers.file_utils.create_named_file("foo.jpg", "goodbye")
|
||||
|
||||
helpers.server_utils.add_files_to_client_mediadb(client.col.media,
|
||||
[file_for_client],
|
||||
update_db=True)
|
||||
helpers.server_utils.add_files_to_server_mediadb(server.col.media, [file_for_server])
|
||||
|
||||
# Syncing should work.
|
||||
self.assertEqual(client.sync(), 'OK')
|
||||
|
||||
# A version of the file should be present in both the client's and the
|
||||
# server's media directory.
|
||||
self.assertTrue(isfile(join(client.col.media.dir(), "foo.jpg")))
|
||||
self.assertEqual(os.listdir(client.col.media.dir()), ['foo.jpg'])
|
||||
self.assertTrue(isfile(join(server.col.media.dir(), "foo.jpg")))
|
||||
self.assertEqual(os.listdir(server.col.media.dir()), ['foo.jpg'])
|
||||
self.assertEqual(client.sync(), 'noChanges')
|
||||
|
||||
# Both files should have the contents of the server's version.
|
||||
_checksum = client.col.media._checksum
|
||||
self.assertEqual(_checksum(join(client.col.media.dir(), "foo.jpg")),
|
||||
_checksum(file_for_server))
|
||||
self.assertEqual(_checksum(join(server.col.media.dir(), "foo.jpg")),
|
||||
_checksum(file_for_server))
|
||||
|
||||
def test_sync_add_and_delete_on_client(self):
|
||||
"""
|
||||
Adds a file on the client. After syncing, the client and server should
|
||||
both have the file. Then removes the file from the client's directory
|
||||
and marks it as deleted in its database. After syncing again, the
|
||||
server should have removed its version of the file from its media dir
|
||||
and marked it as deleted in its db.
|
||||
"""
|
||||
join = os.path.join
|
||||
isfile = os.path.isfile
|
||||
client = self.client_syncer
|
||||
server = helpers.server_utils.get_syncer_for_hkey(self.server_app,
|
||||
self.hkey,
|
||||
'media')
|
||||
|
||||
# Create a test file.
|
||||
temp_file_path = helpers.file_utils.create_named_file("foo.jpg", "hello")
|
||||
|
||||
# Add the test file to client's media collection.
|
||||
helpers.server_utils.add_files_to_client_mediadb(client.col.media,
|
||||
[temp_file_path],
|
||||
update_db=True)
|
||||
|
||||
# Syncing client should work.
|
||||
self.assertEqual(client.sync(), 'OK')
|
||||
|
||||
# The same file should be present in both client's and the server's
|
||||
# media directory.
|
||||
self.assertTrue(filecmp.cmp(join(client.col.media.dir(), "foo.jpg"),
|
||||
join(server.col.media.dir(), "foo.jpg")))
|
||||
|
||||
# Syncing client again should do nothing.
|
||||
self.assertEqual(client.sync(), 'noChanges')
|
||||
|
||||
# Remove files from client's media dir and write changes to its db.
|
||||
os.remove(join(client.col.media.dir(), "foo.jpg"))
|
||||
|
||||
# TODO: client.col.media.findChanges() doesn't work here - why?
|
||||
client.col.media._logChanges()
|
||||
self.assertEqual(client.col.media.syncInfo("foo.jpg"), (None, 1))
|
||||
self.assertFalse(isfile(join(client.col.media.dir(), "foo.jpg")))
|
||||
|
||||
# Syncing client again should work.
|
||||
self.assertEqual(client.sync(), 'OK')
|
||||
|
||||
# server should have picked up the removal from client.
|
||||
self.assertEqual(server.col.media.syncInfo("foo.jpg"), (None, 0))
|
||||
self.assertFalse(isfile(join(server.col.media.dir(), "foo.jpg")))
|
||||
|
||||
# Syncing client again should do nothing.
|
||||
self.assertEqual(client.sync(), 'noChanges')
|
||||
|
||||
def test_sync_compare_database_to_expected(self):
|
||||
"""
|
||||
Adds a test image file to the client's media directory. After syncing,
|
||||
the server's database should, except for timestamps, be identical to a
|
||||
database containing the expected data.
|
||||
"""
|
||||
client = self.client_syncer
|
||||
|
||||
# Add a test image file to the client's media collection but don't
|
||||
# update its media db since the desktop client updates that, using
|
||||
# findChanges(), only during syncs.
|
||||
support_file = helpers.file_utils.get_asset_path('blue.jpg')
|
||||
self.assertTrue(os.path.isfile(support_file))
|
||||
helpers.server_utils.add_files_to_client_mediadb(client.col.media,
|
||||
[support_file],
|
||||
update_db=False)
|
||||
|
||||
# Syncing should work.
|
||||
self.assertEqual(client.sync(), "OK")
|
||||
|
||||
# Create temporary db file with expected results.
|
||||
chksum = client.col.media._checksum(support_file)
|
||||
sql = ("""
|
||||
CREATE TABLE meta (dirMod int, lastUsn int);
|
||||
|
||||
INSERT INTO `meta` (dirMod, lastUsn) VALUES (123456789,1);
|
||||
|
||||
CREATE TABLE media (
|
||||
fname text not null primary key,
|
||||
csum text,
|
||||
mtime int not null,
|
||||
dirty int not null
|
||||
);
|
||||
|
||||
INSERT INTO `media` (fname, csum, mtime, dirty) VALUES (
|
||||
'blue.jpg',
|
||||
'%s',
|
||||
1441483037,
|
||||
0
|
||||
);
|
||||
|
||||
CREATE INDEX idx_media_dirty on media (dirty);
|
||||
""" % chksum)
|
||||
|
||||
_, dbpath = tempfile.mkstemp(suffix=".anki2")
|
||||
helpers.db_utils.from_sql(dbpath, sql)
|
||||
|
||||
# Except for timestamps, the client's db after sync should be identical
|
||||
# to the expected data.
|
||||
self.assertFalse(self.media_dbs_differ(
|
||||
client.col.media.db._path,
|
||||
dbpath
|
||||
))
|
||||
os.unlink(dbpath)
|
||||
|
||||
def test_sync_mediaChanges(self):
|
||||
client = self.client_syncer
|
||||
client2 = self.create_client_syncer(self.colutils.create_empty_col(), self.hkey, self.server_test_app)
|
||||
server = helpers.server_utils.get_syncer_for_hkey(self.server_app, self.hkey, 'media')
|
||||
self.assertEqual(server.mediaChanges(lastUsn=client.col.media.lastUsn())['data'], [])
|
||||
|
||||
helpers.server_utils.add_files_to_client_mediadb(client.col.media, [
|
||||
helpers.file_utils.create_named_file("a", "lastUsn a"),
|
||||
helpers.file_utils.create_named_file("b", "lastUsn b"),
|
||||
helpers.file_utils.create_named_file("c", "lastUsn c"),
|
||||
], update_db=True)
|
||||
self.assertEqual(client.sync(), "OK")
|
||||
self.assertEqual(server.mediaChanges(lastUsn=client.col.media.lastUsn())['data'], [])
|
||||
|
||||
self.assertEqual(client2.sync(), "OK")
|
||||
os.remove(os.path.join(client2.col.media.dir(), "c"))
|
||||
client2.col.media._logChanges()
|
||||
self.assertEqual(client2.sync(), "OK")
|
||||
self.assertEqual(server.mediaChanges(lastUsn=client.col.media.lastUsn())['data'], [['c', 4, None]])
|
||||
self.assertEqual(client.sync(), "OK")
|
||||
self.assertEqual(server.mediaChanges(lastUsn=client.col.media.lastUsn())['data'], [])
|
||||
|
||||
helpers.server_utils.add_files_to_client_mediadb(client.col.media, [
|
||||
helpers.file_utils.create_named_file("d", "lastUsn d"),
|
||||
], update_db=True)
|
||||
client.col.media._logChanges()
|
||||
self.assertEqual(client.sync(), "OK")
|
||||
|
||||
self.assertEqual(server.mediaChanges(lastUsn=client2.col.media.lastUsn())['data'], [['d', 5, self.file_checksum(os.path.join(server.col.media.dir(), "d"))]])
|
||||
|
||||
self.assertEqual(client2.sync(), "OK")
|
||||
self.assertEqual(server.mediaChanges(lastUsn=client2.col.media.lastUsn())['data'], [])
|
||||
|
||||
dpath = os.path.join(client.col.media.dir(), "d")
|
||||
with open(dpath, "a") as f:
|
||||
f.write("\nsome change")
|
||||
# files with the same mtime and name are considered equivalent by anki.media.MediaManager._changes
|
||||
os.utime(dpath, (315529200, 315529200))
|
||||
client.col.media._logChanges()
|
||||
self.assertEqual(client.sync(), "OK")
|
||||
self.assertEqual(server.mediaChanges(lastUsn=client2.col.media.lastUsn())['data'], [['d', 6, self.file_checksum(os.path.join(server.col.media.dir(), "d"))]])
|
||||
self.assertEqual(client2.sync(), "OK")
|
||||
self.assertEqual(server.mediaChanges(lastUsn=client2.col.media.lastUsn())['data'], [])
|
||||
|
||||
def test_sync_rename(self):
|
||||
"""
|
||||
Adds 3 media files to the client's media directory, syncs and then
|
||||
renames them and syncs again. After syncing, both the client and the
|
||||
server should only have the renamed files.
|
||||
"""
|
||||
client = self.client_syncer
|
||||
client2 = self.create_client_syncer(self.colutils.create_empty_col(), self.hkey, self.server_test_app)
|
||||
server = helpers.server_utils.get_syncer_for_hkey(self.server_app, self.hkey, 'media')
|
||||
self.assertEqual(server.mediaChanges(lastUsn=client.col.media.lastUsn())['data'], [])
|
||||
|
||||
helpers.server_utils.add_files_to_client_mediadb(client.col.media, [
|
||||
helpers.file_utils.create_named_file("a.wav", "lastUsn a"),
|
||||
helpers.file_utils.create_named_file("b.wav", "lastUsn b"),
|
||||
helpers.file_utils.create_named_file("c.wav", "lastUsn c"),
|
||||
], update_db=True)
|
||||
self.assertEqual(client.sync(), "OK")
|
||||
|
||||
for fname in os.listdir(client.col.media.dir()):
|
||||
os.rename(
|
||||
os.path.join(client.col.media.dir(), fname),
|
||||
os.path.join(client.col.media.dir(), fname[:1] + ".mp3")
|
||||
)
|
||||
client.col.media._logChanges()
|
||||
self.assertEqual(client.sync(), "OK")
|
||||
self.assertEqual(
|
||||
set(os.listdir(server.col.media.dir())),
|
||||
{"a.mp3", "b.mp3", "c.mp3"},
|
||||
)
|
||||
self.assertEqual(
|
||||
set(os.listdir(client.col.media.dir())),
|
||||
set(os.listdir(server.col.media.dir())),
|
||||
)
|
||||
self.assertEqual(
|
||||
list(client.col.media.db.execute("SELECT fname, csum FROM media ORDER BY fname")),
|
||||
list(server.col.media.db.execute("SELECT fname, csum FROM media ORDER BY fname")),
|
||||
)
|
||||
Reference in New Issue
Block a user