initial commit

This commit is contained in:
Stijn Van Campenhout
2019-09-18 06:57:23 +02:00
commit c53c511b57
18 changed files with 1459 additions and 0 deletions

133
.gitignore vendored Normal file
View File

@@ -0,0 +1,133 @@
# Created by https://www.gitignore.io/api/python
# Edit at https://www.gitignore.io/?templates=python
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# End of https://www.gitignore.io/api/python

14
Pipfile Normal file
View File

@@ -0,0 +1,14 @@
[[source]]
name = "pypi"
url = "https://pypi.org/simple"
verify_ssl = true
[dev-packages]
[packages]
requests = "*"
pyaml = "*"
sphinx = "*"
[requires]
python_version = "3.7"

250
Pipfile.lock generated Normal file
View File

@@ -0,0 +1,250 @@
{
"_meta": {
"hash": {
"sha256": "880b0f4dc28652bfa8674e7ebeaae37eb25ec47d136a03791bcef5b6f7a5518d"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.7"
},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"alabaster": {
"hashes": [
"sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359",
"sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"
],
"version": "==0.7.12"
},
"attrs": {
"hashes": [
"sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
"sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
],
"version": "==19.1.0"
},
"babel": {
"hashes": [
"sha256:af92e6106cb7c55286b25b38ad7695f8b4efb36a90ba483d7f7a6628c46158ab",
"sha256:e86135ae101e31e2c8ec20a4e0c5220f4eed12487d5cf3f78be7e98d3a57fc28"
],
"version": "==2.7.0"
},
"certifi": {
"hashes": [
"sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50",
"sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef"
],
"version": "==2019.9.11"
},
"chardet": {
"hashes": [
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
"sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
],
"version": "==3.0.4"
},
"docutils": {
"hashes": [
"sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0",
"sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827",
"sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99"
],
"version": "==0.15.2"
},
"idna": {
"hashes": [
"sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
"sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
],
"version": "==2.8"
},
"imagesize": {
"hashes": [
"sha256:3f349de3eb99145973fefb7dbe38554414e5c30abd0c8e4b970a7c9d09f3a1d8",
"sha256:f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5"
],
"version": "==1.1.0"
},
"jinja2": {
"hashes": [
"sha256:065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013",
"sha256:14dd6caf1527abb21f08f86c784eac40853ba93edb79552aa1e4b8aef1b61c7b"
],
"version": "==2.10.1"
},
"markupsafe": {
"hashes": [
"sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473",
"sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161",
"sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235",
"sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5",
"sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff",
"sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b",
"sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1",
"sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e",
"sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183",
"sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66",
"sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1",
"sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1",
"sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e",
"sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b",
"sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905",
"sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735",
"sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d",
"sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e",
"sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d",
"sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c",
"sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21",
"sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2",
"sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5",
"sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b",
"sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6",
"sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f",
"sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f",
"sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"
],
"version": "==1.1.1"
},
"packaging": {
"hashes": [
"sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9",
"sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe"
],
"version": "==19.1"
},
"pyaml": {
"hashes": [
"sha256:a2dcbc4a8bb00b541efd1c5a064d93474d4f41ded1484fbb08bec9d236523931",
"sha256:c79ae98ececda136a034115ca178ee8bf3aa7df236c488c2f55d12f177b88f1e"
],
"index": "pypi",
"version": "==19.4.1"
},
"pygments": {
"hashes": [
"sha256:71e430bc85c88a430f000ac1d9b331d2407f681d6f6aec95e8bcfbc3df5b0127",
"sha256:881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297"
],
"version": "==2.4.2"
},
"pyparsing": {
"hashes": [
"sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80",
"sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"
],
"version": "==2.4.2"
},
"pytz": {
"hashes": [
"sha256:26c0b32e437e54a18161324a2fca3c4b9846b74a8dccddd843113109e1116b32",
"sha256:c894d57500a4cd2d5c71114aaab77dbab5eabd9022308ce5ac9bb93a60a6f0c7"
],
"version": "==2019.2"
},
"pyyaml": {
"hashes": [
"sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9",
"sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4",
"sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8",
"sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696",
"sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34",
"sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9",
"sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73",
"sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299",
"sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b",
"sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae",
"sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681",
"sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41",
"sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8"
],
"version": "==5.1.2"
},
"requests": {
"hashes": [
"sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
"sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
],
"index": "pypi",
"version": "==2.22.0"
},
"six": {
"hashes": [
"sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
"sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
],
"version": "==1.12.0"
},
"snowballstemmer": {
"hashes": [
"sha256:713e53b79cbcf97bc5245a06080a33d54a77e7cce2f789c835a143bcdb5c033e"
],
"version": "==1.9.1"
},
"sphinx": {
"hashes": [
"sha256:0d586b0f8c2fc3cc6559c5e8fd6124628110514fda0e5d7c82e682d749d2e845",
"sha256:839a3ed6f6b092bb60f492024489cc9e6991360fb9f52ed6361acd510d261069"
],
"index": "pypi",
"version": "==2.2.0"
},
"sphinxcontrib-applehelp": {
"hashes": [
"sha256:edaa0ab2b2bc74403149cb0209d6775c96de797dfd5b5e2a71981309efab3897",
"sha256:fb8dee85af95e5c30c91f10e7eb3c8967308518e0f7488a2828ef7bc191d0d5d"
],
"version": "==1.0.1"
},
"sphinxcontrib-devhelp": {
"hashes": [
"sha256:6c64b077937330a9128a4da74586e8c2130262f014689b4b89e2d08ee7294a34",
"sha256:9512ecb00a2b0821a146736b39f7aeb90759834b07e81e8cc23a9c70bacb9981"
],
"version": "==1.0.1"
},
"sphinxcontrib-htmlhelp": {
"hashes": [
"sha256:4670f99f8951bd78cd4ad2ab962f798f5618b17675c35c5ac3b2132a14ea8422",
"sha256:d4fd39a65a625c9df86d7fa8a2d9f3cd8299a3a4b15db63b50aac9e161d8eff7"
],
"version": "==1.0.2"
},
"sphinxcontrib-jsmath": {
"hashes": [
"sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178",
"sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"
],
"version": "==1.0.1"
},
"sphinxcontrib-qthelp": {
"hashes": [
"sha256:513049b93031beb1f57d4daea74068a4feb77aa5630f856fcff2e50de14e9a20",
"sha256:79465ce11ae5694ff165becda529a600c754f4bc459778778c7017374d4d406f"
],
"version": "==1.0.2"
},
"sphinxcontrib-serializinghtml": {
"hashes": [
"sha256:c0efb33f8052c04fd7a26c0a07f1678e8512e0faec19f4aa8f2473a8b81d5227",
"sha256:db6615af393650bf1151a6cd39120c29abaf93cc60db8c48eb2dddbfdc3a9768"
],
"version": "==1.1.3"
},
"urllib3": {
"hashes": [
"sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
"sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
],
"version": "==1.25.3"
}
},
"develop": {}
}

20
docs/Makefile Normal file
View File

@@ -0,0 +1,20 @@
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = source
BUILDDIR = build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

35
docs/make.bat Normal file
View File

@@ -0,0 +1,35 @@
@ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=source
set BUILDDIR=build
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
:end
popd

56
docs/source/conf.py Normal file
View File

@@ -0,0 +1,56 @@
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- Project information -----------------------------------------------------
project = 'rmapi'
copyright = '2019, Stijn Van Campenhout'
author = 'Stijn Van Campenhout'
# The full version, including alpha/beta/rc tags
release = '0.1'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.napoleon']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']

20
docs/source/index.rst Normal file
View File

@@ -0,0 +1,20 @@
.. rmapi documentation master file, created by
sphinx-quickstart on Tue Sep 17 19:24:29 2019.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to rmapi's documentation!
=================================
.. toctree::
:maxdepth: 2
:caption: Contents:
.. automodule:: rmapi
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`

7
docs/source/modules.rst Normal file
View File

@@ -0,0 +1,7 @@
rmapipy
=======
.. toctree::
:maxdepth: 4
rmapi

78
docs/source/rmapi.rst Normal file
View File

@@ -0,0 +1,78 @@
rmapi package
=============
Submodules
----------
rmapi.api module
----------------
.. automodule:: rmapi.api
:members:
:undoc-members:
:show-inheritance:
rmapi.collections module
------------------------
.. automodule:: rmapi.collections
:members:
:undoc-members:
:show-inheritance:
rmapi.config module
-------------------
.. automodule:: rmapi.config
:members:
:undoc-members:
:show-inheritance:
rmapi.const module
------------------
.. automodule:: rmapi.const
:members:
:undoc-members:
:show-inheritance:
rmapi.document module
---------------------
.. automodule:: rmapi.document
:members:
:undoc-members:
:show-inheritance:
rmapi.exceptions module
-----------------------
.. automodule:: rmapi.exceptions
:members:
:undoc-members:
:show-inheritance:
rmapi.folder module
-------------------
.. automodule:: rmapi.folder
:members:
:undoc-members:
:show-inheritance:
rmapi.types module
------------------
.. automodule:: rmapi.types
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: rmapi
:members:
:undoc-members:
:show-inheritance:

0
rmapi/__init__.py Normal file
View File

363
rmapi/api.py Normal file
View File

@@ -0,0 +1,363 @@
import requests
from logging import getLogger
from datetime import datetime
import json
from typing import TypeVar
from uuid import uuid4
from .collections import Collection
from .config import load, dump
from .document import Document, ZipDocument
from .folder import Folder
from .exceptions import AuthError, DocumentNotFound, ApiError
from .const import (RFC3339Nano,
USER_AGENT,
BASE_URL,
DEVICE_TOKEN_URL,
USER_TOKEN_URL,
DEVICE,)
log = getLogger("rmapipy.rmapi")
DocOrFolder = TypeVar('A', Document, Folder)
class Client(object):
"""API Client for Remarkable Cloud
This allows you to authenticate & communiticate with the Remarkable Cloud
and does all the heavy lifting for you.
Attributes:
token_set: the authentication tokens
"""
token_set = {
"devicetoken": None,
"usertoken": None
}
def __init__(self):
config = load()
if "devicetoken" in config:
self.token_set["devicetoken"] = config["devicetoken"]
if "usertoken" in config:
self.token_set["usertoken"] = config["usertoken"]
def request(self, method: str, path: str,
data=None,
body=None, headers={},
params=None, stream=False) -> requests.Response:
"""Creates a request against the Remarkable Cloud API
This function automatically fills in the blanks of base
url & authentication.
Args:
method: The request method.
path: complete url or path to request.
data: raw data to put/post/...
body: the body to request with. This will be converted to json.
headers: a dict of additional headers to add to the request.
params: Query params to append to the request.
steam: Should the response be a stream?
Returns:
A Response instance containing most likely the response from
the server.
"""
if not path.startswith("http"):
if not path.startswith('/'):
path = '/' + path
url = f"{BASE_URL}{path}"
else:
url = path
_headers = {
"user-agent": USER_AGENT,
}
if self.token_set["usertoken"]:
token = self.token_set["usertoken"]
_headers["Authorization"] = f"Bearer {token}"
for k in headers.keys():
_headers[k] = headers[k]
log.debug(url, _headers)
print(method, url, json.dumps(body))
r = requests.request(method, url,
json=body,
data=data,
headers=_headers,
params=params,
stream=stream)
print(r.status_code, r.text)
return r
def register_device(self, code: str) -> True:
"""Registers a device to on the Remarkable Cloud.
This uses a unique code the user gets from
https://my.remarkable.com/connect/remarkable to register a new device
or client to be able to execute api calls.
Args:
code: A unique One time code the user can get
at https://my.remarkable.com/connect/remarkable .
Returns:
True
Raises:
AuthError: We didn't recieved an devicetoken from the Remarkable
Cloud.
"""
uuid = str(uuid4())
body = {
"code": code,
"deviceDesc": DEVICE,
"deviceID": uuid,
}
response = self.request("POST", DEVICE_TOKEN_URL, body)
if response.ok:
self.token_set["devicetoken"] = response.text
dump(self.token_set)
return True
else:
raise AuthError("Can't register device")
def renew_token(self) -> True:
"""Fetches a new user_token.
This is the second step of the authentication of the Remarkable Cloud.
Before each new session, you should fetch a new user token.
User tokens have an unknown expiration date.
Returns:
True
Raises:
AuthError: An error occurred while renewing the user token.
"""
if not self.token_set["devicetoken"]:
raise AuthError("Please register a device first")
token = self.token_set["devicetoken"]
response = self.request("POST", USER_TOKEN_URL, None, headers={
"Authorization": f"Bearer {token}"
})
if response.ok:
self.token_set["usertoken"] = response.text
dump(self.token_set)
return True
else:
raise AuthError("Can't renew token: {e}".format(
e=response.status_code))
def is_auth(self) -> bool:
"""Is the client authenticated
Returns:
bool: True if the client is authenticated
"""
if self.token_set["devicetoken"] and self.token_set["usertoken"]:
return True
else:
return False
def get_meta_items(self) -> Collection:
"""Returns a new collection from meta items.
It fetches all meta items from the Remarkable Cloud and stores them
in a collection, wrapping them in the correct class.
Returns:
Collection: a collection of Documents & Folders from the Remarkable
Cloud
"""
response = self.request("GET", "/document-storage/json/2/docs")
collection = Collection()
log.debug(response.text)
for item in response.json():
collection.add(item)
return collection
def get_doc(self, ID: str) -> DocOrFolder:
"""Get a meta item by ID
Fetch a meta item from the Remarkable Cloud by ID.
Args:
ID: The id of the meta item.
Returns:
A Document or Folder instance of the requested ID.
Raises:
DocumentNotFound: When a document cannot be found.
"""
log.debug(f"GETTING DOC {ID}")
response = self.request("GET", "/document-storage/json/2/docs",
params={
"doc": ID,
"withBlob": True
})
log.debug(response.url)
data_response = response.json()
log.debug(data_response)
if len(data_response) > 0:
if data_response[0]["Type"] == "CollectionType":
return Folder(**data_response[0])
elif data_response[0]["Type"] == "DocumentType":
return Document(**data_response[0])
else:
raise DocumentNotFound(f"Cound not find document {ID}")
def download(self, document: Document) -> ZipDocument:
"""Download a ZipDocument
This will download a raw document from the Remarkable Cloud containing
the real document. See the documentation for ZipDocument for more
information.
Args:
document: A Document instance we should download
Returns:
A ZipDocument instance, containing the raw data files from a
document.
"""
if not document.BlobURLGet:
document = self.get_doc(document.ID)
log.debug("BLOB", document.BlobURLGet)
r = self.request("GET", document.BlobURLGet, stream=True)
return ZipDocument.from_request_stream(document.ID, r)
def upload(self, zipDoc: ZipDocument, document: Document) -> True:
"""Upload a document to the cloud.
Add a new document to the Remarkable Cloud.
Args:
zipDoc: A ZipDocument instance containing the data of a Document.
document: the meta item where the zipDoc is for.
Raises:
ApiError: an error occured while uploading the document.
"""
return True
def update_metadata(self, docorfolder: DocOrFolder) -> True:
"""Send an update of the current metadata of a meta object
Update the meta item.
Args:
docorfolder: A document or folder to update the meta information
from.
"""
req = docorfolder.to_dict()
req["Version"] = self.get_current_version(docorfolder) + 1
req["ModifiedClient"] = datetime.utcnow().strftime(RFC3339Nano)
res = self.request("PUT",
"/document-storage/json/2/upload/update-status",
body=[req])
return self.check_reponse(res)
def get_current_version(self, docorfolder: DocOrFolder) -> int:
"""Get the latest version info from a Document or Folder
This fetches the latest meta information from the Remarkable Cloud
and returns the version information.
Args:
docorfolder: A Document or Folder instance.
Returns:
the version information.
Raises:
DocumentNotFound: cannot find the requested Document or Folder.
ApiError: An error occured while processing the request.
"""
try:
d = self.get_doc(docorfolder.ID)
except DocumentNotFound:
return 0
if not d:
return 0
return int(d.Version)
def create_folder(self, folder: Folder) -> True:
"""Create a new folder meta object.
This needs to be done in 3 steps:
1. Create an upload request for a new CollectionType meta object
2. Upload a zipfile with a *.content file containing
an empty object
3. Update the meta object with the new name.
Args:
folder: A folder instance.
Returns:
True if the folder is created.
"""
zipFolder, req = folder.create_request()
res = self.request("PUT", "/document-storage/json/2/upload/request",
body=[req])
if not res.ok:
raise ApiError(
f"upload request failed with status {res.status_code}",
response=res)
response = res.json()
if len(response) > 0:
dest = response[0].get("BlobURLPut", None)
if dest:
res = self.request("PUT", dest, data=zipFolder.read())
else:
raise ApiError(
"Cannot create a folder. because BlobURLPut is not set",
response=res)
if res.ok:
self.update_metadata(folder)
return True
def check_reponse(self, response: requests.Response) -> True:
"""Check the response from an API Call
Does some sanity checking on the Response
Args:
response: A API Response
Returns:
True if the response looks ok
Raises:
ApiError: When the response contains an error
"""
if response.ok:
if len(response.json()) > 0:
if response.json()[0]["Success"]:
return True
else:
log.error("Got A non success response")
msg = response.json()[0]["Message"]
log.error(msg)
raise ApiError(f"{msg}",
response=response)
else:
log.error("Got An empty response")
raise ApiError("Got An empty response",
response=response)
else:
log.error(f"Got An invalid HTTP Response: {response.status_code}")
raise ApiError(
f"Got An invalid HTTP Response: {response.status_code}",
response=response)
return True

37
rmapi/collections.py Normal file
View File

@@ -0,0 +1,37 @@
from .document import Document
from .folder import Folder
from typing import NoReturn, TypeVar
DocOrFolder = TypeVar('A', Document, Folder)
class Collection(object):
"""
A collection of meta items
"""
items = []
def __init__(self, *items):
for i in items:
self.items.append(i)
def add(self, docdict: dict) -> NoReturn:
if docdict.get("Type", None) == "DocumentType":
return self.addDocument(docdict)
elif docdict.get("Type", None) == "CollectionType":
return self.addFolder(docdict)
else:
raise TypeError("Unsupported type: {_type}"
.format(_type=docdict.get("Type", None)))
def addDocument(self, docdict: dict) -> NoReturn:
self.items.append(Document(**docdict))
def addFolder(self, dirdict: dict) -> NoReturn:
self.items.append(Folder(**dirdict))
def __len__(self) -> int:
return len(self.items)
def __getitem__(self, position: int) -> DocOrFolder:
return self.items[position]

32
rmapi/config.py Normal file
View File

@@ -0,0 +1,32 @@
from pathlib import Path
from yaml import load as yml_load
from yaml import dump as yml_dump
def load() -> dict:
"""
Load the .rmapi config file
"""
config_file_path = Path.joinpath(Path.home(), ".rmapi")
config = {}
if Path.exists(config_file_path):
with open(config_file_path, 'r') as config_file:
config = dict(yml_load(config_file.read()))
return config
def dump(config: dict) -> True:
"""
Dump config to the .rmapi config file
"""
config_file_path = Path.joinpath(Path.home(), ".rmapi")
with open(config_file_path, 'w') as config_file:
config_file.write(yml_dump(config))
return True

10
rmapi/const.py Normal file
View File

@@ -0,0 +1,10 @@
from typing import TypeVar
from .document import Document
RFC3339Nano = "%Y-%m-%dT%H:%M:%SZ"
USER_AGENT = "rmapipy"
BASE_URL = "https://document-storage-production-dot-remarkable-production.appspot.com" # noqa
DEVICE_TOKEN_URL = "https://my.remarkable.com/token/json/2/device/new"
USER_TOKEN_URL = "https://my.remarkable.com/token/json/2/user/new"
DEVICE = "desktop-windows"

307
rmapi/document.py Normal file
View File

@@ -0,0 +1,307 @@
from io import BytesIO
from zipfile import ZipFile, ZIP_DEFLATED
import shutil
from uuid import uuid4
import json
from typing import NoReturn
from requests import Response
class Document(object):
""" Document represents a real object expected in most
calls by the remarkable API"""
ID = ""
Version = 0
Message = ""
Succes = True
BlobURLGet = ""
BlobURLGetExpires = ""
BlobURLPut = ""
BlobURLPutExpires = ""
ModifiedClient = ""
Type = "DocumentType"
VissibleName = ""
CurrentPage = 1
Bookmarked = False
Parent = ""
def __init__(self, **kwargs):
kkeys = self.to_dict().keys()
for k in kkeys:
setattr(self, k, kwargs.get(k, getattr(self, k)))
def to_dict(self):
return {
"ID": self.ID,
"Version": self.Version,
"Message": self.Message,
"Succes": self.Succes,
"BlobURLGet": self.BlobURLGet,
"BlobURLGetExpires": self.BlobURLGetExpires,
"BlobURLPut": self.BlobURLPut,
"BlobURLPutExpires": self.BlobURLPutExpires,
"ModifiedClient": self.ModifiedClient,
"Type": self.Type,
"VissibleName": self.VissibleName,
"CurrentPage": self.CurrentPage,
"Bookmarked": self.Bookmarked,
"Parent": self.Parent
}
def __str__(self):
return f"<rmapi.document.Document {self.ID}>"
def __repr__(self):
return self.__str__()
class ZipDocument(object):
"""
Here is the content of an archive retried on the tablet as example:
384327f5-133e-49c8-82ff-30aa19f3cfa40.content
384327f5-133e-49c8-82ff-30aa19f3cfa40-metadata.json
384327f5-133e-49c8-82ff-30aa19f3cfa40.rm
384327f5-133e-49c8-82ff-30aa19f3cfa40.pagedata
384327f5-133e-49c8-82ff-30aa19f3cfa40.thumbnails/0.jpg
As the .zip file from remarkable is simply a normal .zip file
containing specific file formats, this package is a helper to
read and write zip files with the correct format expected by
the tablet.
In order to correctly use this package, you will have to understand
the format of a Remarkable zip file, and the format of the files
that it contains.
You can find some help about the format at the following URL:
https://remarkablewiki.com/tech/filesystem
"""
content = {
"ExtraMetadata": {
"LastBrushColor": "Black",
"LastBrushThicknessScale": "2",
"LastColor": "Black",
"LastEraserThicknessScale": "2",
"LastEraserTool": "Eraser",
"LastPen": "Ballpoint",
"LastPenColor": "Black",
"LastPenThicknessScale": "2",
"LastPencil": "SharpPencil",
"LastPencilColor": "Black",
"LastPencilThicknessScale": "2",
"LastTool": "SharpPencil",
"ThicknessScale": "2"
},
"FileType": "",
"FontName": "",
"LastOpenedPage": 0,
"LineHeight": -1,
"Margins": 100,
"Orientation": "portrait",
"PageCount": 0,
"Pages": [],
"TextScale": 1,
"Transform": {
"M11": 1,
"M12": 0,
"M13": 0,
"M21": 0,
"M22": 1,
"M23": 0,
"M31": 0,
"M32": 0,
"M33": 1,
}
}
metadata = {
"deleted": False,
"lastModified": "1568368808000",
"metadatamodified": False,
"modified": False,
"parent": "",
"pinned": False,
"synced": True,
"type": "DocumentType",
"version": 1,
"visibleName": "New Document"
}
pagedata = ""
zipfile = BytesIO()
pdf = None
epub = None
rm = []
ID = None
def __init__(self, ID=None, doc=None, file=None):
if not ID:
ID = str(uuid4())
self.ID = ID
if doc:
ext = doc[-4:]
if ext.endswith("pdf"):
self.content["FileType"] = "pdf"
self.pdf = BytesIO()
with open(doc, 'rb') as fb:
self.pdf.write(fb.read())
if ext.endswith("epub"):
self.content["FileType"] = "epub"
self.epub = BytesIO()
with open(doc, 'rb') as fb:
self.epub.write(fb.read())
elif ext.endswith("rm"):
self.content["FileType"] = "notebook"
self.pdf = BytesIO()
with open(doc, 'rb') as fb:
self.rm.append(RmPage(page=BytesIO(doc.read())))
if file:
self.load(file)
def __str__(self):
return f"<rmapi.document.ZipDocument {self.ID}>"
def __repr__(self):
return self.__str__()
def dump(self, file):
"""
Dump the contents of ZipDocument back to a zip file
"""
with ZipFile(f"{file}.zip", "w", ZIP_DEFLATED) as zf:
if self.content:
zf.writestr(f"{self.ID}.content",
json.dumps(self.content))
if self.pagedata:
zf.writestr(f"{self.ID}.pagedata",
self.pagedata.read())
if self.pdf:
zf.writestr(f"{self.ID}.pdf",
self.pdf.read())
if self.epub:
zf.writestr(f"{self.ID}.epub",
self.epub.read())
for page in self.rm:
zf.writestr(f"{self.ID}/{page.order}.rm",
page.page.read())
zf.writestr(f"{self.ID}/{page.order}-metadata.json",
json.dumps(page.metadata))
page.page.seek(0)
zf.writestr(f"{self.ID}.thumbnails/{page.order}.jpg",
page.thumbnail.read())
def load(self, file) -> NoReturn:
"""
Fill in the defaults from the given ZIP
"""
self.zipfile = BytesIO()
self.zipfile.seek(0)
if isinstance(file, str):
with open(file, 'rb') as f:
shutil.copyfileobj(f, self.zipfile)
elif isinstance(file, BytesIO):
self.zipfile = file
self.zipfile.seek(0)
else:
raise Exception("Unsupported file type.")
with ZipFile(self.zipfile, 'r') as zf:
with zf.open(f"{self.ID}.content", 'r') as content:
self.content = json.load(content)
try:
with zf.open(f"{self.ID}.metadata", 'r') as metadata:
self.metadata = json.load(metadata)
except KeyError:
pass
try:
with zf.open(f"{self.ID}.pagedata", 'r') as pagedata:
self.pagedata = BytesIO(pagedata.read())
except KeyError:
pass
try:
with zf.open(f"{self.ID}.pdf", 'r') as pdf:
self.pdf = BytesIO(pdf.read())
except KeyError:
pass
try:
with zf.open(f"{self.ID}.epub", 'r') as epub:
self.epub = BytesIO(epub.read())
except KeyError:
pass
# Get the RM pages
content = [x for x in zf.namelist()
if x.startswith(f"{self.ID}/") and x.endswith('.rm')]
for p in content:
pagenumber = p.replace(f"{self.ID}/", "").replace(".rm", "")
pagenumber = int(pagenumber)
page = BytesIO()
thumbnail = BytesIO()
with zf.open(p, 'r') as rm:
page = BytesIO(rm.read())
page.seek(0)
with zf.open(p.replace(".rm", "-metadata.json"), 'r') as md:
metadata = json.load(md)
thumbnail_name = p.replace(".rm", ".jpg")
thumbnail_name = thumbnail_name.replace("/", ".thumbnails/")
with zf.open(thumbnail_name, 'r') as tn:
thumbnail = BytesIO(tn.read())
thumbnail.seek(0)
self.rm.append(RmPage(page, metadata, pagenumber, thumbnail,
self.ID))
self.zipfile.seek(0)
class RmPage(object):
"""A Remarkable Page"""
def __init__(self, page, metadata=None, order=0, thumbnail=None, ID=None):
self.page = page
if metadata:
self.metadata = metadata
else:
self.metadata = {"layers": [{"name": "Layer 1"}]}
self.order = order
if thumbnail:
self.thumbnail = thumbnail
if ID:
self.ID = ID
else:
self.ID = str(uuid4())
def __str__(self):
return f"<rmapi.document.RmPage {self.order} for {self.ID}>"
def __repr__(self):
return self.__str__()
def from_zip(ID: str, file: str) -> ZipDocument:
"""
Return A ZipDocument from a zipfile.
"""
return ZipDocument(ID, file=file)
def from_request_stream(ID: str, stream: Response) -> ZipDocument:
"""
Return a ZipDocument from a request stream containing a zipfile.
"""
tmp = BytesIO()
for chunk in stream.iter_content(chunk_size=8192):
tmp.write(chunk)
zd = ZipDocument(ID=ID)
zd.load(tmp)
return zd

18
rmapi/exceptions.py Normal file
View File

@@ -0,0 +1,18 @@
class AuthError(Exception):
"""Authentication error"""
def __init__(self, msg):
super(AuthError, self).__init__(msg)
class DocumentNotFound(Exception):
"""Could not found a requested document"""
def __init__(self, msg):
super(DocumentNotFound, self).__init__(msg)
class ApiError(Exception):
"""Could not found a requested document"""
def __init__(self, msg, response=None):
self.response = response
super(ApiError, self).__init__(msg)

79
rmapi/folder.py Normal file
View File

@@ -0,0 +1,79 @@
from .document import Document
from datetime import datetime
from uuid import uuid4
from io import BytesIO
from zipfile import ZipFile, ZIP_DEFLATED
from .const import RFC3339Nano
class ZipFolder(object):
"""A dummy zipfile to create a folder
This is needed to create a folder on the Remarkable Cloud
"""
def __init__(self, ID: str):
"""Creates a zipfile in memory
Args:
ID: the ID to create a zipFolder for
"""
super(ZipFolder, self).__init__()
self.ID = ID
self.file = BytesIO()
self.Version = 1
with ZipFile(self.file, 'w', ZIP_DEFLATED) as zf:
zf.writestr(f"{self.ID}.content", "{}")
self.file.seek(0)
class Folder(Document):
"""
A Meta type of object used to represent a folder.
"""
def __init__(self, name=None, **kwargs):
"""Create a Folder instance
Args:
name: An optional name for this folder. In the end, a name is
really needed, but can be ommitted to set a later time.
"""
super(Folder, self).__init__(**kwargs)
self.Type = "CollectionType"
if name:
self.VissibleName = name
if not self.ID:
self.ID = str(uuid4())
def create_request(self) -> (ZipFolder, dict):
"""Prepares the nessesary parameters to create this folder.
This creates a ZipFolder & the nessesary json body to
create an upload request.
"""
return ZipFolder(self.ID).file, {
"ID": self.ID,
"Type": "CollectionType",
"Version": 1
}
def update_request(self) -> dict:
"""Perpares the nessesary parameters to update a folder.
This sets some parameters in the datastructure to submit to the API.
"""
data = self.to_dict()
data["Version"] = data.get("Version", 0) + 1
data["ModifiedClient"] = datetime.utcnow().strftime(RFC3339Nano)
return data
def __str__(self):
return f"<rmapi.folder.Folder {self.ID}>"
def __repr__(self):
return self.__str__()

0
rmapi/types.py Normal file
View File