diff --git a/.gitignore b/.gitignore
index 5e5593a..af30a45 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,245 @@
-*.pyc
-__pycache__
-dist
-doxieapi.egg-info
-README.rst
+
+# Created by https://www.toptal.com/developers/gitignore/api/git,python,PyCharm+all
+# Edit at https://www.toptal.com/developers/gitignore?templates=git,python,PyCharm+all
+
+### Git ###
+# Created by git for backups. To disable backups in Git:
+# $ git config --global mergetool.keepBackup false
+*.orig
+
+# Created by git when using merge tools for conflicts
+*.BACKUP.*
+*.BASE.*
+*.LOCAL.*
+*.REMOTE.*
+*_BACKUP_*.txt
+*_BASE_*.txt
+*_LOCAL_*.txt
+*_REMOTE_*.txt
+
+### PyCharm+all ###
+# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
+# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
+
+# User-specific stuff
+.idea/**/workspace.xml
+.idea/**/tasks.xml
+.idea/**/usage.statistics.xml
+.idea/**/dictionaries
+.idea/**/shelf
+
+# Generated files
+.idea/**/contentModel.xml
+
+# Sensitive or high-churn files
+.idea/**/dataSources/
+.idea/**/dataSources.ids
+.idea/**/dataSources.local.xml
+.idea/**/sqlDataSources.xml
+.idea/**/dynamic.xml
+.idea/**/uiDesigner.xml
+.idea/**/dbnavigator.xml
+
+# Gradle
+.idea/**/gradle.xml
+.idea/**/libraries
+
+# Gradle and Maven with auto-import
+# When using Gradle or Maven with auto-import, you should exclude module files,
+# since they will be recreated, and may cause churn. Uncomment if using
+# auto-import.
+# .idea/artifacts
+# .idea/compiler.xml
+# .idea/jarRepositories.xml
+# .idea/modules.xml
+# .idea/*.iml
+# .idea/modules
+# *.iml
+# *.ipr
+
+# CMake
+cmake-build-*/
+
+# Mongo Explorer plugin
+.idea/**/mongoSettings.xml
+
+# File-based project format
+*.iws
+
+# IntelliJ
+out/
+
+# mpeltonen/sbt-idea plugin
+.idea_modules/
+
+# JIRA plugin
+atlassian-ide-plugin.xml
+
+# Cursive Clojure plugin
+.idea/replstate.xml
+
+# Crashlytics plugin (for Android Studio and IntelliJ)
+com_crashlytics_export_strings.xml
+crashlytics.properties
+crashlytics-build.properties
+fabric.properties
+
+# Editor-based Rest Client
+.idea/httpRequests
+
+# Android studio 3.1+ serialized cache file
+.idea/caches/build_file_checksums.ser
+
+### PyCharm+all Patch ###
+# Ignores the whole .idea folder and all .iml files
+# See https://github.com/joeblau/gitignore.io/issues/186 and https://github.com/joeblau/gitignore.io/issues/360
+
+.idea/
+
+# Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-249601023
+
+*.iml
+modules.xml
+.idea/misc.xml
+*.ipr
+
+# Sonarlint plugin
+.idea/sonarlint
+
+### Python ###
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+pytestdebug.log
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+doc/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# End of https://www.toptal.com/developers/gitignore/api/git,python,PyCharm+all
diff --git a/doxieapi/__init__.py b/doxieapi/__init__.py
index b0fbb67..30332c3 100644
--- a/doxieapi/__init__.py
+++ b/doxieapi/__init__.py
@@ -1 +1,13 @@
+# -*- coding: utf-8 -*-
+
+"""
+doxieapi
+~~~~~~~~
+
+A Python library for the developer API of the Doxie Go Wi-Fi document scanner.
+"""
+
+
from .api import DoxieScanner
+
+__all__ = ['DoxieScanner']
diff --git a/doxieapi/__main__.py b/doxieapi/__main__.py
index b7954f1..95cbaed 100644
--- a/doxieapi/__main__.py
+++ b/doxieapi/__main__.py
@@ -1,15 +1,27 @@
+# -*- coding: utf-8 -*-
+
+"""
+doxieapi.__main__
+~~~~~~~~~~~~~~~~~
+
+Application that runs when executing the module with -m.
+"""
+
import os
from .api import DoxieScanner
+
def main():
"""
- Grab all available scan images and save them to the current working directory
+ Grab all available scan images and save them to the current working
+ directory.
"""
for doxie in DoxieScanner.discover():
print("Discovered {}.".format(doxie))
for scan in doxie.download_scans(os.getcwd()):
print("Saved {}".format(scan))
+
if __name__ == '__main__':
main()
diff --git a/doxieapi/api.py b/doxieapi/api.py
old mode 100755
new mode 100644
index 37ef5e8..cb6f597
--- a/doxieapi/api.py
+++ b/doxieapi/api.py
@@ -1,3 +1,12 @@
+# -*- coding: utf-8 -*-
+
+"""
+doxieapi.api
+~~~~~~~~~~~~
+
+An API client implementation for the Doxie Scanner API.
+"""
+
import os
import time
import json
@@ -20,9 +29,15 @@
# Scans are downloaded in chunks of this many bytes:
DOWNLOAD_CHUNK_SIZE = 1024*8
+
class DoxieScanner:
+ """A client for the Doxie Scanner."""
+
+ # pylint: disable=too-many-instance-attributes
+ # Nine is reasonable in this case.
+
url = None
- username = "doxie" # This is always the same according to API docs
+ username = "doxie" # This is always the same according to API docs
password = None
# These attributes will be populated by _load_hello_attributes
@@ -43,21 +58,25 @@ def __init__(self, url, load_attributes=True):
def __str__(self):
"""
- >>> doxie = DoxieScanner("http://192.168.100.1:8080/", load_attributes=False)
+ >>> doxie = DoxieScanner("http://192.168.100.1:8080/",
+ ... load_attributes=False)
>>> doxie.name = "Doxie_00AAFF"
>>> doxie.model = "DX250"
>>> str(doxie)
'Doxie model DX250 (Doxie_00AAFF) at http://192.168.100.1:8080/'
"""
- return "Doxie model {} ({}) at {}".format(self.model, self.name, self.url)
+ return "Doxie model {} ({}) at {}".format(
+ self.model, self.name, self.url)
def __repr__(self):
"""
- >>> doxie = DoxieScanner("http://192.168.100.1:8080/", load_attributes=False)
+ >>> doxie = DoxieScanner("http://192.168.100.1:8080/",
+ ... load_attributes=False)
>>> doxie.name = "Doxie_00AAFF"
>>> doxie.model = "DX250"
>>> str(doxie)
- ''
+ ''
"""
return "".format(str(self))
@@ -76,9 +95,9 @@ def discover(cls):
def _api_url(self, path):
"""
- >>> DoxieScanner("http://192.168.100.1:8080/", load_attributes=False)._api_url("/scans.json")
+ >>> doxie._api_url("/scans.json")
'http://192.168.100.1:8080/scans.json'
- >>> DoxieScanner("http://192.168.100.1:8080/", load_attributes=False)._api_url("/networks/available.json")
+ >>> doxie._api_url("/networks/available.json")
'http://192.168.100.1:8080/networks/available.json'
"""
return urljoin(self.url, path)
@@ -108,10 +127,13 @@ def _get_url(self, url, stream=False):
def _get_auth(self):
"""
- Returns a (username, password) tuple if self.password is set, otherwise None.
+ Returns a (username, password) tuple if self.password is set, otherwise
+ None.
Suitable for passing to requests' 'auth' kwarg.
"""
- return (self.username, self.password) if self.password is not None else None
+ return (
+ self.username, self.password
+ ) if self.password is not None else None
def _load_hello_attributes(self):
"""
@@ -128,7 +150,7 @@ def _load_hello_attributes(self):
self.firmware_wifi = attributes['firmwareWiFi']
if self.mode == "Client":
self.network = attributes['network']
- if attributes['hasPassword'] == True:
+ if attributes['hasPassword']:
self._load_password()
def _load_password(self):
@@ -136,19 +158,24 @@ def _load_password(self):
Load the password for this Doxie's MAC address from ~/.doxieapi.ini,
or another path specified by the DOXIEAPI_CONFIG_PATH env variable
"""
- config_path = os.path.expanduser(os.environ.get("DOXIEAPI_CONFIG_PATH", "~/.doxieapi.ini"))
+ config_path = os.path.expanduser(
+ os.environ.get("DOXIEAPI_CONFIG_PATH", "~/.doxieapi.ini")
+ )
config = ConfigParser()
config.read(config_path)
try:
self.password = config[self.mac]['password']
except KeyError:
- raise Exception("Couldn't find password for Doxie {} in {}".format(self.mac, config_path))
+ raise Exception(
+ "Couldn't find password for Doxie {} in {}".format(
+ self.mac, config_path)
+ )
@property
def firmware(self):
"""
- Fetches and caches the 'firmware' string from the 'hello_extra' API call.
- This call is expensive and the value isn't going to change, so
+ Fetches and caches the 'firmware' string from the 'hello_extra' API
+ call. This call is expensive and the value isn't going to change, so
we're fine to cache it for the lifetime of this DoxieScanner instance.
"""
if self._firmware is None:
@@ -205,9 +232,9 @@ def download_scan(self, path, output_dir):
output_path = os.path.join(output_dir, os.path.basename(path))
if os.path.isfile(output_path):
raise FileExistsError(output_path)
- with open(output_path, 'wb') as f:
+ with open(output_path, 'wb') as output:
for chunk in response.iter_content(chunk_size=DOWNLOAD_CHUNK_SIZE):
- f.write(chunk)
+ output.write(chunk)
return output_path
def download_scans(self, output_dir):
@@ -225,12 +252,13 @@ def delete_scan(self, path, retries=3, timeout=5):
"""
Deletes a scan from the Doxie.
This method may be slow; from the API docs:
- Deleting takes several seconds because a lock on the internal storage
- must be obtained and released. Deleting may fail if the lock cannot
- be obtained (e.g., the scanner is busy), so consider retrying on
- failure conditions.
+ Deleting takes several seconds because a lock on the internal
+ storage must be obtained and released. Deleting may fail if the lock
+ cannot be obtained (e.g., the scanner is busy), so consider retrying
+ on failure conditions.
This method will attempt the deletion multiple times with a timeout
- between attempts - controlled by the retries and timeout (seconds) params.
+ between attempts - controlled by the retries and timeout (seconds)
+ params.
Returns a boolean indicating whether the deletion was successful.
"""
if not path.startswith("/scans"):
@@ -249,12 +277,13 @@ def delete_scans(self, paths, retries=3, timeout=5):
"""
Deletes multiple scans from the Doxie.
This method may be slow; from the API docs:
- Deleting takes several seconds because a lock on the internal storage
- must be obtained and released. Deleting may fail if the lock cannot
- be obtained (e.g., the scanner is busy), so consider retrying on
- failure conditions.
+ Deleting takes several seconds because a lock on the internal
+ storage must be obtained and released. Deleting may fail if the lock
+ cannot be obtained (e.g., the scanner is busy), so consider retrying
+ on failure conditions.
This method will attempt the deletion multiple times with a timeout
- between attempts - controlled by the retries and timeout (seconds) params.
+ between attempts - controlled by the retries and timeout (seconds)
+ params.
Returns a boolean indicating whether the deletion was successful.
The deletion is considered successful by the Doxie if at least one scan
was deleted, it seems.
diff --git a/doxieapi/ssdp.py b/doxieapi/ssdp.py
index d86a741..3a8f705 100644
--- a/doxieapi/ssdp.py
+++ b/doxieapi/ssdp.py
@@ -1,16 +1,21 @@
-# Copyright 2014 Dan Krause
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
+# -*- coding: utf-8 -*-
+
+"""
+Copyright 2014 Dan Krause
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
import socket
try:
@@ -18,28 +23,44 @@
except ImportError:
from httplib import HTTPResponse
+
+# pylint: disable=invalid-name,too-few-public-methods
+
class SSDPResponse(object):
+ """Defines an SSDP Response."""
def __init__(self, sock):
- r = HTTPResponse(sock)
- r.begin()
- self.location = r.getheader("location")
- self.usn = r.getheader("usn")
- self.st = r.getheader("st")
- self.cache = r.getheader("cache-control").split("=")[1]
+ resp = HTTPResponse(sock)
+ resp.begin()
+ self.location = resp.getheader("location")
+ self.usn = resp.getheader("usn")
+ self.st = resp.getheader("st")
+ self.cache = resp.getheader("cache-control").split("=")[1]
+
def __repr__(self):
- return "".format(**self.__dict__)
+ return "".format(
+ **self.__dict__
+ )
+
def discover(service, timeout=2, retries=1, mx=3):
+ """
+ Discover an SSDP advertisment.
+
+ Example:
+ ssdp.discover("roku:ecp")
+ """
group = ("239.255.255.250", 1900)
message = "\r\n".join([
'M-SEARCH * HTTP/1.1',
'HOST: {0}:{1}',
'MAN: "ssdp:discover"',
- 'ST: {st}','MX: {mx}','',''])
+ 'ST: {st}', 'MX: {mx}', '', ''])
socket.setdefaulttimeout(max(timeout, mx))
responses = {}
for _ in range(retries):
- sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
+ sock = socket.socket(
+ socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP
+ )
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
sock.sendto(message.format(*group, st=service, mx=mx).encode(), group)
@@ -50,7 +71,3 @@ def discover(service, timeout=2, retries=1, mx=3):
except socket.timeout:
break
return responses.values()
-
-# Example:
-# import ssdp
-# ssdp.discover("roku:ecp")
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..9b4789e
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,2 @@
+[pylint.TYPECHECK]
+generated-members=(requests\.)?codes\.(name|continue|CONTINUE|switching_protocols|SWITCHING_PROTOCOLS|processing|PROCESSING|checkpoint|CHECKPOINT|uri_too_long|URI_TOO_LONG|request_uri_too_long|REQUEST_URI_TOO_LONG|ok|OK|okay|OKAY|all_ok|ALL_OK|all_okay|ALL_OKAY|all_good|ALL_GOOD|created|CREATED|accepted|ACCEPTED|non_authoritative_info|NON_AUTHORITATIVE_INFO|non_authoritative_information|NON_AUTHORITATIVE_INFORMATION|no_content|NO_CONTENT|reset_content|RESET_CONTENT|reset|RESET|partial_content|PARTIAL_CONTENT|partial|PARTIAL|multi_status|MULTI_STATUS|multiple_status|MULTIPLE_STATUS|multi_stati|MULTI_STATI|multiple_stati|MULTIPLE_STATI|already_reported|ALREADY_REPORTED|im_used|IM_USED|multiple_choices|MULTIPLE_CHOICES|moved_permanently|MOVED_PERMANENTLY|moved|MOVED|found|FOUND|see_other|SEE_OTHER|other|OTHER|not_modified|NOT_MODIFIED|use_proxy|USE_PROXY|switch_proxy|SWITCH_PROXY|temporary_redirect|TEMPORARY_REDIRECT|temporary_moved|TEMPORARY_MOVED|temporary|TEMPORARY|permanent_redirect|PERMANENT_REDIRECT|resume_incomplete|RESUME_INCOMPLETE|resume|RESUME|bad_request|BAD_REQUEST|bad|BAD|unauthorized|UNAUTHORIZED|payment_required|PAYMENT_REQUIRED|payment|PAYMENT|forbidden|FORBIDDEN|not_found|NOT_FOUND|method_not_allowed|METHOD_NOT_ALLOWED|not_allowed|NOT_ALLOWED|not_acceptable|NOT_ACCEPTABLE|proxy_authentication_required|PROXY_AUTHENTICATION_REQUIRED|proxy_auth|PROXY_AUTH|proxy_authentication|PROXY_AUTHENTICATION|request_timeout|REQUEST_TIMEOUT|timeout|TIMEOUT|conflict|CONFLICT|gone|GONE|length_required|LENGTH_REQUIRED|precondition_failed|PRECONDITION_FAILED|precondition|PRECONDITION|request_entity_too_large|REQUEST_ENTITY_TOO_LARGE|request_uri_too_large|REQUEST_URI_TOO_LARGE|unsupported_media_type|UNSUPPORTED_MEDIA_TYPE|unsupported_media|UNSUPPORTED_MEDIA|media_type|MEDIA_TYPE|requested_range_not_satisfiable|REQUESTED_RANGE_NOT_SATISFIABLE|requested_range|REQUESTED_RANGE|range_not_satisfiable|RANGE_NOT_SATISFIABLE|expectation_failed|EXPECTATION_FAILED|im_a_teapot|IM_A_TEAPOT|teapot|TEAPOT|i_am_a_teapot|I_AM_A_TEAPOT|misdirected_request|MISDIRECTED_REQUEST|unprocessable_entity|UNPROCESSABLE_ENTITY|unprocessable|UNPROCESSABLE|locked|LOCKED|failed_dependency|FAILED_DEPENDENCY|dependency|DEPENDENCY|unordered_collection|UNORDERED_COLLECTION|unordered|UNORDERED|upgrade_required|UPGRADE_REQUIRED|upgrade|UPGRADE|precondition_required|PRECONDITION_REQUIRED|too_many_requests|TOO_MANY_REQUESTS|too_many|TOO_MANY|header_fields_too_large|HEADER_FIELDS_TOO_LARGE|fields_too_large|FIELDS_TOO_LARGE|no_response|NO_RESPONSE|none|NONE|retry_with|RETRY_WITH|retry|RETRY|blocked_by_windows_parental_controls|BLOCKED_BY_WINDOWS_PARENTAL_CONTROLS|parental_controls|PARENTAL_CONTROLS|unavailable_for_legal_reasons|UNAVAILABLE_FOR_LEGAL_REASONS|legal_reasons|LEGAL_REASONS|client_closed_request|CLIENT_CLOSED_REQUEST|internal_server_error|INTERNAL_SERVER_ERROR|server_error|SERVER_ERROR|not_implemented|NOT_IMPLEMENTED|bad_gateway|BAD_GATEWAY|service_unavailable|SERVICE_UNAVAILABLE|unavailable|UNAVAILABLE|gateway_timeout|GATEWAY_TIMEOUT|http_version_not_supported|HTTP_VERSION_NOT_SUPPORTED|http_version|HTTP_VERSION|variant_also_negotiates|VARIANT_ALSO_NEGOTIATES|insufficient_storage|INSUFFICIENT_STORAGE|bandwidth_limit_exceeded|BANDWIDTH_LIMIT_EXCEEDED|bandwidth|BANDWIDTH|not_extended|NOT_EXTENDED|network_authentication_required|NETWORK_AUTHENTICATION_REQUIRED|network_auth|NETWORK_AUTH|network_authentication|NETWORK_AUTHENTICATION),
diff --git a/setup.py b/setup.py
index c4d4d13..dac768f 100644
--- a/setup.py
+++ b/setup.py
@@ -1,5 +1,8 @@
-from setuptools import setup, find_packages
+#!/usr/bin/env python
+
from distutils import log
+from setuptools import find_packages
+from setuptools import setup
try:
import pypandoc
@@ -8,16 +11,14 @@
log.warn("warning: Couldn't generate README.rst - is pypandoc installed?")
setup(
- name = "doxieapi",
- version = "0.0.2",
- packages = find_packages(),
-
- install_requires = ['requests'],
-
- author = "Dave Arter",
- author_email = "pypi@davea.me",
- description = "Library for downloading scans from a Doxie Go Wi-Fi document scanner",
- license = "LICENSE.txt",
- keywords = "doxie document scanner",
- url = "https://github.com/davea/doxieapi/",
+ name="doxieapi",
+ version="0.0.2",
+ packages=find_packages(),
+ install_requires=['requests'],
+ author="Dave Arter",
+ author_email="pypi@davea.me",
+ description="Library for downloading scans from a Doxie Go Wi-Fi document scanner",
+ license="LICENSE.txt",
+ keywords="doxie document scanner",
+ url="https://github.com/davea/doxieapi/",
)