diff --git a/.github/workflows/upload-pypi.yml b/.github/workflows/upload-pypi.yml
new file mode 100644
index 0000000..3a43043
--- /dev/null
+++ b/.github/workflows/upload-pypi.yml
@@ -0,0 +1,32 @@
+name: Build Pypi Artifacts
+
+on:
+ create:
+ tags:
+ - v*
+
+jobs:
+
+ deploy:
+ name: Publish in Pypi
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - name: Setup Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: '3.8'
+
+ - name: Install dependencies
+ run: |
+ python3 -m pip install -U pip
+
+ - name: Make package
+ run: |
+ python3 setup.py sdist
+
+ - name: Publish Package Manager PyPI
+ uses: pypa/gh-action-pypi-publish@release/v1
+ with:
+ user: __token__
+ password: ${{ secrets.pypi_password }}
diff --git a/.gitignore b/.gitignore
index 5672901..dadb771 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,7 +1,596 @@
-*.pyc
-.DS_Store
+# Created by .ignore support plugin (hsz.mobi)
+### Python template
+# Byte-compiled / optimized / DLL files
+
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
*.egg
-*.egg-info
-dist
-/.idea
-_build
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+### VisualStudio template
+## Ignore Visual Studio temporary files, build results, and
+## files generated by popular Visual Studio add-ons.
+##
+## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
+
+# User-specific files
+*.rsuser
+*.suo
+*.user
+*.userosscache
+*.sln.docstates
+
+# User-specific files (MonoDevelop/Xamarin Studio)
+*.userprefs
+
+# Mono auto generated files
+mono_crash.*
+
+# Build results
+[Dd]ebug/
+[Dd]ebugPublic/
+[Rr]elease/
+[Rr]eleases/
+x64/
+x86/
+[Aa][Rr][Mm]/
+[Aa][Rr][Mm]64/
+bld/
+[Bb]in/
+[Oo]bj/
+[Ll]og/
+[Ll]ogs/
+
+# Visual Studio 2015/2017 cache/options directory
+.vs/
+# Uncomment if you have tasks that create the project's static files in wwwroot
+#wwwroot/
+
+# Visual Studio 2017 auto generated files
+Generated\ Files/
+
+# MSTest test Results
+[Tt]est[Rr]esult*/
+[Bb]uild[Ll]og.*
+
+# NUnit
+*.VisualState.xml
+TestResult.xml
+nunit-*.xml
+
+# Build Results of an ATL Project
+[Dd]ebugPS/
+[Rr]eleasePS/
+dlldata.c
+
+# Benchmark Results
+BenchmarkDotNet.Artifacts/
+
+# .NET Core
+project.lock.json
+project.fragment.lock.json
+artifacts/
+
+# StyleCop
+StyleCopReport.xml
+
+# Files built by Visual Studio
+*_i.c
+*_p.c
+*_h.h
+*.ilk
+*.meta
+*.obj
+*.iobj
+*.pch
+*.pdb
+*.ipdb
+*.pgc
+*.pgd
+*.rsp
+*.sbr
+*.tlb
+*.tli
+*.tlh
+*.tmp
+*.tmp_proj
+*_wpftmp.csproj
+*.vspscc
+*.vssscc
+.builds
+*.pidb
+*.svclog
+*.scc
+
+# Chutzpah Test files
+_Chutzpah*
+
+# Visual C++ cache files
+ipch/
+*.aps
+*.ncb
+*.opendb
+*.opensdf
+*.sdf
+*.cachefile
+*.VC.db
+*.VC.VC.opendb
+
+# Visual Studio profiler
+*.psess
+*.vsp
+*.vspx
+*.sap
+
+# Visual Studio Trace Files
+*.e2e
+
+# TFS 2012 Local Workspace
+$tf/
+
+# Guidance Automation Toolkit
+*.gpState
+
+# ReSharper is a .NET coding add-in
+_ReSharper*/
+*.[Rr]e[Ss]harper
+*.DotSettings.user
+
+# TeamCity is a build add-in
+_TeamCity*
+
+# DotCover is a Code Coverage Tool
+*.dotCover
+
+# AxoCover is a Code Coverage Tool
+.axoCover/*
+!.axoCover/settings.json
+
+# Visual Studio code coverage results
+*.coverage
+*.coveragexml
+
+# NCrunch
+_NCrunch_*
+.*crunch*.local.xml
+nCrunchTemp_*
+
+# MightyMoose
+*.mm.*
+AutoTest.Net/
+
+# Web workbench (sass)
+.sass-cache/
+
+# Installshield output folder
+[Ee]xpress/
+
+# DocProject is a documentation generator add-in
+DocProject/buildhelp/
+DocProject/Help/*.HxT
+DocProject/Help/*.HxC
+DocProject/Help/*.hhc
+DocProject/Help/*.hhk
+DocProject/Help/*.hhp
+DocProject/Help/Html2
+DocProject/Help/html
+
+# Click-Once directory
+publish/
+
+# Publish Web Output
+*.[Pp]ublish.xml
+*.azurePubxml
+# Note: Comment the next line if you want to checkin your web deploy settings,
+# but database connection strings (with potential passwords) will be unencrypted
+*.pubxml
+*.publishproj
+
+# Microsoft Azure Web App publish settings. Comment the next line if you want to
+# checkin your Azure Web App publish settings, but sensitive information contained
+# in these scripts will be unencrypted
+PublishScripts/
+
+# NuGet Packages
+*.nupkg
+# NuGet Symbol Packages
+*.snupkg
+# The packages folder can be ignored because of Package Restore
+**/[Pp]ackages/*
+# except build/, which is used as an MSBuild target.
+!**/[Pp]ackages/build/
+# Uncomment if necessary however generally it will be regenerated when needed
+#!**/[Pp]ackages/repositories.config
+# NuGet v3's project.json files produces more ignorable files
+*.nuget.props
+*.nuget.targets
+
+# Microsoft Azure Build Output
+csx/
+*.build.csdef
+
+# Microsoft Azure Emulator
+ecf/
+rcf/
+
+# Windows Store app package directories and files
+AppPackages/
+BundleArtifacts/
+Package.StoreAssociation.xml
+_pkginfo.txt
+*.appx
+*.appxbundle
+*.appxupload
+
+# Visual Studio cache files
+# files ending in .cache can be ignored
+*.[Cc]ache
+# but keep track of directories ending in .cache
+!?*.[Cc]ache/
+
+# Others
+ClientBin/
+~$*
+*~
+*.dbmdl
+*.dbproj.schemaview
+*.jfm
+*.pfx
+*.publishsettings
+orleans.codegen.cs
+
+# Including strong name files can present a security risk
+# (https://github.com/github/gitignore/pull/2483#issue-259490424)
+#*.snk
+
+# Since there are multiple workflows, uncomment next line to ignore bower_components
+# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
+#bower_components/
+
+# RIA/Silverlight projects
+Generated_Code/
+
+# Backup & report files from converting an old project file
+# to a newer Visual Studio version. Backup files are not needed,
+# because we have git ;-)
+_UpgradeReport_Files/
+Backup*/
+UpgradeLog*.XML
+UpgradeLog*.htm
+ServiceFabricBackup/
+*.rptproj.bak
+
+# SQL Server files
+*.mdf
+*.ldf
+*.ndf
+
+# Business Intelligence projects
+*.rdl.data
+*.bim.layout
+*.bim_*.settings
+*.rptproj.rsuser
+*- [Bb]ackup.rdl
+*- [Bb]ackup ([0-9]).rdl
+*- [Bb]ackup ([0-9][0-9]).rdl
+
+# Microsoft Fakes
+FakesAssemblies/
+
+# GhostDoc plugin setting file
+*.GhostDoc.xml
+
+# Node.js Tools for Visual Studio
+.ntvs_analysis.dat
+node_modules/
+
+# Visual Studio 6 build log
+*.plg
+
+# Visual Studio 6 workspace options file
+*.opt
+
+# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
+*.vbw
+
+# Visual Studio LightSwitch build output
+**/*.HTMLClient/GeneratedArtifacts
+**/*.DesktopClient/GeneratedArtifacts
+**/*.DesktopClient/ModelManifest.xml
+**/*.Server/GeneratedArtifacts
+**/*.Server/ModelManifest.xml
+_Pvt_Extensions
+
+# Paket dependency manager
+.paket/paket.exe
+paket-files/
+
+# FAKE - F# Make
+.fake/
+
+# CodeRush personal settings
+.cr/personal
+
+# Python Tools for Visual Studio (PTVS)
+*.pyc
+
+# Cake - Uncomment if you are using it
+# tools/**
+# !tools/packages.config
+
+# Tabs Studio
+*.tss
+
+# Telerik's JustMock configuration file
+*.jmconfig
+
+# BizTalk build output
+*.btp.cs
+*.btm.cs
+*.odx.cs
+*.xsd.cs
+
+# OpenCover UI analysis results
+OpenCover/
+
+# Azure Stream Analytics local run output
+ASALocalRun/
+
+# MSBuild Binary and Structured Log
+*.binlog
+
+# NVidia Nsight GPU debugger configuration file
+*.nvuser
+
+# MFractors (Xamarin productivity tool) working folder
+.mfractor/
+
+# Local History for Visual Studio
+.localhistory/
+
+# BeatPulse healthcheck temp database
+healthchecksdb
+
+# Backup folder for Package Reference Convert tool in Visual Studio 2017
+MigrationBackup/
+
+# Ionide (cross platform F# VS Code tools) working folder
+.ionide/
+
+### JetBrains template
+# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
+# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
+
+# User-specific stuff
+.idea/
+.idea/**/workspace.xml
+.idea/**/tasks.xml
+.idea/**/usage.statistics.xml
+.idea/**/dictionaries
+.idea/**/shelf
+
+# Generated files
+.idea/**/contentModel.xml
+
+# Sensitive or high-churn files
+.idea/**/dataSources/
+.idea/**/dataSources.ids
+.idea/**/dataSources.local.xml
+.idea/**/sqlDataSources.xml
+.idea/**/dynamic.xml
+.idea/**/uiDesigner.xml
+.idea/**/dbnavigator.xml
+
+# Gradle
+.idea/**/gradle.xml
+.idea/**/libraries
+
+# Gradle and Maven with auto-import
+# When using Gradle or Maven with auto-import, you should exclude module files,
+# since they will be recreated, and may cause churn. Uncomment if using
+# auto-import.
+# .idea/artifacts
+# .idea/compiler.xml
+# .idea/modules.xml
+# .idea/*.iml
+# .idea/modules
+# *.iml
+# *.ipr
+
+# CMake
+cmake-build-*/
+
+# Mongo Explorer plugin
+.idea/**/mongoSettings.xml
+
+# File-based project format
+*.iws
+
+# IntelliJ
+out/
+
+# mpeltonen/sbt-idea plugin
+.idea_modules/
+
+# JIRA plugin
+atlassian-ide-plugin.xml
+
+# Cursive Clojure plugin
+.idea/replstate.xml
+
+# Crashlytics plugin (for Android Studio and IntelliJ)
+com_crashlytics_export_strings.xml
+crashlytics.properties
+crashlytics-build.properties
+fabric.properties
+
+# Editor-based Rest Client
+.idea/httpRequests
+
+# Android studio 3.1+ serialized cache file
+.idea/caches/build_file_checksums.ser
+
+### Gradle template
+.gradle
+/build/
+
+# Ignore Gradle GUI config
+gradle-app.setting
+
+# Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored)
+!gradle-wrapper.jar
+
+# Cache of project
+.gradletasknamecache
+
+# # Work around https://youtrack.jetbrains.com/issue/IDEA-116898
+# gradle/wrapper/gradle-wrapper.properties
+
+### VirtualEnv template
+# Virtualenv
+# http://iamzed.com/2009/05/07/a-primer-on-virtualenv/
+[Bb]in
+[Ii]nclude
+[Ll]ib
+[Ll]ib64
+[Ll]ocal
+pyvenv.cfg
+pip-selfcheck.json
+.idea/
+
+# Jekyll
+.jekyll-metadata
+.jekyll-cache
+_site
+
+# TextX
+*.dot
+
+# Visual Studio Code
+.vscode
+
+# Mist
+/grammar.tx
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index ba26c77..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,14 +0,0 @@
-language: python
-python:
- - "2.7"
-# - "3.2" # 3.2 is broken, for some reason
- - "3.3"
- - "3.4"
- - "3.5"
- - "nightly"
-# command to install dependencies, e.g. pip install -r requirements.txt --use-mirrors
-install: pip install -r requirements.txt
-# command to run tests, e.g. python setup.py test
-script: nosetests --with-coverage --cover-package=flask_s3
-after_success: coveralls
-sudo: false
diff --git a/CONTRIBUTORS b/CONTRIBUTORS
deleted file mode 100644
index f7d889c..0000000
--- a/CONTRIBUTORS
+++ /dev/null
@@ -1,10 +0,0 @@
-Contributors
-============
-
-* Edward Robinson (e-dard)
-* Rehan Dalal (rehandalal)
-* Hannes Ljungberg (hannseman)
-* Erik Taubeneck (eriktaubeneck)
-* Frank Tackitt (kageurufu)
-* Isaac Dickinson (SunDwarf)
-* bool-dev
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..5ec34d9
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,19 @@
+include requirements.txt README.md VERSION
+
+recursive-exclude * test_*
+recursive-exclude * __pycache__
+recursive-exclude * *.pyc
+recursive-exclude * *.pyo
+recursive-exclude * *.orig
+recursive-exclude * .DS_Store
+
+global-exclude test/*
+global-exclude __pycache__/*
+global-exclude .deps/*
+global-exclude *.pyd
+global-exclude *.pyc
+global-exclude .git*
+global-exclude .DS_Store
+global-exclude .mailmap
+
+recursive-include * *.mist
diff --git a/README.md b/README.md
index d361b4d..a25f903 100644
--- a/README.md
+++ b/README.md
@@ -1,33 +1,38 @@
-flask-s3
-========
-
-[](https://travis-ci.org/e-dard/flask-s3)
-[](https://coveralls.io/github/e-dard/flask-s3?branch=master)
-[](https://github.com/igrigorik/ga-beacon)
-[](https://pypi.python.org/pypi/Flask-S3)
+# flask-s3-ng
Seamlessly serve the static assets of your Flask app from Amazon S3.
-Maintainers
------------
+## Project description
-Flask-S3 is maintained by @e-dard, @eriktaubeneck and @SunDwarf.
+This project is base on Flask-S3 project. This great project has unmaintained for a lot time and this fork aims to get updated.
+In this fork, Python 2 support was removed.
-Installation
-------------
+## Installation
Install Flask-S3 via pypi:
- pip install flask-s3
-
-Or, install the latest development version:
+ pip install flask-s3-ng
+
+## New features
+
+This fork offer some new features:
+
+- Progress bar while it is updating statics
+- Support for other S3 providers but AWS.
+
+### Support for other S3 providers
+
+If you use a different provider than AWS S3, you can use the configuration parameter `FLASKS3_ENDPOINT_URL`.
+
+For example, if you're using Scaleway provider for S3 storage, your `FLASKS3_ENDPOINT_URL` is: `https://s3.nl-ams.scw.cloud`.
+
+# Documentation
+
+Most of the original documentation is currently valid.
- pip install git+https://github.com/e-dard/flask-s3
-
+For additional informacion or example you can refer to the original repo.
-Documentation
--------------
The latest documentation for Flask-S3 can be found [here](https://flask-s3.readthedocs.io/en/latest/).
diff --git a/README.rst b/README.rst
deleted file mode 100644
index 7d0b616..0000000
--- a/README.rst
+++ /dev/null
@@ -1,42 +0,0 @@
-flask-s3
-========
-
-|Build Status| |Coverage Status| |Analytics| |PyPI Version|
-
-Seamlessly serve the static assets of your Flask app from Amazon S3.
-
-Maintainers
------------
-
-Flask-S3 is maintained by @e-dard, @eriktaubeneck and @SunDwarf.
-
-Installation
-------------
-
-Install Flask-S3 via pypi:
-
-::
-
- pip install flask-s3
-
-
-Or, install the latest development version:
-
-::
-
- pip install git+https://github.com/e-dard/flask-s3
-
-Documentation
--------------
-
-The latest documentation for Flask-S3 can be found
-`here `__.
-
-.. |Build Status| image:: https://travis-ci.org/e-dard/flask-s3.svg?branch=master
- :target: https://travis-ci.org/e-dard/flask-s3
-.. |Coverage Status| image:: https://coveralls.io/repos/e-dard/flask-s3/badge.svg?branch=master&service=github
- :target: https://coveralls.io/github/e-dard/flask-s3?branch=master
-.. |Analytics| image:: https://ga-beacon.appspot.com/UA-35880013-3/flask-s3/readme
- :target: https://github.com/igrigorik/ga-beacon
-.. |PyPI Version| image:: https://img.shields.io/pypi/v/Flask-S3.svg
- :target: https://pypi.python.org/pypi/Flask-S3
diff --git a/VERSION b/VERSION
new file mode 100644
index 0000000..6d7de6e
--- /dev/null
+++ b/VERSION
@@ -0,0 +1 @@
+1.0.2
diff --git a/docs/Makefile b/docs/Makefile
deleted file mode 100644
index 54dbff7..0000000
--- a/docs/Makefile
+++ /dev/null
@@ -1,153 +0,0 @@
-# Makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line.
-SPHINXOPTS =
-SPHINXBUILD = sphinx-build
-PAPER =
-BUILDDIR = _build
-
-# Internal variables.
-PAPEROPT_a4 = -D latex_paper_size=a4
-PAPEROPT_letter = -D latex_paper_size=letter
-ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
-# the i18n builder cannot share the environment and doctrees with the others
-I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
-
-.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
-
-help:
- @echo "Please use \`make ' where is one of"
- @echo " html to make standalone HTML files"
- @echo " dirhtml to make HTML files named index.html in directories"
- @echo " singlehtml to make a single large HTML file"
- @echo " pickle to make pickle files"
- @echo " json to make JSON files"
- @echo " htmlhelp to make HTML files and a HTML help project"
- @echo " qthelp to make HTML files and a qthelp project"
- @echo " devhelp to make HTML files and a Devhelp project"
- @echo " epub to make an epub"
- @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
- @echo " latexpdf to make LaTeX files and run them through pdflatex"
- @echo " text to make text files"
- @echo " man to make manual pages"
- @echo " texinfo to make Texinfo files"
- @echo " info to make Texinfo files and run them through makeinfo"
- @echo " gettext to make PO message catalogs"
- @echo " changes to make an overview of all changed/added/deprecated items"
- @echo " linkcheck to check all external links for integrity"
- @echo " doctest to run all doctests embedded in the documentation (if enabled)"
-
-clean:
- -rm -rf $(BUILDDIR)/*
-
-html:
- $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
- @echo
- @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
-
-dirhtml:
- $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
- @echo
- @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
-
-singlehtml:
- $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
- @echo
- @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
-
-pickle:
- $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
- @echo
- @echo "Build finished; now you can process the pickle files."
-
-json:
- $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
- @echo
- @echo "Build finished; now you can process the JSON files."
-
-htmlhelp:
- $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
- @echo
- @echo "Build finished; now you can run HTML Help Workshop with the" \
- ".hhp project file in $(BUILDDIR)/htmlhelp."
-
-qthelp:
- $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
- @echo
- @echo "Build finished; now you can run "qcollectiongenerator" with the" \
- ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
- @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/flask-s3.qhcp"
- @echo "To view the help file:"
- @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/flask-s3.qhc"
-
-devhelp:
- $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
- @echo
- @echo "Build finished."
- @echo "To view the help file:"
- @echo "# mkdir -p $$HOME/.local/share/devhelp/flask-s3"
- @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/flask-s3"
- @echo "# devhelp"
-
-epub:
- $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
- @echo
- @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
-
-latex:
- $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
- @echo
- @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
- @echo "Run \`make' in that directory to run these through (pdf)latex" \
- "(use \`make latexpdf' here to do that automatically)."
-
-latexpdf:
- $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
- @echo "Running LaTeX files through pdflatex..."
- $(MAKE) -C $(BUILDDIR)/latex all-pdf
- @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
-
-text:
- $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
- @echo
- @echo "Build finished. The text files are in $(BUILDDIR)/text."
-
-man:
- $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
- @echo
- @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
-
-texinfo:
- $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
- @echo
- @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
- @echo "Run \`make' in that directory to run these through makeinfo" \
- "(use \`make info' here to do that automatically)."
-
-info:
- $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
- @echo "Running Texinfo files through makeinfo..."
- make -C $(BUILDDIR)/texinfo info
- @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
-
-gettext:
- $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
- @echo
- @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
-
-changes:
- $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
- @echo
- @echo "The overview file is in $(BUILDDIR)/changes."
-
-linkcheck:
- $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
- @echo
- @echo "Link check complete; look for any errors in the above output " \
- "or in $(BUILDDIR)/linkcheck/output.txt."
-
-doctest:
- $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
- @echo "Testing of doctests in the sources finished, look at the " \
- "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/docs/_themes/LICENSE b/docs/_themes/LICENSE
deleted file mode 100755
index 8daab7e..0000000
--- a/docs/_themes/LICENSE
+++ /dev/null
@@ -1,37 +0,0 @@
-Copyright (c) 2010 by Armin Ronacher.
-
-Some rights reserved.
-
-Redistribution and use in source and binary forms of the theme, with or
-without modification, are permitted provided that the following conditions
-are met:
-
-* Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-
-* Redistributions in binary form must reproduce the above
- copyright notice, this list of conditions and the following
- disclaimer in the documentation and/or other materials provided
- with the distribution.
-
-* The names of the contributors may not be used to endorse or
- promote products derived from this software without specific
- prior written permission.
-
-We kindly ask you to only use these themes in an unmodified manner just
-for Flask and Flask-related products, not for unrelated projects. If you
-like the visual style and want to use it for your own projects, please
-consider making some larger changes to the themes (such as changing
-font faces, sizes, colors or margins).
-
-THIS THEME IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
-LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-ARISING IN ANY WAY OUT OF THE USE OF THIS THEME, EVEN IF ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGE.
diff --git a/docs/_themes/README b/docs/_themes/README
deleted file mode 100755
index b3292bd..0000000
--- a/docs/_themes/README
+++ /dev/null
@@ -1,31 +0,0 @@
-Flask Sphinx Styles
-===================
-
-This repository contains sphinx styles for Flask and Flask related
-projects. To use this style in your Sphinx documentation, follow
-this guide:
-
-1. put this folder as _themes into your docs folder. Alternatively
- you can also use git submodules to check out the contents there.
-2. add this to your conf.py:
-
- sys.path.append(os.path.abspath('_themes'))
- html_theme_path = ['_themes']
- html_theme = 'flask'
-
-The following themes exist:
-
-- 'flask' - the standard flask documentation theme for large
- projects
-- 'flask_small' - small one-page theme. Intended to be used by
- very small addon libraries for flask.
-
-The following options exist for the flask_small theme:
-
- [options]
- index_logo = '' filename of a picture in _static
- to be used as replacement for the
- h1 in the index.rst file.
- index_logo_height = 120px height of the index logo
- github_fork = '' repository name on github for the
- "fork me" badge
diff --git a/docs/_themes/flask/layout.html b/docs/_themes/flask/layout.html
deleted file mode 100755
index 5caa4e2..0000000
--- a/docs/_themes/flask/layout.html
+++ /dev/null
@@ -1,25 +0,0 @@
-{%- extends "basic/layout.html" %}
-{%- block extrahead %}
- {{ super() }}
- {% if theme_touch_icon %}
-
- {% endif %}
-
-{% endblock %}
-{%- block relbar2 %}{% endblock %}
-{% block header %}
- {{ super() }}
- {% if pagename == 'index' %}
-
- {% endif %}
-{% endblock %}
-{# do not display relbars #}
-{% block relbar1 %}{% endblock %}
-{% block relbar2 %}
- {% if theme_github_fork %}
-
- {% endif %}
-{% endblock %}
-{% block sidebar1 %}{% endblock %}
-{% block sidebar2 %}{% endblock %}
diff --git a/docs/_themes/flask_small/static/flasky.css_t b/docs/_themes/flask_small/static/flasky.css_t
deleted file mode 100755
index fe2141c..0000000
--- a/docs/_themes/flask_small/static/flasky.css_t
+++ /dev/null
@@ -1,287 +0,0 @@
-/*
- * flasky.css_t
- * ~~~~~~~~~~~~
- *
- * Sphinx stylesheet -- flasky theme based on nature theme.
- *
- * :copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS.
- * :license: BSD, see LICENSE for details.
- *
- */
-
-@import url("basic.css");
-
-/* -- page layout ----------------------------------------------------------- */
-
-body {
- font-family: 'Georgia', serif;
- font-size: 17px;
- color: #000;
- background: white;
- margin: 0;
- padding: 0;
-}
-
-div.documentwrapper {
- float: left;
- width: 100%;
-}
-
-div.bodywrapper {
- margin: 40px auto 0 auto;
- width: 700px;
-}
-
-hr {
- border: 1px solid #B1B4B6;
-}
-
-div.body {
- background-color: #ffffff;
- color: #3E4349;
- padding: 0 30px 30px 30px;
-}
-
-img.floatingflask {
- padding: 0 0 10px 10px;
- float: right;
-}
-
-div.footer {
- text-align: right;
- color: #888;
- padding: 10px;
- font-size: 14px;
- width: 650px;
- margin: 0 auto 40px auto;
-}
-
-div.footer a {
- color: #888;
- text-decoration: underline;
-}
-
-div.related {
- line-height: 32px;
- color: #888;
-}
-
-div.related ul {
- padding: 0 0 0 10px;
-}
-
-div.related a {
- color: #444;
-}
-
-/* -- body styles ----------------------------------------------------------- */
-
-a {
- color: #004B6B;
- text-decoration: underline;
-}
-
-a:hover {
- color: #6D4100;
- text-decoration: underline;
-}
-
-div.body {
- padding-bottom: 40px; /* saved for footer */
-}
-
-div.body h1,
-div.body h2,
-div.body h3,
-div.body h4,
-div.body h5,
-div.body h6 {
- font-family: 'Garamond', 'Georgia', serif;
- font-weight: normal;
- margin: 30px 0px 10px 0px;
- padding: 0;
-}
-
-{% if theme_index_logo %}
-div.indexwrapper h1 {
- text-indent: -999999px;
- background: url({{ theme_index_logo }}) no-repeat center center;
- height: {{ theme_index_logo_height }};
-}
-{% endif %}
-
-div.body h2 { font-size: 180%; }
-div.body h3 { font-size: 150%; }
-div.body h4 { font-size: 130%; }
-div.body h5 { font-size: 100%; }
-div.body h6 { font-size: 100%; }
-
-a.headerlink {
- color: white;
- padding: 0 4px;
- text-decoration: none;
-}
-
-a.headerlink:hover {
- color: #444;
- background: #eaeaea;
-}
-
-div.body p, div.body dd, div.body li {
- line-height: 1.4em;
-}
-
-div.admonition {
- background: #fafafa;
- margin: 20px -30px;
- padding: 10px 30px;
- border-top: 1px solid #ccc;
- border-bottom: 1px solid #ccc;
-}
-
-div.admonition p.admonition-title {
- font-family: 'Garamond', 'Georgia', serif;
- font-weight: normal;
- font-size: 24px;
- margin: 0 0 10px 0;
- padding: 0;
- line-height: 1;
-}
-
-div.admonition p.last {
- margin-bottom: 0;
-}
-
-div.highlight{
- background-color: white;
-}
-
-dt:target, .highlight {
- background: #FAF3E8;
-}
-
-div.note {
- background-color: #eee;
- border: 1px solid #ccc;
-}
-
-div.seealso {
- background-color: #ffc;
- border: 1px solid #ff6;
-}
-
-div.topic {
- background-color: #eee;
-}
-
-div.warning {
- background-color: #ffe4e4;
- border: 1px solid #f66;
-}
-
-p.admonition-title {
- display: inline;
-}
-
-p.admonition-title:after {
- content: ":";
-}
-
-pre, tt {
- font-family: 'Consolas', 'Menlo', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
- font-size: 0.85em;
-}
-
-img.screenshot {
-}
-
-tt.descname, tt.descclassname {
- font-size: 0.95em;
-}
-
-tt.descname {
- padding-right: 0.08em;
-}
-
-img.screenshot {
- -moz-box-shadow: 2px 2px 4px #eee;
- -webkit-box-shadow: 2px 2px 4px #eee;
- box-shadow: 2px 2px 4px #eee;
-}
-
-table.docutils {
- border: 1px solid #888;
- -moz-box-shadow: 2px 2px 4px #eee;
- -webkit-box-shadow: 2px 2px 4px #eee;
- box-shadow: 2px 2px 4px #eee;
-}
-
-table.docutils td, table.docutils th {
- border: 1px solid #888;
- padding: 0.25em 0.7em;
-}
-
-table.field-list, table.footnote {
- border: none;
- -moz-box-shadow: none;
- -webkit-box-shadow: none;
- box-shadow: none;
-}
-
-table.footnote {
- margin: 15px 0;
- width: 100%;
- border: 1px solid #eee;
-}
-
-table.field-list th {
- padding: 0 0.8em 0 0;
-}
-
-table.field-list td {
- padding: 0;
-}
-
-table.footnote td {
- padding: 0.5em;
-}
-
-dl {
- margin: 0;
- padding: 0;
-}
-
-dl dd {
- margin-left: 30px;
-}
-
-pre {
- padding: 0;
- margin: 15px -30px;
- padding: 8px;
- line-height: 1.3em;
- padding: 7px 30px;
- background: #eee;
- border-radius: 2px;
- -moz-border-radius: 2px;
- -webkit-border-radius: 2px;
-}
-
-dl pre {
- margin-left: -60px;
- padding-left: 60px;
-}
-
-tt {
- background-color: #ecf0f3;
- color: #222;
- /* padding: 1px 2px; */
-}
-
-tt.xref, a tt {
- background-color: #FBFBFB;
-}
-
-a:hover tt {
- background: #EEE;
-}
diff --git a/docs/_themes/flask_small/theme.conf b/docs/_themes/flask_small/theme.conf
deleted file mode 100755
index 542b462..0000000
--- a/docs/_themes/flask_small/theme.conf
+++ /dev/null
@@ -1,10 +0,0 @@
-[theme]
-inherit = basic
-stylesheet = flasky.css
-nosidebar = true
-pygments_style = flask_theme_support.FlaskyStyle
-
-[options]
-index_logo = ''
-index_logo_height = 120px
-github_fork = ''
diff --git a/docs/_themes/flask_theme_support.py b/docs/_themes/flask_theme_support.py
deleted file mode 100755
index 33f4744..0000000
--- a/docs/_themes/flask_theme_support.py
+++ /dev/null
@@ -1,86 +0,0 @@
-# flasky extensions. flasky pygments style based on tango style
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
-
-
-class FlaskyStyle(Style):
- background_color = "#f8f8f8"
- default_style = ""
-
- styles = {
- # No corresponding class for the following:
- #Text: "", # class: ''
- Whitespace: "underline #f8f8f8", # class: 'w'
- Error: "#a40000 border:#ef2929", # class: 'err'
- Other: "#000000", # class 'x'
-
- Comment: "italic #8f5902", # class: 'c'
- Comment.Preproc: "noitalic", # class: 'cp'
-
- Keyword: "bold #004461", # class: 'k'
- Keyword.Constant: "bold #004461", # class: 'kc'
- Keyword.Declaration: "bold #004461", # class: 'kd'
- Keyword.Namespace: "bold #004461", # class: 'kn'
- Keyword.Pseudo: "bold #004461", # class: 'kp'
- Keyword.Reserved: "bold #004461", # class: 'kr'
- Keyword.Type: "bold #004461", # class: 'kt'
-
- Operator: "#582800", # class: 'o'
- Operator.Word: "bold #004461", # class: 'ow' - like keywords
-
- Punctuation: "bold #000000", # class: 'p'
-
- # because special names such as Name.Class, Name.Function, etc.
- # are not recognized as such later in the parsing, we choose them
- # to look the same as ordinary variables.
- Name: "#000000", # class: 'n'
- Name.Attribute: "#c4a000", # class: 'na' - to be revised
- Name.Builtin: "#004461", # class: 'nb'
- Name.Builtin.Pseudo: "#3465a4", # class: 'bp'
- Name.Class: "#000000", # class: 'nc' - to be revised
- Name.Constant: "#000000", # class: 'no' - to be revised
- Name.Decorator: "#888", # class: 'nd' - to be revised
- Name.Entity: "#ce5c00", # class: 'ni'
- Name.Exception: "bold #cc0000", # class: 'ne'
- Name.Function: "#000000", # class: 'nf'
- Name.Property: "#000000", # class: 'py'
- Name.Label: "#f57900", # class: 'nl'
- Name.Namespace: "#000000", # class: 'nn' - to be revised
- Name.Other: "#000000", # class: 'nx'
- Name.Tag: "bold #004461", # class: 'nt' - like a keyword
- Name.Variable: "#000000", # class: 'nv' - to be revised
- Name.Variable.Class: "#000000", # class: 'vc' - to be revised
- Name.Variable.Global: "#000000", # class: 'vg' - to be revised
- Name.Variable.Instance: "#000000", # class: 'vi' - to be revised
-
- Number: "#990000", # class: 'm'
-
- Literal: "#000000", # class: 'l'
- Literal.Date: "#000000", # class: 'ld'
-
- String: "#4e9a06", # class: 's'
- String.Backtick: "#4e9a06", # class: 'sb'
- String.Char: "#4e9a06", # class: 'sc'
- String.Doc: "italic #8f5902", # class: 'sd' - like a comment
- String.Double: "#4e9a06", # class: 's2'
- String.Escape: "#4e9a06", # class: 'se'
- String.Heredoc: "#4e9a06", # class: 'sh'
- String.Interpol: "#4e9a06", # class: 'si'
- String.Other: "#4e9a06", # class: 'sx'
- String.Regex: "#4e9a06", # class: 'sr'
- String.Single: "#4e9a06", # class: 's1'
- String.Symbol: "#4e9a06", # class: 'ss'
-
- Generic: "#000000", # class: 'g'
- Generic.Deleted: "#a40000", # class: 'gd'
- Generic.Emph: "italic #000000", # class: 'ge'
- Generic.Error: "#ef2929", # class: 'gr'
- Generic.Heading: "bold #000080", # class: 'gh'
- Generic.Inserted: "#00A000", # class: 'gi'
- Generic.Output: "#888", # class: 'go'
- Generic.Prompt: "#745334", # class: 'gp'
- Generic.Strong: "bold #000000", # class: 'gs'
- Generic.Subheading: "bold #800080", # class: 'gu'
- Generic.Traceback: "bold #a40000", # class: 'gt'
- }
diff --git a/docs/conf.py b/docs/conf.py
deleted file mode 100644
index 26ccdb4..0000000
--- a/docs/conf.py
+++ /dev/null
@@ -1,258 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# flask-s3 documentation build configuration file, created by
-# sphinx-quickstart on Sat Sep 8 13:10:46 2012.
-#
-# This file is execfile()d with the current directory set to its containing dir.
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default.
-
-import sys, os
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-sys.path.insert(0, os.path.abspath('../'))
-from flask_s3 import __version__
-
-# -- General configuration -----------------------------------------------------
-
-# If your documentation needs a minimal Sphinx version, state it here.
-#needs_sphinx = '1.0'
-
-# Add any Sphinx extension module names here, as strings. They can be extensions
-# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.viewcode']
-
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
-
-# The suffix of source filenames.
-source_suffix = '.rst'
-
-# The encoding of source files.
-#source_encoding = 'utf-8-sig'
-
-# The master toctree document.
-master_doc = 'index'
-
-# General information about the project.
-project = 'flask-S3'
-copyright = '2015, Edward Robinson'
-
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-#
-# The short X.Y version.
-version = ".".join(map(str, __version__[0:2]))
-# The full version, including alpha/beta/rc tags.
-release = ".".join(map(str, __version__))
-
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-#language = None
-
-# There are two options for replacing |today|: either, you set today to some
-# non-false value, then it is used:
-#today = ''
-# Else, today_fmt is used as the format for a strftime call.
-#today_fmt = '%B %d, %Y'
-
-# List of patterns, relative to source directory, that match files and
-# directories to ignore when looking for source files.
-exclude_patterns = ['_build']
-
-# The reST default role (used for this markup: `text`) to use for all documents.
-default_role = 'obj'
-# affects stuff wrapped like `this`
-#default_role = None
-
-# If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
-
-# If true, the current module name will be prepended to all description
-# unit titles (such as .. function::).
-#add_module_names = True
-
-# If true, sectionauthor and moduleauthor directives will be shown in the
-# output. They are ignored by default.
-#show_authors = False
-
-# The name of the Pygments (syntax highlighting) style to use.
-#pygments_style = 'sphinx'
-
-# A list of ignored prefixes for module index sorting.
-#modindex_common_prefix = []
-
-
-# -- Options for HTML output ---------------------------------------------------
-
-# The theme to use for HTML and HTML Help pages. See the documentation for
-# a list of builtin themes.
-html_theme = 'default'
-
-# Theme options are theme-specific and customize the look and feel of a theme
-# further. For a list of options available for each theme, see the
-# documentation.
-#html_theme_options = {}
-
-# Add any paths that contain custom themes here, relative to this directory.
-#html_theme_path = []
-
-# The name for this set of Sphinx documents. If None, it defaults to
-# " v documentation".
-#html_title = None
-
-# A shorter title for the navigation bar. Default is the same as html_title.
-#html_short_title = None
-
-# The name of an image file (relative to this directory) to place at the top
-# of the sidebar.
-#html_logo = None
-
-# The name of an image file (within the static path) to use as favicon of the
-# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
-# pixels large.
-#html_favicon = None
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
-
-# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
-# using the given strftime format.
-#html_last_updated_fmt = '%b %d, %Y'
-
-# If true, SmartyPants will be used to convert quotes and dashes to
-# typographically correct entities.
-#html_use_smartypants = True
-
-# Custom sidebar templates, maps document names to template names.
-#html_sidebars = {}
-
-# Additional templates that should be rendered to pages, maps page names to
-# template names.
-#html_additional_pages = {}
-
-# If false, no module index is generated.
-#html_domain_indices = True
-
-# If false, no index is generated.
-#html_use_index = True
-
-# If true, the index is split into individual pages for each letter.
-#html_split_index = False
-
-# If true, links to the reST sources are added to the pages.
-#html_show_sourcelink = True
-
-# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-#html_show_sphinx = True
-
-# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
-#html_show_copyright = True
-
-# If true, an OpenSearch description file will be output, and all pages will
-# contain a tag referring to it. The value of this option must be the
-# base URL from which the finished HTML is served.
-#html_use_opensearch = ''
-
-# This is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = None
-
-# Output file base name for HTML help builder.
-htmlhelp_basename = 'flask-s3doc'
-
-
-# -- Options for LaTeX output --------------------------------------------------
-
-latex_elements = {
-# The paper size ('letterpaper' or 'a4paper').
-#'papersize': 'letterpaper',
-
-# The font size ('10pt', '11pt' or '12pt').
-#'pointsize': '10pt',
-
-# Additional stuff for the LaTeX preamble.
-#'preamble': '',
-}
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title, author, documentclass [howto/manual]).
-latex_documents = [
- ('index', 'flask-s3.tex', u'flask-s3 Documentation',
- u'Edward Robinson', 'manual'),
-]
-
-# The name of an image file (relative to this directory) to place at the top of
-# the title page.
-#latex_logo = None
-
-# For "manual" documents, if this is true, then toplevel headings are parts,
-# not chapters.
-#latex_use_parts = False
-
-# If true, show page references after internal links.
-#latex_show_pagerefs = False
-
-# If true, show URL addresses after external links.
-#latex_show_urls = False
-
-# Documents to append as an appendix to all manuals.
-#latex_appendices = []
-
-# If false, no module index is generated.
-#latex_domain_indices = True
-
-
-# -- Options for manual page output --------------------------------------------
-
-# One entry per manual page. List of tuples
-# (source start file, name, description, authors, manual section).
-man_pages = [
- ('index', 'flask-s3', u'flask-s3 Documentation',
- [u'Edward Robinson'], 1)
-]
-
-# If true, show URL addresses after external links.
-#man_show_urls = False
-
-
-# -- Options for Texinfo output ------------------------------------------------
-
-# Grouping the document tree into Texinfo files. List of tuples
-# (source start file, target name, title, author,
-# dir menu entry, description, category)
-texinfo_documents = [
- ('index', 'flask-s3', u'flask-s3 Documentation',
- u'Edward Robinson', 'flask-s3', 'Flask-S3 allows you to server your static assets from Amazon S3.',
- 'Miscellaneous'),
-]
-
-# Documents to append as an appendix to all manuals.
-#texinfo_appendices = []
-
-# If false, no module index is generated.
-#texinfo_domain_indices = True
-
-# How to display URL addresses: 'footnote', 'no', or 'inline'.
-#texinfo_show_urls = 'footnote'
-
-sys.path.append(os.path.abspath('_themes'))
-html_theme_path = ['_themes']
-html_theme = 'flask_small'
-html_theme_options = dict(github_fork='e-dard/flask-s3',
- index_logo=False)
-
-
-# Example configuration for intersphinx: refer to the Python standard library.
-intersphinx_mapping = {'http://docs.python.org/': None,
- 'http://flask.pocoo.org/docs/': None}
-
-
diff --git a/docs/index.rst b/docs/index.rst
deleted file mode 100644
index cd790a2..0000000
--- a/docs/index.rst
+++ /dev/null
@@ -1,271 +0,0 @@
-Flask-S3
-********
-.. module:: flask_s3
-
-Flask-S3 allows you to easily serve all your `Flask`_ application's
-static assets from `Amazon S3`_, without having to modify your
-templates.
-
-.. _Amazon S3: http://aws.amazon.com/s3
-.. _Flask: http://flask.pocoo.org/
-
-
-How it works
-============
-
-Flask-S3 has two main functions:
-
- 1. Walk through your application's static folders, gather all your
- static assets together, and upload them to a bucket of your choice
- on S3;
-
- 2. Replace the URLs that Flask's :func:`flask.url_for` function would
- insert into your templates, with URLs that point to the static
- assets in your S3 bucket.
-
-The process of gathering and uploading your static assets to S3 need
-only be done once, and your application does not need to be running for
-it to work. The location of the S3 bucket can be inferred from Flask-S3
-`settings`_ specified in your Flask application, therefore when your
-application is running there need not be any communication between the
-Flask application and Amazon S3.
-
-Internally, every time ``url_for`` is called in one of your
-application's templates, `flask_s3.url_for` is instead invoked. If the
-endpoint provided is deemed to refer to static assets, then the S3 URL
-for the asset specified in the `filename` argument is instead returned.
-Otherwise, `flask_s3.url_for` passes the call on to `flask.url_for`.
-
-
-Installation
-============
-
-If you use pip then installation is simply::
-
- $ pip install flask-s3
-
-or, if you want the latest github version::
-
- $ pip install git+git://github.com/e-dard/flask-s3.git
-
-You can also install Flask-S3 via Easy Install::
-
- $ easy_install flask-s3
-
-Dependencies
-------------
-
-Aside from the obvious dependency of Flask itself, Flask-S3 makes use of
-the `boto`_ library for uploading assets to Amazon S3. **Note**:
-Flask-S3 currently only supports applications that use the `jinja2`_
-templating system.
-
-.. _boto: http://docs.pythonboto.org/en/latest/
-.. _jinja2: http://jinja.pocoo.org/docs/
-
-
-Using Flask-S3
-==============
-
-Flask-S3 is incredibly simple to use. In order to start serving your
-Flask application's assets from Amazon S3, the first thing to do is let
-Flask-S3 know about your :class:`flask.Flask` application object.
-
-.. code-block:: python
-
- from flask import Flask
- from flask_s3 import FlaskS3
-
- app = Flask(__name__)
- app.config['FLASKS3_BUCKET_NAME'] = 'mybucketname'
- s3 = FlaskS3(app)
-
-In many cases, however, one cannot expect a Flask instance to be ready
-at import time, and a common pattern is to return a Flask instance from
-within a function only after other configuration details have been taken
-care of. In these cases, Flask-S3 provides a simple function,
-``init_app``, which takes your application as an argument.
-
-.. code-block:: python
-
- from flask import Flask
- from flask_s3 import FlaskS3
-
- s3 = FlaskS3()
-
- def start_app():
- app = Flask(__name__)
- s3.init_app(app)
- return app
-
-In terms of getting your application to use external Amazon S3 URLs when
-referring to your application's static assets, passing your ``Flask``
-object to the ``FlaskS3`` object is all that needs to be done. Once your
-app is running, any templates that contained relative static asset
-locations, will instead contain hosted counterparts on Amazon S3.
-
-Uploading your Static Assets
-----------------------------
-
-You only need to upload your static assets to Amazon S3 once. Of course,
-if you add or modify your existing assets then you will need to repeat
-the uploading process.
-
-Uploading your static assets from a Python console is as simple as
-follows.
-
-.. code-block:: python
-
- >>> import flask_s3
- >>> from my_application import app
- >>> flask_s3.create_all(app)
- >>>
-
-Flask-S3 will proceed to walk through your application's static assets,
-including those belonging to *registered* `blueprints`_, and upload them
-to your Amazon S3 bucket.
-
-.. _blueprints: http://flask.pocoo.org/docs/blueprints/
-
-Static Asset URLs
-~~~~~~~~~~~~~~~~~
-
-Within your bucket on S3, Flask-S3 replicates the static file hierarchy
-defined in your application object and any registered blueprints. URLs
-generated by Flask-S3 will look like the following:
-
-``/static/foo/style.css`` becomes
-``https://mybucketname.s3.amazonaws.com/static/foo/style.css``, assuming
-that ``mybucketname`` is the name of your S3 bucket, and you have chosen
-to have assets served over HTTPS.
-
-Setting Custom HTTP Headers
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-To set custom HTTP headers on the files served from S3 specify what
-headers you want to use with the `FLASKS3_HEADERS` option.
-
-.. code-block:: python
-
- FLASKS3_HEADERS = {
- 'Expires': 'Thu, 15 Apr 2010 20:00:00 GMT',
- 'Cache-Control': 'max-age=86400',
- }
-
-See `Yahoo!`_ more information on how to set good values for your headers.
-
-.. _Yahoo!: http://developer.yahoo.com/performance/rules.html#expires
-
-.. _settings:
-.. _configuration:
-
-Flask-S3 Options
-----------------
-
-Within your Flask application's settings you can provide the following
-settings to control the behaviour of Flask-S3. None of the settings are
-required, but if not present, some will need to be provided when
-uploading assets to S3.
-
-=========================== =============================================================
-`AWS_ACCESS_KEY_ID` Your AWS access key. This does not need to be
- stored in your configuration if you choose to pass
- it directly when uploading your assets.
-`AWS_SECRET_ACCESS_KEY` Your AWS secret key. As with the access key, this
- need not be stored in your configuration if passed
- in to `create_all`.
-`FLASKS3_BUCKET_DOMAIN` The domain part of the URI for your S3 bucket. You
- probably won't need to change this.
- **Default:** ``u's3.amazonaws.com'``
-`FLASKS3_CDN_DOMAIN` AWS makes it easy to attach CloudFront to an S3
- bucket. If you want to use this or another CDN,
- set the base domain here. This is distinct from the
- `FLASKS3_BUCKET_DOMAIN` since it will not include the
- bucket name in the base url.
-`FLASKS3_BUCKET_NAME` The desired name for your Amazon S3 bucket. Note:
- the name will be visible in all your assets' URLs.
-`FLASKS3_REGION` The AWS region to host the bucket in; an empty
- string indicates the default region should be used,
- which is the US Standard region. Possible location
- values include: `'DEFAULT'`, `'EU'`, `'USWest'`,
- `'APSoutheast'`
-`FLASKS3_URL_STYLE` Set to `'host'` to use virtual-host-style URLs,
- e.g. ``bucketname.s3.amazonaws.com``. Set to
- `'path'` to use path-style URLs, e.g.
- ``s3.amazonaws.com/bucketname``.
- **Default:** `'host'`
-`FLASKS3_USE_HTTPS` Specifies whether or not to serve your assets
- stored in S3 over HTTPS.
- Can be overriden per url, by using the `_scheme`
- argument as per usual Flask `url_for`.
- **Default:** `True`
-`FLASKS3_ACTIVE` This setting allows you to toggle whether Flask-S3
- is active or not. When set to `False` your
- application's templates will revert to including
- static asset locations determined by
- `flask.url_for`.
- **Default:** `True`
- **Note**: if you run your application in `debug`_
- mode (and `FLASKS3_DEBUG` is `False` - see next
- item), `FLASKS3_ACTIVE` will be changed to `False`.
- This allows the `FLASKS3_ACTIVE` config variable to
- be the definitive check as to whether `flask_s3.url_for`
- is overriding `flask.url_for`.
-`FLASKS3_DEBUG` By default, Flask-S3 will be switched off when
- running your application in `debug`_ mode, so that
- your templates include static asset locations
- specified by `flask.url_for`. If you wish to enable
- Flask-S3 in debug mode, set this value to `True`.
- **Note**: if `FLASKS3_ACTIVE` is set to `False` then
- templates will always include asset locations
- specified by `flask.url_for`.
-`FLASKS3_HEADERS` Sets custom headers to be sent with each file to S3.
- **Default:** `{}`
-`FLASKS3_FILEPATH_HEADERS` Sets custom headers for files whose filepath matches
- certain regular expressions. (Note that this cannot
- be used for CORS, that must be set per S3 bucket
- using an XML config string.) E.g. to add custom
- metadata when serving text files, set this to:
- `{r'\.txt$':`
- ` {'Texted-Up-By': 'Mister Foo'}`
- `}`
- **Default:** `{}`
-`FLASKS3_ONLY_MODIFIED` Only upload files that have been modified since last
- upload to S3. SHA-1 file hashes are used to compute
- file changes. You can delete `.file-hashes` from
- your S3 bucket to force all files to upload again.
- Defaults to `False`.
-`FLASKS3_GZIP` Compress all assets using GZIP and set the
- corresponding Content-Type and Content-Encoding
- headers on the S3 files. Defaults to `False`.
-`FLASKS3_GZIP_ONLY_EXTS` A list of file extensions that should be gzipped.
- ``FLASKS3_GZIP`` should be ``True`` for this to take effect.
- If mentioned and non-empty, then only files with the
- specified extensions are gzipped.
- Defaults to empty list, meaning all files will be
- gzipped.
- Eg:- ``['.js', '.css']`` will gzip only js and css files.
-`FLASKS3_FORCE_MIMETYPE` Always set the Content-Type header on the S3 files
- irrespective of gzipping. Defaults to `False`.
-=========================== =============================================================
-
-.. _debug: http://flask.pocoo.org/docs/config/#configuration-basics
-
-
-API Documentation
-=================
-
-Flask-S3 is a very simple extension. The few exposed objects, methods
-and functions are as follows.
-
-The FlaskS3 Object
-------------------
-.. autoclass:: FlaskS3
-
- .. automethod:: init_app
-
-S3 Interaction
---------------
-.. autofunction:: create_all
-
-.. autofunction:: url_for
diff --git a/docs/make.bat b/docs/make.bat
deleted file mode 100644
index bca5341..0000000
--- a/docs/make.bat
+++ /dev/null
@@ -1,190 +0,0 @@
-@ECHO OFF
-
-REM Command file for Sphinx documentation
-
-if "%SPHINXBUILD%" == "" (
- set SPHINXBUILD=sphinx-build
-)
-set BUILDDIR=_build
-set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
-set I18NSPHINXOPTS=%SPHINXOPTS% .
-if NOT "%PAPER%" == "" (
- set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
- set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
-)
-
-if "%1" == "" goto help
-
-if "%1" == "help" (
- :help
- echo.Please use `make ^` where ^ is one of
- echo. html to make standalone HTML files
- echo. dirhtml to make HTML files named index.html in directories
- echo. singlehtml to make a single large HTML file
- echo. pickle to make pickle files
- echo. json to make JSON files
- echo. htmlhelp to make HTML files and a HTML help project
- echo. qthelp to make HTML files and a qthelp project
- echo. devhelp to make HTML files and a Devhelp project
- echo. epub to make an epub
- echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
- echo. text to make text files
- echo. man to make manual pages
- echo. texinfo to make Texinfo files
- echo. gettext to make PO message catalogs
- echo. changes to make an overview over all changed/added/deprecated items
- echo. linkcheck to check all external links for integrity
- echo. doctest to run all doctests embedded in the documentation if enabled
- goto end
-)
-
-if "%1" == "clean" (
- for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
- del /q /s %BUILDDIR%\*
- goto end
-)
-
-if "%1" == "html" (
- %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The HTML pages are in %BUILDDIR%/html.
- goto end
-)
-
-if "%1" == "dirhtml" (
- %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
- goto end
-)
-
-if "%1" == "singlehtml" (
- %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
- goto end
-)
-
-if "%1" == "pickle" (
- %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; now you can process the pickle files.
- goto end
-)
-
-if "%1" == "json" (
- %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; now you can process the JSON files.
- goto end
-)
-
-if "%1" == "htmlhelp" (
- %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; now you can run HTML Help Workshop with the ^
-.hhp project file in %BUILDDIR%/htmlhelp.
- goto end
-)
-
-if "%1" == "qthelp" (
- %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; now you can run "qcollectiongenerator" with the ^
-.qhcp project file in %BUILDDIR%/qthelp, like this:
- echo.^> qcollectiongenerator %BUILDDIR%\qthelp\flask-s3.qhcp
- echo.To view the help file:
- echo.^> assistant -collectionFile %BUILDDIR%\qthelp\flask-s3.ghc
- goto end
-)
-
-if "%1" == "devhelp" (
- %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished.
- goto end
-)
-
-if "%1" == "epub" (
- %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The epub file is in %BUILDDIR%/epub.
- goto end
-)
-
-if "%1" == "latex" (
- %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
- goto end
-)
-
-if "%1" == "text" (
- %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The text files are in %BUILDDIR%/text.
- goto end
-)
-
-if "%1" == "man" (
- %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The manual pages are in %BUILDDIR%/man.
- goto end
-)
-
-if "%1" == "texinfo" (
- %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
- goto end
-)
-
-if "%1" == "gettext" (
- %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
- goto end
-)
-
-if "%1" == "changes" (
- %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
- if errorlevel 1 exit /b 1
- echo.
- echo.The overview file is in %BUILDDIR%/changes.
- goto end
-)
-
-if "%1" == "linkcheck" (
- %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
- if errorlevel 1 exit /b 1
- echo.
- echo.Link check complete; look for any errors in the above output ^
-or in %BUILDDIR%/linkcheck/output.txt.
- goto end
-)
-
-if "%1" == "doctest" (
- %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
- if errorlevel 1 exit /b 1
- echo.
- echo.Testing of doctests in the sources finished, look at the ^
-results in %BUILDDIR%/doctest/output.txt.
- goto end
-)
-
-:end
diff --git a/docs/requirements.txt b/docs/requirements.txt
deleted file mode 100644
index d5b2388..0000000
--- a/docs/requirements.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Flask
-boto3
-
diff --git a/example/example/__init__.py b/example/example/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/example/example/app.py b/example/example/app.py
deleted file mode 100644
index 8abcfeb..0000000
--- a/example/example/app.py
+++ /dev/null
@@ -1,20 +0,0 @@
-
-from flask import Flask, render_template_string
-from flask_s3 import FlaskS3, create_all
-
-app = Flask(__name__)
-app.config['S3_BUCKET_NAME'] = 'mybucketname'
-app.config['USE_S3_DEBUG'] = True
-
-s3 = FlaskS3(app)
-
-@app.route('/')
-def index():
- template_str = """{{ url_for('static', filename="foo.js") }}"""
- return render_template_string(template_str)
-
-def upload_all():
- create_all(app, user='MY_AWS_ID', password='MY_AWS_SECRET')
-
-if __name__ == '__main__':
- app.run(debug=True)
\ No newline at end of file
diff --git a/example/example/static/foo.js b/example/example/static/foo.js
deleted file mode 100644
index 5f7a80a..0000000
--- a/example/example/static/foo.js
+++ /dev/null
@@ -1 +0,0 @@
-exciting ex.js
\ No newline at end of file
diff --git a/flask_s3.py b/flask_s3/__init__.py
similarity index 95%
rename from flask_s3.py
rename to flask_s3/__init__.py
index 9d043e7..256db2d 100644
--- a/flask_s3.py
+++ b/flask_s3/__init__.py
@@ -1,23 +1,22 @@
+import io
+import os
+import re
import gzip
-import hashlib
import json
+import hashlib
import logging
-import os
-import re
-
-try:
- from cStringIO import StringIO
-except ImportError:
- from io import StringIO
import mimetypes
+
from collections import defaultdict
+
import boto3
import boto3.exceptions
from botocore.exceptions import ClientError
+
+from tqdm import tqdm
from flask import current_app
from flask import url_for as flask_url_for
-import six
logger = logging.getLogger('flask_s3')
@@ -49,8 +48,6 @@
'FLASKS3_FORCE_MIMETYPE': False,
'FLASKS3_PREFIX': ''}
-__version__ = (0, 3, 2)
-
def _get_statics_prefix(app):
"""
@@ -165,15 +162,13 @@ def url_for(endpoint, **values):
return flask_url_for(endpoint, **values)
-def _bp_static_url(blueprint):
+def _bp_static_url(blueprint) -> str:
""" builds the absolute url path for a blueprint's static folder """
- u = six.u('%s%s' % (blueprint.url_prefix or '', blueprint.static_url_path or ''))
- return u
-
+ return f"{blueprint.url_prefix or ''}{blueprint.static_url_path or ''}"
def _gather_files(app, hidden, filepath_filter_regex=None):
""" Gets all files in static folders and returns in dict."""
- dirs = [(six.text_type(app.static_folder), app.static_url_path)]
+ dirs = [(app.static_folder, app.static_url_path)]
if hasattr(app, 'blueprints'):
blueprints = app.blueprints.values()
bp_details = lambda x: (x.static_folder, _bp_static_url(x))
@@ -235,7 +230,7 @@ def _write_files(s3, app, static_url_loc, static_folder, files, bucket,
gzip_include_only = app.config.get('FLASKS3_GZIP_ONLY_EXTS')
new_hashes = []
static_folder_rel = _path_to_relative_url(static_folder)
- for file_path in files:
+ for file_path in tqdm(files, desc="Uploading: {}".format(static_folder)):
per_file_should_gzip = should_gzip
asset_loc = _path_to_relative_url(file_path)
full_key_name = _static_folder_path(static_url_loc, static_folder_rel,
@@ -259,9 +254,9 @@ def _write_files(s3, app, static_url_loc, static_folder, files, bucket,
# configured regular expressions.
filepath_headers = app.config.get('FLASKS3_FILEPATH_HEADERS')
if filepath_headers:
- for filepath_regex, headers in six.iteritems(filepath_headers):
+ for filepath_regex, headers in filepath_headers.items():
if re.search(filepath_regex, file_path):
- for header, value in six.iteritems(headers):
+ for header, value in headers.items():
h[header] = value
# check for extension, only if there are extensions provided
@@ -283,12 +278,11 @@ def _write_files(s3, app, static_url_loc, static_folder, files, bucket,
logger.warn("Unable to detect mimetype for %s" %
file_path)
- file_mode = 'rb' if six.PY3 else 'r'
- with open(file_path, file_mode) as fp:
+ with open(file_path, 'rb') as fp:
merged_dicts = merge_two_dicts(get_setting('FLASKS3_HEADERS', app), h)
metadata, params = split_metadata_params(merged_dicts)
if per_file_should_gzip:
- compressed = six.BytesIO()
+ compressed = io.BytesIO()
z = gzip.GzipFile(os.path.basename(file_path), 'wb', 9,
compressed)
z.write(fp.read())
@@ -311,7 +305,7 @@ def _write_files(s3, app, static_url_loc, static_folder, files, bucket,
def _upload_files(s3, app, files_, bucket, hashes=None):
new_hashes = []
prefix = _get_statics_prefix(app)
- for (static_folder, static_url), names in six.iteritems(files_):
+ for (static_folder, static_url), names in files_.items():
static_upload_url = '%s/%s' % (prefix.rstrip('/'), static_url.lstrip('/'))
new_hashes.extend(_write_files(s3, app, static_upload_url, static_folder,
names, bucket, hashes=hashes))
@@ -402,6 +396,13 @@ def create_all(app, user=None, password=None, bucket_name=None,
"""
user = user or app.config.get('AWS_ACCESS_KEY_ID')
password = password or app.config.get('AWS_SECRET_ACCESS_KEY')
+
+ if not user:
+ raise ValueError("you must set 'user' or 'AWS_ACCESS_KEY_ID'")
+
+ if not password:
+ raise ValueError("you must set 'password' or 'AWS_SECRET_ACCESS_KEY'")
+
bucket_name = bucket_name or app.config.get('FLASKS3_BUCKET_NAME')
if not bucket_name:
raise ValueError("No bucket name provided.")
diff --git a/requirements.txt b/requirements.txt
index da4c3eb..86485de 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,6 +1,3 @@
+tqdm
Flask
boto3
-six
-coverage
-coveralls
-nose
\ No newline at end of file
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..600eab2
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,28 @@
+[metadata]
+name = Flask-S3-ng
+description = New Generation version of Flask-S3 project. Seamlessly serve the static files of your Flask app from Amazon S3
+version = file: VERSION
+long_description = file: README.md
+long_description_content_type = text/markdown
+license = License :: OSI Approved :: BSD License
+classifiers =
+ License :: OSI Approved :: BSD License
+ Programming Language :: Python :: 3.8
+ Environment :: Web Environment
+ Intended Audience :: Developers
+ License :: Other/Proprietary License
+ Operating System :: OS Independent
+ Programming Language :: Python
+ Topic :: Internet :: WWW/HTTP :: Dynamic Content
+ Topic :: Software Development :: Libraries :: Python Modules
+author = Edward Robinson
+author_email = hi@edd.io
+maintainer = cr0hn (maintainer)
+maintainer_email = cr0hn@cr0hn.com
+url = http://github.com/cr0hn/flask-s3
+
+[options]
+zip_safe = True
+include_package_data = True
+packages = find:
+
diff --git a/setup.py b/setup.py
index f84076a..b471432 100644
--- a/setup.py
+++ b/setup.py
@@ -4,55 +4,12 @@
Easily serve your static files from Amazon S3.
"""
+
from setuptools import setup
-# Figure out the version; this could be done by importing the
-# module, though that requires dependencies to be already installed,
-# which may not be the case when processing a pip requirements
-# file, for example.
-def parse_version(asignee):
- import os, re
- here = os.path.dirname(os.path.abspath(__file__))
- version_re = re.compile(
- r'%s = (\(.*?\))' % asignee)
- with open(os.path.join(here, 'flask_s3.py')) as fp:
- for line in fp:
- match = version_re.search(line)
- if match:
- version = eval(match.group(1))
- return ".".join(map(str, version))
- else:
- raise Exception("cannot find version")
-version = parse_version('__version__')
-# above taken from miracle2k/flask-assets
+requirements = open("requirements.txt", "r").read().splitlines()
setup(
- name='Flask-S3',
- version=version,
- url='http://github.com/e-dard/flask-s3',
- license='WTFPL',
- author='Edward Robinson',
- author_email='hi@edd.io',
- description='Seamlessly serve the static files of your Flask app from Amazon S3',
- long_description=__doc__,
- py_modules=['flask_s3'],
- zip_safe=False,
- include_package_data=True,
- platforms='any',
- install_requires=[
- 'Flask',
- 'Boto3>=1.1.1',
- 'six'
- ],
- tests_require=['nose', 'mock'],
- classifiers=[
- 'Environment :: Web Environment',
- 'Intended Audience :: Developers',
- 'License :: Other/Proprietary License',
- 'Operating System :: OS Independent',
- 'Programming Language :: Python',
- 'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
- 'Topic :: Software Development :: Libraries :: Python Modules'
- ],
- test_suite = 'nose.collector'
+ install_requires=requirements
)
+
diff --git a/test_flask_static.py b/test_flask_static.py
deleted file mode 100644
index 022da88..0000000
--- a/test_flask_static.py
+++ /dev/null
@@ -1,467 +0,0 @@
-import ntpath
-import os
-import sys
-import tempfile
-import unittest
-from itertools import count
-
-try:
- from unittest.mock import Mock, patch, call, mock_open
-except ImportError:
- from mock import Mock, patch, call, mock_open
-from flask import Flask, render_template_string, Blueprint
-import six
-import flask_s3
-from flask_s3 import FlaskS3
-
-
-class FlaskStaticTest(unittest.TestCase):
- def setUp(self):
- self.app = Flask(__name__)
- self.app.testing = True
-
- @self.app.route('/')
- def a(url_for_string):
- return render_template_string(url_for_string)
-
- def test_jinja_url_for(self):
- """ Tests that the jinja global gets assigned correctly. """
- self.assertNotEqual(self.app.jinja_env.globals['url_for'],
- flask_s3.url_for)
- # then we initialise the extension
- FlaskS3(self.app)
- self.assertEquals(self.app.jinja_env.globals['url_for'],
- flask_s3.url_for)
-
- # Temporarily commented out
- """
- def test_config(self):
- "" Tests configuration vars exist. ""
- FlaskS3(self.app)
- defaults = ('S3_USE_HTTP', 'USE_S3', 'USE_S3_DEBUG',
- 'S3_BUCKET_DOMAIN', 'S3_CDN_DOMAIN',
- 'S3_USE_CACHE_CONTROL', 'S3_HEADERS',
- 'S3_URL_STYLE')
- for default in defaults:
- self.assertIn(default, self.app.config)
- """
-
-
-class UrlTests(unittest.TestCase):
- def setUp(self):
- self.app = Flask(__name__)
- self.app.testing = True
- self.app.config['FLASKS3_BUCKET_NAME'] = 'foo'
- self.app.config['FLASKS3_USE_HTTPS'] = True
- self.app.config['FLASKS3_BUCKET_DOMAIN'] = 's3.amazonaws.com'
- self.app.config['FLASKS3_CDN_DOMAIN'] = ''
- self.app.config['FLASKS3_OVERRIDE_TESTING'] = True
-
- @self.app.route('/')
- def a(url_for_string):
- return render_template_string(url_for_string)
-
- @self.app.route('/')
- def b():
- return render_template_string("{{url_for('b')}}")
-
- bp = Blueprint('admin', __name__, static_folder='admin-static')
-
- @bp.route('/')
- def c():
- return render_template_string("{{url_for('b')}}")
-
- self.app.register_blueprint(bp)
-
- def client_get(self, ufs):
- FlaskS3(self.app)
- client = self.app.test_client()
- import six
- if six.PY3:
- return client.get('/%s' % ufs)
- elif six.PY2:
- return client.get('/{}'.format(ufs))
-
- def test_required_config(self):
- """
- Tests that ValueError raised if bucket address not provided.
- """
- raises = False
-
- del self.app.config['FLASKS3_BUCKET_NAME']
-
- try:
- ufs = "{{url_for('static', filename='bah.js')}}"
- self.client_get(ufs)
- except ValueError:
- raises = True
- self.assertTrue(raises)
-
- def test_url_for(self):
- """
- Tests that correct url formed for static asset in self.app.
- """
- # non static endpoint url_for in template
- self.assertEquals(self.client_get('').data, six.b('/'))
- # static endpoint url_for in template
- ufs = "{{url_for('static', filename='bah.js')}}"
- exp = 'https://foo.s3.amazonaws.com/static/bah.js'
- self.assertEquals(self.client_get(ufs).data, six.b(exp))
-
- def test_url_for_per_url_scheme(self):
- """
- Tests that if _scheme is passed in the url_for arguments, that
- scheme is used instead of configuration scheme.
- """
- # check _scheme overriden per url
- ufs = "{{url_for('static', filename='bah.js', _scheme='http')}}"
- exp = 'http://foo.s3.amazonaws.com/static/bah.js'
- self.assertEquals(self.client_get(ufs).data, six.b(exp))
-
- def test_url_for_handles_special_args(self):
- """
- Tests that if any special arguments are passed, they are ignored, and
- removed from generated url. As of this writing these are the special
- args: _external, _anchor, _method (from flask's url_for)
- """
- # check _external, _anchor, and _method are ignored, and not added
- # to the url
- ufs = "{{url_for('static', filename='bah.js',\
- _external=True, _anchor='foobar', _method='GET')}}"
- exp = 'https://foo.s3.amazonaws.com/static/bah.js'
- self.assertEquals(self.client_get(ufs).data, six.b(exp))
-
- def test_url_for_debug(self):
- """Tests Flask-S3 behaviour in debug mode."""
- self.app.debug = True
- # static endpoint url_for in template
- ufs = "{{url_for('static', filename='bah.js')}}"
- exp = '/static/bah.js'
- self.assertEquals(self.client_get(ufs).data, six.b(exp))
-
- def test_url_for_debug_override(self):
- """Tests Flask-S3 behavior in debug mode with USE_S3_DEBUG turned on."""
- self.app.debug = True
- self.app.config['FLASKS3_DEBUG'] = True
- ufs = "{{url_for('static', filename='bah.js')}}"
- exp = 'https://foo.s3.amazonaws.com/static/bah.js'
- self.assertEquals(self.client_get(ufs).data, six.b(exp))
-
- def test_url_for_blueprint(self):
- """
- Tests that correct url formed for static asset in blueprint.
- """
- # static endpoint url_for in template
- ufs = "{{url_for('admin.static', filename='bah.js')}}"
- exp = 'https://foo.s3.amazonaws.com/admin-static/bah.js'
- self.assertEquals(self.client_get(ufs).data, six.b(exp))
-
- def test_url_for_cdn_domain(self):
- self.app.config['FLASKS3_CDN_DOMAIN'] = 'foo.cloudfront.net'
- ufs = "{{url_for('static', filename='bah.js')}}"
- exp = 'https://foo.cloudfront.net/static/bah.js'
- self.assertEquals(self.client_get(ufs).data, six.b(exp))
-
- def test_url_for_url_style_path(self):
- """Tests that the URL returned uses the path style."""
- self.app.config['FLASKS3_URL_STYLE'] = 'path'
- ufs = "{{url_for('static', filename='bah.js')}}"
- exp = 'https://s3.amazonaws.com/foo/static/bah.js'
- self.assertEquals(self.client_get(ufs).data, six.b(exp))
-
- def test_url_for_url_style_invalid(self):
- """Tests that an exception is raised for invalid URL styles."""
- self.app.config['FLASKS3_URL_STYLE'] = 'balderdash'
- ufs = "{{url_for('static', filename='bah.js')}}"
- self.assertRaises(ValueError, self.client_get, six.b(ufs))
-
-class S3TestsWithCustomEndpoint(unittest.TestCase):
- def setUp(self):
- self.app = Flask(__name__)
- self.app.testing = True
- self.app.config['FLASKS3_BUCKET_NAME'] = 'thebucket'
- self.app.config['FLASKS3_REGION'] = 'theregion'
- self.app.config['AWS_ACCESS_KEY_ID'] = 'thekeyid'
- self.app.config['AWS_SECRET_ACCESS_KEY'] = 'thesecretkey'
- self.app.config['FLASKS3_ENDPOINT_URL'] = 'https://minio.local:9000/'
-
- @patch('flask_s3.boto3')
- def test__custom_endpoint_is_passed_to_boto(self, mock_boto3):
- flask_s3.create_all(self.app)
-
- mock_boto3.client.assert_called_once_with("s3",
- region_name='theregion',
- aws_access_key_id='thekeyid',
- aws_secret_access_key='thesecretkey',
- endpoint_url='https://minio.local:9000/')
-
-class S3Tests(unittest.TestCase):
- def setUp(self):
- self.app = Flask(__name__)
- self.app.testing = True
- self.app.config['FLASKS3_BUCKET_NAME'] = 'foo'
- self.app.config['FLASKS3_USE_CACHE_CONTROL'] = True
- self.app.config['FLASKS3_CACHE_CONTROL'] = 'cache instruction'
- self.app.config['FLASKS3_CACHE_CONTROL'] = '3600'
- self.app.config['FLASKS3_HEADERS'] = {
- 'Expires': 'Thu, 31 Dec 2037 23:59:59 GMT',
- 'Content-Encoding': 'gzip',
- }
- self.app.config['FLASKS3_ONLY_MODIFIED'] = False
-
- def test__bp_static_url(self):
- """ Tests test__bp_static_url """
- bps = [Mock(static_url_path='/foo', url_prefix=None),
- Mock(static_url_path=None, url_prefix='/pref'),
- Mock(static_url_path='/b/bar', url_prefix='/pref'),
- Mock(static_url_path=None, url_prefix=None)]
- expected = [six.u('/foo'), six.u('/pref'), six.u('/pref/b/bar'), six.u('')]
- self.assertEquals(expected, [flask_s3._bp_static_url(x) for x in bps])
-
- def test__cache_config(self):
- """ Test that cache headers are set correctly. """
- new_app = Flask("test_cache_param")
- new_app.config['FLASKS3_USE_CACHE_CONTROL'] = True
- new_app.config['FLASKS3_CACHE_CONTROL'] = '3600'
- flask_s3.FlaskS3(new_app)
- expected = {'Cache-Control': '3600'}
- self.assertEqual(expected, new_app.config['FLASKS3_HEADERS'])
-
- @patch('os.walk')
- @patch('os.path.isdir')
- def test__gather_files(self, path_mock, os_mock):
- """ Tests the _gather_files function """
- self.app.static_folder = '/home'
- self.app.static_url_path = '/static'
-
- bp_a = Mock(static_folder='/home/bar', static_url_path='/a/bar',
- url_prefix=None)
- bp_b = Mock(static_folder='/home/zoo', static_url_path='/b/bar',
- url_prefix=None)
- bp_c = Mock(static_folder=None)
-
- self.app.blueprints = {'a': bp_a, 'b': bp_b, 'c': bp_c}
- dirs = {'/home': [('/home', None, ['.a'])],
- '/home/bar': [('/home/bar', None, ['b'])],
- '/home/zoo': [('/home/zoo', None, ['c']),
- ('/home/zoo/foo', None, ['d', 'e'])]}
- os_mock.side_effect = dirs.get
- path_mock.return_value = True
-
- expected = {('/home/bar', six.u('/a/bar')): ['/home/bar/b'],
- ('/home/zoo', six.u('/b/bar')): ['/home/zoo/c',
- '/home/zoo/foo/d',
- '/home/zoo/foo/e']}
- actual = flask_s3._gather_files(self.app, False)
- self.assertEqual(expected, actual)
-
- expected[('/home', six.u('/static'))] = ['/home/.a']
- actual = flask_s3._gather_files(self.app, True)
- self.assertEqual(expected, actual)
-
- @patch('os.walk')
- @patch('os.path.isdir')
- def test__gather_files_no_blueprints_no_files(self, path_mock, os_mock):
- """
- Tests that _gather_files works when there are no blueprints and
- no files available in the static folder
- """
- self.app.static_folder = '/foo'
- dirs = {'/foo': [('/foo', None, [])]}
- os_mock.side_effect = dirs.get
- path_mock.return_value = True
-
- actual = flask_s3._gather_files(self.app, False)
- self.assertEqual({}, actual)
-
- @patch('os.walk')
- @patch('os.path.isdir')
- def test__gather_files_bad_folder(self, path_mock, os_mock):
- """
- Tests that _gather_files when static folder is not valid folder
- """
- self.app.static_folder = '/bad'
- dirs = {'/bad': []}
- os_mock.side_effect = dirs.get
- path_mock.return_value = False
-
- actual = flask_s3._gather_files(self.app, False)
- self.assertEqual({}, actual)
-
- @patch('os.path.splitdrive', side_effect=ntpath.splitdrive)
- @patch('os.path.join', side_effect=ntpath.join)
- def test__path_to_relative_url_win(self, join_mock, split_mock):
- """ Tests _path_to_relative_url on Windows system """
- input_ = [r'C:\foo\bar\baz.css', r'C:\foo\bar.css',
- r'\foo\bar.css']
- expected = ['/foo/bar/baz.css', '/foo/bar.css', '/foo/bar.css']
- for in_, exp in zip(input_, expected):
- actual = flask_s3._path_to_relative_url(in_)
- self.assertEquals(exp, actual)
-
- @unittest.skipIf(sys.version_info < (3, 0),
- "not supported in this version")
- @patch('flask_s3.boto3')
- @patch("{}.open".format("builtins"), mock_open(read_data='test'))
- def test__write_files(self, key_mock):
- """ Tests _write_files """
- static_url_loc = '/foo/static'
- static_folder = '/home/z'
- assets = ['/home/z/bar.css', '/home/z/foo.css']
- exclude = ['/foo/static/foo.css', '/foo/static/foo/bar.css']
- # we expect foo.css to be excluded and not uploaded
- expected = [call(bucket=None, name=six.u('/foo/static/bar.css')),
- call().set_metadata('Cache-Control', 'cache instruction'),
- call().set_metadata('Expires', 'Thu, 31 Dec 2037 23:59:59 GMT'),
- call().set_metadata('Content-Encoding', 'gzip'),
- call().set_contents_from_filename('/home/z/bar.css')]
- flask_s3._write_files(key_mock, self.app, static_url_loc, static_folder, assets,
- None, exclude)
- self.assertLessEqual(expected, key_mock.mock_calls)
-
- @patch('flask_s3.boto3')
- def test__write_only_modified(self, key_mock):
- """ Test that we only upload files that have changed """
- self.app.config['FLASKS3_ONLY_MODIFIED'] = True
- static_folder = tempfile.mkdtemp()
- static_url_loc = static_folder
- filenames = [os.path.join(static_folder, f) for f in ['foo.css', 'bar.css']]
- expected = []
-
- data_iter = count()
-
- for filename in filenames:
- # Write random data into files
- with open(filename, 'wb') as f:
- if six.PY3:
- data = str(data_iter)
- f.write(data.encode())
- else:
- data = str(data_iter.next())
- f.write(data)
-
- # We expect each file to be uploaded
- expected.append(call.put_object(ACL='public-read',
- Bucket=None,
- Key=filename.lstrip("/"),
- Body=data,
- Metadata={},
- Expires='Thu, 31 Dec 2037 23:59:59 GMT',
- ContentEncoding='gzip'))
-
- files = {(static_url_loc, static_folder): filenames}
-
- hashes = flask_s3._upload_files(key_mock, self.app, files, None)
-
- # All files are uploaded and hashes are returned
- self.assertLessEqual(len(expected), len(key_mock.mock_calls))
- self.assertEquals(len(hashes), len(filenames))
-
- # We now modify the second file
- with open(filenames[1], 'wb') as f:
- data = str(next(data_iter))
- if six.PY2:
- f.write(data)
- else:
- f.write(data.encode())
-
- # We expect only this file to be uploaded
- expected.append(call.put_object(ACL='public-read',
- Bucket=None,
- Key=filenames[1].lstrip("/"),
- Body=data,
- Metadata={},
- Expires='Thu, 31 Dec 2037 23:59:59 GMT',
- ContentEncoding='gzip'))
-
- new_hashes = flask_s3._upload_files(key_mock, self.app, files, None,
- hashes=dict(hashes))
- #import pprint
-
- #pprint.pprint(zip(expected, key_mock.mock_calls))
- self.assertEquals(len(expected), len(key_mock.mock_calls))
-
- @patch('flask_s3.boto3')
- def test_write_binary_file(self, key_mock):
- """ Tests _write_files """
- self.app.config['FLASKS3_ONLY_MODIFIED'] = True
- static_folder = tempfile.mkdtemp()
- static_url_loc = static_folder
- filenames = [os.path.join(static_folder, 'favicon.ico')]
-
- for filename in filenames:
- # Write random data into files
- with open(filename, 'wb') as f:
- f.write(bytearray([120, 3, 255, 0, 100]))
-
- flask_s3._write_files(key_mock, self.app, static_url_loc, static_folder, filenames, None)
-
- expected = {
- 'ACL': 'public-read',
- 'Bucket': None,
- 'Metadata': {},
- 'ContentEncoding': 'gzip',
- 'Body': b'x\x03\xff\x00d',
- 'Key': filenames[0][1:],
- 'Expires': 'Thu, 31 Dec 2037 23:59:59 GMT'}
- name, args, kwargs = key_mock.mock_calls[0]
-
- self.assertEquals(expected, kwargs)
-
- def test_static_folder_path(self):
- """ Tests _static_folder_path """
- inputs = [('/static', '/home/static', '/home/static/foo.css'),
- ('/foo/static', '/home/foo/s', '/home/foo/s/a/b.css'),
- ('/bar/', '/bar/', '/bar/s/a/b.css')]
- expected = [six.u('/static/foo.css'), six.u('/foo/static/a/b.css'),
- six.u('/bar/s/a/b.css')]
- for i, e in zip(inputs, expected):
- self.assertEquals(e, flask_s3._static_folder_path(*i))
-
- @patch('flask_s3.boto3')
- def test__bucket_acl_not_set(self, mock_boto3):
- flask_s3.create_all(self.app, put_bucket_acl=False)
- self.assertFalse(mock_boto3.client().put_bucket_acl.called,
- "put_bucket_acl was called!")
-
- @patch('flask_s3._write_files')
- def test__upload_uses_prefix(self, mock_write_files):
- s3_mock = Mock()
- local_path = '/local_path/static'
- file_paths = ['/local_path/static/file1', '/local_path/static/file2']
- files = {(local_path, '/static'): file_paths}
-
- flask_s3._upload_files(s3_mock, self.app, files, 's3_bucket')
- expected_call = call(
- s3_mock, self.app, '/static', local_path, file_paths, 's3_bucket', hashes=None)
- self.assertEquals(mock_write_files.call_args_list, [expected_call])
-
- for supported_prefix in ['foo', '/foo', 'foo/', '/foo/']:
- mock_write_files.reset_mock()
- self.app.config['FLASKS3_PREFIX'] = supported_prefix
- flask_s3._upload_files(s3_mock, self.app, files, 's3_bucket')
- expected_call = call(s3_mock, self.app, '/foo/static',
- local_path, file_paths, 's3_bucket', hashes=None)
- self.assertEquals(mock_write_files.call_args_list, [expected_call])
-
- @patch('flask_s3.current_app')
- def test__url_for_uses_prefix(self, mock_current_app):
- bucket_path = 'foo.s3.amazonaws.com'
- flask_s3.FlaskS3(self.app)
- mock_current_app.config = self.app.config
- mock_bind = mock_current_app.url_map.bind
-
- flask_s3.url_for('static', **{'filename': 'test_file.txt'})
- self.assertEqual(mock_bind.call_args_list, [call(bucket_path, url_scheme='https')])
-
- for supported_prefix in ['bar', '/bar', 'bar/', '/bar/']:
- mock_bind.reset_mock()
- self.app.config['FLASKS3_PREFIX'] = supported_prefix
- flask_s3.url_for('static', **{'filename': 'test_file.txt'})
- expected_path = '%s/%s' % (bucket_path, 'bar')
- self.assertEqual(mock_bind.call_args_list,
- [call(expected_path, url_scheme='https')])
-
-
-if __name__ == '__main__':
- unittest.main()