stable version

master
parent 6dc00f7b2b
commit b203a417ae
  1. BIN
      __pycache__/run.cpython-37.pyc
  2. 3
      backend/requirements.txt
  3. 0
      backend/venv/lib/python3.7/site-packages/Flask_PyMongo-2.3.0.dist-info/INSTALLER
  4. 24
      backend/venv/lib/python3.7/site-packages/Flask_PyMongo-2.3.0.dist-info/LICENSE
  5. 43
      backend/venv/lib/python3.7/site-packages/Flask_PyMongo-2.3.0.dist-info/METADATA
  6. 25
      backend/venv/lib/python3.7/site-packages/Flask_PyMongo-2.3.0.dist-info/RECORD
  7. 2
      backend/venv/lib/python3.7/site-packages/Flask_PyMongo-2.3.0.dist-info/WHEEL
  8. 1
      backend/venv/lib/python3.7/site-packages/Flask_PyMongo-2.3.0.dist-info/pbr.json
  9. 1
      backend/venv/lib/python3.7/site-packages/Flask_PyMongo-2.3.0.dist-info/top_level.txt
  10. 54
      backend/venv/lib/python3.7/site-packages/Werkzeug-0.12.2.dist-info/DESCRIPTION.rst
  11. 29
      backend/venv/lib/python3.7/site-packages/Werkzeug-0.12.2.dist-info/LICENSE.txt
  12. 83
      backend/venv/lib/python3.7/site-packages/Werkzeug-0.12.2.dist-info/METADATA
  13. 95
      backend/venv/lib/python3.7/site-packages/Werkzeug-0.12.2.dist-info/RECORD
  14. 1
      backend/venv/lib/python3.7/site-packages/Werkzeug-0.12.2.dist-info/metadata.json
  15. 1
      backend/venv/lib/python3.7/site-packages/Werkzeug-0.15.4.dist-info/INSTALLER
  16. 28
      backend/venv/lib/python3.7/site-packages/Werkzeug-0.15.4.dist-info/LICENSE.rst
  17. 133
      backend/venv/lib/python3.7/site-packages/Werkzeug-0.15.4.dist-info/METADATA
  18. 119
      backend/venv/lib/python3.7/site-packages/Werkzeug-0.15.4.dist-info/RECORD
  19. 6
      backend/venv/lib/python3.7/site-packages/Werkzeug-0.15.4.dist-info/WHEEL
  20. 0
      backend/venv/lib/python3.7/site-packages/Werkzeug-0.15.4.dist-info/top_level.txt
  21. 1170
      backend/venv/lib/python3.7/site-packages/bson/__init__.py
  22. BIN
      backend/venv/lib/python3.7/site-packages/bson/__pycache__/__init__.cpython-37.pyc
  23. BIN
      backend/venv/lib/python3.7/site-packages/bson/__pycache__/binary.cpython-37.pyc
  24. BIN
      backend/venv/lib/python3.7/site-packages/bson/__pycache__/code.cpython-37.pyc
  25. BIN
      backend/venv/lib/python3.7/site-packages/bson/__pycache__/codec_options.cpython-37.pyc
  26. BIN
      backend/venv/lib/python3.7/site-packages/bson/__pycache__/dbref.cpython-37.pyc
  27. BIN
      backend/venv/lib/python3.7/site-packages/bson/__pycache__/decimal128.cpython-37.pyc
  28. BIN
      backend/venv/lib/python3.7/site-packages/bson/__pycache__/errors.cpython-37.pyc
  29. BIN
      backend/venv/lib/python3.7/site-packages/bson/__pycache__/int64.cpython-37.pyc
  30. BIN
      backend/venv/lib/python3.7/site-packages/bson/__pycache__/json_util.cpython-37.pyc
  31. BIN
      backend/venv/lib/python3.7/site-packages/bson/__pycache__/max_key.cpython-37.pyc
  32. BIN
      backend/venv/lib/python3.7/site-packages/bson/__pycache__/min_key.cpython-37.pyc
  33. BIN
      backend/venv/lib/python3.7/site-packages/bson/__pycache__/objectid.cpython-37.pyc
  34. BIN
      backend/venv/lib/python3.7/site-packages/bson/__pycache__/py3compat.cpython-37.pyc
  35. BIN
      backend/venv/lib/python3.7/site-packages/bson/__pycache__/raw_bson.cpython-37.pyc
  36. BIN
      backend/venv/lib/python3.7/site-packages/bson/__pycache__/regex.cpython-37.pyc
  37. BIN
      backend/venv/lib/python3.7/site-packages/bson/__pycache__/son.cpython-37.pyc
  38. BIN
      backend/venv/lib/python3.7/site-packages/bson/__pycache__/timestamp.cpython-37.pyc
  39. BIN
      backend/venv/lib/python3.7/site-packages/bson/__pycache__/tz_util.cpython-37.pyc
  40. BIN
      backend/venv/lib/python3.7/site-packages/bson/_cbson.cpython-37m-darwin.so
  41. 242
      backend/venv/lib/python3.7/site-packages/bson/binary.py
  42. 99
      backend/venv/lib/python3.7/site-packages/bson/code.py
  43. 334
      backend/venv/lib/python3.7/site-packages/bson/codec_options.py
  44. 135
      backend/venv/lib/python3.7/site-packages/bson/dbref.py
  45. 335
      backend/venv/lib/python3.7/site-packages/bson/decimal128.py
  46. 40
      backend/venv/lib/python3.7/site-packages/bson/errors.py
  47. 34
      backend/venv/lib/python3.7/site-packages/bson/int64.py
  48. 829
      backend/venv/lib/python3.7/site-packages/bson/json_util.py
  49. 50
      backend/venv/lib/python3.7/site-packages/bson/max_key.py
  50. 50
      backend/venv/lib/python3.7/site-packages/bson/min_key.py
  51. 299
      backend/venv/lib/python3.7/site-packages/bson/objectid.py
  52. 107
      backend/venv/lib/python3.7/site-packages/bson/py3compat.py
  53. 124
      backend/venv/lib/python3.7/site-packages/bson/raw_bson.py
  54. 128
      backend/venv/lib/python3.7/site-packages/bson/regex.py
  55. 200
      backend/venv/lib/python3.7/site-packages/bson/son.py
  56. 120
      backend/venv/lib/python3.7/site-packages/bson/timestamp.py
  57. 52
      backend/venv/lib/python3.7/site-packages/bson/tz_util.py
  58. 241
      backend/venv/lib/python3.7/site-packages/flask_pymongo/__init__.py
  59. BIN
      backend/venv/lib/python3.7/site-packages/flask_pymongo/__pycache__/__init__.cpython-37.pyc
  60. BIN
      backend/venv/lib/python3.7/site-packages/flask_pymongo/__pycache__/_version.cpython-37.pyc
  61. BIN
      backend/venv/lib/python3.7/site-packages/flask_pymongo/__pycache__/wrappers.cpython-37.pyc
  62. 5
      backend/venv/lib/python3.7/site-packages/flask_pymongo/_version.py
  63. 0
      backend/venv/lib/python3.7/site-packages/flask_pymongo/tests/__init__.py
  64. BIN
      backend/venv/lib/python3.7/site-packages/flask_pymongo/tests/__pycache__/__init__.cpython-37.pyc
  65. BIN
      backend/venv/lib/python3.7/site-packages/flask_pymongo/tests/__pycache__/test_config.cpython-37.pyc
  66. BIN
      backend/venv/lib/python3.7/site-packages/flask_pymongo/tests/__pycache__/test_gridfs.cpython-37.pyc
  67. BIN
      backend/venv/lib/python3.7/site-packages/flask_pymongo/tests/__pycache__/test_url_converter.cpython-37.pyc
  68. BIN
      backend/venv/lib/python3.7/site-packages/flask_pymongo/tests/__pycache__/test_wrappers.cpython-37.pyc
  69. BIN
      backend/venv/lib/python3.7/site-packages/flask_pymongo/tests/__pycache__/util.cpython-37.pyc
  70. 108
      backend/venv/lib/python3.7/site-packages/flask_pymongo/tests/test_config.py
  71. 100
      backend/venv/lib/python3.7/site-packages/flask_pymongo/tests/test_gridfs.py
  72. 17
      backend/venv/lib/python3.7/site-packages/flask_pymongo/tests/test_url_converter.py
  73. 33
      backend/venv/lib/python3.7/site-packages/flask_pymongo/tests/test_wrappers.py
  74. 48
      backend/venv/lib/python3.7/site-packages/flask_pymongo/tests/util.py
  75. 117
      backend/venv/lib/python3.7/site-packages/flask_pymongo/wrappers.py
  76. 930
      backend/venv/lib/python3.7/site-packages/gridfs/__init__.py
  77. BIN
      backend/venv/lib/python3.7/site-packages/gridfs/__pycache__/__init__.cpython-37.pyc
  78. BIN
      backend/venv/lib/python3.7/site-packages/gridfs/__pycache__/errors.cpython-37.pyc
  79. BIN
      backend/venv/lib/python3.7/site-packages/gridfs/__pycache__/grid_file.cpython-37.pyc
  80. 33
      backend/venv/lib/python3.7/site-packages/gridfs/errors.py
  81. 840
      backend/venv/lib/python3.7/site-packages/gridfs/grid_file.py
  82. 1
      backend/venv/lib/python3.7/site-packages/pymongo-3.8.0.dist-info/INSTALLER
  83. 243
      backend/venv/lib/python3.7/site-packages/pymongo-3.8.0.dist-info/METADATA
  84. 145
      backend/venv/lib/python3.7/site-packages/pymongo-3.8.0.dist-info/RECORD
  85. 5
      backend/venv/lib/python3.7/site-packages/pymongo-3.8.0.dist-info/WHEEL
  86. 3
      backend/venv/lib/python3.7/site-packages/pymongo-3.8.0.dist-info/top_level.txt
  87. 99
      backend/venv/lib/python3.7/site-packages/pymongo/__init__.py
  88. BIN
      backend/venv/lib/python3.7/site-packages/pymongo/__pycache__/__init__.cpython-37.pyc
  89. BIN
      backend/venv/lib/python3.7/site-packages/pymongo/__pycache__/auth.cpython-37.pyc
  90. BIN
      backend/venv/lib/python3.7/site-packages/pymongo/__pycache__/bulk.cpython-37.pyc
  91. BIN
      backend/venv/lib/python3.7/site-packages/pymongo/__pycache__/change_stream.cpython-37.pyc
  92. BIN
      backend/venv/lib/python3.7/site-packages/pymongo/__pycache__/client_options.cpython-37.pyc
  93. BIN
      backend/venv/lib/python3.7/site-packages/pymongo/__pycache__/client_session.cpython-37.pyc
  94. BIN
      backend/venv/lib/python3.7/site-packages/pymongo/__pycache__/collation.cpython-37.pyc
  95. BIN
      backend/venv/lib/python3.7/site-packages/pymongo/__pycache__/collection.cpython-37.pyc
  96. BIN
      backend/venv/lib/python3.7/site-packages/pymongo/__pycache__/command_cursor.cpython-37.pyc
  97. BIN
      backend/venv/lib/python3.7/site-packages/pymongo/__pycache__/common.cpython-37.pyc
  98. BIN
      backend/venv/lib/python3.7/site-packages/pymongo/__pycache__/compression_support.cpython-37.pyc
  99. BIN
      backend/venv/lib/python3.7/site-packages/pymongo/__pycache__/cursor.cpython-37.pyc
  100. BIN
      backend/venv/lib/python3.7/site-packages/pymongo/__pycache__/cursor_manager.cpython-37.pyc
  101. Some files were not shown because too many files have changed in this diff Show More

Binary file not shown.

@ -5,4 +5,5 @@ itsdangerous==0.24
Jinja2>=2.10.1
MarkupSafe==1.0
six==1.11.0
Werkzeug==0.12.2
Werkzeug==0.12.2
flask_pymongo==2.3.0

@ -0,0 +1,24 @@
Copyright (c) 2011-2017, Dan Crosta
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.

@ -0,0 +1,43 @@
Metadata-Version: 2.1
Name: Flask-PyMongo
Version: 2.3.0
Summary: PyMongo support for Flask applications
Home-page: http://flask-pymongo.readthedocs.org/
Author: Dan Crosta
Author-email: dcrosta@late.am
License: BSD
Download-URL: https://github.com/dcrosta/flask-pymongo/tags
Platform: any
Classifier: Environment :: Web Environment
Classifier: Framework :: Flask
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: BSD License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3.3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Requires-Dist: Flask (>=0.11)
Requires-Dist: PyMongo (>=3.3)
Flask-PyMongo
-------------
MongoDB support for Flask applications.
Flask-PyMongo is pip-installable:
$ pip install Flask-PyMongo
Documentation for Flask-PyMongo is available on `ReadTheDocs
<http://flask-pymongo.readthedocs.io/en/latest/>`_.
Source code is hosted on `GitHub <https://github.com/dcrosta/flask-pymongo>`_.
Contributions are welcome!

@ -0,0 +1,25 @@
Flask_PyMongo-2.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
Flask_PyMongo-2.3.0.dist-info/LICENSE,sha256=Ap8d3yrzplSqXFfool6NTo87qPxXRHk_ikV97lurKvY,1298
Flask_PyMongo-2.3.0.dist-info/METADATA,sha256=YAaRh_44jnKkfE9PdipsdhwJeAFMSYdvHBcC0_Uz0Ok,1356
Flask_PyMongo-2.3.0.dist-info/RECORD,,
Flask_PyMongo-2.3.0.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110
Flask_PyMongo-2.3.0.dist-info/pbr.json,sha256=Uog-jmFMzzyMUmvHpecWUp8hrfTHUIL8FHdBBkk0P6k,47
Flask_PyMongo-2.3.0.dist-info/top_level.txt,sha256=D0YaRrox4mkWzPhTMnqwIP_A_L1SRd9krRPQCU3dDQU,14
flask_pymongo/__init__.py,sha256=iiavXfDPhz37a1rM1XsyFn0XBdG4ykgdTsYUKLl9lCY,8968
flask_pymongo/__pycache__/__init__.cpython-37.pyc,,
flask_pymongo/__pycache__/_version.cpython-37.pyc,,
flask_pymongo/__pycache__/wrappers.cpython-37.pyc,,
flask_pymongo/_version.py,sha256=wB_e6iDNGYA2lGSf9Do9xoBiacRXeGfjda4PNVbM_jk,122
flask_pymongo/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
flask_pymongo/tests/__pycache__/__init__.cpython-37.pyc,,
flask_pymongo/tests/__pycache__/test_config.cpython-37.pyc,,
flask_pymongo/tests/__pycache__/test_gridfs.cpython-37.pyc,,
flask_pymongo/tests/__pycache__/test_url_converter.cpython-37.pyc,,
flask_pymongo/tests/__pycache__/test_wrappers.cpython-37.pyc,,
flask_pymongo/tests/__pycache__/util.cpython-37.pyc,,
flask_pymongo/tests/test_config.py,sha256=P6Fw10liyMYUz78e9U4I1ir0Wb-ltxYtdTWYr8WPrSM,3363
flask_pymongo/tests/test_gridfs.py,sha256=sc70aukyMW9erW0pZZaZoEBKi3dfp1w-AW8OnCR5EDw,3039
flask_pymongo/tests/test_url_converter.py,sha256=u1Avnps0Cgr6UG0akZnD8mPScJEVfJSvjLVziIouUfY,605
flask_pymongo/tests/test_wrappers.py,sha256=c-NCD3xuuM5hWyCBlbMIEW9bkOVTTITHegf7AO-UOig,1212
flask_pymongo/tests/util.py,sha256=XB7xxpDDPRkxYH4gA6v-FtAOo7IGnE8NubAYoDtlZWA,1087
flask_pymongo/wrappers.py,sha256=A24URUPDchBNiY7qfRO9PKc9UGi6eMzB-FqbHVeChNY,4269

@ -1,5 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.24.0)
Generator: bdist_wheel (0.33.1)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any

@ -0,0 +1 @@
{"is_release": false, "git_version": "775c8c3"}

@ -1,54 +0,0 @@
Werkzeug
========
Werkzeug started as simple collection of various utilities for WSGI
applications and has become one of the most advanced WSGI utility
modules. It includes a powerful debugger, full featured request and
response objects, HTTP utilities to handle entity tags, cache control
headers, HTTP dates, cookie handling, file uploads, a powerful URL
routing system and a bunch of community contributed addon modules.
Werkzeug is unicode aware and doesn't enforce a specific template
engine, database adapter or anything else. It doesn't even enforce
a specific way of handling requests and leaves all that up to the
developer. It's most useful for end user applications which should work
on as many server environments as possible (such as blogs, wikis,
bulletin boards, etc.).
Details and example applications are available on the
`Werkzeug website <http://werkzeug.pocoo.org/>`_.
Features
--------
- unicode awareness
- request and response objects
- various utility functions for dealing with HTTP headers such as
`Accept` and `Cache-Control` headers.
- thread local objects with proper cleanup at request end
- an interactive debugger
- A simple WSGI server with support for threading and forking
with an automatic reloader.
- a flexible URL routing system with REST support.
- fully WSGI compatible
Development Version
-------------------
The Werkzeug development version can be installed by cloning the git
repository from `github`_::
git clone git@github.com:pallets/werkzeug.git
.. _github: http://github.com/pallets/werkzeug

@ -1,29 +0,0 @@
Copyright (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* The names of the contributors may not be used to endorse or
promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -1,83 +0,0 @@
Metadata-Version: 2.0
Name: Werkzeug
Version: 0.12.2
Summary: The Swiss Army knife of Python web development
Home-page: http://werkzeug.pocoo.org/
Author: Armin Ronacher
Author-email: armin.ronacher@active-4.com
License: BSD
Platform: any
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Web Environment
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: BSD License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.6
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Provides-Extra: termcolor
Requires-Dist: termcolor; extra == 'termcolor'
Provides-Extra: watchdog
Requires-Dist: watchdog; extra == 'watchdog'
Werkzeug
========
Werkzeug started as simple collection of various utilities for WSGI
applications and has become one of the most advanced WSGI utility
modules. It includes a powerful debugger, full featured request and
response objects, HTTP utilities to handle entity tags, cache control
headers, HTTP dates, cookie handling, file uploads, a powerful URL
routing system and a bunch of community contributed addon modules.
Werkzeug is unicode aware and doesn't enforce a specific template
engine, database adapter or anything else. It doesn't even enforce
a specific way of handling requests and leaves all that up to the
developer. It's most useful for end user applications which should work
on as many server environments as possible (such as blogs, wikis,
bulletin boards, etc.).
Details and example applications are available on the
`Werkzeug website <http://werkzeug.pocoo.org/>`_.
Features
--------
- unicode awareness
- request and response objects
- various utility functions for dealing with HTTP headers such as
`Accept` and `Cache-Control` headers.
- thread local objects with proper cleanup at request end
- an interactive debugger
- A simple WSGI server with support for threading and forking
with an automatic reloader.
- a flexible URL routing system with REST support.
- fully WSGI compatible
Development Version
-------------------
The Werkzeug development version can be installed by cloning the git
repository from `github`_::
git clone git@github.com:pallets/werkzeug.git
.. _github: http://github.com/pallets/werkzeug

@ -1,95 +0,0 @@
Werkzeug-0.12.2.dist-info/DESCRIPTION.rst,sha256=z9r9xqJ0fYSAn1Tz7KRBdFGDerL2y4pHWSW_72pUgTc,1591
Werkzeug-0.12.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
Werkzeug-0.12.2.dist-info/LICENSE.txt,sha256=F84h8-PZAuC-Hq-_252D3yhH6mqIc-WUbXUPbfOtjXM,1532
Werkzeug-0.12.2.dist-info/METADATA,sha256=SphYykCCskmOJK7mV1-M2T1PTOrx5K3DJ8n3E5jA298,2738
Werkzeug-0.12.2.dist-info/RECORD,,
Werkzeug-0.12.2.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110
Werkzeug-0.12.2.dist-info/metadata.json,sha256=6taKobd3cQ5zOY5MVKlvuCJGaX7VPLaHYuRwzwwkORI,1276
Werkzeug-0.12.2.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9
werkzeug/__init__.py,sha256=NDY8HsYsT3dguTLu4MhuH-GpQE5XS9aKhrdfwHnzOEk,6864
werkzeug/__pycache__/__init__.cpython-37.pyc,,
werkzeug/__pycache__/_compat.cpython-37.pyc,,
werkzeug/__pycache__/_internal.cpython-37.pyc,,
werkzeug/__pycache__/_reloader.cpython-37.pyc,,
werkzeug/__pycache__/datastructures.cpython-37.pyc,,
werkzeug/__pycache__/exceptions.cpython-37.pyc,,
werkzeug/__pycache__/filesystem.cpython-37.pyc,,
werkzeug/__pycache__/formparser.cpython-37.pyc,,
werkzeug/__pycache__/http.cpython-37.pyc,,
werkzeug/__pycache__/local.cpython-37.pyc,,
werkzeug/__pycache__/posixemulation.cpython-37.pyc,,
werkzeug/__pycache__/routing.cpython-37.pyc,,
werkzeug/__pycache__/script.cpython-37.pyc,,
werkzeug/__pycache__/security.cpython-37.pyc,,
werkzeug/__pycache__/serving.cpython-37.pyc,,
werkzeug/__pycache__/test.cpython-37.pyc,,
werkzeug/__pycache__/testapp.cpython-37.pyc,,
werkzeug/__pycache__/urls.cpython-37.pyc,,
werkzeug/__pycache__/useragents.cpython-37.pyc,,
werkzeug/__pycache__/utils.cpython-37.pyc,,
werkzeug/__pycache__/wrappers.cpython-37.pyc,,
werkzeug/__pycache__/wsgi.cpython-37.pyc,,
werkzeug/_compat.py,sha256=8c4U9o6A_TR9nKCcTbpZNxpqCXcXDVIbFawwKM2s92c,6311
werkzeug/_internal.py,sha256=sE2JbLnMzN9mRI1iipTYWrFAGEWaZVECqtHAiNEhqUE,13841
werkzeug/_reloader.py,sha256=NkIXQCTa6b22wWLpXob_jIVUxux8LtAsfWehLkKt0iM,8816
werkzeug/contrib/__init__.py,sha256=f7PfttZhbrImqpr5Ezre8CXgwvcGUJK7zWNpO34WWrw,623
werkzeug/contrib/__pycache__/__init__.cpython-37.pyc,,
werkzeug/contrib/__pycache__/atom.cpython-37.pyc,,
werkzeug/contrib/__pycache__/cache.cpython-37.pyc,,
werkzeug/contrib/__pycache__/fixers.cpython-37.pyc,,
werkzeug/contrib/__pycache__/iterio.cpython-37.pyc,,
werkzeug/contrib/__pycache__/jsrouting.cpython-37.pyc,,
werkzeug/contrib/__pycache__/limiter.cpython-37.pyc,,
werkzeug/contrib/__pycache__/lint.cpython-37.pyc,,
werkzeug/contrib/__pycache__/profiler.cpython-37.pyc,,
werkzeug/contrib/__pycache__/securecookie.cpython-37.pyc,,
werkzeug/contrib/__pycache__/sessions.cpython-37.pyc,,
werkzeug/contrib/__pycache__/testtools.cpython-37.pyc,,
werkzeug/contrib/__pycache__/wrappers.cpython-37.pyc,,
werkzeug/contrib/atom.py,sha256=qqfJcfIn2RYY-3hO3Oz0aLq9YuNubcPQ_KZcNsDwVJo,15575
werkzeug/contrib/cache.py,sha256=nyUUxsS0MTHiFmu-481y9PHd8NvWH5pzCoEX1yA0mHY,30341
werkzeug/contrib/fixers.py,sha256=gR06T-w71ur-tHQ_31kP_4jpOncPJ4Wc1dOqTvYusr8,10179
werkzeug/contrib/iterio.py,sha256=RlqDvGhz0RneTpzE8dVc-yWCUv4nkPl1jEc_EDp2fH0,10814
werkzeug/contrib/jsrouting.py,sha256=QTmgeDoKXvNK02KzXgx9lr3cAH6fAzpwF5bBdPNvJPs,8564
werkzeug/contrib/limiter.py,sha256=iS8-ahPZ-JLRnmfIBzxpm7O_s3lPsiDMVWv7llAIDCI,1334
werkzeug/contrib/lint.py,sha256=qZlmqiWJ5tQJOEzLnPmHWA8eUEpcBIWkAb_V2RKJg4o,12558
werkzeug/contrib/profiler.py,sha256=ISwCWvwVyGpDLRBRpLjo_qUWma6GXYBrTAco4PEQSHY,5151
werkzeug/contrib/securecookie.py,sha256=bDsAJmslkwmXjycnPjEjWtfLBvhz0ud4z3k7tdezUVs,12174
werkzeug/contrib/sessions.py,sha256=39LVNvLbm5JWpbxM79WC2l87MJFbqeISARjwYbkJatw,12577
werkzeug/contrib/testtools.py,sha256=G9xN-qeihJlhExrIZMCahvQOIDxdL9NiX874jiiHFMs,2453
werkzeug/contrib/wrappers.py,sha256=v7OYlz7wQtDlS9fey75UiRZ1IkUWqCpzbhsLy4k14Hw,10398
werkzeug/datastructures.py,sha256=rq0zICISMUetS3xvUVvrhIvyue9oUzrs_NU3b83zwuQ,89066
werkzeug/debug/__init__.py,sha256=GTsOsjE3PqUAlsUVm2Mgc_KWA2kjjSsUz0JsM7Qu41w,17266
werkzeug/debug/__pycache__/__init__.cpython-37.pyc,,
werkzeug/debug/__pycache__/console.cpython-37.pyc,,
werkzeug/debug/__pycache__/repr.cpython-37.pyc,,
werkzeug/debug/__pycache__/tbtools.cpython-37.pyc,,
werkzeug/debug/console.py,sha256=n3-dsKk1TsjnN-u4ZgmuWCU_HO0qw5IA7ttjhyyMM6I,5607
werkzeug/debug/repr.py,sha256=bKqstDYGfECpeLerd48s_hxuqK4b6UWnjMu3d_DHO8I,9340
werkzeug/debug/shared/FONT_LICENSE,sha256=LwAVEI1oYnvXiNMT9SnCH_TaLCxCpeHziDrMg0gPkAI,4673
werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507
werkzeug/debug/shared/debugger.js,sha256=PKPVYuyO4SX1hkqLOwCLvmIEO5154WatFYaXE-zIfKI,6264
werkzeug/debug/shared/jquery.js,sha256=7LkWEzqTdpEfELxcZZlS6wAx5Ff13zZ83lYO2_ujj7g,95957
werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191
werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200
werkzeug/debug/shared/source.png,sha256=RoGcBTE4CyCB85GBuDGTFlAnUqxwTBiIfDqW15EpnUQ,818
werkzeug/debug/shared/style.css,sha256=IEO0PC2pWmh2aEyGCaN--txuWsRCliuhlbEhPDFwh0A,6270
werkzeug/debug/shared/ubuntu.ttf,sha256=1eaHFyepmy4FyDvjLVzpITrGEBu_CZYY94jE0nED1c0,70220
werkzeug/debug/tbtools.py,sha256=rBudXCmkVdAKIcdhxANxgf09g6kQjJWW9_5bjSpr4OY,18451
werkzeug/exceptions.py,sha256=3wp95Hqj9FqV8MdikV99JRcHse_fSMn27V8tgP5Hw2c,20505
werkzeug/filesystem.py,sha256=hHWeWo_gqLMzTRfYt8-7n2wWcWUNTnDyudQDLOBEICE,2175
werkzeug/formparser.py,sha256=DxN53eOCb6i7PxqtldrF2Kv9Mx00BqW297N4t-RxkWE,21241
werkzeug/http.py,sha256=nrk-ASJzcKOuoBEz274TWA8jKt0CQSOBZuP_A0UASTA,36658
werkzeug/local.py,sha256=QdQhWV5L8p1Y1CJ1CDStwxaUs24SuN5aebHwjVD08C8,14553
werkzeug/posixemulation.py,sha256=xEF2Bxc-vUCPkiu4IbfWVd3LW7DROYAT-ExW6THqyzw,3519
werkzeug/routing.py,sha256=g25wg0GNfff8WcfRlc1ZxTGvz1KbVj09w2S7wxopseQ,66746
werkzeug/script.py,sha256=Jh9OAktqjLNc_IBBUatVM7uP5LDcbxaYA8n2ObnS4bo,11666
werkzeug/security.py,sha256=Z0v0ojdo7T4FNyfIjx86BFQKwasy3ZR9euikIJDQYP8,9191
werkzeug/serving.py,sha256=aAS3EgiD-VjemsYfSf1yqdjaGEfpB4I3M4PKlLotJLo,29069
werkzeug/test.py,sha256=xnabNSpty66ftZiXHcoZaYFP1E4WUNxydw5Oe8Mjhoo,34795
werkzeug/testapp.py,sha256=3HQRW1sHZKXuAjCvFMet4KXtQG3loYTFnvn6LWt-4zI,9396
werkzeug/urls.py,sha256=fSbI4Gb29_p02Zk21VAZQRN1QdOVY9CNTgpb2rbajNQ,36710
werkzeug/useragents.py,sha256=Ck3G977Y0Rzdk9wFcLpL0PyOrONtdK1_d2Zexb78cX4,5640
werkzeug/utils.py,sha256=lkybtv_mq35zV1qhelvEcILTzrMUwZ9yon6E8XwapJE,22972
werkzeug/wrappers.py,sha256=wceh1RhvhIZVzKuok3XMQ5jqjYYCEYv5JqKY3Nc_oRY,82986
werkzeug/wsgi.py,sha256=TjPo5ups3NI1RVVGdMvd3XaceqFtqlMX5X169gWWFrQ,42838

@ -1 +0,0 @@
{"license": "BSD", "name": "Werkzeug", "metadata_version": "2.0", "generator": "bdist_wheel (0.24.0)", "summary": "The Swiss Army knife of Python web development", "platform": "any", "run_requires": [{"requires": ["watchdog"], "extra": "watchdog"}, {"requires": ["termcolor"], "extra": "termcolor"}], "version": "0.12.2", "extensions": {"python.details": {"project_urls": {"Home": "http://werkzeug.pocoo.org/"}, "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "contacts": [{"role": "author", "email": "armin.ronacher@active-4.com", "name": "Armin Ronacher"}]}}, "classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Software Development :: Libraries :: Python Modules"], "extras": ["termcolor", "watchdog"]}

@ -0,0 +1,28 @@
Copyright 2007 Pallets
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -0,0 +1,133 @@
Metadata-Version: 2.1
Name: Werkzeug
Version: 0.15.4
Summary: The comprehensive WSGI web application library.
Home-page: https://palletsprojects.com/p/werkzeug/
Author: Armin Ronacher
Author-email: armin.ronacher@active-4.com
Maintainer: The Pallets Team
Maintainer-email: contact@palletsprojects.com
License: BSD-3-Clause
Project-URL: Documentation, https://werkzeug.palletsprojects.com/
Project-URL: Code, https://github.com/pallets/werkzeug
Project-URL: Issue tracker, https://github.com/pallets/werkzeug/issues
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Web Environment
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: BSD License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
Provides-Extra: dev
Requires-Dist: pytest ; extra == 'dev'
Requires-Dist: coverage ; extra == 'dev'
Requires-Dist: tox ; extra == 'dev'
Requires-Dist: sphinx ; extra == 'dev'
Requires-Dist: pallets-sphinx-themes ; extra == 'dev'
Requires-Dist: sphinx-issues ; extra == 'dev'
Provides-Extra: termcolor
Requires-Dist: termcolor ; extra == 'termcolor'
Provides-Extra: watchdog
Requires-Dist: watchdog ; extra == 'watchdog'
Werkzeug
========
*werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff")
Werkzeug is a comprehensive `WSGI`_ web application library. It began as
a simple collection of various utilities for WSGI applications and has
become one of the most advanced WSGI utility libraries.
It includes:
- An interactive debugger that allows inspecting stack traces and
source code in the browser with an interactive interpreter for any
frame in the stack.
- A full-featured request object with objects to interact with
headers, query args, form data, files, and cookies.
- A response object that can wrap other WSGI applications and handle
streaming data.
- A routing system for matching URLs to endpoints and generating URLs
for endpoints, with an extensible system for capturing variables
from URLs.
- HTTP utilities to handle entity tags, cache control, dates, user
agents, cookies, files, and more.
- A threaded WSGI server for use while developing applications
locally.
- A test client for simulating HTTP requests during testing without
requiring running a server.
Werkzeug is Unicode aware and doesn't enforce any dependencies. It is up
to the developer to choose a template engine, database adapter, and even
how to handle requests. It can be used to build all sorts of end user
applications such as blogs, wikis, or bulletin boards.
`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while
providing more structure and patterns for defining powerful
applications.
Installing
----------
Install and update using `pip`_:
.. code-block:: text
pip install -U Werkzeug
A Simple Example
----------------
.. code-block:: python
from werkzeug.wrappers import Request, Response
@Request.application
def application(request):
return Response('Hello, World!')
if __name__ == '__main__':
from werkzeug.serving import run_simple
run_simple('localhost', 4000, application)
Links
-----
- Website: https://www.palletsprojects.com/p/werkzeug/
- Documentation: https://werkzeug.palletsprojects.com/
- Releases: https://pypi.org/project/Werkzeug/
- Code: https://github.com/pallets/werkzeug
- Issue tracker: https://github.com/pallets/werkzeug/issues
- Test status:
- Linux, Mac: https://travis-ci.org/pallets/werkzeug
- Windows: https://ci.appveyor.com/project/pallets/werkzeug
- Test coverage: https://codecov.io/gh/pallets/werkzeug
- Official chat: https://discord.gg/t6rrQZH
.. _WSGI: https://wsgi.readthedocs.io/en/latest/
.. _Flask: https://www.palletsprojects.com/p/flask/
.. _pip: https://pip.pypa.io/en/stable/quickstart/

@ -0,0 +1,119 @@
Werkzeug-0.15.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
Werkzeug-0.15.4.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475
Werkzeug-0.15.4.dist-info/METADATA,sha256=BFCzIm51w-fKsnG13iHz4No7Upu9-y_vJUq_rkY_bqY,4864
Werkzeug-0.15.4.dist-info/RECORD,,
Werkzeug-0.15.4.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110
Werkzeug-0.15.4.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9
werkzeug/__init__.py,sha256=LBPc9xBJsifxGUsUY5EkreS0o_D8ixJD3YYSh9ARtxE,6805
werkzeug/__pycache__/__init__.cpython-37.pyc,,
werkzeug/__pycache__/_compat.cpython-37.pyc,,
werkzeug/__pycache__/_internal.cpython-37.pyc,,
werkzeug/__pycache__/_reloader.cpython-37.pyc,,
werkzeug/__pycache__/datastructures.cpython-37.pyc,,
werkzeug/__pycache__/exceptions.cpython-37.pyc,,
werkzeug/__pycache__/filesystem.cpython-37.pyc,,
werkzeug/__pycache__/formparser.cpython-37.pyc,,
werkzeug/__pycache__/http.cpython-37.pyc,,
werkzeug/__pycache__/local.cpython-37.pyc,,
werkzeug/__pycache__/posixemulation.cpython-37.pyc,,
werkzeug/__pycache__/routing.cpython-37.pyc,,
werkzeug/__pycache__/security.cpython-37.pyc,,
werkzeug/__pycache__/serving.cpython-37.pyc,,
werkzeug/__pycache__/test.cpython-37.pyc,,
werkzeug/__pycache__/testapp.cpython-37.pyc,,
werkzeug/__pycache__/urls.cpython-37.pyc,,
werkzeug/__pycache__/useragents.cpython-37.pyc,,
werkzeug/__pycache__/utils.cpython-37.pyc,,
werkzeug/__pycache__/wsgi.cpython-37.pyc,,
werkzeug/_compat.py,sha256=oBEVVrJT4sqYdIZbUWmgV9T9w257RhTSDBlTjh0Zbb0,6431
werkzeug/_internal.py,sha256=Wx7cpTRWqeBd0LAqobo0lCO4pNUW4oav6XKf7Taumgk,14590
werkzeug/_reloader.py,sha256=8B8T1npsQT-96nGeVJjV1KXWK_ong6ZlTXOWgxfRLpg,11241
werkzeug/contrib/__init__.py,sha256=EvNyiiCF49j5P0fZYJ3ZGe82ofXdSBvUNqWFwwBMibQ,553
werkzeug/contrib/__pycache__/__init__.cpython-37.pyc,,
werkzeug/contrib/__pycache__/atom.cpython-37.pyc,,
werkzeug/contrib/__pycache__/cache.cpython-37.pyc,,
werkzeug/contrib/__pycache__/fixers.cpython-37.pyc,,
werkzeug/contrib/__pycache__/iterio.cpython-37.pyc,,
werkzeug/contrib/__pycache__/lint.cpython-37.pyc,,
werkzeug/contrib/__pycache__/profiler.cpython-37.pyc,,
werkzeug/contrib/__pycache__/securecookie.cpython-37.pyc,,
werkzeug/contrib/__pycache__/sessions.cpython-37.pyc,,
werkzeug/contrib/__pycache__/wrappers.cpython-37.pyc,,
werkzeug/contrib/atom.py,sha256=KpPJcTfzNW1J0VNQckCbVtVGBe3V8s451tOUya4qByI,15415
werkzeug/contrib/cache.py,sha256=AEh5UIw-Ui7sHZnlpvrD7ueOKUhCaAD55FXiPtXbbRs,32115
werkzeug/contrib/fixers.py,sha256=peEtAiIWYT5bh00EWEPOGKzGZXivOzVhhzKPvvzk1RM,9193
werkzeug/contrib/iterio.py,sha256=KKHa_8aCF_uhoeQVyPGUwrivuB6y6nNdXYo2D2vzOA8,10928
werkzeug/contrib/lint.py,sha256=NdIxP0E2kVt1xDIxoaIz3Rcl8ZdgmHaFbGTOaybGpN4,296
werkzeug/contrib/profiler.py,sha256=k_oMLU-AtsVvQ9TxNdermY6FuzSTYr-WE-ZmWb_DMyU,1229
werkzeug/contrib/securecookie.py,sha256=xbtElskGmtbiApgOJ5WhGgqGDs_68_PcWzqDIAY_QZY,13076
werkzeug/contrib/sessions.py,sha256=oVXh_7-6_CWOMxDKqcaK05H8RpYoWqAd3al-KzMFPYs,13042
werkzeug/contrib/wrappers.py,sha256=ZmNk0wpzD66yomPnQxapndZQs4c0kNJaRzqI-BVxeQk,13199
werkzeug/datastructures.py,sha256=8HoA4Gu9i7ZWi5OBjx244OLWvDEE4JTQQUUTRoAYKog,91761
werkzeug/debug/__init__.py,sha256=Bo3HvgTNY4NQ_2jROTSk3r1ScZcT_g_4EnuHTjKyrKM,18275
werkzeug/debug/__pycache__/__init__.cpython-37.pyc,,
werkzeug/debug/__pycache__/console.cpython-37.pyc,,
werkzeug/debug/__pycache__/repr.cpython-37.pyc,,
werkzeug/debug/__pycache__/tbtools.cpython-37.pyc,,
werkzeug/debug/console.py,sha256=HoBL21bbcmtiCLqiLDJLZi1LYnWMZxjoXYH5WaZB1XY,5469
werkzeug/debug/repr.py,sha256=lIwuhbyrMwVe3P_cFqNyqzHL7P93TLKod7lw9clydEw,9621
werkzeug/debug/shared/FONT_LICENSE,sha256=LwAVEI1oYnvXiNMT9SnCH_TaLCxCpeHziDrMg0gPkAI,4673
werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507
werkzeug/debug/shared/debugger.js,sha256=rOhqZMRfpZnnu6_XCGn6wMWPhtfwRAcyZKksdIxPJas,6400
werkzeug/debug/shared/jquery.js,sha256=FgpCb_KJQlLNfOu91ta32o_NMZxltwRo8QtmkMRdAu8,86927
werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191
werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200
werkzeug/debug/shared/source.png,sha256=RoGcBTE4CyCB85GBuDGTFlAnUqxwTBiIfDqW15EpnUQ,818
werkzeug/debug/shared/style.css,sha256=_Y98F6dR2CBUZNKylsOdgSHjwVaVy717WqE3-xJVcmE,6581
werkzeug/debug/shared/ubuntu.ttf,sha256=1eaHFyepmy4FyDvjLVzpITrGEBu_CZYY94jE0nED1c0,70220
werkzeug/debug/tbtools.py,sha256=SkAAA4KKfwsXJinUbf-AEP4GqONTsR4uU7WPUloXcSE,20318
werkzeug/exceptions.py,sha256=SNbLn_vitnNiG_nwirLT0k6Nf6CZ_1R0Fyk_ub2XCbE,23230
werkzeug/filesystem.py,sha256=HzKl-j0Hd8Jl66j778UbPTAYNnY6vUZgYLlBZ0e7uw0,2101
werkzeug/formparser.py,sha256=tN6SO4mn6RUsxRZq4qVBWXbNWNuasn2KaBznTieMaVk,21790
werkzeug/http.py,sha256=t0ET2tySAf9ZWdEelVWJoLaZzFViYpjoUmiYHPz10-E,43304
werkzeug/local.py,sha256=USVEcgIg-oCiUJFPIecFIW9jkIejfw4Fjf1u5yN-Np4,14456
werkzeug/middleware/__init__.py,sha256=f1SFZo67IlW4k1uqKzNHxYQlsakUS-D6KK_j0e3jjwQ,549
werkzeug/middleware/__pycache__/__init__.cpython-37.pyc,,
werkzeug/middleware/__pycache__/dispatcher.cpython-37.pyc,,
werkzeug/middleware/__pycache__/http_proxy.cpython-37.pyc,,
werkzeug/middleware/__pycache__/lint.cpython-37.pyc,,
werkzeug/middleware/__pycache__/profiler.cpython-37.pyc,,
werkzeug/middleware/__pycache__/proxy_fix.cpython-37.pyc,,
werkzeug/middleware/__pycache__/shared_data.cpython-37.pyc,,
werkzeug/middleware/dispatcher.py,sha256=_-KoMzHtcISHS7ouWKAOraqlCLprdh83YOAn_8DjLp8,2240
werkzeug/middleware/http_proxy.py,sha256=lRjTdMmghHiZuZrS7_UJ3gZc-vlFizhBbFZ-XZPLwIA,7117
werkzeug/middleware/lint.py,sha256=ItTwuWJnflF8xMT1uqU_Ty1ryhux-CjeUfskqaUpxsw,12967
werkzeug/middleware/profiler.py,sha256=8B_s23d6BGrU_q54gJsm6kcCbOJbTSqrXCsioHON0Xs,4471
werkzeug/middleware/proxy_fix.py,sha256=Y86VcU2oAQ--x0mi4iFVJyEFMzp3Ao8q0zvr_SsrpNw,8506
werkzeug/middleware/shared_data.py,sha256=6aUzMABeOLul0Krf5S_hs-T7oUc7ZIQ3B8tAO4p8C7E,8541
werkzeug/posixemulation.py,sha256=gSSiv1SCmOyzOM_nq1ZaZCtxP__C5MeDJl_4yXJmi4Q,3541
werkzeug/routing.py,sha256=51zsLuN3qZcpRxpy3K3XoEuL8kyFfuqo28MquJsjZjw,72902
werkzeug/security.py,sha256=mfxfcM-D6U8LhsyDK5W_rnL1oVTZWgyt-E8E4FlSdrI,8026
werkzeug/serving.py,sha256=tUFUMg7Bj9iw3nA8ZgC_czMDJJKN7vFskajEmgEFhzE,36597
werkzeug/test.py,sha256=Cnb5xa3vLDL0hzFCH1fkG_YRpndViGQgCh4D744iSQk,40645
werkzeug/testapp.py,sha256=hcKBzorVlSHC-uGvGXXjCm3FzCwGWq4yjbTG3Pr7MV8,9301
werkzeug/urls.py,sha256=8yHdYI99N__-isoTwvGqvuj9QhOh66dd1Xh1DIp0q0g,39261
werkzeug/useragents.py,sha256=FIonyUF790Ro8OG8cJqG1zixhg5YzXdHmkZbrnK0QRo,5965
werkzeug/utils.py,sha256=O20Y0qWk5O1IWamC_A5gkmzR5cgBd3yDIHviwBTfNB0,27387
werkzeug/wrappers/__init__.py,sha256=S4VioKAmF_av9Ec9zQvG71X1EOkYfPx1TYck9jyDiyY,1384
werkzeug/wrappers/__pycache__/__init__.cpython-37.pyc,,
werkzeug/wrappers/__pycache__/accept.cpython-37.pyc,,
werkzeug/wrappers/__pycache__/auth.cpython-37.pyc,,
werkzeug/wrappers/__pycache__/base_request.cpython-37.pyc,,
werkzeug/wrappers/__pycache__/base_response.cpython-37.pyc,,
werkzeug/wrappers/__pycache__/common_descriptors.cpython-37.pyc,,
werkzeug/wrappers/__pycache__/etag.cpython-37.pyc,,
werkzeug/wrappers/__pycache__/json.cpython-37.pyc,,
werkzeug/wrappers/__pycache__/request.cpython-37.pyc,,
werkzeug/wrappers/__pycache__/response.cpython-37.pyc,,
werkzeug/wrappers/__pycache__/user_agent.cpython-37.pyc,,
werkzeug/wrappers/accept.py,sha256=TIvjUc0g73fhTWX54wg_D9NNzKvpnG1X8u1w26tK1o8,1760
werkzeug/wrappers/auth.py,sha256=Pmn6iaGHBrUyHbJpW0lZhO_q9RVoAa5QalaTqcavdAI,1158
werkzeug/wrappers/base_request.py,sha256=k5mu1UU99X_xrPqmXj44pzJbkPRpgvwMuP2j9vl8QFU,26873
werkzeug/wrappers/base_response.py,sha256=ZA1XlxtsbvG4SpbdOEMT5--z7aZM0w6C5y33W8wOXa4,27906
werkzeug/wrappers/common_descriptors.py,sha256=OJ8jOwMun4L-BxCuFPkK1vaefx_-Y5IndVXvvn_ems4,12089
werkzeug/wrappers/etag.py,sha256=TwMO1fvluXbBqnFTj2DvrCNa3mYhbHYe1UZAVzfXvuU,12533
werkzeug/wrappers/json.py,sha256=HvK_A4NpO0sLqgb10sTJcoZydYOwyNiPCJPV7SVgcgE,4343
werkzeug/wrappers/request.py,sha256=qPo2zmmBv4HxboywtWZb2pJL8OPXo07BUXBKw2j9Fi8,1338
werkzeug/wrappers/response.py,sha256=vDZFEGzDOG0jjmS0uVVjeT3hqRt1hFaf15npnx7RD28,2329
werkzeug/wrappers/user_agent.py,sha256=4bTgQKTLQmGUyxOREYOzbeiFP2VwIOE7E14AhUB5NqM,444
werkzeug/wsgi.py,sha256=h-zyAeInwE6X6ciSnHI14ImA85adV-F861PmR7UGtRk,36681

@ -0,0 +1,6 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.33.1)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any

File diff suppressed because it is too large Load Diff

@ -0,0 +1,242 @@
# Copyright 2009-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from uuid import UUID
from bson.py3compat import PY3
"""Tools for representing BSON binary data.
"""
BINARY_SUBTYPE = 0
"""BSON binary subtype for binary data.
This is the default subtype for binary data.
"""
FUNCTION_SUBTYPE = 1
"""BSON binary subtype for functions.
"""
OLD_BINARY_SUBTYPE = 2
"""Old BSON binary subtype for binary data.
This is the old default subtype, the current
default is :data:`BINARY_SUBTYPE`.
"""
OLD_UUID_SUBTYPE = 3
"""Old BSON binary subtype for a UUID.
:class:`uuid.UUID` instances will automatically be encoded
by :mod:`bson` using this subtype.
.. versionadded:: 2.1
"""
UUID_SUBTYPE = 4
"""BSON binary subtype for a UUID.
This is the new BSON binary subtype for UUIDs. The
current default is :data:`OLD_UUID_SUBTYPE`.
.. versionchanged:: 2.1
Changed to subtype 4.
"""
STANDARD = UUID_SUBTYPE
"""The standard UUID representation.
:class:`uuid.UUID` instances will automatically be encoded to
and decoded from BSON binary, using RFC-4122 byte order with
binary subtype :data:`UUID_SUBTYPE`.
.. versionadded:: 3.0
"""
PYTHON_LEGACY = OLD_UUID_SUBTYPE
"""The Python legacy UUID representation.
:class:`uuid.UUID` instances will automatically be encoded to
and decoded from BSON binary, using RFC-4122 byte order with
binary subtype :data:`OLD_UUID_SUBTYPE`.
.. versionadded:: 3.0
"""
JAVA_LEGACY = 5
"""The Java legacy UUID representation.
:class:`uuid.UUID` instances will automatically be encoded to
and decoded from BSON binary subtype :data:`OLD_UUID_SUBTYPE`,
using the Java driver's legacy byte order.
.. versionchanged:: 3.6
BSON binary subtype 4 is decoded using RFC-4122 byte order.
.. versionadded:: 2.3
"""
CSHARP_LEGACY = 6
"""The C#/.net legacy UUID representation.
:class:`uuid.UUID` instances will automatically be encoded to
and decoded from BSON binary subtype :data:`OLD_UUID_SUBTYPE`,
using the C# driver's legacy byte order.
.. versionchanged:: 3.6
BSON binary subtype 4 is decoded using RFC-4122 byte order.
.. versionadded:: 2.3
"""
ALL_UUID_SUBTYPES = (OLD_UUID_SUBTYPE, UUID_SUBTYPE)
ALL_UUID_REPRESENTATIONS = (STANDARD, PYTHON_LEGACY, JAVA_LEGACY, CSHARP_LEGACY)
UUID_REPRESENTATION_NAMES = {
PYTHON_LEGACY: 'PYTHON_LEGACY',
STANDARD: 'STANDARD',
JAVA_LEGACY: 'JAVA_LEGACY',
CSHARP_LEGACY: 'CSHARP_LEGACY'}
MD5_SUBTYPE = 5
"""BSON binary subtype for an MD5 hash.
"""
USER_DEFINED_SUBTYPE = 128
"""BSON binary subtype for any user defined structure.
"""
class Binary(bytes):
"""Representation of BSON binary data.
This is necessary because we want to represent Python strings as
the BSON string type. We need to wrap binary data so we can tell
the difference between what should be considered binary data and
what should be considered a string when we encode to BSON.
Raises TypeError if `data` is not an instance of :class:`bytes`
(:class:`str` in python 2) or `subtype` is not an instance of
:class:`int`. Raises ValueError if `subtype` is not in [0, 256).
.. note::
In python 3 instances of Binary with subtype 0 will be decoded
directly to :class:`bytes`.
:Parameters:
- `data`: the binary data to represent
- `subtype` (optional): the `binary subtype
<http://bsonspec.org/#/specification>`_
to use
"""
_type_marker = 5
def __new__(cls, data, subtype=BINARY_SUBTYPE):
if not isinstance(data, bytes):
raise TypeError("data must be an instance of bytes")
if not isinstance(subtype, int):
raise TypeError("subtype must be an instance of int")
if subtype >= 256 or subtype < 0:
raise ValueError("subtype must be contained in [0, 256)")
self = bytes.__new__(cls, data)
self.__subtype = subtype
return self
@property
def subtype(self):
"""Subtype of this binary data.
"""
return self.__subtype
def __getnewargs__(self):
# Work around http://bugs.python.org/issue7382
data = super(Binary, self).__getnewargs__()[0]
if PY3 and not isinstance(data, bytes):
data = data.encode('latin-1')
return data, self.__subtype
def __eq__(self, other):
if isinstance(other, Binary):
return ((self.__subtype, bytes(self)) ==
(other.subtype, bytes(other)))
# We don't return NotImplemented here because if we did then
# Binary("foo") == "foo" would return True, since Binary is a
# subclass of str...
return False
def __hash__(self):
return super(Binary, self).__hash__() ^ hash(self.__subtype)
def __ne__(self, other):
return not self == other
def __repr__(self):
return "Binary(%s, %s)" % (bytes.__repr__(self), self.__subtype)
class UUIDLegacy(Binary):
"""UUID wrapper to support working with UUIDs stored as PYTHON_LEGACY.
.. doctest::
>>> import uuid
>>> from bson.binary import Binary, UUIDLegacy, STANDARD
>>> from bson.codec_options import CodecOptions
>>> my_uuid = uuid.uuid4()
>>> coll = db.get_collection('test',
... CodecOptions(uuid_representation=STANDARD))
>>> coll.insert_one({'uuid': Binary(my_uuid.bytes, 3)}).inserted_id
ObjectId('...')
>>> coll.count_documents({'uuid': my_uuid})
0
>>> coll.count_documents({'uuid': UUIDLegacy(my_uuid)})
1
>>> coll.find({'uuid': UUIDLegacy(my_uuid)})[0]['uuid']
UUID('...')
>>>
>>> # Convert from subtype 3 to subtype 4
>>> doc = coll.find_one({'uuid': UUIDLegacy(my_uuid)})
>>> coll.replace_one({"_id": doc["_id"]}, doc).matched_count
1
>>> coll.count_documents({'uuid': UUIDLegacy(my_uuid)})
0
>>> coll.count_documents({'uuid': {'$in': [UUIDLegacy(my_uuid), my_uuid]}})
1
>>> coll.find_one({'uuid': my_uuid})['uuid']
UUID('...')
Raises TypeError if `obj` is not an instance of :class:`~uuid.UUID`.
:Parameters:
- `obj`: An instance of :class:`~uuid.UUID`.
"""
def __new__(cls, obj):
if not isinstance(obj, UUID):
raise TypeError("obj must be an instance of uuid.UUID")
self = Binary.__new__(cls, obj.bytes, OLD_UUID_SUBTYPE)
self.__uuid = obj
return self
def __getnewargs__(self):
# Support copy and deepcopy
return (self.__uuid,)
@property
def uuid(self):
"""UUID instance wrapped by this UUIDLegacy instance.
"""
return self.__uuid
def __repr__(self):
return "UUIDLegacy('%s')" % self.__uuid

@ -0,0 +1,99 @@
# Copyright 2009-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for representing JavaScript code in BSON.
"""
from bson.py3compat import abc, string_type, PY3, text_type
class Code(str):
"""BSON's JavaScript code type.
Raises :class:`TypeError` if `code` is not an instance of
:class:`basestring` (:class:`str` in python 3) or `scope`
is not ``None`` or an instance of :class:`dict`.
Scope variables can be set by passing a dictionary as the `scope`
argument or by using keyword arguments. If a variable is set as a
keyword argument it will override any setting for that variable in
the `scope` dictionary.
:Parameters:
- `code`: A string containing JavaScript code to be evaluated or another
instance of Code. In the latter case, the scope of `code` becomes this
Code's :attr:`scope`.
- `scope` (optional): dictionary representing the scope in which
`code` should be evaluated - a mapping from identifiers (as
strings) to values. Defaults to ``None``. This is applied after any
scope associated with a given `code` above.
- `**kwargs` (optional): scope variables can also be passed as
keyword arguments. These are applied after `scope` and `code`.
.. versionchanged:: 3.4
The default value for :attr:`scope` is ``None`` instead of ``{}``.
"""
_type_marker = 13
def __new__(cls, code, scope=None, **kwargs):
if not isinstance(code, string_type):
raise TypeError("code must be an "
"instance of %s" % (string_type.__name__))
if not PY3 and isinstance(code, text_type):
self = str.__new__(cls, code.encode('utf8'))
else:
self = str.__new__(cls, code)
try:
self.__scope = code.scope
except AttributeError:
self.__scope = None
if scope is not None:
if not isinstance(scope, abc.Mapping):
raise TypeError("scope must be an instance of dict")
if self.__scope is not None:
self.__scope.update(scope)
else:
self.__scope = scope
if kwargs:
if self.__scope is not None:
self.__scope.update(kwargs)
else:
self.__scope = kwargs
return self
@property
def scope(self):
"""Scope dictionary for this instance or ``None``.
"""
return self.__scope
def __repr__(self):
return "Code(%s, %r)" % (str.__repr__(self), self.__scope)
def __eq__(self, other):
if isinstance(other, Code):
return (self.__scope, str(self)) == (other.__scope, str(other))
return False
__hash__ = None
def __ne__(self, other):
return not self == other

@ -0,0 +1,334 @@
# Copyright 2014-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for specifying BSON codec options."""
import datetime
from abc import abstractmethod
from collections import namedtuple
from bson.py3compat import ABC, abc, abstractproperty, string_type
from bson.binary import (ALL_UUID_REPRESENTATIONS,
PYTHON_LEGACY,
UUID_REPRESENTATION_NAMES)
_RAW_BSON_DOCUMENT_MARKER = 101
def _raw_document_class(document_class):
"""Determine if a document_class is a RawBSONDocument class."""
marker = getattr(document_class, '_type_marker', None)
return marker == _RAW_BSON_DOCUMENT_MARKER
class TypeEncoder(ABC):
"""Base class for defining type codec classes which describe how a
custom type can be transformed to one of the types BSON understands.
Codec classes must implement the ``python_type`` attribute, and the
``transform_python`` method to support encoding.
See :ref:`custom-type-type-codec` documentation for an example.
"""
@abstractproperty
def python_type(self):
"""The Python type to be converted into something serializable."""
pass
@abstractmethod
def transform_python(self, value):
"""Convert the given Python object into something serializable."""
pass
class TypeDecoder(ABC):
"""Base class for defining type codec classes which describe how a
BSON type can be transformed to a custom type.
Codec classes must implement the ``bson_type`` attribute, and the
``transform_bson`` method to support decoding.
See :ref:`custom-type-type-codec` documentation for an example.
"""
@abstractproperty
def bson_type(self):
"""The BSON type to be converted into our own type."""
pass
@abstractmethod
def transform_bson(self, value):
"""Convert the given BSON value into our own type."""
pass
class TypeCodec(TypeEncoder, TypeDecoder):
"""Base class for defining type codec classes which describe how a
custom type can be transformed to/from one of the types :mod:`bson`
can already encode/decode.
Codec classes must implement the ``python_type`` attribute, and the
``transform_python`` method to support encoding, as well as the
``bson_type`` attribute, and the ``transform_bson`` method to support
decoding.
See :ref:`custom-type-type-codec` documentation for an example.
"""
pass
class TypeRegistry(object):
"""Encapsulates type codecs used in encoding and / or decoding BSON, as
well as the fallback encoder. Type registries cannot be modified after
instantiation.
``TypeRegistry`` can be initialized with an iterable of type codecs, and
a callable for the fallback encoder::
>>> from bson.codec_options import TypeRegistry
>>> type_registry = TypeRegistry([Codec1, Codec2, Codec3, ...],
... fallback_encoder)
See :ref:`custom-type-type-registry` documentation for an example.
:Parameters:
- `type_codecs` (optional): iterable of type codec instances. If
``type_codecs`` contains multiple codecs that transform a single
python or BSON type, the transformation specified by the type codec
occurring last prevails. A TypeError will be raised if one or more
type codecs modify the encoding behavior of a built-in :mod:`bson`
type.
- `fallback_encoder` (optional): callable that accepts a single,
unencodable python value and transforms it into a type that
:mod:`bson` can encode. See :ref:`fallback-encoder-callable`
documentation for an example.
"""
def __init__(self, type_codecs=None, fallback_encoder=None):
self.__type_codecs = list(type_codecs or [])
self._fallback_encoder = fallback_encoder
self._encoder_map = {}
self._decoder_map = {}
if self._fallback_encoder is not None:
if not callable(fallback_encoder):
raise TypeError("fallback_encoder %r is not a callable" % (
fallback_encoder))
for codec in self.__type_codecs:
is_valid_codec = False
if isinstance(codec, TypeEncoder):
self._validate_type_encoder(codec)
is_valid_codec = True
self._encoder_map[codec.python_type] = codec.transform_python
if isinstance(codec, TypeDecoder):
is_valid_codec = True
self._decoder_map[codec.bson_type] = codec.transform_bson
if not is_valid_codec:
raise TypeError(
"Expected an instance of %s, %s, or %s, got %r instead" % (
TypeEncoder.__name__, TypeDecoder.__name__,
TypeCodec.__name__, codec))
def _validate_type_encoder(self, codec):
from bson import _BUILT_IN_TYPES
for pytype in _BUILT_IN_TYPES:
if issubclass(codec.python_type, pytype):
err_msg = ("TypeEncoders cannot change how built-in types are "
"encoded (encoder %s transforms type %s)" %
(codec, pytype))
raise TypeError(err_msg)
def __repr__(self):
return ('%s(type_codecs=%r, fallback_encoder=%r)' % (
self.__class__.__name__, self.__type_codecs,
self._fallback_encoder))
def __eq__(self, other):
if not isinstance(other, type(self)):
return NotImplemented
return ((self._decoder_map == other._decoder_map) and
(self._encoder_map == other._encoder_map) and
(self._fallback_encoder == other._fallback_encoder))
_options_base = namedtuple(
'CodecOptions',
('document_class', 'tz_aware', 'uuid_representation',
'unicode_decode_error_handler', 'tzinfo', 'type_registry'))
class CodecOptions(_options_base):
"""Encapsulates options used encoding and / or decoding BSON.
The `document_class` option is used to define a custom type for use
decoding BSON documents. Access to the underlying raw BSON bytes for
a document is available using the :class:`~bson.raw_bson.RawBSONDocument`
type::
>>> from bson.raw_bson import RawBSONDocument
>>> from bson.codec_options import CodecOptions
>>> codec_options = CodecOptions(document_class=RawBSONDocument)
>>> coll = db.get_collection('test', codec_options=codec_options)
>>> doc = coll.find_one()
>>> doc.raw
'\\x16\\x00\\x00\\x00\\x07_id\\x00[0\\x165\\x91\\x10\\xea\\x14\\xe8\\xc5\\x8b\\x93\\x00'
The document class can be any type that inherits from
:class:`~collections.MutableMapping`::
>>> class AttributeDict(dict):
... # A dict that supports attribute access.
... def __getattr__(self, key):
... return self[key]
... def __setattr__(self, key, value):
... self[key] = value
...
>>> codec_options = CodecOptions(document_class=AttributeDict)
>>> coll = db.get_collection('test', codec_options=codec_options)
>>> doc = coll.find_one()
>>> doc._id
ObjectId('5b3016359110ea14e8c58b93')
See :doc:`/examples/datetimes` for examples using the `tz_aware` and
`tzinfo` options.
See :class:`~bson.binary.UUIDLegacy` for examples using the
`uuid_representation` option.
:Parameters:
- `document_class`: BSON documents returned in queries will be decoded
to an instance of this class. Must be a subclass of
:class:`~collections.MutableMapping`. Defaults to :class:`dict`.
- `tz_aware`: If ``True``, BSON datetimes will be decoded to timezone
aware instances of :class:`~datetime.datetime`. Otherwise they will be
naive. Defaults to ``False``.
- `uuid_representation`: The BSON representation to use when encoding
and decoding instances of :class:`~uuid.UUID`. Defaults to
:data:`~bson.binary.PYTHON_LEGACY`.
- `unicode_decode_error_handler`: The error handler to apply when
a Unicode-related error occurs during BSON decoding that would
otherwise raise :exc:`UnicodeDecodeError`. Valid options include
'strict', 'replace', and 'ignore'. Defaults to 'strict'.
- `tzinfo`: A :class:`~datetime.tzinfo` subclass that specifies the
timezone to/from which :class:`~datetime.datetime` objects should be
encoded/decoded.
- `type_registry`: Instance of :class:`TypeRegistry` used to customize
encoding and decoding behavior.
.. versionadded:: 3.8
`type_registry` attribute.
.. warning:: Care must be taken when changing
`unicode_decode_error_handler` from its default value ('strict').
The 'replace' and 'ignore' modes should not be used when documents
retrieved from the server will be modified in the client application
and stored back to the server.
"""
def __new__(cls, document_class=dict,
tz_aware=False, uuid_representation=PYTHON_LEGACY,
unicode_decode_error_handler="strict",
tzinfo=None, type_registry=None):
if not (issubclass(document_class, abc.MutableMapping) or
_raw_document_class(document_class)):
raise TypeError("document_class must be dict, bson.son.SON, "
"bson.raw_bson.RawBSONDocument, or a "
"sublass of collections.MutableMapping")
if not isinstance(tz_aware, bool):
raise TypeError("tz_aware must be True or False")
if uuid_representation not in ALL_UUID_REPRESENTATIONS:
raise ValueError("uuid_representation must be a value "
"from bson.binary.ALL_UUID_REPRESENTATIONS")
if not isinstance(unicode_decode_error_handler, (string_type, None)):
raise ValueError("unicode_decode_error_handler must be a string "
"or None")
if tzinfo is not None:
if not isinstance(tzinfo, datetime.tzinfo):
raise TypeError(
"tzinfo must be an instance of datetime.tzinfo")
if not tz_aware:
raise ValueError(
"cannot specify tzinfo without also setting tz_aware=True")
type_registry = type_registry or TypeRegistry()
if not isinstance(type_registry, TypeRegistry):
raise TypeError("type_registry must be an instance of TypeRegistry")
return tuple.__new__(
cls, (document_class, tz_aware, uuid_representation,
unicode_decode_error_handler, tzinfo, type_registry))
def _arguments_repr(self):
"""Representation of the arguments used to create this object."""
document_class_repr = (
'dict' if self.document_class is dict
else repr(self.document_class))
uuid_rep_repr = UUID_REPRESENTATION_NAMES.get(self.uuid_representation,
self.uuid_representation)
return ('document_class=%s, tz_aware=%r, uuid_representation=%s, '
'unicode_decode_error_handler=%r, tzinfo=%r, '
'type_registry=%r' %
(document_class_repr, self.tz_aware, uuid_rep_repr,
self.unicode_decode_error_handler, self.tzinfo,
self.type_registry))
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self._arguments_repr())
def with_options(self, **kwargs):
"""Make a copy of this CodecOptions, overriding some options::
>>> from bson.codec_options import DEFAULT_CODEC_OPTIONS
>>> DEFAULT_CODEC_OPTIONS.tz_aware
False
>>> options = DEFAULT_CODEC_OPTIONS.with_options(tz_aware=True)
>>> options.tz_aware
True
.. versionadded:: 3.5
"""
return CodecOptions(
kwargs.get('document_class', self.document_class),
kwargs.get('tz_aware', self.tz_aware),
kwargs.get('uuid_representation', self.uuid_representation),
kwargs.get('unicode_decode_error_handler',
self.unicode_decode_error_handler),
kwargs.get('tzinfo', self.tzinfo),
kwargs.get('type_registry', self.type_registry)
)
DEFAULT_CODEC_OPTIONS = CodecOptions()
def _parse_codec_options(options):
"""Parse BSON codec options."""
return CodecOptions(
document_class=options.get(
'document_class', DEFAULT_CODEC_OPTIONS.document_class),
tz_aware=options.get(
'tz_aware', DEFAULT_CODEC_OPTIONS.tz_aware),
uuid_representation=options.get(
'uuidrepresentation', DEFAULT_CODEC_OPTIONS.uuid_representation),
unicode_decode_error_handler=options.get(
'unicode_decode_error_handler',
DEFAULT_CODEC_OPTIONS.unicode_decode_error_handler),
tzinfo=options.get('tzinfo', DEFAULT_CODEC_OPTIONS.tzinfo),
type_registry=options.get(
'type_registry', DEFAULT_CODEC_OPTIONS.type_registry))

@ -0,0 +1,135 @@
# Copyright 2009-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for manipulating DBRefs (references to MongoDB documents)."""
from copy import deepcopy
from bson.py3compat import iteritems, string_type
from bson.son import SON
class DBRef(object):
"""A reference to a document stored in MongoDB.
"""
# DBRef isn't actually a BSON "type" so this number was arbitrarily chosen.
_type_marker = 100
def __init__(self, collection, id, database=None, _extra={}, **kwargs):
"""Initialize a new :class:`DBRef`.
Raises :class:`TypeError` if `collection` or `database` is not
an instance of :class:`basestring` (:class:`str` in python 3).
`database` is optional and allows references to documents to work
across databases. Any additional keyword arguments will create
additional fields in the resultant embedded document.
:Parameters:
- `collection`: name of the collection the document is stored in
- `id`: the value of the document's ``"_id"`` field
- `database` (optional): name of the database to reference
- `**kwargs` (optional): additional keyword arguments will
create additional, custom fields
.. mongodoc:: dbrefs
"""
if not isinstance(collection, string_type):
raise TypeError("collection must be an "
"instance of %s" % string_type.__name__)
if database is not None and not isinstance(database, string_type):
raise TypeError("database must be an "
"instance of %s" % string_type.__name__)
self.__collection = collection
self.__id = id
self.__database = database
kwargs.update(_extra)
self.__kwargs = kwargs
@property
def collection(self):
"""Get the name of this DBRef's collection as unicode.
"""
return self.__collection
@property
def id(self):
"""Get this DBRef's _id.
"""
return self.__id
@property
def database(self):
"""Get the name of this DBRef's database.
Returns None if this DBRef doesn't specify a database.
"""
return self.__database
def __getattr__(self, key):
try:
return self.__kwargs[key]
except KeyError:
raise AttributeError(key)
# Have to provide __setstate__ to avoid
# infinite recursion since we override
# __getattr__.
def __setstate__(self, state):
self.__dict__.update(state)
def as_doc(self):
"""Get the SON document representation of this DBRef.
Generally not needed by application developers
"""
doc = SON([("$ref", self.collection),
("$id", self.id)])
if self.database is not None:
doc["$db"] = self.database
doc.update(self.__kwargs)
return doc
def __repr__(self):
extra = "".join([", %s=%r" % (k, v)
for k, v in iteritems(self.__kwargs)])
if self.database is None:
return "DBRef(%r, %r%s)" % (self.collection, self.id, extra)
return "DBRef(%r, %r, %r%s)" % (self.collection, self.id,
self.database, extra)
def __eq__(self, other):
if isinstance(other, DBRef):
us = (self.__database, self.__collection,
self.__id, self.__kwargs)
them = (other.__database, other.__collection,
other.__id, other.__kwargs)
return us == them
return NotImplemented
def __ne__(self, other):
return not self == other
def __hash__(self):
"""Get a hash value for this :class:`DBRef`."""
return hash((self.__collection, self.__id, self.__database,
tuple(sorted(self.__kwargs.items()))))
def __deepcopy__(self, memo):
"""Support function for `copy.deepcopy()`."""
return DBRef(deepcopy(self.__collection, memo),
deepcopy(self.__id, memo),
deepcopy(self.__database, memo),
deepcopy(self.__kwargs, memo))

@ -0,0 +1,335 @@
# Copyright 2016-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for working with the BSON decimal128 type.
.. versionadded:: 3.4
.. note:: The Decimal128 BSON type requires MongoDB 3.4+.
"""
import decimal
import struct
import sys
from bson.py3compat import (PY3 as _PY3,
string_type as _string_type)
if _PY3:
_from_bytes = int.from_bytes # pylint: disable=no-member, invalid-name
else:
import binascii
def _from_bytes(value, dummy, _int=int, _hexlify=binascii.hexlify):
"An implementation of int.from_bytes for python 2.x."
return _int(_hexlify(value), 16)
_PACK_64 = struct.Struct("<Q").pack
_UNPACK_64 = struct.Struct("<Q").unpack
_EXPONENT_MASK = 3 << 61
_EXPONENT_BIAS = 6176
_EXPONENT_MAX = 6144
_EXPONENT_MIN = -6143
_MAX_DIGITS = 34
_INF = 0x7800000000000000
_NAN = 0x7c00000000000000
_SNAN = 0x7e00000000000000
_SIGN = 0x8000000000000000
_NINF = (_INF + _SIGN, 0)
_PINF = (_INF, 0)
_NNAN = (_NAN + _SIGN, 0)
_PNAN = (_NAN, 0)
_NSNAN = (_SNAN + _SIGN, 0)
_PSNAN = (_SNAN, 0)
_CTX_OPTIONS = {
'prec': _MAX_DIGITS,
'rounding': decimal.ROUND_HALF_EVEN,
'Emin': _EXPONENT_MIN,
'Emax': _EXPONENT_MAX,
'capitals': 1,
'flags': [],
'traps': [decimal.InvalidOperation,
decimal.Overflow,
decimal.Inexact]
}
try:
# Python >= 3.3, cdecimal
decimal.Context(clamp=1) # pylint: disable=unexpected-keyword-arg
_CTX_OPTIONS['clamp'] = 1
except TypeError:
# Python < 3.3
_CTX_OPTIONS['_clamp'] = 1
_DEC128_CTX = decimal.Context(**_CTX_OPTIONS.copy())
def create_decimal128_context():
"""Returns an instance of :class:`decimal.Context` appropriate
for working with IEEE-754 128-bit decimal floating point values.
"""
opts = _CTX_OPTIONS.copy()
opts['traps'] = []
return decimal.Context(**opts)
def _decimal_to_128(value):
"""Converts a decimal.Decimal to BID (high bits, low bits).
:Parameters:
- `value`: An instance of decimal.Decimal
"""
with decimal.localcontext(_DEC128_CTX) as ctx:
value = ctx.create_decimal(value)
if value.is_infinite():
return _NINF if value.is_signed() else _PINF
sign, digits, exponent = value.as_tuple()
if value.is_nan():
if digits:
raise ValueError("NaN with debug payload is not supported")
if value.is_snan():
return _NSNAN if value.is_signed() else _PSNAN
return _NNAN if value.is_signed() else _PNAN
significand = int("".join([str(digit) for digit in digits]))
bit_length = significand.bit_length()
high = 0
low = 0
for i in range(min(64, bit_length)):
if significand & (1 << i):
low |= 1 << i
for i in range(64, bit_length):
if significand & (1 << i):
high |= 1 << (i - 64)
biased_exponent = exponent + _EXPONENT_BIAS
if high >> 49 == 1:
high = high & 0x7fffffffffff
high |= _EXPONENT_MASK
high |= (biased_exponent & 0x3fff) << 47
else:
high |= biased_exponent << 49
if sign:
high |= _SIGN
return high, low
class Decimal128(object):
"""BSON Decimal128 type::
>>> Decimal128(Decimal("0.0005"))
Decimal128('0.0005')
>>> Decimal128("0.0005")
Decimal128('0.0005')
>>> Decimal128((3474527112516337664, 5))
Decimal128('0.0005')
:Parameters:
- `value`: An instance of :class:`decimal.Decimal`, string, or tuple of
(high bits, low bits) from Binary Integer Decimal (BID) format.
.. note:: :class:`~Decimal128` uses an instance of :class:`decimal.Context`
configured for IEEE-754 Decimal128 when validating parameters.
Signals like :class:`decimal.InvalidOperation`, :class:`decimal.Inexact`,
and :class:`decimal.Overflow` are trapped and raised as exceptions::
>>> Decimal128(".13.1")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
...
decimal.InvalidOperation: [<class 'decimal.ConversionSyntax'>]
>>>
>>> Decimal128("1E-6177")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
...
decimal.Inexact: [<class 'decimal.Inexact'>]
>>>
>>> Decimal128("1E6145")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
...
decimal.Overflow: [<class 'decimal.Overflow'>, <class 'decimal.Rounded'>]
To ensure the result of a calculation can always be stored as BSON
Decimal128 use the context returned by
:func:`create_decimal128_context`::
>>> import decimal
>>> decimal128_ctx = create_decimal128_context()
>>> with decimal.localcontext(decimal128_ctx) as ctx:
... Decimal128(ctx.create_decimal(".13.3"))
...
Decimal128('NaN')
>>>
>>> with decimal.localcontext(decimal128_ctx) as ctx:
... Decimal128(ctx.create_decimal("1E-6177"))
...
Decimal128('0E-6176')
>>>
>>> with decimal.localcontext(DECIMAL128_CTX) as ctx:
... Decimal128(ctx.create_decimal("1E6145"))
...
Decimal128('Infinity')
To match the behavior of MongoDB's Decimal128 implementation
str(Decimal(value)) may not match str(Decimal128(value)) for NaN values::
>>> Decimal128(Decimal('NaN'))
Decimal128('NaN')
>>> Decimal128(Decimal('-NaN'))
Decimal128('NaN')
>>> Decimal128(Decimal('sNaN'))
Decimal128('NaN')
>>> Decimal128(Decimal('-sNaN'))
Decimal128('NaN')
However, :meth:`~Decimal128.to_decimal` will return the exact value::
>>> Decimal128(Decimal('NaN')).to_decimal()
Decimal('NaN')
>>> Decimal128(Decimal('-NaN')).to_decimal()
Decimal('-NaN')
>>> Decimal128(Decimal('sNaN')).to_decimal()
Decimal('sNaN')
>>> Decimal128(Decimal('-sNaN')).to_decimal()
Decimal('-sNaN')
Two instances of :class:`Decimal128` compare equal if their Binary
Integer Decimal encodings are equal::
>>> Decimal128('NaN') == Decimal128('NaN')
True
>>> Decimal128('NaN').bid == Decimal128('NaN').bid
True
This differs from :class:`decimal.Decimal` comparisons for NaN::
>>> Decimal('NaN') == Decimal('NaN')
False
"""
__slots__ = ('__high', '__low')
_type_marker = 19
def __init__(self, value):
if isinstance(value, (_string_type, decimal.Decimal)):
self.__high, self.__low = _decimal_to_128(value)
elif isinstance(value, (list, tuple)):
if len(value) != 2:
raise ValueError('Invalid size for creation of Decimal128 '
'from list or tuple. Must have exactly 2 '
'elements.')
self.__high, self.__low = value
else:
raise TypeError("Cannot convert %r to Decimal128" % (value,))
def to_decimal(self):
"""Returns an instance of :class:`decimal.Decimal` for this
:class:`Decimal128`.
"""
high = self.__high
low = self.__low
sign = 1 if (high & _SIGN) else 0
if (high & _SNAN) == _SNAN:
return decimal.Decimal((sign, (), 'N'))
elif (high & _NAN) == _NAN:
return decimal.Decimal((sign, (), 'n'))
elif (high & _INF) == _INF:
return decimal.Decimal((sign, (), 'F'))
if (high & _EXPONENT_MASK) == _EXPONENT_MASK:
exponent = ((high & 0x1fffe00000000000) >> 47) - _EXPONENT_BIAS
return decimal.Decimal((sign, (0,), exponent))
else:
exponent = ((high & 0x7fff800000000000) >> 49) - _EXPONENT_BIAS
arr = bytearray(15)
mask = 0x00000000000000ff
for i in range(14, 6, -1):
arr[i] = (low & mask) >> ((14 - i) << 3)
mask = mask << 8
mask = 0x00000000000000ff
for i in range(6, 0, -1):
arr[i] = (high & mask) >> ((6 - i) << 3)
mask = mask << 8
mask = 0x0001000000000000
arr[0] = (high & mask) >> 48
# cdecimal only accepts a tuple for digits.
digits = tuple(
int(digit) for digit in str(_from_bytes(arr, 'big')))
with decimal.localcontext(_DEC128_CTX) as ctx:
return ctx.create_decimal((sign, digits, exponent))
@classmethod
def from_bid(cls, value):
"""Create an instance of :class:`Decimal128` from Binary Integer
Decimal string.
:Parameters:
- `value`: 16 byte string (128-bit IEEE 754-2008 decimal floating
point in Binary Integer Decimal (BID) format).
"""
if not isinstance(value, bytes):
raise TypeError("value must be an instance of bytes")
if len(value) != 16:
raise ValueError("value must be exactly 16 bytes")
return cls((_UNPACK_64(value[8:])[0], _UNPACK_64(value[:8])[0]))
@property
def bid(self):
"""The Binary Integer Decimal (BID) encoding of this instance."""
return _PACK_64(self.__low) + _PACK_64(self.__high)
def __str__(self):
dec = self.to_decimal()
if dec.is_nan():
# Required by the drivers spec to match MongoDB behavior.
return "NaN"
return str(dec)
def __repr__(self):
return "Decimal128('%s')" % (str(self),)
def __setstate__(self, value):
self.__high, self.__low = value
def __getstate__(self):
return self.__high, self.__low
def __eq__(self, other):
if isinstance(other, Decimal128):
return self.bid == other.bid
return NotImplemented
def __ne__(self, other):
return not self == other

@ -0,0 +1,40 @@
# Copyright 2009-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Exceptions raised by the BSON package."""
class BSONError(Exception):
"""Base class for all BSON exceptions.
"""
class InvalidBSON(BSONError):
"""Raised when trying to create a BSON object from invalid data.
"""
class InvalidStringData(BSONError):
"""Raised when trying to encode a string containing non-UTF8 data.
"""
class InvalidDocument(BSONError):
"""Raised when trying to create a BSON object from an invalid document.
"""
class InvalidId(BSONError):
"""Raised when trying to create an ObjectId from invalid data.
"""

@ -0,0 +1,34 @@
# Copyright 2014-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A BSON wrapper for long (int in python3)"""
from bson.py3compat import PY3
if PY3:
long = int
class Int64(long):
"""Representation of the BSON int64 type.
This is necessary because every integral number is an :class:`int` in
Python 3. Small integral numbers are encoded to BSON int32 by default,
but Int64 numbers will always be encoded to BSON int64.
:Parameters:
- `value`: the numeric value to represent
"""
_type_marker = 18

@ -0,0 +1,829 @@
# Copyright 2009-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for using Python's :mod:`json` module with BSON documents.
This module provides two helper methods `dumps` and `loads` that wrap the
native :mod:`json` methods and provide explicit BSON conversion to and from
JSON. :class:`~bson.json_util.JSONOptions` provides a way to control how JSON
is emitted and parsed, with the default being the legacy PyMongo format.
:mod:`~bson.json_util` can also generate Canonical or Relaxed `Extended JSON`_
when :const:`CANONICAL_JSON_OPTIONS` or :const:`RELAXED_JSON_OPTIONS` is
provided, respectively.
.. _Extended JSON: https://github.com/mongodb/specifications/blob/master/source/extended-json.rst
Example usage (deserialization):
.. doctest::
>>> from bson.json_util import loads
>>> loads('[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$scope": {}, "$code": "function x() { return 1; }"}}, {"bin": {"$type": "80", "$binary": "AQIDBA=="}}]')
[{u'foo': [1, 2]}, {u'bar': {u'hello': u'world'}}, {u'code': Code('function x() { return 1; }', {})}, {u'bin': Binary('...', 128)}]
Example usage (serialization):
.. doctest::
>>> from bson import Binary, Code
>>> from bson.json_util import dumps
>>> dumps([{'foo': [1, 2]},
... {'bar': {'hello': 'world'}},
... {'code': Code("function x() { return 1; }", {})},
... {'bin': Binary(b"\x01\x02\x03\x04")}])
'[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }", "$scope": {}}}, {"bin": {"$binary": "AQIDBA==", "$type": "00"}}]'
Example usage (with :const:`CANONICAL_JSON_OPTIONS`):
.. doctest::
>>> from bson import Binary, Code
>>> from bson.json_util import dumps, CANONICAL_JSON_OPTIONS
>>> dumps([{'foo': [1, 2]},
... {'bar': {'hello': 'world'}},
... {'code': Code("function x() { return 1; }")},
... {'bin': Binary(b"\x01\x02\x03\x04")}],
... json_options=CANONICAL_JSON_OPTIONS)
'[{"foo": [{"$numberInt": "1"}, {"$numberInt": "2"}]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }"}}, {"bin": {"$binary": {"base64": "AQIDBA==", "subType": "00"}}}]'
Example usage (with :const:`RELAXED_JSON_OPTIONS`):
.. doctest::
>>> from bson import Binary, Code
>>> from bson.json_util import dumps, RELAXED_JSON_OPTIONS
>>> dumps([{'foo': [1, 2]},
... {'bar': {'hello': 'world'}},
... {'code': Code("function x() { return 1; }")},
... {'bin': Binary(b"\x01\x02\x03\x04")}],
... json_options=RELAXED_JSON_OPTIONS)
'[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }"}}, {"bin": {"$binary": {"base64": "AQIDBA==", "subType": "00"}}}]'
Alternatively, you can manually pass the `default` to :func:`json.dumps`.
It won't handle :class:`~bson.binary.Binary` and :class:`~bson.code.Code`
instances (as they are extended strings you can't provide custom defaults),
but it will be faster as there is less recursion.
.. note::
If your application does not need the flexibility offered by
:class:`JSONOptions` and spends a large amount of time in the `json_util`
module, look to
`python-bsonjs <https://pypi.python.org/pypi/python-bsonjs>`_ for a nice
performance improvement. `python-bsonjs` is a fast BSON to MongoDB
Extended JSON converter for Python built on top of
`libbson <https://github.com/mongodb/libbson>`_. `python-bsonjs` works best
with PyMongo when using :class:`~bson.raw_bson.RawBSONDocument`.
.. versionchanged:: 2.8
The output format for :class:`~bson.timestamp.Timestamp` has changed from
'{"t": <int>, "i": <int>}' to '{"$timestamp": {"t": <int>, "i": <int>}}'.
This new format will be decoded to an instance of
:class:`~bson.timestamp.Timestamp`. The old format will continue to be
decoded to a python dict as before. Encoding to the old format is no longer
supported as it was never correct and loses type information.
Added support for $numberLong and $undefined - new in MongoDB 2.6 - and
parsing $date in ISO-8601 format.
.. versionchanged:: 2.7
Preserves order when rendering SON, Timestamp, Code, Binary, and DBRef
instances.
.. versionchanged:: 2.3
Added dumps and loads helpers to automatically handle conversion to and
from json and supports :class:`~bson.binary.Binary` and
:class:`~bson.code.Code`
"""
import base64
import datetime
import json
import math
import re
import sys
import uuid
from pymongo.errors import ConfigurationError
import bson
from bson import EPOCH_AWARE, EPOCH_NAIVE, RE_TYPE, SON
from bson.binary import (Binary, JAVA_LEGACY, CSHARP_LEGACY, OLD_UUID_SUBTYPE,
UUID_SUBTYPE)
from bson.code import Code
from bson.codec_options import CodecOptions
from bson.dbref import DBRef
from bson.decimal128 import Decimal128
from bson.int64 import Int64
from bson.max_key import MaxKey
from bson.min_key import MinKey
from bson.objectid import ObjectId
from bson.py3compat import (PY3, iteritems, integer_types, string_type,
text_type)
from bson.regex import Regex
from bson.timestamp import Timestamp
from bson.tz_util import utc
_RE_OPT_TABLE = {
"i": re.I,
"l": re.L,
"m": re.M,
"s": re.S,
"u": re.U,
"x": re.X,
}
# Dollar-prefixed keys which may appear in DBRefs.
_DBREF_KEYS = frozenset(['$id', '$ref', '$db'])
class DatetimeRepresentation:
LEGACY = 0
"""Legacy MongoDB Extended JSON datetime representation.
:class:`datetime.datetime` instances will be encoded to JSON in the
format `{"$date": <dateAsMilliseconds>}`, where `dateAsMilliseconds` is
a 64-bit signed integer giving the number of milliseconds since the Unix
epoch UTC. This was the default encoding before PyMongo version 3.4.
.. versionadded:: 3.4
"""
NUMBERLONG = 1
"""NumberLong datetime representation.
:class:`datetime.datetime` instances will be encoded to JSON in the
format `{"$date": {"$numberLong": "<dateAsMilliseconds>"}}`,
where `dateAsMilliseconds` is the string representation of a 64-bit signed
integer giving the number of milliseconds since the Unix epoch UTC.
.. versionadded:: 3.4
"""
ISO8601 = 2
"""ISO-8601 datetime representation.
:class:`datetime.datetime` instances greater than or equal to the Unix
epoch UTC will be encoded to JSON in the format `{"$date": "<ISO-8601>"}`.
:class:`datetime.datetime` instances before the Unix epoch UTC will be
encoded as if the datetime representation is
:const:`~DatetimeRepresentation.NUMBERLONG`.
.. versionadded:: 3.4
"""
class JSONMode:
LEGACY = 0
"""Legacy Extended JSON representation.
In this mode, :func:`~bson.json_util.dumps` produces PyMongo's legacy
non-standard JSON output. Consider using
:const:`~bson.json_util.JSONMode.RELAXED` or
:const:`~bson.json_util.JSONMode.CANONICAL` instead.
.. versionadded:: 3.5
"""
RELAXED = 1
"""Relaxed Extended JSON representation.
In this mode, :func:`~bson.json_util.dumps` produces Relaxed Extended JSON,
a mostly JSON-like format. Consider using this for things like a web API,
where one is sending a document (or a projection of a document) that only
uses ordinary JSON type primitives. In particular, the ``int``,
:class:`~bson.int64.Int64`, and ``float`` numeric types are represented in
the native JSON number format. This output is also the most human readable
and is useful for debugging and documentation.
.. seealso:: The specification for Relaxed `Extended JSON`_.
.. versionadded:: 3.5
"""
CANONICAL = 2
"""Canonical Extended JSON representation.
In this mode, :func:`~bson.json_util.dumps` produces Canonical Extended
JSON, a type preserving format. Consider using this for things like
testing, where one has to precisely specify expected types in JSON. In
particular, the ``int``, :class:`~bson.int64.Int64`, and ``float`` numeric
types are encoded with type wrappers.
.. seealso:: The specification for Canonical `Extended JSON`_.
.. versionadded:: 3.5
"""
class JSONOptions(CodecOptions):
"""Encapsulates JSON options for :func:`dumps` and :func:`loads`.
:Parameters:
- `strict_number_long`: If ``True``, :class:`~bson.int64.Int64` objects
are encoded to MongoDB Extended JSON's *Strict mode* type
`NumberLong`, ie ``'{"$numberLong": "<number>" }'``. Otherwise they
will be encoded as an `int`. Defaults to ``False``.
- `datetime_representation`: The representation to use when encoding
instances of :class:`datetime.datetime`. Defaults to
:const:`~DatetimeRepresentation.LEGACY`.
- `strict_uuid`: If ``True``, :class:`uuid.UUID` object are encoded to
MongoDB Extended JSON's *Strict mode* type `Binary`. Otherwise it
will be encoded as ``'{"$uuid": "<hex>" }'``. Defaults to ``False``.
- `json_mode`: The :class:`JSONMode` to use when encoding BSON types to
Extended JSON. Defaults to :const:`~JSONMode.LEGACY`.
- `document_class`: BSON documents returned by :func:`loads` will be
decoded to an instance of this class. Must be a subclass of
:class:`collections.MutableMapping`. Defaults to :class:`dict`.
- `uuid_representation`: The BSON representation to use when encoding
and decoding instances of :class:`uuid.UUID`. Defaults to
:const:`~bson.binary.PYTHON_LEGACY`.
- `tz_aware`: If ``True``, MongoDB Extended JSON's *Strict mode* type
`Date` will be decoded to timezone aware instances of
:class:`datetime.datetime`. Otherwise they will be naive. Defaults
to ``True``.
- `tzinfo`: A :class:`datetime.tzinfo` subclass that specifies the
timezone from which :class:`~datetime.datetime` objects should be
decoded. Defaults to :const:`~bson.tz_util.utc`.
- `args`: arguments to :class:`~bson.codec_options.CodecOptions`
- `kwargs`: arguments to :class:`~bson.codec_options.CodecOptions`
.. seealso:: The specification for Relaxed and Canonical `Extended JSON`_.
.. versionadded:: 3.4
.. versionchanged:: 3.5
Accepts the optional parameter `json_mode`.
"""
def __new__(cls, strict_number_long=False,
datetime_representation=DatetimeRepresentation.LEGACY,
strict_uuid=False, json_mode=JSONMode.LEGACY,
*args, **kwargs):
kwargs["tz_aware"] = kwargs.get("tz_aware", True)
if kwargs["tz_aware"]:
kwargs["tzinfo"] = kwargs.get("tzinfo", utc)
if datetime_representation not in (DatetimeRepresentation.LEGACY,
DatetimeRepresentation.NUMBERLONG,
DatetimeRepresentation.ISO8601):
raise ConfigurationError(
"JSONOptions.datetime_representation must be one of LEGACY, "
"NUMBERLONG, or ISO8601 from DatetimeRepresentation.")
self = super(JSONOptions, cls).__new__(cls, *args, **kwargs)
if json_mode not in (JSONMode.LEGACY,
JSONMode.RELAXED,
JSONMode.CANONICAL):
raise ConfigurationError(
"JSONOptions.json_mode must be one of LEGACY, RELAXED, "
"or CANONICAL from JSONMode.")
self.json_mode = json_mode
if self.json_mode == JSONMode.RELAXED:
self.strict_number_long = False
self.datetime_representation = DatetimeRepresentation.ISO8601
self.strict_uuid = True
elif self.json_mode == JSONMode.CANONICAL:
self.strict_number_long = True
self.datetime_representation = DatetimeRepresentation.NUMBERLONG
self.strict_uuid = True
else:
self.strict_number_long = strict_number_long
self.datetime_representation = datetime_representation
self.strict_uuid = strict_uuid
return self
def _arguments_repr(self):
return ('strict_number_long=%r, '
'datetime_representation=%r, '
'strict_uuid=%r, json_mode=%r, %s' % (
self.strict_number_long,
self.datetime_representation,
self.strict_uuid,
self.json_mode,
super(JSONOptions, self)._arguments_repr()))
LEGACY_JSON_OPTIONS = JSONOptions(json_mode=JSONMode.LEGACY)
""":class:`JSONOptions` for encoding to PyMongo's legacy JSON format.
.. seealso:: The documentation for :const:`bson.json_util.JSONMode.LEGACY`.
.. versionadded:: 3.5
"""
DEFAULT_JSON_OPTIONS = LEGACY_JSON_OPTIONS
"""The default :class:`JSONOptions` for JSON encoding/decoding.
The same as :const:`LEGACY_JSON_OPTIONS`. This will change to
:const:`RELAXED_JSON_OPTIONS` in a future release.
.. versionadded:: 3.4
"""
CANONICAL_JSON_OPTIONS = JSONOptions(json_mode=JSONMode.CANONICAL)
""":class:`JSONOptions` for Canonical Extended JSON.
.. seealso:: The documentation for :const:`bson.json_util.JSONMode.CANONICAL`.
.. versionadded:: 3.5
"""
RELAXED_JSON_OPTIONS = JSONOptions(json_mode=JSONMode.RELAXED)
""":class:`JSONOptions` for Relaxed Extended JSON.
.. seealso:: The documentation for :const:`bson.json_util.JSONMode.RELAXED`.
.. versionadded:: 3.5
"""
STRICT_JSON_OPTIONS = JSONOptions(
strict_number_long=True,
datetime_representation=DatetimeRepresentation.ISO8601,
strict_uuid=True)
"""**DEPRECATED** - :class:`JSONOptions` for MongoDB Extended JSON's *Strict
mode* encoding.
.. versionadded:: 3.4
.. versionchanged:: 3.5
Deprecated. Use :const:`RELAXED_JSON_OPTIONS` or
:const:`CANONICAL_JSON_OPTIONS` instead.
"""
def dumps(obj, *args, **kwargs):
"""Helper function that wraps :func:`json.dumps`.
Recursive function that handles all BSON types including
:class:`~bson.binary.Binary` and :class:`~bson.code.Code`.
:Parameters:
- `json_options`: A :class:`JSONOptions` instance used to modify the
encoding of MongoDB Extended JSON types. Defaults to
:const:`DEFAULT_JSON_OPTIONS`.
.. versionchanged:: 3.4
Accepts optional parameter `json_options`. See :class:`JSONOptions`.
.. versionchanged:: 2.7
Preserves order when rendering SON, Timestamp, Code, Binary, and DBRef
instances.
"""
json_options = kwargs.pop("json_options", DEFAULT_JSON_OPTIONS)
return json.dumps(_json_convert(obj, json_options), *args, **kwargs)
def loads(s, *args, **kwargs):
"""Helper function that wraps :func:`json.loads`.
Automatically passes the object_hook for BSON type conversion.
Raises ``TypeError``, ``ValueError``, ``KeyError``, or
:exc:`~bson.errors.InvalidId` on invalid MongoDB Extended JSON.
:Parameters:
- `json_options`: A :class:`JSONOptions` instance used to modify the
decoding of MongoDB Extended JSON types. Defaults to
:const:`DEFAULT_JSON_OPTIONS`.
.. versionchanged:: 3.5
Parses Relaxed and Canonical Extended JSON as well as PyMongo's legacy
format. Now raises ``TypeError`` or ``ValueError`` when parsing JSON
type wrappers with values of the wrong type or any extra keys.
.. versionchanged:: 3.4
Accepts optional parameter `json_options`. See :class:`JSONOptions`.
"""
json_options = kwargs.pop("json_options", DEFAULT_JSON_OPTIONS)
kwargs["object_pairs_hook"] = lambda pairs: object_pairs_hook(
pairs, json_options)
return json.loads(s, *args, **kwargs)
def _json_convert(obj, json_options=DEFAULT_JSON_OPTIONS):
"""Recursive helper method that converts BSON types so they can be
converted into json.
"""
if hasattr(obj, 'iteritems') or hasattr(obj, 'items'): # PY3 support
return SON(((k, _json_convert(v, json_options))
for k, v in iteritems(obj)))
elif hasattr(obj, '__iter__') and not isinstance(obj, (text_type, bytes)):
return list((_json_convert(v, json_options) for v in obj))
try:
return default(obj, json_options)
except TypeError:
return obj
def object_pairs_hook(pairs, json_options=DEFAULT_JSON_OPTIONS):
return object_hook(json_options.document_class(pairs), json_options)
def object_hook(dct, json_options=DEFAULT_JSON_OPTIONS):
if "$oid" in dct:
return _parse_canonical_oid(dct)
if "$ref" in dct:
return _parse_canonical_dbref(dct)
if "$date" in dct:
return _parse_canonical_datetime(dct, json_options)
if "$regex" in dct:
return _parse_legacy_regex(dct)
if "$minKey" in dct:
return _parse_canonical_minkey(dct)
if "$maxKey" in dct:
return _parse_canonical_maxkey(dct)
if "$binary" in dct:
if "$type" in dct:
return _parse_legacy_binary(dct, json_options)
else:
return _parse_canonical_binary(dct, json_options)
if "$code" in dct:
return _parse_canonical_code(dct)
if "$uuid" in dct:
return _parse_legacy_uuid(dct)
if "$undefined" in dct:
return None
if "$numberLong" in dct:
return _parse_canonical_int64(dct)
if "$timestamp" in dct:
tsp = dct["$timestamp"]
return Timestamp(tsp["t"], tsp["i"])
if "$numberDecimal" in dct:
return _parse_canonical_decimal128(dct)
if "$dbPointer" in dct:
return _parse_canonical_dbpointer(dct)
if "$regularExpression" in dct:
return _parse_canonical_regex(dct)
if "$symbol" in dct:
return _parse_canonical_symbol(dct)
if "$numberInt" in dct:
return _parse_canonical_int32(dct)
if "$numberDouble" in dct:
return _parse_canonical_double(dct)
return dct
def _parse_legacy_regex(doc):
pattern = doc["$regex"]
# Check if this is the $regex query operator.
if isinstance(pattern, Regex):
return doc
flags = 0
# PyMongo always adds $options but some other tools may not.
for opt in doc.get("$options", ""):
flags |= _RE_OPT_TABLE.get(opt, 0)
return Regex(pattern, flags)
def _parse_legacy_uuid(doc):
"""Decode a JSON legacy $uuid to Python UUID."""
if len(doc) != 1:
raise TypeError('Bad $uuid, extra field(s): %s' % (doc,))
return uuid.UUID(doc["$uuid"])
def _binary_or_uuid(data, subtype, json_options):
# special handling for UUID
if subtype == OLD_UUID_SUBTYPE:
if json_options.uuid_representation == CSHARP_LEGACY:
return uuid.UUID(bytes_le=data)
if json_options.uuid_representation == JAVA_LEGACY:
data = data[7::-1] + data[:7:-1]
return uuid.UUID(bytes=data)
if subtype == UUID_SUBTYPE:
return uuid.UUID(bytes=data)
if PY3 and subtype == 0:
return data
return Binary(data, subtype)
def _parse_legacy_binary(doc, json_options):
if isinstance(doc["$type"], int):
doc["$type"] = "%02x" % doc["$type"]
subtype = int(doc["$type"], 16)
if subtype >= 0xffffff80: # Handle mongoexport values
subtype = int(doc["$type"][6:], 16)
data = base64.b64decode(doc["$binary"].encode())
return _binary_or_uuid(data, subtype, json_options)
def _parse_canonical_binary(doc, json_options):
binary = doc["$binary"]
b64 = binary["base64"]
subtype = binary["subType"]
if not isinstance(b64, string_type):
raise TypeError('$binary base64 must be a string: %s' % (doc,))
if not isinstance(subtype, string_type) or len(subtype) > 2:
raise TypeError('$binary subType must be a string at most 2 '
'characters: %s' % (doc,))
if len(binary) != 2:
raise TypeError('$binary must include only "base64" and "subType" '
'components: %s' % (doc,))
data = base64.b64decode(b64.encode())
return _binary_or_uuid(data, int(subtype, 16), json_options)
def _parse_canonical_datetime(doc, json_options):
"""Decode a JSON datetime to python datetime.datetime."""
dtm = doc["$date"]
if len(doc) != 1:
raise TypeError('Bad $date, extra field(s): %s' % (doc,))
# mongoexport 2.6 and newer
if isinstance(dtm, string_type):
# Parse offset
if dtm[-1] == 'Z':
dt = dtm[:-1]
offset = 'Z'
elif dtm[-3] == ':':
# (+|-)HH:MM
dt = dtm[:-6]
offset = dtm[-6:]
elif dtm[-5] in ('+', '-'):
# (+|-)HHMM
dt = dtm[:-5]
offset = dtm[-5:]
elif dtm[-3] in ('+', '-'):
# (+|-)HH
dt = dtm[:-3]
offset = dtm[-3:]
else:
dt = dtm
offset = ''
# Parse the optional factional seconds portion.
dot_index = dt.rfind('.')
microsecond = 0
if dot_index != -1:
microsecond = int(float(dt[dot_index:]) * 1000000)
dt = dt[:dot_index]
aware = datetime.datetime.strptime(
dt, "%Y-%m-%dT%H:%M:%S").replace(microsecond=microsecond,
tzinfo=utc)
if offset and offset != 'Z':
if len(offset) == 6:
hours, minutes = offset[1:].split(':')
secs = (int(hours) * 3600 + int(minutes) * 60)
elif len(offset) == 5:
secs = (int(offset[1:3]) * 3600 + int(offset[3:]) * 60)
elif len(offset) == 3:
secs = int(offset[1:3]) * 3600
if offset[0] == "-":
secs *= -1
aware = aware - datetime.timedelta(seconds=secs)
if json_options.tz_aware:
if json_options.tzinfo:
aware = aware.astimezone(json_options.tzinfo)
return aware
else:
return aware.replace(tzinfo=None)
return bson._millis_to_datetime(int(dtm), json_options)
def _parse_canonical_oid(doc):
"""Decode a JSON ObjectId to bson.objectid.ObjectId."""
if len(doc) != 1:
raise TypeError('Bad $oid, extra field(s): %s' % (doc,))
return ObjectId(doc['$oid'])
def _parse_canonical_symbol(doc):
"""Decode a JSON symbol to Python string."""
symbol = doc['$symbol']
if len(doc) != 1:
raise TypeError('Bad $symbol, extra field(s): %s' % (doc,))
return text_type(symbol)
def _parse_canonical_code(doc):
"""Decode a JSON code to bson.code.Code."""
for key in doc:
if key not in ('$code', '$scope'):
raise TypeError('Bad $code, extra field(s): %s' % (doc,))
return Code(doc['$code'], scope=doc.get('$scope'))
def _parse_canonical_regex(doc):
"""Decode a JSON regex to bson.regex.Regex."""
regex = doc['$regularExpression']
if len(doc) != 1:
raise TypeError('Bad $regularExpression, extra field(s): %s' % (doc,))
if len(regex) != 2:
raise TypeError('Bad $regularExpression must include only "pattern"'
'and "options" components: %s' % (doc,))
return Regex(regex['pattern'], regex['options'])
def _parse_canonical_dbref(doc):
"""Decode a JSON DBRef to bson.dbref.DBRef."""
for key in doc:
if key.startswith('$') and key not in _DBREF_KEYS:
# Other keys start with $, so dct cannot be parsed as a DBRef.
return doc
return DBRef(doc.pop('$ref'), doc.pop('$id'),
database=doc.pop('$db', None), **doc)
def _parse_canonical_dbpointer(doc):
"""Decode a JSON (deprecated) DBPointer to bson.dbref.DBRef."""
dbref = doc['$dbPointer']
if len(doc) != 1:
raise TypeError('Bad $dbPointer, extra field(s): %s' % (doc,))
if isinstance(dbref, DBRef):
dbref_doc = dbref.as_doc()
# DBPointer must not contain $db in its value.
if dbref.database is not None:
raise TypeError(
'Bad $dbPointer, extra field $db: %s' % (dbref_doc,))
if not isinstance(dbref.id, ObjectId):
raise TypeError(
'Bad $dbPointer, $id must be an ObjectId: %s' % (dbref_doc,))
if len(dbref_doc) != 2:
raise TypeError(
'Bad $dbPointer, extra field(s) in DBRef: %s' % (dbref_doc,))
return dbref
else:
raise TypeError('Bad $dbPointer, expected a DBRef: %s' % (doc,))
def _parse_canonical_int32(doc):
"""Decode a JSON int32 to python int."""
i_str = doc['$numberInt']
if len(doc) != 1:
raise TypeError('Bad $numberInt, extra field(s): %s' % (doc,))
if not isinstance(i_str, string_type):
raise TypeError('$numberInt must be string: %s' % (doc,))
return int(i_str)
def _parse_canonical_int64(doc):
"""Decode a JSON int64 to bson.int64.Int64."""
l_str = doc['$numberLong']
if len(doc) != 1:
raise TypeError('Bad $numberLong, extra field(s): %s' % (doc,))
return Int64(l_str)
def _parse_canonical_double(doc):
"""Decode a JSON double to python float."""
d_str = doc['$numberDouble']
if len(doc) != 1:
raise TypeError('Bad $numberDouble, extra field(s): %s' % (doc,))
if not isinstance(d_str, string_type):
raise TypeError('$numberDouble must be string: %s' % (doc,))
return float(d_str)
def _parse_canonical_decimal128(doc):
"""Decode a JSON decimal128 to bson.decimal128.Decimal128."""
d_str = doc['$numberDecimal']
if len(doc) != 1:
raise TypeError('Bad $numberDecimal, extra field(s): %s' % (doc,))
if not isinstance(d_str, string_type):
raise TypeError('$numberDecimal must be string: %s' % (doc,))
return Decimal128(d_str)
def _parse_canonical_minkey(doc):
"""Decode a JSON MinKey to bson.min_key.MinKey."""
if doc['$minKey'] is not 1:
raise TypeError('$minKey value must be 1: %s' % (doc,))
if len(doc) != 1:
raise TypeError('Bad $minKey, extra field(s): %s' % (doc,))
return MinKey()
def _parse_canonical_maxkey(doc):
"""Decode a JSON MaxKey to bson.max_key.MaxKey."""
if doc['$maxKey'] is not 1:
raise TypeError('$maxKey value must be 1: %s', (doc,))
if len(doc) != 1:
raise TypeError('Bad $minKey, extra field(s): %s' % (doc,))
return MaxKey()
def _encode_binary(data, subtype, json_options):
if json_options.json_mode == JSONMode.LEGACY:
return SON([
('$binary', base64.b64encode(data).decode()),
('$type', "%02x" % subtype)])
return {'$binary': SON([
('base64', base64.b64encode(data).decode()),
('subType', "%02x" % subtype)])}
def default(obj, json_options=DEFAULT_JSON_OPTIONS):
# We preserve key order when rendering SON, DBRef, etc. as JSON by
# returning a SON for those types instead of a dict.
if isinstance(obj, ObjectId):
return {"$oid": str(obj)}
if isinstance(obj, DBRef):
return _json_convert(obj.as_doc(), json_options=json_options)
if isinstance(obj, datetime.datetime):
if (json_options.datetime_representation ==
DatetimeRepresentation.ISO8601):
if not obj.tzinfo:
obj = obj.replace(tzinfo=utc)
if obj >= EPOCH_AWARE:
off = obj.tzinfo.utcoffset(obj)
if (off.days, off.seconds, off.microseconds) == (0, 0, 0):
tz_string = 'Z'
else:
tz_string = obj.strftime('%z')
millis = int(obj.microsecond / 1000)
fracsecs = ".%03d" % (millis,) if millis else ""
return {"$date": "%s%s%s" % (
obj.strftime("%Y-%m-%dT%H:%M:%S"), fracsecs, tz_string)}
millis = bson._datetime_to_millis(obj)
if (json_options.datetime_representation ==
DatetimeRepresentation.LEGACY):
return {"$date": millis}
return {"$date": {"$numberLong": str(millis)}}
if json_options.strict_number_long and isinstance(obj, Int64):
return {"$numberLong": str(obj)}
if isinstance(obj, (RE_TYPE, Regex)):
flags = ""
if obj.flags & re.IGNORECASE:
flags += "i"
if obj.flags & re.LOCALE:
flags += "l"
if obj.flags & re.MULTILINE:
flags += "m"
if obj.flags & re.DOTALL:
flags += "s"
if obj.flags & re.UNICODE:
flags += "u"
if obj.flags & re.VERBOSE:
flags += "x"
if isinstance(obj.pattern, text_type):
pattern = obj.pattern
else:
pattern = obj.pattern.decode('utf-8')
if json_options.json_mode == JSONMode.LEGACY:
return SON([("$regex", pattern), ("$options", flags)])
return {'$regularExpression': SON([("pattern", pattern),
("options", flags)])}
if isinstance(obj, MinKey):
return {"$minKey": 1}
if isinstance(obj, MaxKey):
return {"$maxKey": 1}
if isinstance(obj, Timestamp):
return {"$timestamp": SON([("t", obj.time), ("i", obj.inc)])}
if isinstance(obj, Code):
if obj.scope is None:
return {'$code': str(obj)}
return SON([
('$code', str(obj)),
('$scope', _json_convert(obj.scope, json_options))])
if isinstance(obj, Binary):
return _encode_binary(obj, obj.subtype, json_options)
if PY3 and isinstance(obj, bytes):
return _encode_binary(obj, 0, json_options)
if isinstance(obj, uuid.UUID):
if json_options.strict_uuid:
data = obj.bytes
subtype = OLD_UUID_SUBTYPE
if json_options.uuid_representation == CSHARP_LEGACY:
data = obj.bytes_le
elif json_options.uuid_representation == JAVA_LEGACY:
data = data[7::-1] + data[:7:-1]
elif json_options.uuid_representation == UUID_SUBTYPE:
subtype = UUID_SUBTYPE
return _encode_binary(data, subtype, json_options)
else:
return {"$uuid": obj.hex}
if isinstance(obj, Decimal128):
return {"$numberDecimal": str(obj)}
if isinstance(obj, bool):
return obj
if (json_options.json_mode == JSONMode.CANONICAL and
isinstance(obj, integer_types)):
if -2 ** 31 <= obj < 2 ** 31:
return {'$numberInt': text_type(obj)}
return {'$numberLong': text_type(obj)}
if json_options.json_mode != JSONMode.LEGACY and isinstance(obj, float):
if math.isnan(obj):
return {'$numberDouble': 'NaN'}
elif math.isinf(obj):
representation = 'Infinity' if obj > 0 else '-Infinity'
return {'$numberDouble': representation}
elif json_options.json_mode == JSONMode.CANONICAL:
# repr() will return the shortest string guaranteed to produce the
# original value, when float() is called on it. str produces a
# shorter string in Python 2.
return {'$numberDouble': text_type(repr(obj))}
raise TypeError("%r is not JSON serializable" % obj)

@ -0,0 +1,50 @@
# Copyright 2010-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Representation for the MongoDB internal MaxKey type.
"""
class MaxKey(object):
"""MongoDB internal MaxKey type.
.. versionchanged:: 2.7
``MaxKey`` now implements comparison operators.
"""
_type_marker = 127
def __eq__(self, other):
return isinstance(other, MaxKey)
def __hash__(self):
return hash(self._type_marker)
def __ne__(self, other):
return not self == other
def __le__(self, other):
return isinstance(other, MaxKey)
def __lt__(self, dummy):
return False
def __ge__(self, dummy):
return True
def __gt__(self, other):
return not isinstance(other, MaxKey)
def __repr__(self):
return "MaxKey()"

@ -0,0 +1,50 @@
# Copyright 2010-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Representation for the MongoDB internal MinKey type.
"""
class MinKey(object):
"""MongoDB internal MinKey type.
.. versionchanged:: 2.7
``MinKey`` now implements comparison operators.
"""
_type_marker = 255
def __eq__(self, other):
return isinstance(other, MinKey)
def __hash__(self):
return hash(self._type_marker)
def __ne__(self, other):
return not self == other
def __le__(self, dummy):
return True
def __lt__(self, other):
return not isinstance(other, MinKey)
def __ge__(self, other):
return isinstance(other, MinKey)
def __gt__(self, dummy):
return False
def __repr__(self):
return "MinKey()"

@ -0,0 +1,299 @@
# Copyright 2009-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for working with MongoDB `ObjectIds
<http://dochub.mongodb.org/core/objectids>`_.
"""
import binascii
import calendar
import datetime
import os
import struct
import threading
import time
from random import SystemRandom
from bson.errors import InvalidId
from bson.py3compat import PY3, bytes_from_hex, string_type, text_type
from bson.tz_util import utc
_MAX_COUNTER_VALUE = 0xFFFFFF
def _raise_invalid_id(oid):
raise InvalidId(
"%r is not a valid ObjectId, it must be a 12-byte input"
" or a 24-character hex string" % oid)
def _random_bytes():
"""Get the 5-byte random field of an ObjectId."""
return struct.pack(">Q", SystemRandom().randint(0, 0xFFFFFFFFFF))[3:]
class ObjectId(object):
"""A MongoDB ObjectId.
"""
_pid = os.getpid()
_inc = SystemRandom().randint(0, _MAX_COUNTER_VALUE)
_inc_lock = threading.Lock()
__random = _random_bytes()
__slots__ = ('__id',)
_type_marker = 7
def __init__(self, oid=None):
"""Initialize a new ObjectId.
An ObjectId is a 12-byte unique identifier consisting of:
- a 4-byte value representing the seconds since the Unix epoch,
- a 5-byte random value,
- a 3-byte counter, starting with a random value.
By default, ``ObjectId()`` creates a new unique identifier. The
optional parameter `oid` can be an :class:`ObjectId`, or any 12
:class:`bytes` or, in Python 2, any 12-character :class:`str`.
For example, the 12 bytes b'foo-bar-quux' do not follow the ObjectId
specification but they are acceptable input::
>>> ObjectId(b'foo-bar-quux')
ObjectId('666f6f2d6261722d71757578')
`oid` can also be a :class:`unicode` or :class:`str` of 24 hex digits::
>>> ObjectId('0123456789ab0123456789ab')
ObjectId('0123456789ab0123456789ab')
>>>
>>> # A u-prefixed unicode literal:
>>> ObjectId(u'0123456789ab0123456789ab')
ObjectId('0123456789ab0123456789ab')
Raises :class:`~bson.errors.InvalidId` if `oid` is not 12 bytes nor
24 hex digits, or :class:`TypeError` if `oid` is not an accepted type.
:Parameters:
- `oid` (optional): a valid ObjectId.
.. mongodoc:: objectids
.. versionchanged:: 3.8
:class:`~bson.objectid.ObjectId` now implements the `ObjectID
specification version 0.2
<https://github.com/mongodb/specifications/blob/master/source/
objectid.rst>`_.
"""
if oid is None:
self.__generate()
elif isinstance(oid, bytes) and len(oid) == 12:
self.__id = oid
else:
self.__validate(oid)
@classmethod
def from_datetime(cls, generation_time):
"""Create a dummy ObjectId instance with a specific generation time.
This method is useful for doing range queries on a field
containing :class:`ObjectId` instances.
.. warning::
It is not safe to insert a document containing an ObjectId
generated using this method. This method deliberately
eliminates the uniqueness guarantee that ObjectIds
generally provide. ObjectIds generated with this method
should be used exclusively in queries.
`generation_time` will be converted to UTC. Naive datetime
instances will be treated as though they already contain UTC.
An example using this helper to get documents where ``"_id"``
was generated before January 1, 2010 would be:
>>> gen_time = datetime.datetime(2010, 1, 1)
>>> dummy_id = ObjectId.from_datetime(gen_time)
>>> result = collection.find({"_id": {"$lt": dummy_id}})
:Parameters:
- `generation_time`: :class:`~datetime.datetime` to be used
as the generation time for the resulting ObjectId.
"""
if generation_time.utcoffset() is not None:
generation_time = generation_time - generation_time.utcoffset()
timestamp = calendar.timegm(generation_time.timetuple())
oid = struct.pack(
">I", int(timestamp)) + b"\x00\x00\x00\x00\x00\x00\x00\x00"
return cls(oid)
@classmethod
def is_valid(cls, oid):
"""Checks if a `oid` string is valid or not.
:Parameters:
- `oid`: the object id to validate
.. versionadded:: 2.3
"""
if not oid:
return False
try:
ObjectId(oid)
return True
except (InvalidId, TypeError):
return False
@classmethod
def _random(cls):
"""Generate a 5-byte random number once per process.
"""
pid = os.getpid()
if pid != cls._pid:
cls._pid = pid
cls.__random = _random_bytes()
return cls.__random
def __generate(self):
"""Generate a new value for this ObjectId.
"""
# 4 bytes current time
oid = struct.pack(">I", int(time.time()))
# 5 bytes random
oid += ObjectId._random()
# 3 bytes inc
with ObjectId._inc_lock:
oid += struct.pack(">I", ObjectId._inc)[1:4]
ObjectId._inc = (ObjectId._inc + 1) % (_MAX_COUNTER_VALUE + 1)
self.__id = oid
def __validate(self, oid):
"""Validate and use the given id for this ObjectId.
Raises TypeError if id is not an instance of
(:class:`basestring` (:class:`str` or :class:`bytes`
in python 3), ObjectId) and InvalidId if it is not a
valid ObjectId.
:Parameters:
- `oid`: a valid ObjectId
"""
if isinstance(oid, ObjectId):
self.__id = oid.binary
# bytes or unicode in python 2, str in python 3
elif isinstance(oid, string_type):
if len(oid) == 24:
try:
self.__id = bytes_from_hex(oid)
except (TypeError, ValueError):
_raise_invalid_id(oid)
else:
_raise_invalid_id(oid)
else:
raise TypeError("id must be an instance of (bytes, %s, ObjectId), "
"not %s" % (text_type.__name__, type(oid)))
@property
def binary(self):
"""12-byte binary representation of this ObjectId.
"""
return self.__id
@property
def generation_time(self):
"""A :class:`datetime.datetime` instance representing the time of
generation for this :class:`ObjectId`.
The :class:`datetime.datetime` is timezone aware, and
represents the generation time in UTC. It is precise to the
second.
"""
timestamp = struct.unpack(">I", self.__id[0:4])[0]
return datetime.datetime.fromtimestamp(timestamp, utc)
def __getstate__(self):
"""return value of object for pickling.
needed explicitly because __slots__() defined.
"""
return self.__id
def __setstate__(self, value):
"""explicit state set from pickling
"""
# Provide backwards compatability with OIDs
# pickled with pymongo-1.9 or older.
if isinstance(value, dict):
oid = value["_ObjectId__id"]
else:
oid = value
# ObjectIds pickled in python 2.x used `str` for __id.
# In python 3.x this has to be converted to `bytes`
# by encoding latin-1.
if PY3 and isinstance(oid, text_type):
self.__id = oid.encode('latin-1')
else:
self.__id = oid
def __str__(self):
if PY3:
return binascii.hexlify(self.__id).decode()
return binascii.hexlify(self.__id)
def __repr__(self):
return "ObjectId('%s')" % (str(self),)
def __eq__(self, other):
if isinstance(other, ObjectId):
return self.__id == other.binary
return NotImplemented
def __ne__(self, other):
if isinstance(other, ObjectId):
return self.__id != other.binary
return NotImplemented
def __lt__(self, other):
if isinstance(other, ObjectId):
return self.__id < other.binary
return NotImplemented
def __le__(self, other):
if isinstance(other, ObjectId):
return self.__id <= other.binary
return NotImplemented
def __gt__(self, other):
if isinstance(other, ObjectId):
return self.__id > other.binary
return NotImplemented
def __ge__(self, other):
if isinstance(other, ObjectId):
return self.__id >= other.binary
return NotImplemented
def __hash__(self):
"""Get a hash value for this :class:`ObjectId`."""
return hash(self.__id)

@ -0,0 +1,107 @@
# Copyright 2009-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You
# may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Utility functions and definitions for python3 compatibility."""
import sys
PY3 = sys.version_info[0] == 3
if PY3:
import codecs
import collections.abc as abc
import _thread as thread
from abc import ABC, abstractmethod
from io import BytesIO as StringIO
def abstractproperty(func):
return property(abstractmethod(func))
MAXSIZE = sys.maxsize
imap = map
def b(s):
# BSON and socket operations deal in binary data. In
# python 3 that means instances of `bytes`. In python
# 2.7 you can create an alias for `bytes` using
# the b prefix (e.g. b'foo').
# See http://python3porting.com/problems.html#nicer-solutions
return codecs.latin_1_encode(s)[0]
def bytes_from_hex(h):
return bytes.fromhex(h)
def iteritems(d):
return iter(d.items())
def itervalues(d):
return iter(d.values())
def reraise(exctype, value, trace=None):
raise exctype(str(value)).with_traceback(trace)
def reraise_instance(exc_instance, trace=None):
raise exc_instance.with_traceback(trace)
def _unicode(s):
return s
text_type = str
string_type = str
integer_types = int
else:
import collections as abc
import thread
from abc import ABCMeta, abstractproperty
from itertools import imap
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
ABC = ABCMeta('ABC', (object,), {})
MAXSIZE = sys.maxint
def b(s):
# See comments above. In python 2.x b('foo') is just 'foo'.
return s
def bytes_from_hex(h):
return h.decode('hex')
def iteritems(d):
return d.iteritems()
def itervalues(d):
return d.itervalues()
def reraise(exctype, value, trace=None):
_reraise(exctype, str(value), trace)
def reraise_instance(exc_instance, trace=None):
_reraise(exc_instance, None, trace)
# "raise x, y, z" raises SyntaxError in Python 3
exec("""def _reraise(exc, value, trace):
raise exc, value, trace
""")
_unicode = unicode
string_type = basestring
text_type = unicode
integer_types = (int, long)

@ -0,0 +1,124 @@
# Copyright 2015-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for representing raw BSON documents.
"""
from bson import _elements_to_dict, _get_object_size
from bson.py3compat import abc, iteritems
from bson.codec_options import (
DEFAULT_CODEC_OPTIONS as DEFAULT, _RAW_BSON_DOCUMENT_MARKER)
from bson.son import SON
class RawBSONDocument(abc.Mapping):
"""Representation for a MongoDB document that provides access to the raw
BSON bytes that compose it.
Only when a field is accessed or modified within the document does
RawBSONDocument decode its bytes.
"""
__slots__ = ('__raw', '__inflated_doc', '__codec_options')
_type_marker = _RAW_BSON_DOCUMENT_MARKER
def __init__(self, bson_bytes, codec_options=None):
"""Create a new :class:`RawBSONDocument`
:class:`RawBSONDocument` is a representation of a BSON document that
provides access to the underlying raw BSON bytes. Only when a field is
accessed or modified within the document does RawBSONDocument decode
its bytes.
:class:`RawBSONDocument` implements the ``Mapping`` abstract base
class from the standard library so it can be used like a read-only
``dict``::
>>> raw_doc = RawBSONDocument(BSON.encode({'_id': 'my_doc'}))
>>> raw_doc.raw
b'...'
>>> raw_doc['_id']
'my_doc'
:Parameters:
- `bson_bytes`: the BSON bytes that compose this document
- `codec_options` (optional): An instance of
:class:`~bson.codec_options.CodecOptions` whose ``document_class``
must be :class:`RawBSONDocument`. The default is
:attr:`DEFAULT_RAW_BSON_OPTIONS`.
.. versionchanged:: 3.8
:class:`RawBSONDocument` now validates that the ``bson_bytes``
passed in represent a single bson document.
.. versionchanged:: 3.5
If a :class:`~bson.codec_options.CodecOptions` is passed in, its
`document_class` must be :class:`RawBSONDocument`.
"""
self.__raw = bson_bytes
self.__inflated_doc = None
# Can't default codec_options to DEFAULT_RAW_BSON_OPTIONS in signature,
# it refers to this class RawBSONDocument.
if codec_options is None:
codec_options = DEFAULT_RAW_BSON_OPTIONS
elif codec_options.document_class is not RawBSONDocument:
raise TypeError(
"RawBSONDocument cannot use CodecOptions with document "
"class %s" % (codec_options.document_class, ))
self.__codec_options = codec_options
# Validate the bson object size.
_get_object_size(bson_bytes, 0, len(bson_bytes))
@property
def raw(self):
"""The raw BSON bytes composing this document."""
return self.__raw
def items(self):
"""Lazily decode and iterate elements in this document."""
return iteritems(self.__inflated)
@property
def __inflated(self):
if self.__inflated_doc is None:
# We already validated the object's size when this document was
# created, so no need to do that again.
# Use SON to preserve ordering of elements.
self.__inflated_doc = _elements_to_dict(
self.__raw, 4, len(self.__raw)-1, self.__codec_options, SON())
return self.__inflated_doc
def __getitem__(self, item):
return self.__inflated[item]
def __iter__(self):
return iter(self.__inflated)
def __len__(self):
return len(self.__inflated)
def __eq__(self, other):
if isinstance(other, RawBSONDocument):
return self.__raw == other.raw
return NotImplemented
def __repr__(self):
return ("RawBSONDocument(%r, codec_options=%r)"
% (self.raw, self.__codec_options))
DEFAULT_RAW_BSON_OPTIONS = DEFAULT.with_options(document_class=RawBSONDocument)
"""The default :class:`~bson.codec_options.CodecOptions` for
:class:`RawBSONDocument`.
"""

@ -0,0 +1,128 @@
# Copyright 2013-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for representing MongoDB regular expressions.
"""
import re
from bson.son import RE_TYPE
from bson.py3compat import string_type, text_type
def str_flags_to_int(str_flags):
flags = 0
if "i" in str_flags:
flags |= re.IGNORECASE
if "l" in str_flags:
flags |= re.LOCALE
if "m" in str_flags:
flags |= re.MULTILINE
if "s" in str_flags:
flags |= re.DOTALL
if "u" in str_flags:
flags |= re.UNICODE
if "x" in str_flags:
flags |= re.VERBOSE
return flags
class Regex(object):
"""BSON regular expression data."""
_type_marker = 11
@classmethod
def from_native(cls, regex):
"""Convert a Python regular expression into a ``Regex`` instance.
Note that in Python 3, a regular expression compiled from a
:class:`str` has the ``re.UNICODE`` flag set. If it is undesirable
to store this flag in a BSON regular expression, unset it first::
>>> pattern = re.compile('.*')
>>> regex = Regex.from_native(pattern)
>>> regex.flags ^= re.UNICODE
>>> db.collection.insert({'pattern': regex})
:Parameters:
- `regex`: A regular expression object from ``re.compile()``.
.. warning::
Python regular expressions use a different syntax and different
set of flags than MongoDB, which uses `PCRE`_. A regular
expression retrieved from the server may not compile in
Python, or may match a different set of strings in Python than
when used in a MongoDB query.
.. _PCRE: http://www.pcre.org/
"""
if not isinstance(regex, RE_TYPE):
raise TypeError(
"regex must be a compiled regular expression, not %s"
% type(regex))
return Regex(regex.pattern, regex.flags)
def __init__(self, pattern, flags=0):
"""BSON regular expression data.
This class is useful to store and retrieve regular expressions that are
incompatible with Python's regular expression dialect.
:Parameters:
- `pattern`: string
- `flags`: (optional) an integer bitmask, or a string of flag
characters like "im" for IGNORECASE and MULTILINE
"""
if not isinstance(pattern, (text_type, bytes)):
raise TypeError("pattern must be a string, not %s" % type(pattern))
self.pattern = pattern
if isinstance(flags, string_type):
self.flags = str_flags_to_int(flags)
elif isinstance(flags, int):
self.flags = flags
else:
raise TypeError(
"flags must be a string or int, not %s" % type(flags))
def __eq__(self, other):
if isinstance(other, Regex):
return self.pattern == other.pattern and self.flags == other.flags
else:
return NotImplemented
__hash__ = None
def __ne__(self, other):
return not self == other
def __repr__(self):
return "Regex(%r, %r)" % (self.pattern, self.flags)
def try_compile(self):
"""Compile this :class:`Regex` as a Python regular expression.
.. warning::
Python regular expressions use a different syntax and different
set of flags than MongoDB, which uses `PCRE`_. A regular
expression retrieved from the server may not compile in
Python, or may match a different set of strings in Python than
when used in a MongoDB query. :meth:`try_compile()` may raise
:exc:`re.error`.
.. _PCRE: http://www.pcre.org/
"""
return re.compile(self.pattern, self.flags)

@ -0,0 +1,200 @@
# Copyright 2009-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for creating and manipulating SON, the Serialized Ocument Notation.
Regular dictionaries can be used instead of SON objects, but not when the order
of keys is important. A SON object can be used just like a normal Python
dictionary."""
import copy
import re
from bson.py3compat import abc, iteritems
# This sort of sucks, but seems to be as good as it gets...
# This is essentially the same as re._pattern_type
RE_TYPE = type(re.compile(""))
class SON(dict):
"""SON data.
A subclass of dict that maintains ordering of keys and provides a
few extra niceties for dealing with SON. SON provides an API
similar to collections.OrderedDict from Python 2.7+.
"""
def __init__(self, data=None, **kwargs):
self.__keys = []
dict.__init__(self)
self.update(data)
self.update(kwargs)
def __new__(cls, *args, **kwargs):
instance = super(SON, cls).__new__(cls, *args, **kwargs)
instance.__keys = []
return instance
def __repr__(self):
result = []
for key in self.__keys:
result.append("(%r, %r)" % (key, self[key]))
return "SON([%s])" % ", ".join(result)
def __setitem__(self, key, value):
if key not in self.__keys:
self.__keys.append(key)
dict.__setitem__(self, key, value)
def __delitem__(self, key):
self.__keys.remove(key)
dict.__delitem__(self, key)
def keys(self):
return list(self.__keys)
def copy(self):
other = SON()
other.update(self)
return other
# TODO this is all from UserDict.DictMixin. it could probably be made more
# efficient.
# second level definitions support higher levels
def __iter__(self):
for k in self.__keys:
yield k
def has_key(self, key):
return key in self.__keys
# third level takes advantage of second level definitions
def iteritems(self):
for k in self:
yield (k, self[k])
def iterkeys(self):
return self.__iter__()
# fourth level uses definitions from lower levels
def itervalues(self):
for _, v in self.iteritems():
yield v
def values(self):
return [v for _, v in self.iteritems()]
def items(self):
return [(key, self[key]) for key in self]
def clear(self):
self.__keys = []
super(SON, self).clear()
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
def pop(self, key, *args):
if len(args) > 1:
raise TypeError("pop expected at most 2 arguments, got "\
+ repr(1 + len(args)))
try:
value = self[key]
except KeyError:
if args:
return args[0]
raise
del self[key]
return value
def popitem(self):
try:
k, v = next(self.iteritems())
except StopIteration:
raise KeyError('container is empty')
del self[k]
return (k, v)
def update(self, other=None, **kwargs):
# Make progressively weaker assumptions about "other"
if other is None:
pass
elif hasattr(other, 'iteritems'): # iteritems saves memory and lookups
for k, v in other.iteritems():
self[k] = v
elif hasattr(other, 'keys'):
for k in other.keys():
self[k] = other[k]
else:
for k, v in other:
self[k] = v
if kwargs:
self.update(kwargs)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __eq__(self, other):
"""Comparison to another SON is order-sensitive while comparison to a
regular dictionary is order-insensitive.
"""
if isinstance(other, SON):
return len(self) == len(other) and self.items() == other.items()
return self.to_dict() == other
def __ne__(self, other):
return not self == other
def __len__(self):
return len(self.__keys)
def to_dict(self):
"""Convert a SON document to a normal Python dictionary instance.
This is trickier than just *dict(...)* because it needs to be
recursive.
"""
def transform_value(value):
if isinstance(value, list):
return [transform_value(v) for v in value]
elif isinstance(value, abc.Mapping):
return dict([
(k, transform_value(v))
for k, v in iteritems(value)])
else:
return value
return transform_value(dict(self))
def __deepcopy__(self, memo):
out = SON()
val_id = id(self)
if val_id in memo:
return memo.get(val_id)
memo[val_id] = out
for k, v in self.iteritems():
if not isinstance(v, RE_TYPE):
v = copy.deepcopy(v, memo)
out[k] = v
return out

@ -0,0 +1,120 @@
# Copyright 2010-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for representing MongoDB internal Timestamps.
"""
import calendar
import datetime
from bson.py3compat import integer_types
from bson.tz_util import utc
UPPERBOUND = 4294967296
class Timestamp(object):
"""MongoDB internal timestamps used in the opLog.
"""
_type_marker = 17
def __init__(self, time, inc):
"""Create a new :class:`Timestamp`.
This class is only for use with the MongoDB opLog. If you need
to store a regular timestamp, please use a
:class:`~datetime.datetime`.
Raises :class:`TypeError` if `time` is not an instance of
:class: `int` or :class:`~datetime.datetime`, or `inc` is not
an instance of :class:`int`. Raises :class:`ValueError` if
`time` or `inc` is not in [0, 2**32).
:Parameters:
- `time`: time in seconds since epoch UTC, or a naive UTC
:class:`~datetime.datetime`, or an aware
:class:`~datetime.datetime`
- `inc`: the incrementing counter
"""
if isinstance(time, datetime.datetime):
if time.utcoffset() is not None:
time = time - time.utcoffset()
time = int(calendar.timegm(time.timetuple()))
if not isinstance(time, integer_types):
raise TypeError("time must be an instance of int")
if not isinstance(inc, integer_types):
raise TypeError("inc must be an instance of int")
if not 0 <= time < UPPERBOUND:
raise ValueError("time must be contained in [0, 2**32)")
if not 0 <= inc < UPPERBOUND:
raise ValueError("inc must be contained in [0, 2**32)")
self.__time = time
self.__inc = inc
@property
def time(self):
"""Get the time portion of this :class:`Timestamp`.
"""
return self.__time
@property
def inc(self):
"""Get the inc portion of this :class:`Timestamp`.
"""
return self.__inc
def __eq__(self, other):
if isinstance(other, Timestamp):
return (self.__time == other.time and self.__inc == other.inc)
else:
return NotImplemented
def __hash__(self):
return hash(self.time) ^ hash(self.inc)
def __ne__(self, other):
return not self == other
def __lt__(self, other):
if isinstance(other, Timestamp):
return (self.time, self.inc) < (other.time, other.inc)
return NotImplemented
def __le__(self, other):
if isinstance(other, Timestamp):
return (self.time, self.inc) <= (other.time, other.inc)
return NotImplemented
def __gt__(self, other):
if isinstance(other, Timestamp):
return (self.time, self.inc) > (other.time, other.inc)
return NotImplemented
def __ge__(self, other):
if isinstance(other, Timestamp):
return (self.time, self.inc) >= (other.time, other.inc)
return NotImplemented
def __repr__(self):
return "Timestamp(%s, %s)" % (self.__time, self.__inc)
def as_datetime(self):
"""Return a :class:`~datetime.datetime` instance corresponding
to the time portion of this :class:`Timestamp`.
The returned datetime's timezone is UTC.
"""
return datetime.datetime.fromtimestamp(self.__time, utc)

@ -0,0 +1,52 @@
# Copyright 2010-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Timezone related utilities for BSON."""
from datetime import (timedelta,
tzinfo)
ZERO = timedelta(0)
class FixedOffset(tzinfo):
"""Fixed offset timezone, in minutes east from UTC.
Implementation based from the Python `standard library documentation
<http://docs.python.org/library/datetime.html#tzinfo-objects>`_.
Defining __getinitargs__ enables pickling / copying.
"""
def __init__(self, offset, name):
if isinstance(offset, timedelta):
self.__offset = offset
else:
self.__offset = timedelta(minutes=offset)
self.__name = name
def __getinitargs__(self):
return self.__offset, self.__name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return ZERO
utc = FixedOffset(0, "UTC")
"""Fixed offset timezone representing UTC."""

@ -0,0 +1,241 @@
# Copyright (c) 2011-2017, Dan Crosta
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
__all__ = ("PyMongo", "ASCENDING", "DESCENDING")
from mimetypes import guess_type
import sys
from bson.errors import InvalidId
from bson.objectid import ObjectId
from flask import abort, current_app, request
from gridfs import GridFS, NoFile
from pymongo import uri_parser
from werkzeug.routing import BaseConverter
from werkzeug.wsgi import wrap_file
import pymongo
from flask_pymongo.wrappers import MongoClient
PY2 = sys.version_info[0] == 2
# Python 3 compatibility
if PY2:
text_type = (str, unicode)
num_type = (int, long)
else:
text_type = str
num_type = int
DESCENDING = pymongo.DESCENDING
"""Descending sort order."""
ASCENDING = pymongo.ASCENDING
"""Ascending sort order."""
class BSONObjectIdConverter(BaseConverter):
"""A simple converter for the RESTful URL routing system of Flask.
.. code-block:: python
@app.route("/<ObjectId:task_id>")
def show_task(task_id):
task = mongo.db.tasks.find_one_or_404(task_id)
return render_template("task.html", task=task)
Valid object ID strings are converted into
:class:`~bson.objectid.ObjectId` objects; invalid strings result
in a 404 error. The converter is automatically registered by the
initialization of :class:`~flask_pymongo.PyMongo` with keyword
:attr:`ObjectId`.
"""
def to_python(self, value):
try:
return ObjectId(value)
except InvalidId:
raise abort(404)
def to_url(self, value):
return str(value)
class PyMongo(object):
"""Manages MongoDB connections for your Flask app.
PyMongo objects provide access to the MongoDB server via the :attr:`db`
and :attr:`cx` attributes. You must either pass the :class:`~flask.Flask`
app to the constructor, or call :meth:`init_app`.
PyMongo accepts a MongoDB URI via the ``MONGO_URI`` Flask configuration
variable, or as an argument to the constructor or ``init_app``. See
:meth:`init_app` for more detail.
"""
def __init__(self, app=None, uri=None, *args, **kwargs):
self.cx = None
self.db = None
if app is not None:
self.init_app(app, uri, *args, **kwargs)
def init_app(self, app, uri=None, *args, **kwargs):
"""Initialize this :class:`PyMongo` for use.
Configure a :class:`~pymongo.mongo_client.MongoClient`
in the following scenarios:
1. If ``uri`` is not ``None``, pass the ``uri`` and any positional
or keyword arguments to :class:`~pymongo.mongo_client.MongoClient`
2. If ``uri`` is ``None``, and a Flask config variable named
``MONGO_URI`` exists, use that as the ``uri`` as above.
The caller is responsible for ensuring that additional positional
and keyword arguments result in a valid call.
.. versionchanged:: 2.2
The ``uri`` is no longer required to contain a database name. If it
does not, then the :attr:`db` attribute will be ``None``.
.. versionchanged:: 2.0
Flask-PyMongo no longer accepts many of the configuration variables
it did in previous versions. You must now use a MongoDB URI to
configure Flask-PyMongo.
"""
if uri is None:
uri = app.config.get("MONGO_URI", None)
if uri is not None:
args = tuple([uri] + list(args))
else:
raise ValueError(
"You must specify a URI or set the MONGO_URI Flask config variable",
)
parsed_uri = uri_parser.parse_uri(uri)
database_name = parsed_uri["database"]
# Try to delay connecting, in case the app is loaded before forking, per
# http://api.mongodb.com/python/current/faq.html#is-pymongo-fork-safe
kwargs.setdefault("connect", False)
self.cx = MongoClient(*args, **kwargs)
if database_name:
self.db = self.cx[database_name]
app.url_map.converters["ObjectId"] = BSONObjectIdConverter
# view helpers
def send_file(self, filename, base="fs", version=-1, cache_for=31536000):
"""Respond with a file from GridFS.
Returns an instance of the :attr:`~flask.Flask.response_class`
containing the named file, and implement conditional GET semantics
(using :meth:`~werkzeug.wrappers.ETagResponseMixin.make_conditional`).
.. code-block:: python
@app.route("/uploads/<path:filename>")
def get_upload(filename):
return mongo.send_file(filename)
:param str filename: the filename of the file to return
:param str base: the base name of the GridFS collections to use
:param bool version: if positive, return the Nth revision of the file
identified by filename; if negative, return the Nth most recent
revision. If no such version exists, return with HTTP status 404.
:param int cache_for: number of seconds that browsers should be
instructed to cache responses
"""
if not isinstance(base, text_type):
raise TypeError("'base' must be string or unicode")
if not isinstance(version, num_type):
raise TypeError("'version' must be an integer")
if not isinstance(cache_for, num_type):
raise TypeError("'cache_for' must be an integer")
storage = GridFS(self.db, base)
try:
fileobj = storage.get_version(filename=filename, version=version)
except NoFile:
abort(404)
# mostly copied from flask/helpers.py, with
# modifications for GridFS
data = wrap_file(request.environ, fileobj, buffer_size=1024 * 255)
response = current_app.response_class(
data,
mimetype=fileobj.content_type,
direct_passthrough=True,
)
response.content_length = fileobj.length
response.last_modified = fileobj.upload_date
response.set_etag(fileobj.md5)
response.cache_control.max_age = cache_for
response.cache_control.public = True
response.make_conditional(request)
return response
def save_file(self, filename, fileobj, base="fs", content_type=None, **kwargs):
"""Save a file-like object to GridFS using the given filename.
.. code-block:: python
@app.route("/uploads/<path:filename>", methods=["POST"])
def save_upload(filename):
mongo.save_file(filename, request.files["file"])
return redirect(url_for("get_upload", filename=filename))
:param str filename: the filename of the file to return
:param file fileobj: the file-like object to save
:param str base: base the base name of the GridFS collections to use
:param str content_type: the MIME content-type of the file. If
``None``, the content-type is guessed from the filename using
:func:`~mimetypes.guess_type`
:param kwargs: extra attributes to be stored in the file's document,
passed directly to :meth:`gridfs.GridFS.put`
"""
if not isinstance(base, text_type):
raise TypeError("'base' must be string or unicode")
if not (hasattr(fileobj, "read") and callable(fileobj.read)):
raise TypeError("'fileobj' must have read() method")
if content_type is None:
content_type, _ = guess_type(filename)
storage = GridFS(self.db, base)
id = storage.put(fileobj, filename=filename, content_type=content_type, **kwargs)
return id

@ -0,0 +1,5 @@
# This file is automatically generated by setup.py.
__version__ = '2.3.0'
__sha__ = 'g91c4c19'
__revision__ = 'g91c4c19'

@ -0,0 +1,108 @@
from contextlib import contextmanager
import time
import pymongo
import pytest
from flask_pymongo.tests.util import FlaskRequestTest
import flask_pymongo
class CouldNotConnect(Exception):
pass
@contextmanager
def doesnt_raise(exc=BaseException):
try:
yield
except exc:
pytest.fail("{} was raised but should not have been".format(exc))
class FlaskPyMongoConfigTest(FlaskRequestTest):
def setUp(self):
super(FlaskPyMongoConfigTest, self).setUp()
conn = pymongo.MongoClient(port=self.port)
conn.test.command("ping") # wait for server
def tearDown(self):
super(FlaskPyMongoConfigTest, self).tearDown()
conn = pymongo.MongoClient(port=self.port)
conn.drop_database(self.dbname)
conn.drop_database(self.dbname + "2")
def test_config_with_uri_in_flask_conf_var(self):
uri = "mongodb://localhost:{}/{}".format(self.port, self.dbname)
self.app.config["MONGO_URI"] = uri
mongo = flask_pymongo.PyMongo(self.app, connect=True)
_wait_until_connected(mongo)
assert mongo.db.name == self.dbname
assert ("localhost", self.port) == mongo.cx.address
def test_config_with_uri_passed_directly(self):
uri = "mongodb://localhost:{}/{}".format(self.port, self.dbname)
mongo = flask_pymongo.PyMongo(self.app, uri, connect=True)
_wait_until_connected(mongo)
assert mongo.db.name == self.dbname
assert ("localhost", self.port) == mongo.cx.address
def test_it_fails_with_no_uri(self):
self.app.config.pop("MONGO_URI", None)
with pytest.raises(ValueError):
flask_pymongo.PyMongo(self.app)
def test_multiple_pymongos(self):
uri1 = "mongodb://localhost:{}/{}".format(self.port, self.dbname)
uri2 = "mongodb://localhost:{}/{}".format(self.port, self.dbname + "2")
mongo1 = flask_pymongo.PyMongo(self.app, uri1) # noqa: F841 unused variable
mongo2 = flask_pymongo.PyMongo(self.app, uri2) # noqa: F841 unused variable
# this test passes if it raises no exceptions
def test_custom_document_class(self):
class CustomDict(dict):
pass
uri = "mongodb://localhost:{}/{}".format(self.port, self.dbname)
mongo = flask_pymongo.PyMongo(self.app, uri, document_class=CustomDict)
assert mongo.db.things.find_one() is None, "precondition failed"
mongo.db.things.insert_one({"_id": "thing", "val": "foo"})
assert type(mongo.db.things.find_one()) == CustomDict
def test_it_doesnt_connect_by_default(self):
uri = "mongodb://localhost:{}/{}".format(self.port, self.dbname)
mongo = flask_pymongo.PyMongo(self.app, uri)
with pytest.raises(CouldNotConnect):
_wait_until_connected(mongo, timeout=0.2)
def test_it_doesnt_require_db_name_in_uri(self):
uri = "mongodb://localhost:{}".format(self.port)
with doesnt_raise(Exception):
mongo = flask_pymongo.PyMongo(self.app, uri)
assert mongo.db is None
def _wait_until_connected(mongo, timeout=1.0):
start = time.time()
while time.time() < (start + timeout):
if mongo.cx.nodes:
return
time.sleep(0.05)
raise CouldNotConnect("could not prove mongodb connected in %r seconds" % timeout)

@ -0,0 +1,100 @@
from hashlib import md5
from io import BytesIO
from bson.objectid import ObjectId
from gridfs import GridFS
from werkzeug.exceptions import NotFound
import pytest
from flask_pymongo.tests.util import FlaskPyMongoTest
class GridFSCleanupMixin(object):
def tearDown(self):
gridfs = GridFS(self.mongo.db)
files = list(gridfs.find())
for gridfile in files:
gridfs.delete(gridfile._id)
super(GridFSCleanupMixin, self).tearDown()
class TestSaveFile(GridFSCleanupMixin, FlaskPyMongoTest):
def test_it_saves_files(self):
fileobj = BytesIO(b"these are the bytes")
self.mongo.save_file("my-file", fileobj)
gridfs = GridFS(self.mongo.db)
assert gridfs.exists({"filename": "my-file"})
def test_it_guesses_type_from_filename(self):
fileobj = BytesIO(b"these are the bytes")
self.mongo.save_file("my-file.txt", fileobj)
gridfs = GridFS(self.mongo.db)
gridfile = gridfs.find_one({"filename": "my-file.txt"})
assert gridfile.content_type == "text/plain"
def test_it_saves_files_with_props(self):
fileobj = BytesIO(b"these are the bytes")
self.mongo.save_file("my-file", fileobj, foo="bar")
gridfs = GridFS(self.mongo.db)
gridfile = gridfs.find_one({"filename": "my-file"})
assert gridfile.foo == "bar"
def test_it_returns_id(self):
fileobj = BytesIO(b"these are the bytes")
_id = self.mongo.save_file("my-file", fileobj, foo="bar")
assert type(_id) is ObjectId
class TestSendFile(GridFSCleanupMixin, FlaskPyMongoTest):
def setUp(self):
super(TestSendFile, self).setUp()
# make it bigger than 1 gridfs chunk
self.myfile = BytesIO(b"a" * 500 * 1024)
self.mongo.save_file("myfile.txt", self.myfile)
def test_it_404s_for_missing_files(self):
with pytest.raises(NotFound):
self.mongo.send_file("no-such-file.txt")
def test_it_sets_content_type(self):
resp = self.mongo.send_file("myfile.txt")
assert resp.content_type.startswith("text/plain")
def test_it_sets_content_length(self):
resp = self.mongo.send_file("myfile.txt")
assert resp.content_length == len(self.myfile.getvalue())
def test_it_sets_supports_conditional_gets(self):
# a basic conditional GET
environ_args = {
"method": "GET",
"headers": {
"If-None-Match": md5(self.myfile.getvalue()).hexdigest(),
},
}
with self.app.test_request_context(**environ_args):
resp = self.mongo.send_file("myfile.txt")
assert resp.status_code == 304
def test_it_sets_cache_headers(self):
resp = self.mongo.send_file("myfile.txt", cache_for=60)
assert resp.cache_control.max_age == 60
assert resp.cache_control.public is True
def test_it_streams_results(self):
resp = self.mongo.send_file("myfile.txt")
assert resp.is_streamed

@ -0,0 +1,17 @@
from bson import ObjectId
from werkzeug.exceptions import NotFound
from flask_pymongo import BSONObjectIdConverter
from flask_pymongo.tests.util import FlaskPyMongoTest
class UrlConverterTest(FlaskPyMongoTest):
def test_bson_object_id_converter(self):
converter = BSONObjectIdConverter("/")
self.assertRaises(NotFound, converter.to_python, ("132"))
assert converter.to_python("4e4ac5cfffc84958fa1f45fb") == \
ObjectId("4e4ac5cfffc84958fa1f45fb")
assert converter.to_url(ObjectId("4e4ac5cfffc84958fa1f45fb")) == \
"4e4ac5cfffc84958fa1f45fb"

@ -0,0 +1,33 @@
from werkzeug.exceptions import HTTPException
from flask_pymongo.tests.util import FlaskPyMongoTest
class CollectionTest(FlaskPyMongoTest):
def test_find_one_or_404(self):
self.mongo.db.things.delete_many({})
try:
self.mongo.db.things.find_one_or_404({"_id": "thing"})
except HTTPException as notfound:
assert notfound.code == 404, "raised wrong exception"
self.mongo.db.things.insert_one({"_id": "thing", "val": "foo"})
# now it should not raise
thing = self.mongo.db.things.find_one_or_404({"_id": "thing"})
assert thing["val"] == "foo", "got wrong thing"
# also test with dotted-named collections
self.mongo.db.things.morethings.delete_many({})
try:
self.mongo.db.things.morethings.find_one_or_404({"_id": "thing"})
except HTTPException as notfound:
assert notfound.code == 404, "raised wrong exception"
self.mongo.db.things.morethings.insert_one({"_id": "thing", "val": "foo"})
# now it should not raise
thing = self.mongo.db.things.morethings.find_one_or_404({"_id": "thing"})
assert thing["val"] == "foo", "got wrong thing"

@ -0,0 +1,48 @@
import os
import unittest
import flask
import flask_pymongo
class ToxDockerMixin(object):
"""
Sets :attr:`port` based on the env var from tox-docker, if present.
"""
def setUp(self):
super(ToxDockerMixin, self).setUp()
self.port = int(os.environ.get("MONGO_27017_TCP", 27017))
class FlaskRequestTest(ToxDockerMixin, unittest.TestCase):
def setUp(self):
super(FlaskRequestTest, self).setUp()
self.dbname = self.__class__.__name__
self.app = flask.Flask("test")
self.context = self.app.test_request_context("/")
self.context.push()
def tearDown(self):
super(FlaskRequestTest, self).tearDown()
self.context.pop()
class FlaskPyMongoTest(FlaskRequestTest):
def setUp(self):
super(FlaskPyMongoTest, self).setUp()
uri = "mongodb://localhost:{}/{}".format(self.port, self.dbname)
self.mongo = flask_pymongo.PyMongo(self.app, uri)
def tearDown(self):
self.mongo.cx.drop_database(self.dbname)
super(FlaskPyMongoTest, self).tearDown()

@ -0,0 +1,117 @@
# Copyright (c) 2011-2017, Dan Crosta
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from flask import abort
from pymongo import collection
from pymongo import database
from pymongo import mongo_client
class MongoClient(mongo_client.MongoClient):
"""Wrapper for :class:`~pymongo.mongo_client.MongoClient`.
Returns instances of Flask-PyMongo
:class:`~flask_pymongo.wrappers.Database` instead of native PyMongo
:class:`~pymongo.database.Database` when accessed with dot notation.
"""
def __getattr__(self, name): # noqa: D105
attr = super(MongoClient, self).__getattr__(name)
if isinstance(attr, database.Database):
return Database(self, name)
return attr
def __getitem__(self, item): # noqa: D105
attr = super(MongoClient, self).__getitem__(item)
if isinstance(attr, database.Database):
return Database(self, item)
return attr
class Database(database.Database):
"""Wrapper for :class:`~pymongo.database.Database`.
Returns instances of Flask-PyMongo
:class:`~flask_pymongo.wrappers.Collection` instead of native PyMongo
:class:`~pymongo.collection.Collection` when accessed with dot notation.
"""
def __getattr__(self, name): # noqa: D105
attr = super(Database, self).__getattr__(name)
if isinstance(attr, collection.Collection):
return Collection(self, name)
return attr
def __getitem__(self, item): # noqa: D105
item_ = super(Database, self).__getitem__(item)
if isinstance(item_, collection.Collection):
return Collection(self, item)
return item_
class Collection(collection.Collection):
"""Sub-class of PyMongo :class:`~pymongo.collection.Collection` with helpers.
"""
def __getattr__(self, name): # noqa: D105
attr = super(Collection, self).__getattr__(name)
if isinstance(attr, collection.Collection):
db = self._Collection__database
return Collection(db, attr.name)
return attr
def __getitem__(self, item): # noqa: D105
item_ = super(Collection, self).__getitem__(item)
if isinstance(item_, collection.Collection):
db = self._Collection__database
return Collection(db, item_.name)
return item_
def find_one_or_404(self, *args, **kwargs):
"""Find a single document or raise a 404.
This is like :meth:`~pymongo.collection.Collection.find_one`, but
rather than returning ``None``, cause a 404 Not Found HTTP status
on the request.
.. code-block:: python
@app.route("/user/<username>")
def user_profile(username):
user = mongo.db.users.find_one_or_404({"_id": username})
return render_template("user.html",
user=user)
"""
found = self.find_one(*args, **kwargs)
if found is None:
abort(404)
return found

@ -0,0 +1,930 @@
# Copyright 2009-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GridFS is a specification for storing large objects in Mongo.
The :mod:`gridfs` package is an implementation of GridFS on top of
:mod:`pymongo`, exposing a file-like interface.
.. mongodoc:: gridfs
"""
from bson.py3compat import abc
from gridfs.errors import NoFile
from gridfs.grid_file import (GridIn,
GridOut,
GridOutCursor,
DEFAULT_CHUNK_SIZE,
_clear_entity_type_registry)
from pymongo import (ASCENDING,
DESCENDING)
from pymongo.common import UNAUTHORIZED_CODES, validate_string
from pymongo.database import Database
from pymongo.errors import ConfigurationError, OperationFailure
class GridFS(object):
"""An instance of GridFS on top of a single Database.
"""
def __init__(self, database, collection="fs", disable_md5=False):
"""Create a new instance of :class:`GridFS`.
Raises :class:`TypeError` if `database` is not an instance of
:class:`~pymongo.database.Database`.
:Parameters:
- `database`: database to use
- `collection` (optional): root collection to use
- `disable_md5` (optional): When True, MD5 checksums will not be
computed for uploaded files. Useful in environments where MD5
cannot be used for regulatory or other reasons. Defaults to False.
.. versionchanged:: 3.1
Indexes are only ensured on the first write to the DB.
.. versionchanged:: 3.0
`database` must use an acknowledged
:attr:`~pymongo.database.Database.write_concern`
.. mongodoc:: gridfs
"""
if not isinstance(database, Database):
raise TypeError("database must be an instance of Database")
database = _clear_entity_type_registry(database)
if not database.write_concern.acknowledged:
raise ConfigurationError('database must use '
'acknowledged write_concern')
self.__database = database
self.__collection = database[collection]
self.__files = self.__collection.files
self.__chunks = self.__collection.chunks
self.__disable_md5 = disable_md5
def new_file(self, **kwargs):
"""Create a new file in GridFS.
Returns a new :class:`~gridfs.grid_file.GridIn` instance to
which data can be written. Any keyword arguments will be
passed through to :meth:`~gridfs.grid_file.GridIn`.
If the ``"_id"`` of the file is manually specified, it must
not already exist in GridFS. Otherwise
:class:`~gridfs.errors.FileExists` is raised.
:Parameters:
- `**kwargs` (optional): keyword arguments for file creation
"""
# No need for __ensure_index_files_id() here; GridIn ensures
# the (files_id, n) index when needed.
return GridIn(
self.__collection, disable_md5=self.__disable_md5, **kwargs)
def put(self, data, **kwargs):
"""Put data in GridFS as a new file.
Equivalent to doing::
try:
f = new_file(**kwargs)
f.write(data)
finally:
f.close()
`data` can be either an instance of :class:`str` (:class:`bytes`
in python 3) or a file-like object providing a :meth:`read` method.
If an `encoding` keyword argument is passed, `data` can also be a
:class:`unicode` (:class:`str` in python 3) instance, which will
be encoded as `encoding` before being written. Any keyword arguments
will be passed through to the created file - see
:meth:`~gridfs.grid_file.GridIn` for possible arguments. Returns the
``"_id"`` of the created file.
If the ``"_id"`` of the file is manually specified, it must
not already exist in GridFS. Otherwise
:class:`~gridfs.errors.FileExists` is raised.
:Parameters:
- `data`: data to be written as a file.
- `**kwargs` (optional): keyword arguments for file creation
.. versionchanged:: 3.0
w=0 writes to GridFS are now prohibited.
"""
grid_file = GridIn(
self.__collection, disable_md5=self.__disable_md5, **kwargs)
try:
grid_file.write(data)
finally:
grid_file.close()
return grid_file._id
def get(self, file_id, session=None):
"""Get a file from GridFS by ``"_id"``.
Returns an instance of :class:`~gridfs.grid_file.GridOut`,
which provides a file-like interface for reading.
:Parameters:
- `file_id`: ``"_id"`` of the file to get
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession`
.. versionchanged:: 3.6
Added ``session`` parameter.
"""
gout = GridOut(self.__collection, file_id, session=session)
# Raise NoFile now, instead of on first attribute access.
gout._ensure_file()
return gout
def get_version(self, filename=None, version=-1, session=None, **kwargs):
"""Get a file from GridFS by ``"filename"`` or metadata fields.
Returns a version of the file in GridFS whose filename matches
`filename` and whose metadata fields match the supplied keyword
arguments, as an instance of :class:`~gridfs.grid_file.GridOut`.
Version numbering is a convenience atop the GridFS API provided
by MongoDB. If more than one file matches the query (either by
`filename` alone, by metadata fields, or by a combination of
both), then version ``-1`` will be the most recently uploaded
matching file, ``-2`` the second most recently
uploaded, etc. Version ``0`` will be the first version
uploaded, ``1`` the second version, etc. So if three versions
have been uploaded, then version ``0`` is the same as version
``-3``, version ``1`` is the same as version ``-2``, and
version ``2`` is the same as version ``-1``.
Raises :class:`~gridfs.errors.NoFile` if no such version of
that file exists.
:Parameters:
- `filename`: ``"filename"`` of the file to get, or `None`
- `version` (optional): version of the file to get (defaults
to -1, the most recent version uploaded)
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession`
- `**kwargs` (optional): find files by custom metadata.
.. versionchanged:: 3.6
Added ``session`` parameter.
.. versionchanged:: 3.1
``get_version`` no longer ensures indexes.
"""
query = kwargs
if filename is not None:
query["filename"] = filename
cursor = self.__files.find(query, session=session)
if version < 0:
skip = abs(version) - 1
cursor.limit(-1).skip(skip).sort("uploadDate", DESCENDING)
else:
cursor.limit(-1).skip(version).sort("uploadDate", ASCENDING)
try:
doc = next(cursor)
return GridOut(
self.__collection, file_document=doc, session=session)
except StopIteration:
raise NoFile("no version %d for filename %r" % (version, filename))
def get_last_version(self, filename=None, session=None, **kwargs):
"""Get the most recent version of a file in GridFS by ``"filename"``
or metadata fields.
Equivalent to calling :meth:`get_version` with the default
`version` (``-1``).
:Parameters:
- `filename`: ``"filename"`` of the file to get, or `None`
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession`
- `**kwargs` (optional): find files by custom metadata.
.. versionchanged:: 3.6
Added ``session`` parameter.
"""
return self.get_version(filename=filename, session=session, **kwargs)
# TODO add optional safe mode for chunk removal?
def delete(self, file_id, session=None):
"""Delete a file from GridFS by ``"_id"``.
Deletes all data belonging to the file with ``"_id"``:
`file_id`.
.. warning:: Any processes/threads reading from the file while
this method is executing will likely see an invalid/corrupt
file. Care should be taken to avoid concurrent reads to a file
while it is being deleted.
.. note:: Deletes of non-existent files are considered successful
since the end result is the same: no file with that _id remains.
:Parameters:
- `file_id`: ``"_id"`` of the file to delete
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession`
.. versionchanged:: 3.6
Added ``session`` parameter.
.. versionchanged:: 3.1
``delete`` no longer ensures indexes.
"""
self.__files.delete_one({"_id": file_id}, session=session)
self.__chunks.delete_many({"files_id": file_id}, session=session)
def list(self, session=None):
"""List the names of all files stored in this instance of
:class:`GridFS`.
:Parameters:
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession`
.. versionchanged:: 3.6
Added ``session`` parameter.
.. versionchanged:: 3.1
``list`` no longer ensures indexes.
"""
# With an index, distinct includes documents with no filename
# as None.
return [
name for name in self.__files.distinct("filename", session=session)
if name is not None]
def find_one(self, filter=None, session=None, *args, **kwargs):
"""Get a single file from gridfs.
All arguments to :meth:`find` are also valid arguments for
:meth:`find_one`, although any `limit` argument will be
ignored. Returns a single :class:`~gridfs.grid_file.GridOut`,
or ``None`` if no matching file is found. For example::
file = fs.find_one({"filename": "lisa.txt"})
:Parameters:
- `filter` (optional): a dictionary specifying
the query to be performing OR any other type to be used as
the value for a query for ``"_id"`` in the file collection.
- `*args` (optional): any additional positional arguments are
the same as the arguments to :meth:`find`.
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession`
- `**kwargs` (optional): any additional keyword arguments
are the same as the arguments to :meth:`find`.
.. versionchanged:: 3.6
Added ``session`` parameter.
"""
if filter is not None and not isinstance(filter, abc.Mapping):
filter = {"_id": filter}
for f in self.find(filter, *args, session=session, **kwargs):
return f
return None
def find(self, *args, **kwargs):
"""Query GridFS for files.
Returns a cursor that iterates across files matching
arbitrary queries on the files collection. Can be combined
with other modifiers for additional control. For example::
for grid_out in fs.find({"filename": "lisa.txt"},
no_cursor_timeout=True):
data = grid_out.read()
would iterate through all versions of "lisa.txt" stored in GridFS.
Note that setting no_cursor_timeout to True may be important to
prevent the cursor from timing out during long multi-file processing
work.
As another example, the call::
most_recent_three = fs.find().sort("uploadDate", -1).limit(3)
would return a cursor to the three most recently uploaded files
in GridFS.
Follows a similar interface to
:meth:`~pymongo.collection.Collection.find`
in :class:`~pymongo.collection.Collection`.
If a :class:`~pymongo.client_session.ClientSession` is passed to
:meth:`find`, all returned :class:`~gridfs.grid_file.GridOut` instances
are associated with that session.
:Parameters:
- `filter` (optional): a SON object specifying elements which
must be present for a document to be included in the
result set
- `skip` (optional): the number of files to omit (from
the start of the result set) when returning the results
- `limit` (optional): the maximum number of results to
return
- `no_cursor_timeout` (optional): if False (the default), any
returned cursor is closed by the server after 10 minutes of
inactivity. If set to True, the returned cursor will never
time out on the server. Care should be taken to ensure that
cursors with no_cursor_timeout turned on are properly closed.
- `sort` (optional): a list of (key, direction) pairs
specifying the sort order for this query. See
:meth:`~pymongo.cursor.Cursor.sort` for details.
Raises :class:`TypeError` if any of the arguments are of
improper type. Returns an instance of
:class:`~gridfs.grid_file.GridOutCursor`
corresponding to this query.
.. versionchanged:: 3.0
Removed the read_preference, tag_sets, and
secondary_acceptable_latency_ms options.
.. versionadded:: 2.7
.. mongodoc:: find
"""
return GridOutCursor(self.__collection, *args, **kwargs)
def exists(self, document_or_id=None, session=None, **kwargs):
"""Check if a file exists in this instance of :class:`GridFS`.
The file to check for can be specified by the value of its
``_id`` key, or by passing in a query document. A query
document can be passed in as dictionary, or by using keyword
arguments. Thus, the following three calls are equivalent:
>>> fs.exists(file_id)
>>> fs.exists({"_id": file_id})
>>> fs.exists(_id=file_id)
As are the following two calls:
>>> fs.exists({"filename": "mike.txt"})
>>> fs.exists(filename="mike.txt")
And the following two:
>>> fs.exists({"foo": {"$gt": 12}})
>>> fs.exists(foo={"$gt": 12})
Returns ``True`` if a matching file exists, ``False``
otherwise. Calls to :meth:`exists` will not automatically
create appropriate indexes; application developers should be
sure to create indexes if needed and as appropriate.
:Parameters:
- `document_or_id` (optional): query document, or _id of the
document to check for
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession`
- `**kwargs` (optional): keyword arguments are used as a
query document, if they're present.
.. versionchanged:: 3.6
Added ``session`` parameter.
"""
if kwargs:
f = self.__files.find_one(kwargs, ["_id"], session=session)
else:
f = self.__files.find_one(document_or_id, ["_id"], session=session)
return f is not None
class GridFSBucket(object):
"""An instance of GridFS on top of a single Database."""
def __init__(self, db, bucket_name="fs",
chunk_size_bytes=DEFAULT_CHUNK_SIZE, write_concern=None,
read_preference=None, disable_md5=False):
"""Create a new instance of :class:`GridFSBucket`.
Raises :exc:`TypeError` if `database` is not an instance of
:class:`~pymongo.database.Database`.
Raises :exc:`~pymongo.errors.ConfigurationError` if `write_concern`
is not acknowledged.
:Parameters:
- `database`: database to use.
- `bucket_name` (optional): The name of the bucket. Defaults to 'fs'.
- `chunk_size_bytes` (optional): The chunk size in bytes. Defaults
to 255KB.
- `write_concern` (optional): The
:class:`~pymongo.write_concern.WriteConcern` to use. If ``None``
(the default) db.write_concern is used.
- `read_preference` (optional): The read preference to use. If
``None`` (the default) db.read_preference is used.
- `disable_md5` (optional): When True, MD5 checksums will not be
computed for uploaded files. Useful in environments where MD5
cannot be used for regulatory or other reasons. Defaults to False.
.. versionadded:: 3.1
.. mongodoc:: gridfs
"""
if not isinstance(db, Database):
raise TypeError("database must be an instance of Database")
db = _clear_entity_type_registry(db)
wtc = write_concern if write_concern is not None else db.write_concern
if not wtc.acknowledged:
raise ConfigurationError('write concern must be acknowledged')
self._db = db
self._bucket_name = bucket_name
self._collection = db[bucket_name]
self._disable_md5 = disable_md5
self._chunks = self._collection.chunks.with_options(
write_concern=write_concern,
read_preference=read_preference)
self._files = self._collection.files.with_options(
write_concern=write_concern,
read_preference=read_preference)
self._chunk_size_bytes = chunk_size_bytes
def open_upload_stream(self, filename, chunk_size_bytes=None,
metadata=None, session=None):
"""Opens a Stream that the application can write the contents of the
file to.
The user must specify the filename, and can choose to add any
additional information in the metadata field of the file document or
modify the chunk size.
For example::
my_db = MongoClient().test
fs = GridFSBucket(my_db)
grid_in = fs.open_upload_stream(
"test_file", chunk_size_bytes=4,
metadata={"contentType": "text/plain"})
grid_in.write("data I want to store!")
grid_in.close() # uploaded on close
Returns an instance of :class:`~gridfs.grid_file.GridIn`.
Raises :exc:`~gridfs.errors.NoFile` if no such version of
that file exists.
Raises :exc:`~ValueError` if `filename` is not a string.
:Parameters:
- `filename`: The name of the file to upload.
- `chunk_size_bytes` (options): The number of bytes per chunk of this
file. Defaults to the chunk_size_bytes in :class:`GridFSBucket`.
- `metadata` (optional): User data for the 'metadata' field of the
files collection document. If not provided the metadata field will
be omitted from the files collection document.
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession`
.. versionchanged:: 3.6
Added ``session`` parameter.
"""
validate_string("filename", filename)
opts = {"filename": filename,
"chunk_size": (chunk_size_bytes if chunk_size_bytes
is not None else self._chunk_size_bytes)}
if metadata is not None:
opts["metadata"] = metadata
return GridIn(
self._collection,
session=session,
disable_md5=self._disable_md5,
**opts)
def open_upload_stream_with_id(
self, file_id, filename, chunk_size_bytes=None, metadata=None,
session=None):
"""Opens a Stream that the application can write the contents of the
file to.
The user must specify the file id and filename, and can choose to add
any additional information in the metadata field of the file document
or modify the chunk size.
For example::
my_db = MongoClient().test
fs = GridFSBucket(my_db)
grid_in = fs.open_upload_stream_with_id(
ObjectId(),
"test_file",
chunk_size_bytes=4,
metadata={"contentType": "text/plain"})
grid_in.write("data I want to store!")
grid_in.close() # uploaded on close
Returns an instance of :class:`~gridfs.grid_file.GridIn`.
Raises :exc:`~gridfs.errors.NoFile` if no such version of
that file exists.
Raises :exc:`~ValueError` if `filename` is not a string.
:Parameters:
- `file_id`: The id to use for this file. The id must not have
already been used for another file.
- `filename`: The name of the file to upload.
- `chunk_size_bytes` (options): The number of bytes per chunk of this
file. Defaults to the chunk_size_bytes in :class:`GridFSBucket`.
- `metadata` (optional): User data for the 'metadata' field of the
files collection document. If not provided the metadata field will
be omitted from the files collection document.
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession`
.. versionchanged:: 3.6
Added ``session`` parameter.
"""
validate_string("filename", filename)
opts = {"_id": file_id,
"filename": filename,
"chunk_size": (chunk_size_bytes if chunk_size_bytes
is not None else self._chunk_size_bytes)}
if metadata is not None:
opts["metadata"] = metadata
return GridIn(
self._collection,
session=session,
disable_md5=self._disable_md5,
**opts)
def upload_from_stream(self, filename, source, chunk_size_bytes=None,
metadata=None, session=None):
"""Uploads a user file to a GridFS bucket.
Reads the contents of the user file from `source` and uploads
it to the file `filename`. Source can be a string or file-like object.
For example::
my_db = MongoClient().test
fs = GridFSBucket(my_db)
file_id = fs.upload_from_stream(
"test_file",
"data I want to store!",
chunk_size_bytes=4,
metadata={"contentType": "text/plain"})
Returns the _id of the uploaded file.
Raises :exc:`~gridfs.errors.NoFile` if no such version of
that file exists.
Raises :exc:`~ValueError` if `filename` is not a string.
:Parameters:
- `filename`: The name of the file to upload.
- `source`: The source stream of the content to be uploaded. Must be
a file-like object that implements :meth:`read` or a string.
- `chunk_size_bytes` (options): The number of bytes per chunk of this
file. Defaults to the chunk_size_bytes of :class:`GridFSBucket`.
- `metadata` (optional): User data for the 'metadata' field of the
files collection document. If not provided the metadata field will
be omitted from the files collection document.
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession`
.. versionchanged:: 3.6
Added ``session`` parameter.
"""
with self.open_upload_stream(
filename, chunk_size_bytes, metadata, session=session) as gin:
gin.write(source)
return gin._id
def upload_from_stream_with_id(self, file_id, filename, source,
chunk_size_bytes=None, metadata=None,
session=None):
"""Uploads a user file to a GridFS bucket with a custom file id.
Reads the contents of the user file from `source` and uploads
it to the file `filename`. Source can be a string or file-like object.
For example::
my_db = MongoClient().test
fs = GridFSBucket(my_db)
file_id = fs.upload_from_stream(
ObjectId(),
"test_file",
"data I want to store!",
chunk_size_bytes=4,
metadata={"contentType": "text/plain"})
Raises :exc:`~gridfs.errors.NoFile` if no such version of
that file exists.
Raises :exc:`~ValueError` if `filename` is not a string.
:Parameters:
- `file_id`: The id to use for this file. The id must not have
already been used for another file.
- `filename`: The name of the file to upload.
- `source`: The source stream of the content to be uploaded. Must be
a file-like object that implements :meth:`read` or a string.
- `chunk_size_bytes` (options): The number of bytes per chunk of this
file. Defaults to the chunk_size_bytes of :class:`GridFSBucket`.
- `metadata` (optional): User data for the 'metadata' field of the
files collection document. If not provided the metadata field will
be omitted from the files collection document.
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession`
.. versionchanged:: 3.6
Added ``session`` parameter.
"""
with self.open_upload_stream_with_id(
file_id, filename, chunk_size_bytes, metadata,
session=session) as gin:
gin.write(source)
def open_download_stream(self, file_id, session=None):
"""Opens a Stream from which the application can read the contents of
the stored file specified by file_id.
For example::
my_db = MongoClient().test
fs = GridFSBucket(my_db)
# get _id of file to read.
file_id = fs.upload_from_stream("test_file", "data I want to store!")
grid_out = fs.open_download_stream(file_id)
contents = grid_out.read()
Returns an instance of :class:`~gridfs.grid_file.GridOut`.
Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists.
:Parameters:
- `file_id`: The _id of the file to be downloaded.
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession`
.. versionchanged:: 3.6
Added ``session`` parameter.
"""
gout = GridOut(self._collection, file_id, session=session)
# Raise NoFile now, instead of on first attribute access.
gout._ensure_file()
return gout
def download_to_stream(self, file_id, destination, session=None):
"""Downloads the contents of the stored file specified by file_id and
writes the contents to `destination`.
For example::
my_db = MongoClient().test
fs = GridFSBucket(my_db)
# Get _id of file to read
file_id = fs.upload_from_stream("test_file", "data I want to store!")
# Get file to write to
file = open('myfile','wb+')
fs.download_to_stream(file_id, file)
file.seek(0)
contents = file.read()
Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists.
:Parameters:
- `file_id`: The _id of the file to be downloaded.
- `destination`: a file-like object implementing :meth:`write`.
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession`
.. versionchanged:: 3.6
Added ``session`` parameter.
"""
with self.open_download_stream(file_id, session=session) as gout:
for chunk in gout:
destination.write(chunk)
def delete(self, file_id, session=None):
"""Given an file_id, delete this stored file's files collection document
and associated chunks from a GridFS bucket.
For example::
my_db = MongoClient().test
fs = GridFSBucket(my_db)
# Get _id of file to delete
file_id = fs.upload_from_stream("test_file", "data I want to store!")
fs.delete(file_id)
Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists.
:Parameters:
- `file_id`: The _id of the file to be deleted.
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession`
.. versionchanged:: 3.6
Added ``session`` parameter.
"""
res = self._files.delete_one({"_id": file_id}, session=session)
self._chunks.delete_many({"files_id": file_id}, session=session)
if not res.deleted_count:
raise NoFile(
"no file could be deleted because none matched %s" % file_id)
def find(self, *args, **kwargs):
"""Find and return the files collection documents that match ``filter``
Returns a cursor that iterates across files matching
arbitrary queries on the files collection. Can be combined
with other modifiers for additional control.
For example::
for grid_data in fs.find({"filename": "lisa.txt"},
no_cursor_timeout=True):
data = grid_data.read()
would iterate through all versions of "lisa.txt" stored in GridFS.
Note that setting no_cursor_timeout to True may be important to
prevent the cursor from timing out during long multi-file processing
work.
As another example, the call::
most_recent_three = fs.find().sort("uploadDate", -1).limit(3)
would return a cursor to the three most recently uploaded files
in GridFS.
Follows a similar interface to
:meth:`~pymongo.collection.Collection.find`
in :class:`~pymongo.collection.Collection`.
If a :class:`~pymongo.client_session.ClientSession` is passed to
:meth:`find`, all returned :class:`~gridfs.grid_file.GridOut` instances
are associated with that session.
:Parameters:
- `filter`: Search query.
- `batch_size` (optional): The number of documents to return per
batch.
- `limit` (optional): The maximum number of documents to return.
- `no_cursor_timeout` (optional): The server normally times out idle
cursors after an inactivity period (10 minutes) to prevent excess
memory use. Set this option to True prevent that.
- `skip` (optional): The number of documents to skip before
returning.
- `sort` (optional): The order by which to sort results. Defaults to
None.
"""
return GridOutCursor(self._collection, *args, **kwargs)
def open_download_stream_by_name(self, filename, revision=-1, session=None):
"""Opens a Stream from which the application can read the contents of
`filename` and optional `revision`.
For example::
my_db = MongoClient().test
fs = GridFSBucket(my_db)
grid_out = fs.open_download_stream_by_name("test_file")
contents = grid_out.read()
Returns an instance of :class:`~gridfs.grid_file.GridOut`.
Raises :exc:`~gridfs.errors.NoFile` if no such version of
that file exists.
Raises :exc:`~ValueError` filename is not a string.
:Parameters:
- `filename`: The name of the file to read from.
- `revision` (optional): Which revision (documents with the same
filename and different uploadDate) of the file to retrieve.
Defaults to -1 (the most recent revision).
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession`
:Note: Revision numbers are defined as follows:
- 0 = the original stored file
- 1 = the first revision
- 2 = the second revision
- etc...
- -2 = the second most recent revision
- -1 = the most recent revision
.. versionchanged:: 3.6
Added ``session`` parameter.
"""
validate_string("filename", filename)
query = {"filename": filename}
cursor = self._files.find(query, session=session)
if revision < 0:
skip = abs(revision) - 1
cursor.limit(-1).skip(skip).sort("uploadDate", DESCENDING)
else:
cursor.limit(-1).skip(revision).sort("uploadDate", ASCENDING)
try:
grid_file = next(cursor)
return GridOut(
self._collection, file_document=grid_file, session=session)
except StopIteration:
raise NoFile(
"no version %d for filename %r" % (revision, filename))
def download_to_stream_by_name(self, filename, destination, revision=-1,
session=None):
"""Write the contents of `filename` (with optional `revision`) to
`destination`.
For example::
my_db = MongoClient().test
fs = GridFSBucket(my_db)
# Get file to write to
file = open('myfile','wb')
fs.download_to_stream_by_name("test_file", file)
Raises :exc:`~gridfs.errors.NoFile` if no such version of
that file exists.
Raises :exc:`~ValueError` if `filename` is not a string.
:Parameters:
- `filename`: The name of the file to read from.
- `destination`: A file-like object that implements :meth:`write`.
- `revision` (optional): Which revision (documents with the same
filename and different uploadDate) of the file to retrieve.
Defaults to -1 (the most recent revision).
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession`
:Note: Revision numbers are defined as follows:
- 0 = the original stored file
- 1 = the first revision
- 2 = the second revision
- etc...
- -2 = the second most recent revision
- -1 = the most recent revision
.. versionchanged:: 3.6
Added ``session`` parameter.
"""
with self.open_download_stream_by_name(
filename, revision, session=session) as gout:
for chunk in gout:
destination.write(chunk)
def rename(self, file_id, new_filename, session=None):
"""Renames the stored file with the specified file_id.
For example::
my_db = MongoClient().test
fs = GridFSBucket(my_db)
# Get _id of file to rename
file_id = fs.upload_from_stream("test_file", "data I want to store!")
fs.rename(file_id, "new_test_name")
Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists.
:Parameters:
- `file_id`: The _id of the file to be renamed.
- `new_filename`: The new name of the file.
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession`
.. versionchanged:: 3.6
Added ``session`` parameter.
"""
result = self._files.update_one({"_id": file_id},
{"$set": {"filename": new_filename}},
session=session)
if not result.matched_count:
raise NoFile("no files could be renamed %r because none "
"matched file_id %i" % (new_filename, file_id))

@ -0,0 +1,33 @@
# Copyright 2009-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Exceptions raised by the :mod:`gridfs` package"""
from pymongo.errors import PyMongoError
class GridFSError(PyMongoError):
"""Base class for all GridFS exceptions."""
class CorruptGridFile(GridFSError):
"""Raised when a file in :class:`~gridfs.GridFS` is malformed."""
class NoFile(GridFSError):
"""Raised when trying to read from a non-existent file."""
class FileExists(GridFSError):
"""Raised when trying to create a file that already exists."""

@ -0,0 +1,840 @@
# Copyright 2009-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for representing files stored in GridFS."""
import datetime
import hashlib
import io
import math
import os
from bson.son import SON
from bson.binary import Binary
from bson.objectid import ObjectId
from bson.py3compat import text_type, StringIO
from gridfs.errors import CorruptGridFile, FileExists, NoFile
from pymongo import ASCENDING
from pymongo.collection import Collection
from pymongo.cursor import Cursor
from pymongo.errors import (ConfigurationError,
CursorNotFound,
DuplicateKeyError,
OperationFailure)
from pymongo.read_preferences import ReadPreference
try:
_SEEK_SET = os.SEEK_SET
_SEEK_CUR = os.SEEK_CUR
_SEEK_END = os.SEEK_END
# before 2.5
except AttributeError:
_SEEK_SET = 0
_SEEK_CUR = 1
_SEEK_END = 2
EMPTY = b""
NEWLN = b"\n"
"""Default chunk size, in bytes."""
# Slightly under a power of 2, to work well with server's record allocations.
DEFAULT_CHUNK_SIZE = 255 * 1024
_C_INDEX = SON([("files_id", ASCENDING), ("n", ASCENDING)])
_F_INDEX = SON([("filename", ASCENDING), ("uploadDate", ASCENDING)])
def _grid_in_property(field_name, docstring, read_only=False,
closed_only=False):
"""Create a GridIn property."""
def getter(self):
if closed_only and not self._closed:
raise AttributeError("can only get %r on a closed file" %
field_name)
# Protect against PHP-237
if field_name == 'length':
return self._file.get(field_name, 0)
return self._file.get(field_name, None)
def setter(self, value):
if self._closed:
self._coll.files.update_one({"_id": self._file["_id"]},
{"$set": {field_name: value}})
self._file[field_name] = value
if read_only:
docstring += "\n\nThis attribute is read-only."
elif closed_only:
docstring = "%s\n\n%s" % (docstring, "This attribute is read-only and "
"can only be read after :meth:`close` "
"has been called.")
if not read_only and not closed_only:
return property(getter, setter, doc=docstring)
return property(getter, doc=docstring)
def _grid_out_property(field_name, docstring):
"""Create a GridOut property."""
def getter(self):
self._ensure_file()
# Protect against PHP-237
if field_name == 'length':
return self._file.get(field_name, 0)
return self._file.get(field_name, None)
docstring += "\n\nThis attribute is read-only."
return property(getter, doc=docstring)
def _clear_entity_type_registry(entity, **kwargs):
"""Clear the given database/collection object's type registry."""
codecopts = entity.codec_options.with_options(type_registry=None)
return entity.with_options(codec_options=codecopts, **kwargs)
class GridIn(object):
"""Class to write data to GridFS.
"""
def __init__(
self, root_collection, session=None, disable_md5=False, **kwargs):
"""Write a file to GridFS
Application developers should generally not need to
instantiate this class directly - instead see the methods
provided by :class:`~gridfs.GridFS`.
Raises :class:`TypeError` if `root_collection` is not an
instance of :class:`~pymongo.collection.Collection`.
Any of the file level options specified in the `GridFS Spec
<http://dochub.mongodb.org/core/gridfsspec>`_ may be passed as
keyword arguments. Any additional keyword arguments will be
set as additional fields on the file document. Valid keyword
arguments include:
- ``"_id"``: unique ID for this file (default:
:class:`~bson.objectid.ObjectId`) - this ``"_id"`` must
not have already been used for another file
- ``"filename"``: human name for the file
- ``"contentType"`` or ``"content_type"``: valid mime-type
for the file
- ``"chunkSize"`` or ``"chunk_size"``: size of each of the
chunks, in bytes (default: 255 kb)
- ``"encoding"``: encoding used for this file. In Python 2,
any :class:`unicode` that is written to the file will be
converted to a :class:`str`. In Python 3, any :class:`str`
that is written to the file will be converted to
:class:`bytes`.
:Parameters:
- `root_collection`: root collection to write to
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession` to use for all
commands
- `disable_md5` (optional): When True, an MD5 checksum will not be
computed for the uploaded file. Useful in environments where
MD5 cannot be used for regulatory or other reasons. Defaults to
False.
- `**kwargs` (optional): file level options (see above)
.. versionchanged:: 3.6
Added ``session`` parameter.
.. versionchanged:: 3.0
`root_collection` must use an acknowledged
:attr:`~pymongo.collection.Collection.write_concern`
"""
if not isinstance(root_collection, Collection):
raise TypeError("root_collection must be an "
"instance of Collection")
if not root_collection.write_concern.acknowledged:
raise ConfigurationError('root_collection must use '
'acknowledged write_concern')
# Handle alternative naming
if "content_type" in kwargs:
kwargs["contentType"] = kwargs.pop("content_type")
if "chunk_size" in kwargs:
kwargs["chunkSize"] = kwargs.pop("chunk_size")
coll = _clear_entity_type_registry(
root_collection, read_preference=ReadPreference.PRIMARY)
if not disable_md5:
kwargs["md5"] = hashlib.md5()
# Defaults
kwargs["_id"] = kwargs.get("_id", ObjectId())
kwargs["chunkSize"] = kwargs.get("chunkSize", DEFAULT_CHUNK_SIZE)
object.__setattr__(self, "_session", session)
object.__setattr__(self, "_coll", coll)
object.__setattr__(self, "_chunks", coll.chunks)
object.__setattr__(self, "_file", kwargs)
object.__setattr__(self, "_buffer", StringIO())
object.__setattr__(self, "_position", 0)
object.__setattr__(self, "_chunk_number", 0)
object.__setattr__(self, "_closed", False)
object.__setattr__(self, "_ensured_index", False)
def __create_index(self, collection, index_key, unique):
doc = collection.find_one(projection={"_id": 1}, session=self._session)
if doc is None:
try:
index_keys = [index_spec['key'] for index_spec in
collection.list_indexes(session=self._session)]
except OperationFailure:
index_keys = []
if index_key not in index_keys:
collection.create_index(
index_key.items(), unique=unique, session=self._session)
def __ensure_indexes(self):
if not object.__getattribute__(self, "_ensured_index"):
self.__create_index(self._coll.files, _F_INDEX, False)
self.__create_index(self._coll.chunks, _C_INDEX, True)
object.__setattr__(self, "_ensured_index", True)
def abort(self):
"""Remove all chunks/files that may have been uploaded and close.
"""
self._coll.chunks.delete_many(
{"files_id": self._file['_id']}, session=self._session)
self._coll.files.delete_one(
{"_id": self._file['_id']}, session=self._session)
object.__setattr__(self, "_closed", True)
@property
def closed(self):
"""Is this file closed?
"""
return self._closed
_id = _grid_in_property("_id", "The ``'_id'`` value for this file.",
read_only=True)
filename = _grid_in_property("filename", "Name of this file.")
name = _grid_in_property("filename", "Alias for `filename`.")
content_type = _grid_in_property("contentType", "Mime-type for this file.")
length = _grid_in_property("length", "Length (in bytes) of this file.",
closed_only=True)
chunk_size = _grid_in_property("chunkSize", "Chunk size for this file.",
read_only=True)
upload_date = _grid_in_property("uploadDate",
"Date that this file was uploaded.",
closed_only=True)
md5 = _grid_in_property("md5", "MD5 of the contents of this file "
"if an md5 sum was created.",
closed_only=True)
def __getattr__(self, name):
if name in self._file:
return self._file[name]
raise AttributeError("GridIn object has no attribute '%s'" % name)
def __setattr__(self, name, value):
# For properties of this instance like _buffer, or descriptors set on
# the class like filename, use regular __setattr__
if name in self.__dict__ or name in self.__class__.__dict__:
object.__setattr__(self, name, value)
else:
# All other attributes are part of the document in db.fs.files.
# Store them to be sent to server on close() or if closed, send
# them now.
self._file[name] = value
if self._closed:
self._coll.files.update_one({"_id": self._file["_id"]},
{"$set": {name: value}})
def __flush_data(self, data):
"""Flush `data` to a chunk.
"""
self.__ensure_indexes()
if 'md5' in self._file:
self._file['md5'].update(data)
if not data:
return
assert(len(data) <= self.chunk_size)
chunk = {"files_id": self._file["_id"],
"n": self._chunk_number,
"data": Binary(data)}
try:
self._chunks.insert_one(chunk, session=self._session)
except DuplicateKeyError:
self._raise_file_exists(self._file['_id'])
self._chunk_number += 1
self._position += len(data)
def __flush_buffer(self):
"""Flush the buffer contents out to a chunk.
"""
self.__flush_data(self._buffer.getvalue())
self._buffer.close()
self._buffer = StringIO()
def __flush(self):
"""Flush the file to the database.
"""
try:
self.__flush_buffer()
if "md5" in self._file:
self._file["md5"] = self._file["md5"].hexdigest()
self._file["length"] = self._position
self._file["uploadDate"] = datetime.datetime.utcnow()
return self._coll.files.insert_one(
self._file, session=self._session)
except DuplicateKeyError:
self._raise_file_exists(self._id)
def _raise_file_exists(self, file_id):
"""Raise a FileExists exception for the given file_id."""
raise FileExists("file with _id %r already exists" % file_id)
def close(self):
"""Flush the file and close it.
A closed file cannot be written any more. Calling
:meth:`close` more than once is allowed.
"""
if not self._closed:
self.__flush()
object.__setattr__(self, "_closed", True)
def read(self, size=-1):
raise io.UnsupportedOperation('read')
def readable(self):
return False
def seekable(self):
return False
def write(self, data):
"""Write data to the file. There is no return value.
`data` can be either a string of bytes or a file-like object
(implementing :meth:`read`). If the file has an
:attr:`encoding` attribute, `data` can also be a
:class:`unicode` (:class:`str` in python 3) instance, which
will be encoded as :attr:`encoding` before being written.
Due to buffering, the data may not actually be written to the
database until the :meth:`close` method is called. Raises
:class:`ValueError` if this file is already closed. Raises
:class:`TypeError` if `data` is not an instance of
:class:`str` (:class:`bytes` in python 3), a file-like object,
or an instance of :class:`unicode` (:class:`str` in python 3).
Unicode data is only allowed if the file has an :attr:`encoding`
attribute.
:Parameters:
- `data`: string of bytes or file-like object to be written
to the file
"""
if self._closed:
raise ValueError("cannot write to a closed file")
try:
# file-like
read = data.read
except AttributeError:
# string
if not isinstance(data, (text_type, bytes)):
raise TypeError("can only write strings or file-like objects")
if isinstance(data, text_type):
try:
data = data.encode(self.encoding)
except AttributeError:
raise TypeError("must specify an encoding for file in "
"order to write %s" % (text_type.__name__,))
read = StringIO(data).read
if self._buffer.tell() > 0:
# Make sure to flush only when _buffer is complete
space = self.chunk_size - self._buffer.tell()
if space:
try:
to_write = read(space)
except:
self.abort()
raise
self._buffer.write(to_write)
if len(to_write) < space:
return # EOF or incomplete
self.__flush_buffer()
to_write = read(self.chunk_size)
while to_write and len(to_write) == self.chunk_size:
self.__flush_data(to_write)
to_write = read(self.chunk_size)
self._buffer.write(to_write)
def writelines(self, sequence):
"""Write a sequence of strings to the file.
Does not add seperators.
"""
for line in sequence:
self.write(line)
def writeable(self):
return True
def __enter__(self):
"""Support for the context manager protocol.
"""
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Support for the context manager protocol.
Close the file and allow exceptions to propagate.
"""
self.close()
# propagate exceptions
return False
class GridOut(object):
"""Class to read data out of GridFS.
"""
def __init__(self, root_collection, file_id=None, file_document=None,
session=None):
"""Read a file from GridFS
Application developers should generally not need to
instantiate this class directly - instead see the methods
provided by :class:`~gridfs.GridFS`.
Either `file_id` or `file_document` must be specified,
`file_document` will be given priority if present. Raises
:class:`TypeError` if `root_collection` is not an instance of
:class:`~pymongo.collection.Collection`.
:Parameters:
- `root_collection`: root collection to read from
- `file_id` (optional): value of ``"_id"`` for the file to read
- `file_document` (optional): file document from
`root_collection.files`
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession` to use for all
commands
.. versionchanged:: 3.8
For better performance and to better follow the GridFS spec,
:class:`GridOut` now uses a single cursor to read all the chunks in
the file.
.. versionchanged:: 3.6
Added ``session`` parameter.
.. versionchanged:: 3.0
Creating a GridOut does not immediately retrieve the file metadata
from the server. Metadata is fetched when first needed.
"""
if not isinstance(root_collection, Collection):
raise TypeError("root_collection must be an "
"instance of Collection")
root_collection = _clear_entity_type_registry(root_collection)
self.__chunks = root_collection.chunks
self.__files = root_collection.files
self.__file_id = file_id
self.__buffer = EMPTY
self.__chunk_iter = None
self.__position = 0
self._file = file_document
self._session = session
_id = _grid_out_property("_id", "The ``'_id'`` value for this file.")
filename = _grid_out_property("filename", "Name of this file.")
name = _grid_out_property("filename", "Alias for `filename`.")
content_type = _grid_out_property("contentType", "Mime-type for this file.")
length = _grid_out_property("length", "Length (in bytes) of this file.")
chunk_size = _grid_out_property("chunkSize", "Chunk size for this file.")
upload_date = _grid_out_property("uploadDate",
"Date that this file was first uploaded.")
aliases = _grid_out_property("aliases", "List of aliases for this file.")
metadata = _grid_out_property("metadata", "Metadata attached to this file.")
md5 = _grid_out_property("md5", "MD5 of the contents of this file "
"if an md5 sum was created.")
def _ensure_file(self):
if not self._file:
self._file = self.__files.find_one({"_id": self.__file_id},
session=self._session)
if not self._file:
raise NoFile("no file in gridfs collection %r with _id %r" %
(self.__files, self.__file_id))
def __getattr__(self, name):
self._ensure_file()
if name in self._file:
return self._file[name]
raise AttributeError("GridOut object has no attribute '%s'" % name)
def readable(self):
return True
def readchunk(self):
"""Reads a chunk at a time. If the current position is within a
chunk the remainder of the chunk is returned.
"""
received = len(self.__buffer)
chunk_data = EMPTY
chunk_size = int(self.chunk_size)
if received > 0:
chunk_data = self.__buffer
elif self.__position < int(self.length):
chunk_number = int((received + self.__position) / chunk_size)
if self.__chunk_iter is None:
self.__chunk_iter = _GridOutChunkIterator(
self, self.__chunks, self._session, chunk_number)
chunk = self.__chunk_iter.next()
chunk_data = chunk["data"][self.__position % chunk_size:]
if not chunk_data:
raise CorruptGridFile("truncated chunk")
self.__position += len(chunk_data)
self.__buffer = EMPTY
return chunk_data
def read(self, size=-1):
"""Read at most `size` bytes from the file (less if there
isn't enough data).
The bytes are returned as an instance of :class:`str` (:class:`bytes`
in python 3). If `size` is negative or omitted all data is read.
:Parameters:
- `size` (optional): the number of bytes to read
.. versionchanged:: 3.8
This method now only checks for extra chunks after reading the
entire file. Previously, this method would check for extra chunks
on every call.
"""
self._ensure_file()
remainder = int(self.length) - self.__position
if size < 0 or size > remainder:
size = remainder
if size == 0:
return EMPTY
received = 0
data = StringIO()
while received < size:
chunk_data = self.readchunk()
received += len(chunk_data)
data.write(chunk_data)
# Detect extra chunks after reading the entire file.
if size == remainder and self.__chunk_iter:
try:
self.__chunk_iter.next()
except StopIteration:
pass
self.__position -= received - size
# Return 'size' bytes and store the rest.
data.seek(size)
self.__buffer = data.read()
data.seek(0)
return data.read(size)
def readline(self, size=-1):
"""Read one line or up to `size` bytes from the file.
:Parameters:
- `size` (optional): the maximum number of bytes to read
"""
remainder = int(self.length) - self.__position
if size < 0 or size > remainder:
size = remainder
if size == 0:
return EMPTY
received = 0
data = StringIO()
while received < size:
chunk_data = self.readchunk()
pos = chunk_data.find(NEWLN, 0, size)
if pos != -1:
size = received + pos + 1
received += len(chunk_data)
data.write(chunk_data)
if pos != -1:
break
self.__position -= received - size
# Return 'size' bytes and store the rest.
data.seek(size)
self.__buffer = data.read()
data.seek(0)
return data.read(size)
def tell(self):
"""Return the current position of this file.
"""
return self.__position
def seek(self, pos, whence=_SEEK_SET):
"""Set the current position of this file.
:Parameters:
- `pos`: the position (or offset if using relative
positioning) to seek to
- `whence` (optional): where to seek
from. :attr:`os.SEEK_SET` (``0``) for absolute file
positioning, :attr:`os.SEEK_CUR` (``1``) to seek relative
to the current position, :attr:`os.SEEK_END` (``2``) to
seek relative to the file's end.
"""
if whence == _SEEK_SET:
new_pos = pos
elif whence == _SEEK_CUR:
new_pos = self.__position + pos
elif whence == _SEEK_END:
new_pos = int(self.length) + pos
else:
raise IOError(22, "Invalid value for `whence`")
if new_pos < 0:
raise IOError(22, "Invalid value for `pos` - must be positive")
# Optimization, continue using the same buffer and chunk iterator.
if new_pos == self.__position:
return
self.__position = new_pos
self.__buffer = EMPTY
if self.__chunk_iter:
self.__chunk_iter.close()
self.__chunk_iter = None
def seekable(self):
return True
def __iter__(self):
"""Return an iterator over all of this file's data.
The iterator will return chunk-sized instances of
:class:`str` (:class:`bytes` in python 3). This can be
useful when serving files using a webserver that handles
such an iterator efficiently.
.. note::
This is different from :py:class:`io.IOBase` which iterates over
*lines* in the file. Use :meth:`GridOut.readline` to read line by
line instead of chunk by chunk.
.. versionchanged:: 3.8
The iterator now raises :class:`CorruptGridFile` when encountering
any truncated, missing, or extra chunk in a file. The previous
behavior was to only raise :class:`CorruptGridFile` on a missing
chunk.
"""
return GridOutIterator(self, self.__chunks, self._session)
def close(self):
"""Make GridOut more generically file-like."""
if self.__chunk_iter:
self.__chunk_iter.close()
self.__chunk_iter = None
def write(self, value):
raise io.UnsupportedOperation('write')
def __enter__(self):
"""Makes it possible to use :class:`GridOut` files
with the context manager protocol.
"""
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Makes it possible to use :class:`GridOut` files
with the context manager protocol.
"""
self.close()
return False
class _GridOutChunkIterator(object):
"""Iterates over a file's chunks using a single cursor.
Raises CorruptGridFile when encountering any truncated, missing, or extra
chunk in a file.
"""
def __init__(self, grid_out, chunks, session, next_chunk):
self._id = grid_out._id
self._chunk_size = int(grid_out.chunk_size)
self._length = int(grid_out.length)
self._chunks = chunks
self._session = session
self._next_chunk = next_chunk
self._num_chunks = math.ceil(float(self._length) / self._chunk_size)
self._cursor = None
def expected_chunk_length(self, chunk_n):
if chunk_n < self._num_chunks - 1:
return self._chunk_size
return self._length - (self._chunk_size * (self._num_chunks - 1))
def __iter__(self):
return self
def _create_cursor(self):
filter = {"files_id": self._id}
if self._next_chunk > 0:
filter["n"] = {"$gte": self._next_chunk}
self._cursor = self._chunks.find(filter, sort=[("n", 1)],
session=self._session)
def _next_with_retry(self):
"""Return the next chunk and retry once on CursorNotFound.
We retry on CursorNotFound to maintain backwards compatibility in
cases where two calls to read occur more than 10 minutes apart (the
server's default cursor timeout).
"""
if self._cursor is None:
self._create_cursor()
try:
return self._cursor.next()
except CursorNotFound:
self._cursor.close()
self._create_cursor()
return self._cursor.next()
def next(self):
try:
chunk = self._next_with_retry()
except StopIteration:
if self._next_chunk >= self._num_chunks:
raise
raise CorruptGridFile("no chunk #%d" % self._next_chunk)
if chunk["n"] != self._next_chunk:
self.close()
raise CorruptGridFile(
"Missing chunk: expected chunk #%d but found "
"chunk with n=%d" % (self._next_chunk, chunk["n"]))
if chunk["n"] >= self._num_chunks:
# According to spec, ignore extra chunks if they are empty.
if len(chunk["data"]):
self.close()
raise CorruptGridFile(
"Extra chunk found: expected %d chunks but found "
"chunk with n=%d" % (self._num_chunks, chunk["n"]))
expected_length = self.expected_chunk_length(chunk["n"])
if len(chunk["data"]) != expected_length:
self.close()
raise CorruptGridFile(
"truncated chunk #%d: expected chunk length to be %d but "
"found chunk with length %d" % (
chunk["n"], expected_length, len(chunk["data"])))
self._next_chunk += 1
return chunk
__next__ = next
def close(self):
if self._cursor:
self._cursor.close()
self._cursor = None
class GridOutIterator(object):
def __init__(self, grid_out, chunks, session):
self.__chunk_iter = _GridOutChunkIterator(grid_out, chunks, session, 0)
def __iter__(self):
return self
def next(self):
chunk = self.__chunk_iter.next()
return bytes(chunk["data"])
__next__ = next
class GridOutCursor(Cursor):
"""A cursor / iterator for returning GridOut objects as the result
of an arbitrary query against the GridFS files collection.
"""
def __init__(self, collection, filter=None, skip=0, limit=0,
no_cursor_timeout=False, sort=None, batch_size=0,
session=None):
"""Create a new cursor, similar to the normal
:class:`~pymongo.cursor.Cursor`.
Should not be called directly by application developers - see
the :class:`~gridfs.GridFS` method :meth:`~gridfs.GridFS.find` instead.
.. versionadded 2.7
.. mongodoc:: cursors
"""
collection = _clear_entity_type_registry(collection)
# Hold on to the base "fs" collection to create GridOut objects later.
self.__root_collection = collection
super(GridOutCursor, self).__init__(
collection.files, filter, skip=skip, limit=limit,
no_cursor_timeout=no_cursor_timeout, sort=sort,
batch_size=batch_size, session=session)
def next(self):
"""Get next GridOut object from cursor.
"""
# Work around "super is not iterable" issue in Python 3.x
next_file = super(GridOutCursor, self).next()
return GridOut(self.__root_collection, file_document=next_file,
session=self.session)
__next__ = next
def add_option(self, *args, **kwargs):
raise NotImplementedError("Method does not exist for GridOutCursor")
def remove_option(self, *args, **kwargs):
raise NotImplementedError("Method does not exist for GridOutCursor")
def _clone_base(self, session):
"""Creates an empty GridOutCursor for information to be copied into.
"""
return GridOutCursor(self.__root_collection, session=session)

@ -0,0 +1,243 @@
Metadata-Version: 2.1
Name: pymongo
Version: 3.8.0
Summary: Python driver for MongoDB <http://www.mongodb.org>
Home-page: http://github.com/mongodb/mongo-python-driver
Author: Mike Dirolf
Author-email: mongodb-user@googlegroups.com
Maintainer: Bernie Hackett
Maintainer-email: bernie@mongodb.com
License: Apache License, Version 2.0
Keywords: mongo,mongodb,pymongo,gridfs,bson
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Operating System :: MacOS :: MacOS X
Classifier: Operating System :: Microsoft :: Windows
Classifier: Operating System :: POSIX
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Database
Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*
Provides-Extra: gssapi
Provides-Extra: tls
Provides-Extra: srv
Provides-Extra: snappy
Provides-Extra: gssapi
Requires-Dist: pykerberos; extra == 'gssapi'
Provides-Extra: snappy
Requires-Dist: python-snappy; extra == 'snappy'
Provides-Extra: srv
Requires-Dist: dnspython (<2.0.0,>=1.13.0); extra == 'srv'
Provides-Extra: tls
=======
PyMongo
=======
:Info: See `the mongo site <http://www.mongodb.org>`_ for more information. See `GitHub <http://github.com/mongodb/mongo-python-driver>`_ for the latest source.
:Author: Mike Dirolf
:Maintainer: Bernie Hackett <bernie@mongodb.com>
About
=====
The PyMongo distribution contains tools for interacting with MongoDB
database from Python. The ``bson`` package is an implementation of
the `BSON format <http://bsonspec.org>`_ for Python. The ``pymongo``
package is a native Python driver for MongoDB. The ``gridfs`` package
is a `gridfs
<http://www.mongodb.org/display/DOCS/GridFS+Specification>`_
implementation on top of ``pymongo``.
PyMongo supports MongoDB 2.6, 3.0, 3.2, 3.4, 3.6 and 4.0.
Support / Feedback
==================
For issues with, questions about, or feedback for PyMongo, please look into
our `support channels <http://www.mongodb.org/about/support>`_. Please
do not email any of the PyMongo developers directly with issues or
questions - you're more likely to get an answer on the `mongodb-user
<http://groups.google.com/group/mongodb-user>`_ list on Google Groups.
Bugs / Feature Requests
=======================
Think you’ve found a bug? Want to see a new feature in PyMongo? Please open a
case in our issue management tool, JIRA:
- `Create an account and login <https://jira.mongodb.org>`_.
- Navigate to `the PYTHON project <https://jira.mongodb.org/browse/PYTHON>`_.
- Click **Create Issue** - Please provide as much information as possible about the issue type and how to reproduce it.
Bug reports in JIRA for all driver projects (i.e. PYTHON, CSHARP, JAVA) and the
Core Server (i.e. SERVER) project are **public**.
How To Ask For Help
-------------------
Please include all of the following information when opening an issue:
- Detailed steps to reproduce the problem, including full traceback, if possible.
- The exact python version used, with patch level::
$ python -c "import sys; print(sys.version)"
- The exact version of PyMongo used, with patch level::
$ python -c "import pymongo; print(pymongo.version); print(pymongo.has_c())"
- The operating system and version (e.g. Windows 7, OSX 10.8, ...)
- Web framework or asynchronous network library used, if any, with version (e.g.
Django 1.7, mod_wsgi 4.3.0, gevent 1.0.1, Tornado 4.0.2, ...)
Security Vulnerabilities
------------------------
If you’ve identified a security vulnerability in a driver or any other
MongoDB project, please report it according to the `instructions here
<http://docs.mongodb.org/manual/tutorial/create-a-vulnerability-report>`_.
Installation
============
PyMongo can be installed with `pip <http://pypi.python.org/pypi/pip>`_::
$ python -m pip install pymongo
Or ``easy_install`` from
`setuptools <http://pypi.python.org/pypi/setuptools>`_::
$ python -m easy_install pymongo
You can also download the project source and do::
$ python setup.py install
Do **not** install the "bson" package from pypi. PyMongo comes with its own
bson package; doing "easy_install bson" installs a third-party package that
is incompatible with PyMongo.
Dependencies
============
PyMongo supports CPython 2.7, 3.4+, PyPy, and PyPy3.5+.
Optional dependencies:
GSSAPI authentication requires `pykerberos
<https://pypi.python.org/pypi/pykerberos>`_ on Unix or `WinKerberos
<https://pypi.python.org/pypi/winkerberos>`_ on Windows. The correct
dependency can be installed automatically along with PyMongo::
$ python -m pip install pymongo[gssapi]
Support for mongodb+srv:// URIs requires `dnspython
<https://pypi.python.org/pypi/dnspython>`_::
$ python -m pip install pymongo[srv]
TLS / SSL support may require `ipaddress
<https://pypi.python.org/pypi/ipaddress>`_ and `certifi
<https://pypi.python.org/pypi/certifi>`_ or `wincertstore
<https://pypi.python.org/pypi/wincertstore>`_ depending on the Python
version in use. The necessary dependencies can be installed along with
PyMongo::
$ python -m pip install pymongo[tls]
Wire protocol compression with snappy requires `python-snappy
<https://pypi.org/project/python-snappy>`_::
$ python -m pip install pymongo[snappy]
You can install all dependencies automatically with the following
command::
$ python -m pip install pymongo[snappy,gssapi,srv,tls]
Other optional packages:
- `backports.pbkdf2 <https://pypi.python.org/pypi/backports.pbkdf2/>`_,
improves authentication performance with SCRAM-SHA-1 and SCRAM-SHA-256.
It especially improves performance on Python versions older than 2.7.8.
- `monotonic <https://pypi.python.org/pypi/monotonic>`_ adds support for
a monotonic clock, which improves reliability in environments
where clock adjustments are frequent. Not needed in Python 3.
Additional dependencies are:
- (to generate documentation) sphinx_
Examples
========
Here's a basic example (for more see the *examples* section of the docs):
.. code-block:: python
>>> import pymongo
>>> client = pymongo.MongoClient("localhost", 27017)
>>> db = client.test
>>> db.name
u'test'
>>> db.my_collection
Collection(Database(MongoClient('localhost', 27017), u'test'), u'my_collection')
>>> db.my_collection.insert_one({"x": 10}).inserted_id
ObjectId('4aba15ebe23f6b53b0000000')
>>> db.my_collection.insert_one({"x": 8}).inserted_id
ObjectId('4aba160ee23f6b543e000000')
>>> db.my_collection.insert_one({"x": 11}).inserted_id
ObjectId('4aba160ee23f6b543e000002')
>>> db.my_collection.find_one()
{u'x': 10, u'_id': ObjectId('4aba15ebe23f6b53b0000000')}
>>> for item in db.my_collection.find():
... print(item["x"])
...
10
8
11
>>> db.my_collection.create_index("x")
u'x_1'
>>> for item in db.my_collection.find().sort("x", pymongo.ASCENDING):
... print(item["x"])
...
8
10
11
>>> [item["x"] for item in db.my_collection.find().limit(2).skip(1)]
[8, 11]
Documentation
=============
You will need sphinx_ installed to generate the
documentation. Documentation can be generated by running **python
setup.py doc**. Generated documentation can be found in the
*doc/build/html/* directory.
Testing
=======
The easiest way to run the tests is to run **python setup.py test** in
the root of the distribution.
To verify that PyMongo works with Gevent's monkey-patching::
$ python green_framework_test.py gevent
Or with Eventlet's::
$ python green_framework_test.py eventlet
.. _sphinx: http://sphinx.pocoo.org/

@ -0,0 +1,145 @@
bson/__init__.py,sha256=fA5HyVYXaQiiGTPImc6lU1DZmPwpEOdolj4H8wi6GWo,41831
bson/__pycache__/__init__.cpython-37.pyc,,
bson/__pycache__/binary.cpython-37.pyc,,
bson/__pycache__/code.cpython-37.pyc,,
bson/__pycache__/codec_options.cpython-37.pyc,,
bson/__pycache__/dbref.cpython-37.pyc,,
bson/__pycache__/decimal128.cpython-37.pyc,,
bson/__pycache__/errors.cpython-37.pyc,,
bson/__pycache__/int64.cpython-37.pyc,,
bson/__pycache__/json_util.cpython-37.pyc,,
bson/__pycache__/max_key.cpython-37.pyc,,
bson/__pycache__/min_key.cpython-37.pyc,,
bson/__pycache__/objectid.cpython-37.pyc,,
bson/__pycache__/py3compat.cpython-37.pyc,,
bson/__pycache__/raw_bson.cpython-37.pyc,,
bson/__pycache__/regex.cpython-37.pyc,,
bson/__pycache__/son.cpython-37.pyc,,
bson/__pycache__/timestamp.cpython-37.pyc,,
bson/__pycache__/tz_util.cpython-37.pyc,,
bson/_cbson.cpython-37m-darwin.so,sha256=h9ZveLbtwlMV0HQh0y19k_9cWn8WFfLhoGlqa3fCNLo,55768
bson/binary.py,sha256=Og_jHotkpGCrUbOcyP8J3jNP2l6Q51NVj8GZ-3h-bXE,7223
bson/code.py,sha256=Bj9q2xc3hJ-IuNwzUTSi1r0qshBU1J1pCjVJIJExquk,3360
bson/codec_options.py,sha256=q-9JB_wMZeRFIl-N3iHE7HHnl1SorntWMROew64Uqws,13752
bson/dbref.py,sha256=pMBnQj36MsJHr-OeTOnJ0gQBF239Mff5E3ioXp_x2vs,4733
bson/decimal128.py,sha256=RA9r0OcH_XzxAW0Bdi8oD7axD6yIgRBSq69zBu-iDbI,10425
bson/errors.py,sha256=AkDIISytky_6NFP-U2ecdXooIr53yt0ZiAT42DmuoI8,1159
bson/int64.py,sha256=NNAMdrdFUMfrhmTfd9cGo2qLSpnS4xjSyVvDnJKmagc,1056
bson/json_util.py,sha256=yqRnD6yYW0H47TXmknCwOGMnirkafQ7u5uSPSdVXSAk,31707
bson/max_key.py,sha256=21OvVcOVm6sb7bd4oRFiapZMgmG1dqnTNOjEm1QaGZQ,1315
bson/min_key.py,sha256=AIejvYyTgDFTJna81weTarOb5zBhZGWTW8M2fU1GZJQ,1315
bson/objectid.py,sha256=5lIAMfIEI6GH0PGQaCO43ySRBAIu-TAJtqIAWMvwyT0,9426
bson/py3compat.py,sha256=nC6q-RwR7iCHN3NFVoiwO3s3Y6GeKe_qQAcIL4Gc9J4,2815
bson/raw_bson.py,sha256=nQkLPwICqaK6_cKMn1dWbg6G-ZDqAxBbK-FKLCqDwyc,4588
bson/regex.py,sha256=44nO3645IcX3gRQ9X9ChUDVkQErHSLzX67BLYsyyuII,4291
bson/son.py,sha256=vv0ervx8YNBTIJpd1vao621OAZ5mss7MhWr031rUajQ,5788
bson/timestamp.py,sha256=KmPD75UR8zE95sTOQxjvutIJ6A65UOcqfTRFL2MAE-k,3932
bson/tz_util.py,sha256=Zy_bA8x2YWrTbIx08HQYYJVghKKwChcCkO4VSBwaNVU,1518
gridfs/__init__.py,sha256=16Mn5rm1dVHCCA3Ur93Az6tMUdyQ9XnkeWCcDemQVe4,36320
gridfs/__pycache__/__init__.cpython-37.pyc,,
gridfs/__pycache__/errors.cpython-37.pyc,,
gridfs/__pycache__/grid_file.cpython-37.pyc,,
gridfs/errors.py,sha256=Z7E-XkxtrWNfob3cTBSgeRlTvHcG02DCPv4X_EmvBkQ,1056
gridfs/grid_file.py,sha256=o7jeNNH_OEx-tN3w9uMFSZXKPKNgmMImaVsAWt9LUU4,30383
pymongo-3.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
pymongo-3.8.0.dist-info/METADATA,sha256=fOw5E48QwzpbCzgOvXB6wSjCCm4lIqlAc6lzQU6cXiQ,8020
pymongo-3.8.0.dist-info/RECORD,,
pymongo-3.8.0.dist-info/WHEEL,sha256=_k7-jlcomYi2KWQV8j4dnQP2hhlJPv6RZQa_A7vSCGc,110
pymongo-3.8.0.dist-info/top_level.txt,sha256=OinVojDdOfo1Dsp-NRfrZdp6gcJJ4bPRq61vSg5vyAs,20
pymongo/__init__.py,sha256=81DOJzt4jqEltYecEUMnW_Cxd83L5PiWph1PL4-yCW0,2821
pymongo/__pycache__/__init__.cpython-37.pyc,,
pymongo/__pycache__/auth.cpython-37.pyc,,
pymongo/__pycache__/bulk.cpython-37.pyc,,
pymongo/__pycache__/change_stream.cpython-37.pyc,,
pymongo/__pycache__/client_options.cpython-37.pyc,,
pymongo/__pycache__/client_session.cpython-37.pyc,,
pymongo/__pycache__/collation.cpython-37.pyc,,
pymongo/__pycache__/collection.cpython-37.pyc,,
pymongo/__pycache__/command_cursor.cpython-37.pyc,,
pymongo/__pycache__/common.cpython-37.pyc,,
pymongo/__pycache__/compression_support.cpython-37.pyc,,
pymongo/__pycache__/cursor.cpython-37.pyc,,
pymongo/__pycache__/cursor_manager.cpython-37.pyc,,
pymongo/__pycache__/database.cpython-37.pyc,,
pymongo/__pycache__/driver_info.cpython-37.pyc,,
pymongo/__pycache__/errors.cpython-37.pyc,,
pymongo/__pycache__/helpers.cpython-37.pyc,,
pymongo/__pycache__/ismaster.cpython-37.pyc,,
pymongo/__pycache__/max_staleness_selectors.cpython-37.pyc,,
pymongo/__pycache__/message.cpython-37.pyc,,
pymongo/__pycache__/mongo_client.cpython-37.pyc,,
pymongo/__pycache__/mongo_replica_set_client.cpython-37.pyc,,
pymongo/__pycache__/monitor.cpython-37.pyc,,
pymongo/__pycache__/monitoring.cpython-37.pyc,,
pymongo/__pycache__/monotonic.cpython-37.pyc,,
pymongo/__pycache__/network.cpython-37.pyc,,
pymongo/__pycache__/operations.cpython-37.pyc,,
pymongo/__pycache__/periodic_executor.cpython-37.pyc,,
pymongo/__pycache__/pool.cpython-37.pyc,,
pymongo/__pycache__/read_concern.cpython-37.pyc,,
pymongo/__pycache__/read_preferences.cpython-37.pyc,,
pymongo/__pycache__/response.cpython-37.pyc,,
pymongo/__pycache__/results.cpython-37.pyc,,
pymongo/__pycache__/saslprep.cpython-37.pyc,,
pymongo/__pycache__/server.cpython-37.pyc,,
pymongo/__pycache__/server_description.cpython-37.pyc,,
pymongo/__pycache__/server_selectors.cpython-37.pyc,,
pymongo/__pycache__/server_type.cpython-37.pyc,,
pymongo/__pycache__/settings.cpython-37.pyc,,
pymongo/__pycache__/son_manipulator.cpython-37.pyc,,
pymongo/__pycache__/ssl_context.cpython-37.pyc,,
pymongo/__pycache__/ssl_match_hostname.cpython-37.pyc,,
pymongo/__pycache__/ssl_support.cpython-37.pyc,,
pymongo/__pycache__/thread_util.cpython-37.pyc,,
pymongo/__pycache__/topology.cpython-37.pyc,,
pymongo/__pycache__/topology_description.cpython-37.pyc,,
pymongo/__pycache__/uri_parser.cpython-37.pyc,,
pymongo/__pycache__/write_concern.cpython-37.pyc,,
pymongo/_cmessage.cpython-37m-darwin.so,sha256=ICPjtSBB9FHIzDa2RwhmCpRjXc5UBAbbGBAlWttP5hc,31272
pymongo/auth.py,sha256=Us_2nWCfUAMo8YUFHCesVNl36QzdFcp71zOUH6trgcE,20621
pymongo/bulk.py,sha256=EX0Ywp_iNGI-t8lD0PP3S9I4ees5Vl5XjhqF9Eccj7Y,27237
pymongo/change_stream.py,sha256=5dh1Fnia-F39woBq-jvwE7U2iJrXmlDhUQDwAwy0qKo,11924
pymongo/client_options.py,sha256=C8HZx2FNdnKN_5u1lypzjENiJf1lf9N8us2Il7sEd9A,8868
pymongo/client_session.py,sha256=WkFxjmKporUVdaZT2rCWq5ejqFMeb-8kDo-lNPCIet0,24237
pymongo/collation.py,sha256=-dQ4Aoclig9lx-nZTo92Jw5NsV8Q6QVWzzpxTb9FKvs,7808
pymongo/collection.py,sha256=eAJvZ0_wRnnT-ZJWyanTi8lBMmj8EwOkGlULm-pJqE0,144875
pymongo/command_cursor.py,sha256=k3GAgo4T54UrJjtU3i5PygJWANTAlfPrQvtbGBX19pY,12393
pymongo/common.py,sha256=saIGQW5XhiwUDoZz0jUaOu-lNJL9PcSDj6WvlAoA-4Y,24440
pymongo/compression_support.py,sha256=9HkL52o0UJdNRGaiJOT7DVbF_K_E3R2kkhFbGVo_6MI,4147
pymongo/cursor.py,sha256=CzgUSMHBzuEJQvDWiZhheuCQ1Vxic8LqeZr-UkU-ETw,49943
pymongo/cursor_manager.py,sha256=lJerbsskemaaB5bUy_Rv0Ola4BJqEEezSo3zOAoAGak,2088
pymongo/database.py,sha256=cyyIRYIBkyyERkhLf1Oxz2rvUNZgbDnplCWror_h_34,60249
pymongo/driver_info.py,sha256=fk9u7Ni4Ln-Rx1DkqZmvsJeI49l72o3kiRJvfHpcmlI,1703
pymongo/errors.py,sha256=tE3rNsYdBrtJuG1S98x9b4Nen0Ko5loevwygSECpEd4,7756
pymongo/helpers.py,sha256=rlLgk-0L-P1GiBNjk72sjsTSAstDULxcYqQh79NoyXA,9922
pymongo/ismaster.py,sha256=FOLjQm86XxBPiQ2sCRWHc1i_g-ap721tTBLX5X1WywQ,4527
pymongo/max_staleness_selectors.py,sha256=rXA_frTXGvAwAWY_pBbu4GvnLsOLlaG2xnIbYdCizgI,4326
pymongo/message.py,sha256=AvCYQPCHplrGsyohjiJshWz5CmrVJ3BFWy_jxRCNXJQ,54961
pymongo/mongo_client.py,sha256=U584fea4EAWOUvtp9rgfqRVMaKe55ZTaibaJkWlvk04,85049
pymongo/mongo_replica_set_client.py,sha256=sjv9GYkF-m_sTa9OeqALj0vnMT1s9C-Vo8yAbL6u_4M,1955
pymongo/monitor.py,sha256=avigyQxsYwWEfH6LR-qFgCI9QvJ_CB7cgsTHIcBht1U,6706
pymongo/monitoring.py,sha256=zJ1RATkTNlPIU5GMI2Dz6s3-bTETLzzm8Kq9WJdUvFw,32460
pymongo/monotonic.py,sha256=NOgK1fiHf-yqoF5vZ2xP0QhMLB0DYRvAc5U8q4utxSE,1100
pymongo/network.py,sha256=MsM63PCn9mbRIWJGo2_G71kcI98_89aHzLkZGbeRl9Q,11738
pymongo/operations.py,sha256=acY2LxLYx8ZwgQelduw2inu_8SliR5O17CAcv5pTtS8,13510
pymongo/periodic_executor.py,sha256=7YuG_Bh9NVkSXSfAYHJtL1csYQXI3kb1jvP17NA0khk,5535
pymongo/pool.py,sha256=Ljc0jB7N0TOm0MRwNkp269hqVd3IeFLz9ZYrc-dHG5I,43433
pymongo/read_concern.py,sha256=s-lPeqjpraky4TabOul_stU6ps9UELs1tHGGAPvaoIk,2337
pymongo/read_preferences.py,sha256=3FAArDFnwp4grk9Emv7lg2LECQ_Dmg8OILVnnK5Wp0U,16494
pymongo/response.py,sha256=8kwNkEwCAaVtQ2AFaFD8wZFtANniAS6f9bPVG8bizgY,3474
pymongo/results.py,sha256=FE8M0suvJW50LMMEEE1j6546dg6B_fRWkt2-uQkJq5Y,7818
pymongo/saslprep.py,sha256=Ybzpu66Rm4KRFIfbqooRocDNjFGJ3Xkpzu1uI0f7pc8,4275
pymongo/server.py,sha256=SXmorrZJrE7rvoH7dzBBbxpT_CIrTVQGMomZgwC4Nok,5874
pymongo/server_description.py,sha256=uErJUi1UF9guWb13NI0IEiaUD5m3EkwV3DbAjnwtgkY,6081
pymongo/server_selectors.py,sha256=KoItAlFYGYujfq_nrNaHjWVW_IDSCpKP7f-2YLl_k5I,5307
pymongo/server_type.py,sha256=AScVY81CujqY93-0kfnB1LG4OowXqO3Ts-CeXyMPa9g,882
pymongo/settings.py,sha256=cNo29xuxjv6qy5KpAqB52fuL5X04rqSIErLJfexaVj8,4095
pymongo/son_manipulator.py,sha256=aINKIHNhAE_f6j1Do8ejNVYD0_u-Pa1e6oerKsMxe7s,6699
pymongo/ssl_context.py,sha256=_iBrSiqyD7I0MQMIwOUOc1q_CxTODeALfepfZ3353rE,3670
pymongo/ssl_match_hostname.py,sha256=u5QdUf8wogzS0SzsfsKcOYcAwWXo97e8COGWJbbl7dY,4677
pymongo/ssl_support.py,sha256=5Yuim-Bp50vZUCKEn4e8qp3YkglnaptG1_L5lTg8Eus,7763
pymongo/thread_util.py,sha256=YF-_hCRZEH931lCfhBBIZBXQE39WPamHwG7BU_QHlzo,3959
pymongo/topology.py,sha256=C4Yd1pYMzAYVwnczkVjhPQR6Tze5ZyCUHRQgjlgAC_E,25389
pymongo/topology_description.py,sha256=z9KHcuCrsIwSvndj49a95750fVqBMF_nrqA6fgH6JkQ,20631
pymongo/uri_parser.py,sha256=Od1L3n1wb7yIOxV__l5u0Nk4ramkvIjThWuXsw01jDQ,14553
pymongo/write_concern.py,sha256=PQmoV6RqaTlMgvoKU-KoQ_oowJsH9Tabi8KF-kSCZTg,5000

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.31.1)
Root-Is-Purelib: false
Tag: cp37-cp37m-macosx_10_9_x86_64

@ -0,0 +1,99 @@
# Copyright 2009-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Python driver for MongoDB."""
ASCENDING = 1
"""Ascending sort order."""
DESCENDING = -1
"""Descending sort order."""
GEO2D = "2d"
"""Index specifier for a 2-dimensional `geospatial index`_.
.. _geospatial index: http://docs.mongodb.org/manual/core/2d/
"""
GEOHAYSTACK = "geoHaystack"
"""Index specifier for a 2-dimensional `haystack index`_.
.. versionadded:: 2.1
.. _haystack index: http://docs.mongodb.org/manual/core/geohaystack/
"""
GEOSPHERE = "2dsphere"
"""Index specifier for a `spherical geospatial index`_.
.. versionadded:: 2.5
.. _spherical geospatial index: http://docs.mongodb.org/manual/core/2dsphere/
"""
HASHED = "hashed"
"""Index specifier for a `hashed index`_.
.. versionadded:: 2.5
.. _hashed index: http://docs.mongodb.org/manual/core/index-hashed/
"""
TEXT = "text"
"""Index specifier for a `text index`_.
.. versionadded:: 2.7.1
.. _text index: http://docs.mongodb.org/manual/core/index-text/
"""
OFF = 0
"""No database profiling."""
SLOW_ONLY = 1
"""Only profile slow operations."""
ALL = 2
"""Profile all operations."""
version_tuple = (3, 8, 0)
def get_version_string():
if isinstance(version_tuple[-1], str):
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
return '.'.join(map(str, version_tuple))
__version__ = version = get_version_string()
"""Current version of PyMongo."""
from pymongo.collection import ReturnDocument
from pymongo.common import (MIN_SUPPORTED_WIRE_VERSION,
MAX_SUPPORTED_WIRE_VERSION)
from pymongo.cursor import CursorType
from pymongo.mongo_client import MongoClient
from pymongo.mongo_replica_set_client import MongoReplicaSetClient
from pymongo.operations import (IndexModel,
InsertOne,
DeleteOne,
DeleteMany,
UpdateOne,
UpdateMany,
ReplaceOne)
from pymongo.read_preferences import ReadPreference
from pymongo.write_concern import WriteConcern
def has_c():
"""Is the C extension installed?"""
try:
from pymongo import _cmessage
return True
except ImportError:
return False

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save