diff --git a/doc/api-asyncio/asyncio_gridfs.rst b/doc/api-asyncio/asyncio_gridfs.rst index c384de1e..28ca5f1f 100644 --- a/doc/api-asyncio/asyncio_gridfs.rst +++ b/doc/api-asyncio/asyncio_gridfs.rst @@ -8,366 +8,8 @@ Store blobs of data in `GridFS `_. .. seealso:: :ref:`Differences between PyMongo's and Motor's GridFS APIs `. - -.. class:: AsyncIOMotorGridFSBucket - - Create a new instance of :class:`AsyncIOMotorGridFSBucket`. - - Raises :exc:`TypeError` if `database` is not an instance of - :class:`AsyncIOMotorDatabase`. - - Raises :exc:`~pymongo.errors.ConfigurationError` if `write_concern` - is not acknowledged. - - :Parameters: - - `database`: database to use. - - `bucket_name` (optional): The name of the bucket. Defaults to 'fs'. - - `chunk_size_bytes` (optional): The chunk size in bytes. Defaults - to 255KB. - - `write_concern` (optional): The - :class:`~pymongo.write_concern.WriteConcern` to use. If ``None`` - (the default) db.write_concern is used. - - `read_preference` (optional): The read preference to use. If - ``None`` (the default) db.read_preference is used. - - .. mongodoc:: gridfs - - .. coroutinemethod:: delete(self, file_id) - - Delete a file's metadata and data chunks from a GridFS bucket:: - - async def delete(): - my_db = AsyncIOMotorClient().test - fs = AsyncIOMotorGridFSBucket(my_db) - # Get _id of file to delete - file_id = await fs.upload_from_stream("test_file", - b"data I want to store!") - await fs.delete(file_id) - - Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - - :Parameters: - - `file_id`: The _id of the file to be deleted. - - .. coroutinemethod:: download_to_stream(self, file_id, destination) - - Downloads the contents of the stored file specified by file_id and - writes the contents to `destination`:: - - async def download(): - my_db = AsyncIOMotorClient().test - fs = AsyncIOMotorGridFSBucket(my_db) - # Get _id of file to read - file_id = await fs.upload_from_stream("test_file", - b"data I want to store!") - # Get file to write to - file = open('myfile','wb+') - await fs.download_to_stream(file_id, file) - file.seek(0) - contents = file.read() - - Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - - :Parameters: - - `file_id`: The _id of the file to be downloaded. - - `destination`: a file-like object implementing :meth:`write`. - - .. coroutinemethod:: download_to_stream_by_name(self, filename, destination, revision=-1) - - Write the contents of `filename` (with optional `revision`) to - `destination`. - - For example:: - - async def download_by_name(): - my_db = AsyncIOMotorClient().test - fs = AsyncIOMotorGridFSBucket(my_db) - # Get file to write to - file = open('myfile','wb') - await fs.download_to_stream_by_name("test_file", file) - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - - Raises :exc:`~ValueError` if `filename` is not a string. - - :Parameters: - - `filename`: The name of the file to read from. - - `destination`: A file-like object that implements :meth:`write`. - - `revision` (optional): Which revision (documents with the same - filename and different uploadDate) of the file to retrieve. - Defaults to -1 (the most recent revision). - - :Note: Revision numbers are defined as follows: - - - 0 = the original stored file - - 1 = the first revision - - 2 = the second revision - - etc... - - -2 = the second most recent revision - - -1 = the most recent revision - - .. method:: find(self, *args, **kwargs) - - Find and return the files collection documents that match ``filter``. - - Returns a cursor that iterates across files matching - arbitrary queries on the files collection. Can be combined - with other modifiers for additional control. - - For example:: - - async def find(): - cursor = fs.find({"filename": "lisa.txt"}, - no_cursor_timeout=True) - - async for grid_data in cursor: - data = grid_data.read() - - iterates through all versions of "lisa.txt" stored in GridFS. - Setting no_cursor_timeout may be important to - prevent the cursor from timing out during long multi-file processing - work. - - As another example, the call:: - - most_recent_three = fs.find().sort("uploadDate", -1).limit(3) - - returns a cursor to the three most recently uploaded files in GridFS. - - Follows a similar interface to :meth:`~AsyncIOMotorCollection.find` - in :class:`AsyncIOMotorCollection`. - - :Parameters: - - `filter`: Search query. - - `batch_size` (optional): The number of documents to return per - batch. - - `limit` (optional): The maximum number of documents to return. - - `no_cursor_timeout` (optional): The server normally times out idle - cursors after an inactivity period (10 minutes) to prevent excess - memory use. Set this option to True prevent that. - - `skip` (optional): The number of documents to skip before - returning. - - `sort` (optional): The order by which to sort results. Defaults to - None. - - .. coroutinemethod:: open_download_stream(self, file_id) - - Opens a stream to read the contents of the stored file specified by file_id:: - - async def download_stream(): - my_db = AsyncIOMotorClient().test - fs = AsyncIOMotorGridFSBucket(my_db) - # get _id of file to read. - file_id = await fs.upload_from_stream("test_file", - b"data I want to store!") - grid_out = await fs.open_download_stream(file_id) - contents = await grid_out.read() - - Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - - :Parameters: - - `file_id`: The _id of the file to be downloaded. - - Returns a :class:`AsyncIOMotorGridOut`. - - .. coroutinemethod:: open_download_stream_by_name(self, filename, revision=-1) - - Opens a stream to read the contents of `filename` and optional `revision`:: - - async def download_by_name(): - my_db = AsyncIOMotorClient().test - fs = AsyncIOMotorGridFSBucket(my_db) - # get _id of file to read. - file_id = await fs.upload_from_stream("test_file", - b"data I want to store!") - grid_out = await fs.open_download_stream_by_name(file_id) - contents = await grid_out.read() - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - - Raises :exc:`~ValueError` filename is not a string. - - :Parameters: - - `filename`: The name of the file to read from. - - `revision` (optional): Which revision (documents with the same - filename and different uploadDate) of the file to retrieve. - Defaults to -1 (the most recent revision). - - Returns a :class:`AsyncIOMotorGridOut`. - - :Note: Revision numbers are defined as follows: - - - 0 = the original stored file - - 1 = the first revision - - 2 = the second revision - - etc... - - -2 = the second most recent revision - - -1 = the most recent revision - - .. method:: open_upload_stream(self, filename, chunk_size_bytes=None, metadata=None) - - Opens a stream for writing. - - Specify the filename, and add any additional information in the metadata - field of the file document or modify the chunk size:: - - async def upload(): - my_db = AsyncIOMotorClient().test - fs = AsyncIOMotorGridFSBucket(my_db) - grid_in = fs.open_upload_stream( - "test_file", metadata={"contentType": "text/plain"}) - - await grid_in.write(b"data I want to store!") - await grid_in.close() # uploaded on close - - Returns an instance of :class:`AsyncIOMotorGridIn`. - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - Raises :exc:`~ValueError` if `filename` is not a string. - - In a Python 3.5 native coroutine, the "async with" statement calls - :meth:`~AsyncIOMotorGridIn.close` automatically:: - - async def upload(): - my_db = AsyncIOMotorClient().test - fs = AsyncIOMotorGridFSBucket(my_db) - async with await fs.open_upload_stream( - "test_file", metadata={"contentType": "text/plain"}) as gridin: - await gridin.write(b'First part\n') - await gridin.write(b'Second part') - - # gridin is now closed automatically. - - :Parameters: - - `filename`: The name of the file to upload. - - `chunk_size_bytes` (options): The number of bytes per chunk of this - file. Defaults to the chunk_size_bytes in :class:`AsyncIOMotorGridFSBucket`. - - `metadata` (optional): User data for the 'metadata' field of the - files collection document. If not provided the metadata field will - be omitted from the files collection document. - - .. method:: open_upload_stream_with_id(self, file_id, filename, chunk_size_bytes=None, metadata=None) - - Opens a stream for writing. - - Specify the filed_id and filename, and add any additional information in - the metadata field of the file document, or modify the chunk size:: - - async def upload(): - my_db = AsyncIOMotorClient().test - fs = AsyncIOMotorGridFSBucket(my_db) - grid_in = fs.open_upload_stream_with_id( - ObjectId(), "test_file", - metadata={"contentType": "text/plain"}) - - await grid_in.write(b"data I want to store!") - await grid_in.close() # uploaded on close - - Returns an instance of :class:`AsyncIOMotorGridIn`. - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - Raises :exc:`~ValueError` if `filename` is not a string. - - :Parameters: - - `file_id`: The id to use for this file. The id must not have - already been used for another file. - - `filename`: The name of the file to upload. - - `chunk_size_bytes` (options): The number of bytes per chunk of this - file. Defaults to the chunk_size_bytes in :class:`AsyncIOMotorGridFSBucket`. - - `metadata` (optional): User data for the 'metadata' field of the - files collection document. If not provided the metadata field will - be omitted from the files collection document. - - .. coroutinemethod:: rename(self, file_id, new_filename) - - Renames the stored file with the specified file_id. - - For example:: - - - async def rename(): - my_db = AsyncIOMotorClient().test - fs = AsyncIOMotorGridFSBucket(my_db) - # get _id of file to read. - file_id = await fs.upload_from_stream("test_file", - b"data I want to store!") - - await fs.rename(file_id, "new_test_name") - - Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - - :Parameters: - - `file_id`: The _id of the file to be renamed. - - `new_filename`: The new name of the file. - - .. coroutinemethod:: upload_from_stream(self, filename, source, chunk_size_bytes=None, metadata=None) - - Uploads a user file to a GridFS bucket. - - Reads the contents of the user file from `source` and uploads - it to the file `filename`. Source can be a string or file-like object. - For example:: - - async def upload_from_stream(): - my_db = AsyncIOMotorClient().test - fs = AsyncIOMotorGridFSBucket(my_db) - file_id = await fs.upload_from_stream( - "test_file", - b"data I want to store!", - metadata={"contentType": "text/plain"}) - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - Raises :exc:`~ValueError` if `filename` is not a string. - - :Parameters: - - `filename`: The name of the file to upload. - - `source`: The source stream of the content to be uploaded. Must be - a file-like object that implements :meth:`read` or a string. - - `chunk_size_bytes` (options): The number of bytes per chunk of this - file. Defaults to the chunk_size_bytes of :class:`AsyncIOMotorGridFSBucket`. - - `metadata` (optional): User data for the 'metadata' field of the - files collection document. If not provided the metadata field will - be omitted from the files collection document. - - Returns the _id of the uploaded file. - - .. coroutinemethod:: upload_from_stream_with_id(self, file_id, filename, source, chunk_size_bytes=None, metadata=None) - - Uploads a user file to a GridFS bucket with a custom file id. - - Reads the contents of the user file from `source` and uploads - it to the file `filename`. Source can be a string or file-like object. - For example:: - - async def upload_from_stream_with_id(): - my_db = AsyncIOMotorClient().test - fs = AsyncIOMotorGridFSBucket(my_db) - file_id = await fs.upload_from_stream_with_id( - ObjectId(), - "test_file", - b"data I want to store!", - metadata={"contentType": "text/plain"}) - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - Raises :exc:`~ValueError` if `filename` is not a string. - - :Parameters: - - `file_id`: The id to use for this file. The id must not have - already been used for another file. - - `filename`: The name of the file to upload. - - `source`: The source stream of the content to be uploaded. Must be - a file-like object that implements :meth:`read` or a string. - - `chunk_size_bytes` (options): The number of bytes per chunk of this - file. Defaults to the chunk_size_bytes of :class:`AsyncIOMotorGridFSBucket`. - - `metadata` (optional): User data for the 'metadata' field of the - files collection document. If not provided the metadata field will - be omitted from the files collection document. +.. autoclass:: AsyncIOMotorGridFSBucket + :members: .. autoclass:: AsyncIOMotorGridIn :members: diff --git a/doc/api-asyncio/asyncio_motor_collection.rst b/doc/api-asyncio/asyncio_motor_collection.rst index 4ae6926f..cddde0e2 100644 --- a/doc/api-asyncio/asyncio_motor_collection.rst +++ b/doc/api-asyncio/asyncio_motor_collection.rst @@ -5,7 +5,6 @@ .. autoclass:: AsyncIOMotorCollection :members: - :exclude-members: create_index, inline_map_reduce .. describe:: c[name] || c.name @@ -18,108 +17,3 @@ The :class:`AsyncIOMotorDatabase` that this :class:`AsyncIOMotorCollection` is a part of. - - .. coroutinemethod:: create_index(self, keys, **kwargs) - - Creates an index on this collection. - - Takes either a single key or a list of (key, direction) pairs. - The key(s) must be an instance of :class:`basestring` - (:class:`str` in python 3), and the direction(s) must be one of - (:data:`~pymongo.ASCENDING`, :data:`~pymongo.DESCENDING`, - :data:`~pymongo.GEO2D`, :data:`~pymongo.GEOHAYSTACK`, - :data:`~pymongo.GEOSPHERE`, :data:`~pymongo.HASHED`, - :data:`~pymongo.TEXT`). - - To create a single key ascending index on the key ``'mike'`` we just - use a string argument:: - - await my_collection.create_index("mike") - - For a compound index on ``'mike'`` descending and ``'eliot'`` - ascending we need to use a list of tuples:: - - await my_collection.create_index([("mike", pymongo.DESCENDING), - ("eliot", pymongo.ASCENDING)]) - - All optional index creation parameters should be passed as - keyword arguments to this method. For example:: - - await my_collection.create_index([("mike", pymongo.DESCENDING)], - background=True) - - Valid options include, but are not limited to: - - - `name`: custom name to use for this index - if none is - given, a name will be generated. - - `unique`: if ``True`` creates a uniqueness constraint on the index. - - `background`: if ``True`` this index should be created in the - background. - - `sparse`: if ``True``, omit from the index any documents that lack - the indexed field. - - `bucketSize`: for use with geoHaystack indexes. - Number of documents to group together within a certain proximity - to a given longitude and latitude. - - `min`: minimum value for keys in a :data:`~pymongo.GEO2D` - index. - - `max`: maximum value for keys in a :data:`~pymongo.GEO2D` - index. - - `expireAfterSeconds`: Used to create an expiring (TTL) - collection. MongoDB will automatically delete documents from - this collection after seconds. The indexed field must - be a UTC datetime or the data will not expire. - - `partialFilterExpression`: A document that specifies a filter for - a partial index. - - `collation` (optional): An instance of - :class:`~pymongo.collation.Collation`. - - See the MongoDB documentation for a full list of supported options by - server version. - - .. warning:: `dropDups` is not supported by MongoDB 3.0 or newer. The - option is silently ignored by the server and unique index builds - using the option will fail if a duplicate value is detected. - - .. note:: `partialFilterExpression` requires server version **>= 3.2** - - .. note:: The :attr:`~pymongo.collection.Collection.write_concern` of - this collection is automatically applied to this operation. - - :Parameters: - - `keys`: a single key or a list of (key, direction) - pairs specifying the index to create - - `**kwargs` (optional): any additional index creation - options (see the above list) should be passed as keyword - arguments - - .. mongodoc:: indexes - - .. coroutinemethod:: inline_map_reduce(self, map, reduce, full_response=False, **kwargs) - - Perform an inline map/reduce operation on this collection. - - Perform the map/reduce operation on the server in RAM. A result - collection is not created. The result set is returned as a list - of documents. - - If `full_response` is ``False`` (default) returns the - result documents in a list. Otherwise, returns the full - response from the server to the `map reduce command`_. - - The :meth:`inline_map_reduce` method obeys the :attr:`read_preference` - of this :class:`Collection`. - - :Parameters: - - `map`: map function (as a JavaScript string) - - `reduce`: reduce function (as a JavaScript string) - - `full_response` (optional): if ``True``, return full response to - this command - otherwise just return the result collection - - `**kwargs` (optional): additional arguments to the - `map reduce command`_ may be passed as keyword arguments to this - helper method, e.g.:: - - await db.test.inline_map_reduce(map, reduce, limit=2) - - .. _map reduce command: https://mongodb.com/docs/manual/reference/command/mapReduce/ - - .. mongodoc:: mapreduce diff --git a/doc/api-tornado/gridfs.rst b/doc/api-tornado/gridfs.rst index 1737a7b1..a83bcac0 100644 --- a/doc/api-tornado/gridfs.rst +++ b/doc/api-tornado/gridfs.rst @@ -10,380 +10,8 @@ Store blobs of data in `GridFS `_. .. seealso:: :doc:`web` -.. class:: MotorGridFSBucket - - Create a new instance of :class:`MotorGridFSBucket`. - - Raises :exc:`TypeError` if `database` is not an instance of - :class:`MotorDatabase`. - - Raises :exc:`~pymongo.errors.ConfigurationError` if `write_concern` - is not acknowledged. - - :Parameters: - - `database`: database to use. - - `bucket_name` (optional): The name of the bucket. Defaults to 'fs'. - - `chunk_size_bytes` (optional): The chunk size in bytes. Defaults - to 255KB. - - `write_concern` (optional): The - :class:`~pymongo.write_concern.WriteConcern` to use. If ``None`` - (the default) db.write_concern is used. - - `read_preference` (optional): The read preference to use. If - ``None`` (the default) db.read_preference is used. - - .. mongodoc:: gridfs - - .. coroutinemethod:: delete(self, file_id)) - - Delete a file's metadata and data chunks from a GridFS bucket:: - - async def delete(): - my_db = MotorClient().test - fs = MotorGridFSBucket(my_db) - # Get _id of file to delete - file_id = await fs.upload_from_stream("test_file", - b"data I want to store!") - await fs.delete(file_id) - - Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - - :Parameters: - - `file_id`: The _id of the file to be deleted. - - Returns a Future. - - .. coroutinemethod:: download_to_stream(self, file_id, destination)) - - Downloads the contents of the stored file specified by file_id and - writes the contents to `destination`:: - - async def download(): - my_db = MotorClient().test - fs = MotorGridFSBucket(my_db) - # Get _id of file to read - file_id = await fs.upload_from_stream("test_file", - b"data I want to store!") - # Get file to write to - file = open('myfile','wb+') - await fs.download_to_stream(file_id, file) - file.seek(0) - contents = file.read() - - Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - - :Parameters: - - `file_id`: The _id of the file to be downloaded. - - `destination`: a file-like object implementing :meth:`write`. - - Returns a Future. - - .. coroutinemethod:: download_to_stream_by_name(self, filename, destination, revision=-1) - - Write the contents of `filename` (with optional `revision`) to - `destination`. - - For example:: - - async def download_by_name(): - my_db = MotorClient().test - fs = MotorGridFSBucket(my_db) - # Get file to write to - file = open('myfile','wb') - await fs.download_to_stream_by_name("test_file", file) - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - - Raises :exc:`~ValueError` if `filename` is not a string. - - :Parameters: - - `filename`: The name of the file to read from. - - `destination`: A file-like object that implements :meth:`write`. - - `revision` (optional): Which revision (documents with the same - filename and different uploadDate) of the file to retrieve. - Defaults to -1 (the most recent revision). - - :Note: Revision numbers are defined as follows: - - - 0 = the original stored file - - 1 = the first revision - - 2 = the second revision - - etc... - - -2 = the second most recent revision - - -1 = the most recent revision - - .. method:: find(self, *args, **kwargs) - - Find and return the files collection documents that match ``filter``. - - Returns a cursor that iterates across files matching - arbitrary queries on the files collection. Can be combined - with other modifiers for additional control. - - For example:: - - async def find(): - cursor = fs.find({"filename": "lisa.txt"}, - no_cursor_timeout=True) - - async for grid_data in cursor: - data = grid_data.read() - - iterates through all versions of "lisa.txt" stored in GridFS. - Setting no_cursor_timeout may be important to - prevent the cursor from timing out during long multi-file processing - work. - - As another example, the call:: - - most_recent_three = fs.find().sort("uploadDate", -1).limit(3) - - returns a cursor to the three most recently uploaded files in GridFS. - - Follows a similar interface to :meth:`~MotorCollection.find` - in :class:`MotorCollection`. - - :Parameters: - - `filter`: Search query. - - `batch_size` (optional): The number of documents to return per - batch. - - `limit` (optional): The maximum number of documents to return. - - `no_cursor_timeout` (optional): The server normally times out idle - cursors after an inactivity period (10 minutes) to prevent excess - memory use. Set this option to True prevent that. - - `skip` (optional): The number of documents to skip before - returning. - - `sort` (optional): The order by which to sort results. Defaults to - None. - - Returns a Future. - - .. coroutinemethod:: open_download_stream(self, file_id) - - Opens a stream to read the contents of the stored file specified by file_id:: - - async def download_stream(): - my_db = MotorClient().test - fs = MotorGridFSBucket(my_db) - # get _id of file to read. - file_id = await fs.upload_from_stream("test_file", - b"data I want to store!") - grid_out = await fs.open_download_stream(file_id) - contents = await grid_out.read() - - Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - - :Parameters: - - `file_id`: The _id of the file to be downloaded. - - Returns a Future that resolves to a :class:`MotorGridOut`. - - .. coroutinemethod:: open_download_stream_by_name(self, filename, revision=-1) - - Opens a stream to read the contents of `filename` and optional `revision`:: - - async def download_by_name(): - my_db = MotorClient().test - fs = MotorGridFSBucket(my_db) - # get _id of file to read. - file_id = await fs.upload_from_stream("test_file", - b"data I want to store!") - grid_out = await fs.open_download_stream_by_name(file_id) - contents = await grid_out.read() - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - - Raises :exc:`~ValueError` filename is not a string. - - :Parameters: - - `filename`: The name of the file to read from. - - `revision` (optional): Which revision (documents with the same - filename and different uploadDate) of the file to retrieve. - Defaults to -1 (the most recent revision). - - Returns a Future that resolves to a :class:`MotorGridOut`. - - :Note: Revision numbers are defined as follows: - - - 0 = the original stored file - - 1 = the first revision - - 2 = the second revision - - etc... - - -2 = the second most recent revision - - -1 = the most recent revision - - .. method:: open_upload_stream(self, filename, chunk_size_bytes=None, metadata=None) - - Opens a stream for writing. - - Specify the filename, and add any additional information in the metadata - field of the file document or modify the chunk size:: - - async def upload(): - my_db = MotorClient().test - fs = MotorGridFSBucket(my_db) - grid_in = fs.open_upload_stream( - "test_file", chunk_size_bytes=4, - metadata={"contentType": "text/plain"}) - - await grid_in.write(b"data I want to store!") - await grid_in.close() # uploaded on close - - Returns an instance of :class:`MotorGridIn`. - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - Raises :exc:`~ValueError` if `filename` is not a string. - - Using the "async with" statement calls :meth:`~MotorGridIn.close` - automatically:: - - async def upload(): - my_db = MotorClient().test - fs = MotorGridFSBucket(my_db) - async with await fs.open_upload_stream( - "test_file", metadata={"contentType": "text/plain"}) as gridin: - await gridin.write(b'First part\n') - await gridin.write(b'Second part') - - # gridin is now closed automatically. - - :Parameters: - - `filename`: The name of the file to upload. - - `chunk_size_bytes` (options): The number of bytes per chunk of this - file. Defaults to the chunk_size_bytes in :class:`MotorGridFSBucket`. - - `metadata` (optional): User data for the 'metadata' field of the - files collection document. If not provided the metadata field will - be omitted from the files collection document. - - .. method:: open_upload_stream_with_id(self, file_id, filename, chunk_size_bytes=None, metadata=None) - - Opens a stream for writing. - - Specify the filed_id and filename, and add any additional information in - the metadata field of the file document, or modify the chunk size:: - - async def upload(): - my_db = MotorClient().test - fs = MotorGridFSBucket(my_db) - grid_in = fs.open_upload_stream_with_id( - ObjectId(), - "test_file", - chunk_size_bytes=4, - metadata={"contentType": "text/plain"}) - - await grid_in.write(b"data I want to store!") - await grid_in.close() # uploaded on close - - Returns an instance of :class:`MotorGridIn`. - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - Raises :exc:`~ValueError` if `filename` is not a string. - - :Parameters: - - `file_id`: The id to use for this file. The id must not have - already been used for another file. - - `filename`: The name of the file to upload. - - `chunk_size_bytes` (options): The number of bytes per chunk of this - file. Defaults to the chunk_size_bytes in :class:`MotorGridFSBucket`. - - `metadata` (optional): User data for the 'metadata' field of the - files collection document. If not provided the metadata field will - be omitted from the files collection document. - - .. coroutinemethod:: rename(self, file_id, new_filename)) - - Renames the stored file with the specified file_id. - - For example:: - - - async def rename(): - my_db = MotorClient().test - fs = MotorGridFSBucket(my_db) - # get _id of file to read. - file_id = await fs.upload_from_stream("test_file", - b"data I want to store!") - - await fs.rename(file_id, "new_test_name") - - Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - - :Parameters: - - `file_id`: The _id of the file to be renamed. - - `new_filename`: The new name of the file. - - Returns a Future. - - .. coroutinemethod:: upload_from_stream(self, filename, source, chunk_size_bytes=None, metadata=None)) - - Uploads a user file to a GridFS bucket. - - Reads the contents of the user file from `source` and uploads - it to the file `filename`. Source can be a string or file-like object. - For example:: - - async def upload_from_stream(): - my_db = MotorClient().test - fs = MotorGridFSBucket(my_db) - file_id = await fs.upload_from_stream( - "test_file", - b"data I want to store!", - chunk_size_bytes=4, - metadata={"contentType": "text/plain"}) - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - Raises :exc:`~ValueError` if `filename` is not a string. - - :Parameters: - - `filename`: The name of the file to upload. - - `source`: The source stream of the content to be uploaded. Must be - a file-like object that implements :meth:`read` or a string. - - `chunk_size_bytes` (options): The number of bytes per chunk of this - file. Defaults to the chunk_size_bytes of :class:`MotorGridFSBucket`. - - `metadata` (optional): User data for the 'metadata' field of the - files collection document. If not provided the metadata field will - be omitted from the files collection document. - - Returns a Future that resolves to the _id of the uploaded file. - - .. coroutinemethod:: upload_from_stream_with_id(self, file_id, filename, source, chunk_size_bytes=None, metadata=None)) - - Uploads a user file to a GridFS bucket with a custom file id. - - Reads the contents of the user file from `source` and uploads - it to the file `filename`. Source can be a string or file-like object. - For example:: - - async def upload_from_stream_with_id(): - my_db = MotorClient().test - fs = MotorGridFSBucket(my_db) - file_id = await fs.upload_from_stream_with_id( - ObjectId(), - "test_file", - b"data I want to store!", - chunk_size_bytes=4, - metadata={"contentType": "text/plain"}) - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - Raises :exc:`~ValueError` if `filename` is not a string. - - :Parameters: - - `file_id`: The id to use for this file. The id must not have - already been used for another file. - - `filename`: The name of the file to upload. - - `source`: The source stream of the content to be uploaded. Must be - a file-like object that implements :meth:`read` or a string. - - `chunk_size_bytes` (options): The number of bytes per chunk of this - file. Defaults to the chunk_size_bytes of :class:`MotorGridFSBucket`. - - `metadata` (optional): User data for the 'metadata' field of the - files collection document. If not provided the metadata field will - be omitted from the files collection document. - - Returns a Future. +.. autoclass:: MotorGridFSBucket + :members: .. autoclass:: MotorGridIn :members: diff --git a/doc/api-tornado/motor_collection.rst b/doc/api-tornado/motor_collection.rst index bee77f19..d077c488 100644 --- a/doc/api-tornado/motor_collection.rst +++ b/doc/api-tornado/motor_collection.rst @@ -5,7 +5,6 @@ .. autoclass:: MotorCollection :members: - :exclude-members: create_index, inline_map_reduce .. describe:: c[name] || c.name @@ -18,112 +17,3 @@ The :class:`MotorDatabase` that this :class:`MotorCollection` is a part of. - - .. coroutinemethod:: create_index(self, keys, **kwargs) - - Creates an index on this collection. - - Takes either a single key or a list of (key, direction) pairs. - The key(s) must be an instance of :class:`basestring` - (:class:`str` in python 3), and the direction(s) must be one of - (:data:`~pymongo.ASCENDING`, :data:`~pymongo.DESCENDING`, - :data:`~pymongo.GEO2D`, :data:`~pymongo.GEOHAYSTACK`, - :data:`~pymongo.GEOSPHERE`, :data:`~pymongo.HASHED`, - :data:`~pymongo.TEXT`). - - To create a single key ascending index on the key ``'mike'`` we just - use a string argument:: - - await my_collection.create_index("mike") - - For a compound index on ``'mike'`` descending and ``'eliot'`` - ascending we need to use a list of tuples:: - - await my_collection.create_index([("mike", pymongo.DESCENDING), - ("eliot", pymongo.ASCENDING)]) - - All optional index creation parameters should be passed as - keyword arguments to this method. For example:: - - await my_collection.create_index([("mike", pymongo.DESCENDING)], - background=True) - - Valid options include, but are not limited to: - - - `name`: custom name to use for this index - if none is - given, a name will be generated. - - `unique`: if ``True`` creates a uniqueness constraint on the index. - - `background`: if ``True`` this index should be created in the - background. - - `sparse`: if ``True``, omit from the index any documents that lack - the indexed field. - - `bucketSize`: for use with geoHaystack indexes. - Number of documents to group together within a certain proximity - to a given longitude and latitude. - - `min`: minimum value for keys in a :data:`~pymongo.GEO2D` - index. - - `max`: maximum value for keys in a :data:`~pymongo.GEO2D` - index. - - `expireAfterSeconds`: Used to create an expiring (TTL) - collection. MongoDB will automatically delete documents from - this collection after seconds. The indexed field must - be a UTC datetime or the data will not expire. - - `partialFilterExpression`: A document that specifies a filter for - a partial index. - - `collation` (optional): An instance of - :class:`~pymongo.collation.Collation`. - - See the MongoDB documentation for a full list of supported options by - server version. - - .. warning:: `dropDups` is not supported by MongoDB 3.0 or newer. The - option is silently ignored by the server and unique index builds - using the option will fail if a duplicate value is detected. - - .. note:: `partialFilterExpression` requires server version **>= 3.2** - - .. note:: The :attr:`~pymongo.collection.Collection.write_concern` of - this collection is automatically applied to this operation. - - :Parameters: - - `keys`: a single key or a list of (key, direction) - pairs specifying the index to create - - `**kwargs` (optional): any additional index creation - options (see the above list) should be passed as keyword - arguments - - Returns a Future. - - .. mongodoc:: indexes - - .. coroutinemethod:: inline_map_reduce(self, map, reduce, full_response=False, **kwargs) - - Perform an inline map/reduce operation on this collection. - - Perform the map/reduce operation on the server in RAM. A result - collection is not created. The result set is returned as a list - of documents. - - If `full_response` is ``False`` (default) returns the - result documents in a list. Otherwise, returns the full - response from the server to the `map reduce command`_. - - The :meth:`inline_map_reduce` method obeys the :attr:`read_preference` - of this :class:`Collection`. - - :Parameters: - - `map`: map function (as a JavaScript string) - - `reduce`: reduce function (as a JavaScript string) - - `full_response` (optional): if ``True``, return full response to - this command - otherwise just return the result collection - - `**kwargs` (optional): additional arguments to the - `map reduce command`_ may be passed as keyword arguments to this - helper method, e.g.:: - - await db.test.inline_map_reduce(map, reduce, limit=2) - - Returns a Future. - - .. _map reduce command: https://mongodb.com/docs/manual/reference/command/mapReduce/ - - .. mongodoc:: mapreduce diff --git a/motor/core.py b/motor/core.py index a50dcff2..1249af87 100644 --- a/motor/core.py +++ b/motor/core.py @@ -756,7 +756,7 @@ class AgnosticCollection(AgnosticBaseProperties): __bool__ = DelegateMethod() bulk_write = AsyncCommand(doc=docstrings.bulk_write_doc) count_documents = AsyncRead() - create_index = AsyncCommand() + create_index = AsyncCommand(doc=docstrings.create_index_doc) create_indexes = AsyncCommand(doc=docstrings.create_indexes_doc) delete_many = AsyncCommand(doc=docstrings.delete_many_doc) delete_one = AsyncCommand(doc=docstrings.delete_one_doc) diff --git a/motor/docstrings.py b/motor/docstrings.py index cadcfd9b..7a126b9d 100644 --- a/motor/docstrings.py +++ b/motor/docstrings.py @@ -201,6 +201,86 @@ async def modify_data(): Added session parameter. """ +create_index_doc = """Creates an index on this collection. + + Takes either a single key or a list of (key, direction) pairs. + The key(s) must be an instance of :class:`basestring` + (:class:`str` in python 3), and the direction(s) must be one of + (:data:`~pymongo.ASCENDING`, :data:`~pymongo.DESCENDING`, + :data:`~pymongo.GEO2D`, :data:`~pymongo.GEOHAYSTACK`, + :data:`~pymongo.GEOSPHERE`, :data:`~pymongo.HASHED`, + :data:`~pymongo.TEXT`). + + To create a single key ascending index on the key ``'mike'`` we just + use a string argument:: + + await my_collection.create_index("mike") + + For a compound index on ``'mike'`` descending and ``'eliot'`` + ascending we need to use a list of tuples:: + + await my_collection.create_index([("mike", pymongo.DESCENDING), + ("eliot", pymongo.ASCENDING)]) + + All optional index creation parameters should be passed as + keyword arguments to this method. For example:: + + await my_collection.create_index([("mike", pymongo.DESCENDING)], + background=True) + + Valid options include, but are not limited to: + + - `name`: custom name to use for this index - if none is + given, a name will be generated. + - `unique`: if ``True`` creates a uniqueness constraint on the index. + - `background`: if ``True`` this index should be created in the + background. + - `sparse`: if ``True``, omit from the index any documents that lack + the indexed field. + - `bucketSize`: for use with geoHaystack indexes. + Number of documents to group together within a certain proximity + to a given longitude and latitude. + - `min`: minimum value for keys in a :data:`~pymongo.GEO2D` + index. + - `max`: maximum value for keys in a :data:`~pymongo.GEO2D` + index. + - `expireAfterSeconds`: Used to create an expiring (TTL) + collection. MongoDB will automatically delete documents from + this collection after seconds. The indexed field must + be a UTC datetime or the data will not expire. + - `partialFilterExpression`: A document that specifies a filter for + a partial index. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. + + See the MongoDB documentation for a full list of supported options by + server version. + + .. warning:: `dropDups` is not supported by MongoDB 3.0 or newer. The + option is silently ignored by the server and unique index builds + using the option will fail if a duplicate value is detected. + + .. note:: `partialFilterExpression` requires server version **>= 3.2** + + .. note:: The :attr:`~pymongo.collection.Collection.write_concern` of + this collection is automatically applied to this operation. + + :Parameters: + - `keys`: a single key or a list of (key, direction) + pairs specifying the index to create. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`, created with + :meth:`~MotorClient.start_session`. + - `comment` (optional): A user-provided comment to attach to this command. + - `**kwargs` (optional): any additional index creation + options (see the above list) should be passed as keyword + arguments + + Returns a Future. + + .. mongodoc:: indexes +""" + create_indexes_doc = """Create one or more indexes on this collection:: from pymongo import IndexModel, ASCENDING, DESCENDING @@ -1251,3 +1331,312 @@ async def coro(): encrypted = await client_encryption.encrypt(value, ...) decrypted = await client_encryption.decrypt(encrypted) """ + +gridfs_delete_doc = """Delete a file's metadata and data chunks from a GridFS bucket:: + + async def delete(): + my_db = AsyncIOMotorClient().test + fs = AsyncIOMotorGridFSBucket(my_db) + # Get _id of file to delete + file_id = await fs.upload_from_stream("test_file", + b"data I want to store!") + await fs.delete(file_id) + + Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. + + :Parameters: + - `file_id`: The _id of the file to be deleted. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`, created with + :meth:`~MotorClient.start_session`. +""" + +gridfs_download_to_stream_doc = """Downloads the contents of the stored file specified by file_id and + writes the contents to `destination`:: + + async def download(): + my_db = AsyncIOMotorClient().test + fs = AsyncIOMotorGridFSBucket(my_db) + # Get _id of file to read + file_id = await fs.upload_from_stream("test_file", + b"data I want to store!") + # Get file to write to + file = open('myfile','wb+') + await fs.download_to_stream(file_id, file) + file.seek(0) + contents = file.read() + + Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. + + :Parameters: + - `file_id`: The _id of the file to be downloaded. + - `destination`: a file-like object implementing :meth:`write`. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`, created with + :meth:`~MotorClient.start_session`. +""" + +gridfs_download_to_stream_by_name_doc = """ Write the contents of `filename` (with optional `revision`) to + `destination`. + + For example:: + + async def download_by_name(): + my_db = AsyncIOMotorClient().test + fs = AsyncIOMotorGridFSBucket(my_db) + # Get file to write to + file = open('myfile','wb') + await fs.download_to_stream_by_name("test_file", file) + + Raises :exc:`~gridfs.errors.NoFile` if no such version of + that file exists. + + Raises :exc:`~ValueError` if `filename` is not a string. + + :Parameters: + - `filename`: The name of the file to read from. + - `destination`: A file-like object that implements :meth:`write`. + - `revision` (optional): Which revision (documents with the same + filename and different uploadDate) of the file to retrieve. + Defaults to -1 (the most recent revision). + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`, created with + :meth:`~MotorClient.start_session`. + + :Note: Revision numbers are defined as follows: + + - 0 = the original stored file + - 1 = the first revision + - 2 = the second revision + - etc... + - -2 = the second most recent revision + - -1 = the most recent revision +""" + +gridfs_open_download_stream_doc = """Opens a stream to read the contents of the stored file specified by file_id:: + + async def download_stream(): + my_db = AsyncIOMotorClient().test + fs = AsyncIOMotorGridFSBucket(my_db) + # get _id of file to read. + file_id = await fs.upload_from_stream("test_file", + b"data I want to store!") + grid_out = await fs.open_download_stream(file_id) + contents = await grid_out.read() + + Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. + + :Parameters: + - `file_id`: The _id of the file to be downloaded. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`, created with + :meth:`~MotorClient.start_session`. + + Returns a :class:`AsyncIOMotorGridOut`. +""" + +gridfs_open_download_stream_by_name_doc = """Opens a stream to read the contents of `filename` and optional `revision`:: + + async def download_by_name(): + my_db = AsyncIOMotorClient().test + fs = AsyncIOMotorGridFSBucket(my_db) + # get _id of file to read. + file_id = await fs.upload_from_stream("test_file", + b"data I want to store!") + grid_out = await fs.open_download_stream_by_name(file_id) + contents = await grid_out.read() + + Raises :exc:`~gridfs.errors.NoFile` if no such version of + that file exists. + + Raises :exc:`~ValueError` filename is not a string. + + :Parameters: + - `filename`: The name of the file to read from. + - `revision` (optional): Which revision (documents with the same + filename and different uploadDate) of the file to retrieve. + Defaults to -1 (the most recent revision). + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`, created with + :meth:`~MotorClient.start_session`. + + Returns a :class:`AsyncIOMotorGridOut`. + + :Note: Revision numbers are defined as follows: + + - 0 = the original stored file + - 1 = the first revision + - 2 = the second revision + - etc... + - -2 = the second most recent revision + - -1 = the most recent revision +""" + + +gridfs_open_upload_stream_doc = """Opens a stream for writing. + + Specify the filename, and add any additional information in the metadata + field of the file document or modify the chunk size:: + + async def upload(): + my_db = AsyncIOMotorClient().test + fs = AsyncIOMotorGridFSBucket(my_db) + grid_in = fs.open_upload_stream( + "test_file", metadata={"contentType": "text/plain"}) + + await grid_in.write(b"data I want to store!") + await grid_in.close() # uploaded on close + + Returns an instance of :class:`AsyncIOMotorGridIn`. + + Raises :exc:`~gridfs.errors.NoFile` if no such version of + that file exists. + Raises :exc:`~ValueError` if `filename` is not a string. + + In a native coroutine, the "async with" statement calls + :meth:`~AsyncIOMotorGridIn.close` automatically:: + + async def upload(): + my_db = AsyncIOMotorClient().test + fs = AsyncIOMotorGridFSBucket(my_db) + async with await fs.open_upload_stream( + "test_file", metadata={"contentType": "text/plain"}) as gridin: + await gridin.write(b'First part\\n') + await gridin.write(b'Second part') + + :Parameters: + - `filename`: The name of the file to upload. + - `chunk_size_bytes` (options): The number of bytes per chunk of this + file. Defaults to the chunk_size_bytes in :class:`AsyncIOMotorGridFSBucket`. + - `metadata` (optional): User data for the 'metadata' field of the + files collection document. If not provided the metadata field will + be omitted from the files collection document. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`, created with + :meth:`~MotorClient.start_session`. +""" + +gridfs_open_upload_stream_with_id_doc = """Opens a stream for writing. + + Specify the filed_id and filename, and add any additional information in + the metadata field of the file document, or modify the chunk size:: + + async def upload(): + my_db = AsyncIOMotorClient().test + fs = AsyncIOMotorGridFSBucket(my_db) + grid_in = fs.open_upload_stream_with_id( + ObjectId(), "test_file", + metadata={"contentType": "text/plain"}) + + await grid_in.write(b"data I want to store!") + await grid_in.close() # uploaded on close + + Returns an instance of :class:`AsyncIOMotorGridIn`. + + Raises :exc:`~gridfs.errors.NoFile` if no such version of + that file exists. + Raises :exc:`~ValueError` if `filename` is not a string. + + :Parameters: + - `file_id`: The id to use for this file. The id must not have + already been used for another file. + - `filename`: The name of the file to upload. + - `chunk_size_bytes` (options): The number of bytes per chunk of this + file. Defaults to the chunk_size_bytes in :class:`AsyncIOMotorGridFSBucket`. + - `metadata` (optional): User data for the 'metadata' field of the + files collection document. If not provided the metadata field will + be omitted from the files collection document. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`, created with + :meth:`~MotorClient.start_session`. +""" + +gridfs_rename_doc = """Renames the stored file with the specified file_id. + + For example:: + + + async def rename(): + my_db = AsyncIOMotorClient().test + fs = AsyncIOMotorGridFSBucket(my_db) + # get _id of file to read. + file_id = await fs.upload_from_stream("test_file", + b"data I want to store!") + + await fs.rename(file_id, "new_test_name") + + Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. + + :Parameters: + - `file_id`: The _id of the file to be renamed. + - `new_filename`: The new name of the file. +""" + +gridfs_upload_from_stream_doc = """Uploads a user file to a GridFS bucket. + + Reads the contents of the user file from `source` and uploads + it to the file `filename`. Source can be a string or file-like object. + For example:: + + async def upload_from_stream(): + my_db = AsyncIOMotorClient().test + fs = AsyncIOMotorGridFSBucket(my_db) + file_id = await fs.upload_from_stream( + "test_file", + b"data I want to store!", + metadata={"contentType": "text/plain"}) + + Raises :exc:`~gridfs.errors.NoFile` if no such version of + that file exists. + Raises :exc:`~ValueError` if `filename` is not a string. + + :Parameters: + - `filename`: The name of the file to upload. + - `source`: The source stream of the content to be uploaded. Must be + a file-like object that implements :meth:`read` or a string. + - `chunk_size_bytes` (options): The number of bytes per chunk of this + file. Defaults to the chunk_size_bytes of :class:`AsyncIOMotorGridFSBucket`. + - `metadata` (optional): User data for the 'metadata' field of the + files collection document. If not provided the metadata field will + be omitted from the files collection document. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`, created with + :meth:`~MotorClient.start_session`. + + Returns the _id of the uploaded file. +""" + +gridfs_upload_from_stream_with_id_doc = """Uploads a user file to a GridFS bucket with a custom file id. + + Reads the contents of the user file from `source` and uploads + it to the file `filename`. Source can be a string or file-like object. + For example:: + + async def upload_from_stream_with_id(): + my_db = AsyncIOMotorClient().test + fs = AsyncIOMotorGridFSBucket(my_db) + file_id = await fs.upload_from_stream_with_id( + ObjectId(), + "test_file", + b"data I want to store!", + metadata={"contentType": "text/plain"}) + + Raises :exc:`~gridfs.errors.NoFile` if no such version of + that file exists. + Raises :exc:`~ValueError` if `filename` is not a string. + + :Parameters: + - `file_id`: The id to use for this file. The id must not have + already been used for another file. + - `filename`: The name of the file to upload. + - `source`: The source stream of the content to be uploaded. Must be + a file-like object that implements :meth:`read` or a string. + - `chunk_size_bytes` (options): The number of bytes per chunk of this + file. Defaults to the chunk_size_bytes of :class:`AsyncIOMotorGridFSBucket`. + - `metadata` (optional): User data for the 'metadata' field of the + files collection document. If not provided the metadata field will + be omitted from the files collection document. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`, created with + :meth:`~MotorClient.start_session`. +""" diff --git a/motor/motor_gridfs.py b/motor/motor_gridfs.py index 76078d70..c474a03d 100644 --- a/motor/motor_gridfs.py +++ b/motor/motor_gridfs.py @@ -22,6 +22,7 @@ import pymongo.errors from gridfs import DEFAULT_CHUNK_SIZE, grid_file +from motor import docstrings from motor.core import AgnosticCollection, AgnosticCursor, AgnosticDatabase from motor.metaprogramming import ( AsyncCommand, @@ -316,16 +317,24 @@ class AgnosticGridFSBucket(object): __motor_class_name__ = "MotorGridFSBucket" __delegate_class__ = gridfs.GridFSBucket - delete = AsyncCommand() - download_to_stream = AsyncCommand() - download_to_stream_by_name = AsyncCommand() - open_download_stream = AsyncCommand().wrap(gridfs.GridOut) - open_download_stream_by_name = AsyncCommand().wrap(gridfs.GridOut) - open_upload_stream = DelegateMethod().wrap(gridfs.GridIn) - open_upload_stream_with_id = DelegateMethod().wrap(gridfs.GridIn) - rename = AsyncCommand() - upload_from_stream = AsyncCommand() - upload_from_stream_with_id = AsyncCommand() + delete = AsyncCommand(doc=docstrings.gridfs_delete_doc) + download_to_stream = AsyncCommand(doc=docstrings.gridfs_download_to_stream_doc) + download_to_stream_by_name = AsyncCommand(doc=docstrings.gridfs_download_to_stream_by_name_doc) + open_download_stream = AsyncCommand(doc=docstrings.gridfs_open_download_stream_doc).wrap( + gridfs.GridOut + ) + open_download_stream_by_name = AsyncCommand( + doc=docstrings.gridfs_open_download_stream_by_name_doc + ).wrap(gridfs.GridOut) + open_upload_stream = DelegateMethod(doc=docstrings.gridfs_open_upload_stream_doc).wrap( + gridfs.GridIn + ) + open_upload_stream_with_id = DelegateMethod( + doc=docstrings.gridfs_open_upload_stream_with_id_doc + ).wrap(gridfs.GridIn) + rename = AsyncCommand(doc=docstrings.gridfs_rename_doc) + upload_from_stream = AsyncCommand(doc=docstrings.gridfs_upload_from_stream_doc) + upload_from_stream_with_id = AsyncCommand(doc=docstrings.gridfs_upload_from_stream_with_id_doc) def __init__( self, diff --git a/tox.ini b/tox.ini index d1079dfe..458bbd7a 100644 --- a/tox.ini +++ b/tox.ini @@ -80,6 +80,7 @@ extras = encryption [testenv:py3-sphinx-docs] +setenv = PYTHONWARNINGS= changedir = doc commands = sphinx-build -q -E -W -b html . {envtmpdir}/html {posargs}