desc stringlengths 3 26.7k | decl stringlengths 11 7.89k | bodies stringlengths 8 553k |
|---|---|---|
'Get a collection of this database by name.
Raises InvalidName if an invalid collection name is used.
:Parameters:
- `name`: the name of the collection to get'
| def __getattr__(self, name):
| if name.startswith('_'):
raise AttributeError(('Database has no attribute %r. To access the %s collection, use database[%r].' % (name, name, name)))
return self.__getitem__(name)
|
'Get a collection of this database by name.
Raises InvalidName if an invalid collection name is used.
:Parameters:
- `name`: the name of the collection to get'
| def __getitem__(self, name):
| return Collection(self, name)
|
'Get a :class:`~pymongo.collection.Collection` with the given name
and options.
Useful for creating a :class:`~pymongo.collection.Collection` with
different codec options, read preference, and/or write concern from
this :class:`Database`.
>>> db.read_preference
Primary()
>>> coll1 = db.test
>>> coll1.read_preference
Pr... | def get_collection(self, name, codec_options=None, read_preference=None, write_concern=None, read_concern=None):
| return Collection(self, name, False, codec_options, read_preference, write_concern, read_concern)
|
'Get a Collection instance with the default settings.'
| def _collection_default_options(self, name, **kargs):
| wc = (self.write_concern if self.write_concern.acknowledged else WriteConcern())
return self.get_collection(name, codec_options=DEFAULT_CODEC_OPTIONS, read_preference=ReadPreference.PRIMARY, write_concern=wc)
|
'Create a new :class:`~pymongo.collection.Collection` in this
database.
Normally collection creation is automatic. This method should
only be used to specify options on
creation. :class:`~pymongo.errors.CollectionInvalid` will be
raised if the collection already exists.
Options should be passed as keyword arguments to ... | def create_collection(self, name, codec_options=None, read_preference=None, write_concern=None, read_concern=None, **kwargs):
| if (name in self.collection_names()):
raise CollectionInvalid(('collection %s already exists' % name))
return Collection(self, name, True, codec_options, read_preference, write_concern, read_concern, **kwargs)
|
'Apply incoming manipulators to `son`.'
| def _apply_incoming_manipulators(self, son, collection):
| for manipulator in self.__incoming_manipulators:
son = manipulator.transform_incoming(son, collection)
return son
|
'Apply incoming copying manipulators to `son`.'
| def _apply_incoming_copying_manipulators(self, son, collection):
| for manipulator in self.__incoming_copying_manipulators:
son = manipulator.transform_incoming(son, collection)
return son
|
'Apply manipulators to an incoming SON object before it gets stored.
:Parameters:
- `son`: the son object going into the database
- `collection`: the collection the son object is being saved in'
| def _fix_incoming(self, son, collection):
| son = self._apply_incoming_manipulators(son, collection)
son = self._apply_incoming_copying_manipulators(son, collection)
return son
|
'Apply manipulators to a SON object as it comes out of the database.
:Parameters:
- `son`: the son object coming out of the database
- `collection`: the collection the son object was saved in'
| def _fix_outgoing(self, son, collection):
| for manipulator in reversed(self.__outgoing_manipulators):
son = manipulator.transform_outgoing(son, collection)
for manipulator in reversed(self.__outgoing_copying_manipulators):
son = manipulator.transform_outgoing(son, collection)
return son
|
'Internal command helper.'
| def _command(self, sock_info, command, slave_ok=False, value=1, check=True, allowable_errors=None, read_preference=ReadPreference.PRIMARY, codec_options=DEFAULT_CODEC_OPTIONS, write_concern=None, parse_write_concern_error=False, **kwargs):
| if isinstance(command, string_type):
command = SON([(command, value)])
if ((sock_info.max_wire_version >= 5) and write_concern):
command['writeConcern'] = write_concern.document
command.update(kwargs)
return sock_info.command(self.__name, command, slave_ok, read_preference, codec_options... |
'Issue a MongoDB command.
Send command `command` to the database and return the
response. If `command` is an instance of :class:`basestring`
(:class:`str` in python 3) then the command {`command`: `value`}
will be sent. Otherwise, `command` must be an instance of
:class:`dict` and will be sent as is.
Any additional key... | def command(self, command, value=1, check=True, allowable_errors=None, read_preference=ReadPreference.PRIMARY, codec_options=DEFAULT_CODEC_OPTIONS, **kwargs):
| client = self.__client
with client._socket_for_reads(read_preference) as (sock_info, slave_ok):
return self._command(sock_info, command, slave_ok, value, check, allowable_errors, read_preference, codec_options, **kwargs)
|
'Internal listCollections helper.'
| def _list_collections(self, sock_info, slave_okay, criteria=None):
| criteria = (criteria or {})
cmd = SON([('listCollections', 1), ('cursor', {})])
if criteria:
cmd['filter'] = criteria
if (sock_info.max_wire_version > 2):
coll = self['$cmd']
cursor = self._command(sock_info, cmd, slave_okay)['cursor']
return CommandCursor(coll, cursor, s... |
'Get a list of all the collection names in this database.
:Parameters:
- `include_system_collections` (optional): if ``False`` list
will not include system collections (e.g ``system.indexes``)'
| def collection_names(self, include_system_collections=True):
| with self.__client._socket_for_reads(ReadPreference.PRIMARY) as (sock_info, slave_okay):
wire_version = sock_info.max_wire_version
results = self._list_collections(sock_info, slave_okay)
names = [result['name'] for result in results]
if (wire_version <= 2):
names = [n[(len(self.__nam... |
'Drop a collection.
:Parameters:
- `name_or_collection`: the name of a collection to drop or the
collection object itself
.. note:: The :attr:`~pymongo.database.Database.write_concern` of
this database is automatically applied to this operation when using
MongoDB >= 3.4.
.. versionchanged:: 3.4
Apply this database\'s w... | def drop_collection(self, name_or_collection):
| name = name_or_collection
if isinstance(name, Collection):
name = name.name
if (not isinstance(name, string_type)):
raise TypeError(('name_or_collection must be an instance of %s' % (string_type.__name__,)))
self.__client._purge_index(self.__name, name)
with self.__... |
'Validate a collection.
Returns a dict of validation info. Raises CollectionInvalid if
validation fails.
:Parameters:
- `name_or_collection`: A Collection object or the name of a
collection to validate.
- `scandata`: Do extra checks beyond checking the overall
structure of the collection.
- `full`: Have the server do a... | def validate_collection(self, name_or_collection, scandata=False, full=False):
| name = name_or_collection
if isinstance(name, Collection):
name = name.name
if (not isinstance(name, string_type)):
raise TypeError(('name_or_collection must be an instance of %s or Collection' % (string_type.__name__,)))
result = self.command('validate', _unicode... |
'Get information on operations currently running.
:Parameters:
- `include_all` (optional): if ``True`` also list currently
idle operations in the result'
| def current_op(self, include_all=False):
| cmd = SON([('currentOp', 1), ('$all', include_all)])
with self.__client._socket_for_writes() as sock_info:
if (sock_info.max_wire_version >= 4):
return sock_info.command('admin', cmd)
else:
spec = ({'$all': True} if include_all else {})
x = helpers._first_batc... |
'Get the database\'s current profiling level.
Returns one of (:data:`~pymongo.OFF`,
:data:`~pymongo.SLOW_ONLY`, :data:`~pymongo.ALL`).
.. mongodoc:: profiling'
| def profiling_level(self):
| result = self.command('profile', (-1))
assert ((result['was'] >= 0) and (result['was'] <= 2))
return result['was']
|
'Set the database\'s profiling level.
:Parameters:
- `level`: Specifies a profiling level, see list of possible values
below.
- `slow_ms`: Optionally modify the threshold for the profile to
consider a query or operation. Even if the profiler is off queries
slower than the `slow_ms` level will get written to the logs.
... | def set_profiling_level(self, level, slow_ms=None):
| if ((not isinstance(level, int)) or (level < 0) or (level > 2)):
raise ValueError('level must be one of (OFF, SLOW_ONLY, ALL)')
if ((slow_ms is not None) and (not isinstance(slow_ms, int))):
raise TypeError('slow_ms must be an integer')
if (slow_ms is not Non... |
'Returns a list containing current profiling information.
.. mongodoc:: profiling'
| def profiling_info(self):
| return list(self['system.profile'].find())
|
'**DEPRECATED**: Get the error if one occurred on the last operation.
This method is obsolete: all MongoDB write operations (insert, update,
remove, and so on) use the write concern ``w=1`` and report their
errors by default.
.. versionchanged:: 2.8
Deprecated.'
| def error(self):
| warnings.warn('Database.error() is deprecated', DeprecationWarning, stacklevel=2)
error = self.command('getlasterror')
error_msg = error.get('err', '')
if (error_msg is None):
return None
if error_msg.startswith('not master'):
primary = self.__client.primary
if prima... |
'**DEPRECATED**: Get status information from the last operation.
This method is obsolete: all MongoDB write operations (insert, update,
remove, and so on) use the write concern ``w=1`` and report their
errors by default.
Returns a SON object with status information.
.. versionchanged:: 2.8
Deprecated.'
| def last_status(self):
| warnings.warn('last_status() is deprecated', DeprecationWarning, stacklevel=2)
return self.command('getlasterror')
|
'**DEPRECATED**: Get the most recent error on this database.
This method is obsolete: all MongoDB write operations (insert, update,
remove, and so on) use the write concern ``w=1`` and report their
errors by default.
Only returns errors that have occurred since the last call to
:meth:`reset_error_history`. Returns None... | def previous_error(self):
| warnings.warn('previous_error() is deprecated', DeprecationWarning, stacklevel=2)
error = self.command('getpreverror')
if (error.get('err', 0) is None):
return None
return error
|
'**DEPRECATED**: Reset the error history of this database.
This method is obsolete: all MongoDB write operations (insert, update,
remove, and so on) use the write concern ``w=1`` and report their
errors by default.
Calls to :meth:`previous_error` will only return errors that have
occurred since the most recent call to ... | def reset_error_history(self):
| warnings.warn('reset_error_history() is deprecated', DeprecationWarning, stacklevel=2)
self.command('reseterror')
|
'Return the default user role for this database.'
| def _default_role(self, read_only):
| if (self.name == 'admin'):
if read_only:
return 'readAnyDatabase'
else:
return 'root'
elif read_only:
return 'read'
else:
return 'dbOwner'
|
'Use a command to create (if create=True) or modify a user.'
| def _create_or_update_user(self, create, name, password, read_only, **kwargs):
| opts = {}
if (read_only or (create and ('roles' not in kwargs))):
warnings.warn('Creating a user with the read_only option or without roles is deprecated in MongoDB >= 2.6', DeprecationWarning)
opts['roles'] = [self._default_role(read_only)]
elif ... |
'Uses v1 system to add users, i.e. saving to system.users.'
| def _legacy_add_user(self, name, password, read_only, **kwargs):
| system_users = self._collection_default_options('system.users')
user = (system_users.find_one({'user': name}) or {'user': name})
if (password is not None):
user['pwd'] = auth._password_digest(name, password)
if (read_only is not None):
user['readOnly'] = read_only
user.update(kwargs)... |
'Create user `name` with password `password`.
Add a new user with permissions for this :class:`Database`.
.. note:: Will change the password if user `name` already exists.
:Parameters:
- `name`: the name of the user to create
- `password` (optional): the password of the user to create. Can not
be used with the ``userSo... | def add_user(self, name, password=None, read_only=None, **kwargs):
| if (not isinstance(name, string_type)):
raise TypeError(('name must be an instance of %s' % (string_type.__name__,)))
if (password is not None):
if (not isinstance(password, string_type)):
raise TypeError(('password must be an instance of %s' % (st... |
'Remove user `name` from this :class:`Database`.
User `name` will no longer have permissions to access this
:class:`Database`.
:Parameters:
- `name`: the name of the user to remove'
| def remove_user(self, name):
| try:
cmd = SON([('dropUser', name)])
if (self.write_concern.acknowledged and self.write_concern.document):
cmd['writeConcern'] = self.write_concern.document
self.command(cmd)
except OperationFailure as exc:
if (exc.code in common.COMMAND_NOT_FOUND_CODES):
... |
'**DEPRECATED**: Authenticate to use this database.
Authentication lasts for the life of the underlying client
instance, or until :meth:`logout` is called.
Raises :class:`TypeError` if (required) `name`, (optional) `password`,
or (optional) `source` is not an instance of :class:`basestring`
(:class:`str` in python 3).
... | def authenticate(self, name=None, password=None, source=None, mechanism='DEFAULT', **kwargs):
| if ((name is not None) and (not isinstance(name, string_type))):
raise TypeError(('name must be an instance of %s' % (string_type.__name__,)))
if ((password is not None) and (not isinstance(password, string_type))):
raise TypeError(('password must be an instance ... |
'**DEPRECATED**: Deauthorize use of this database.'
| def logout(self):
| warnings.warn('Database.logout() is deprecated', DeprecationWarning, stacklevel=2)
self.client._purge_credentials(self.name)
|
'Dereference a :class:`~bson.dbref.DBRef`, getting the
document it points to.
Raises :class:`TypeError` if `dbref` is not an instance of
:class:`~bson.dbref.DBRef`. Returns a document, or ``None`` if
the reference does not point to a valid document. Raises
:class:`ValueError` if `dbref` has a database specified that
i... | def dereference(self, dbref, **kwargs):
| if (not isinstance(dbref, DBRef)):
raise TypeError(('cannot dereference a %s' % type(dbref)))
if ((dbref.database is not None) and (dbref.database != self.__name)):
raise ValueError(('trying to dereference a DBRef that points to another database (%r not ... |
'**DEPRECATED**: Evaluate a JavaScript expression in MongoDB.
:Parameters:
- `code`: string representation of JavaScript code to be
evaluated
- `args` (optional): additional positional arguments are
passed to the `code` being evaluated
.. warning:: the eval command is deprecated in MongoDB 3.0 and
will be removed in a ... | def eval(self, code, *args):
| warnings.warn('Database.eval() is deprecated', DeprecationWarning, stacklevel=2)
if (not isinstance(code, Code)):
code = Code(code)
result = self.command('$eval', code, args=args)
return result.get('retval', None)
|
'This is only here so that some API misusages are easier to debug.'
| def __call__(self, *args, **kwargs):
| raise TypeError(("'Database' object is not callable. If you meant to call the '%s' method on a '%s' object it is failing because no such method exists." % (self.__name, self.__client.__class__.__name__)))
|
'**DEPRECATED**: Get a system js helper for the database `database`.
SystemJS will be removed in PyMongo 4.0.'
| def __init__(self, database):
| warnings.warn('SystemJS is deprecated', DeprecationWarning, stacklevel=2)
if (not database.write_concern.acknowledged):
database = database.client.get_database(database.name, write_concern=WriteConcern())
object.__setattr__(self, '_db', database)
|
'Get a list of the names of the functions stored in this database.'
| def list(self):
| return [x['_id'] for x in self._db.system.js.find(projection=['_id'])]
|
'Read only access to the :class:`~bson.codec_options.CodecOptions`
of this instance.'
| @property
def codec_options(self):
| return self.__codec_options
|
'Read only access to the :class:`~pymongo.write_concern.WriteConcern`
of this instance.
.. versionchanged:: 3.0
The :attr:`write_concern` attribute is now read only.'
| @property
def write_concern(self):
| return self.__write_concern
|
'Read only access to the read preference of this instance.
.. versionchanged:: 3.0
The :attr:`read_preference` attribute is now read only.'
| @property
def read_preference(self):
| return self.__read_preference
|
'Read only access to the read concern of this instance.
.. versionadded:: 3.2'
| @property
def read_concern(self):
| return self.__read_concern
|
'Parse an ismaster response from the server.'
| def __init__(self, doc):
| self._server_type = _get_server_type(doc)
self._doc = doc
self._is_writable = (self._server_type in (SERVER_TYPE.RSPrimary, SERVER_TYPE.Standalone, SERVER_TYPE.Mongos))
self._is_readable = ((self.server_type == SERVER_TYPE.RSSecondary) or self._is_writable)
|
'The complete ismaster command response document.
.. versionadded:: 3.4'
| @property
def document(self):
| return self._doc.copy()
|
'List of hosts, passives, and arbiters known to this server.'
| @property
def all_hosts(self):
| return set(imap(common.clean_node, itertools.chain(self._doc.get('hosts', []), self._doc.get('passives', []), self._doc.get('arbiters', []))))
|
'Replica set member tags or empty dict.'
| @property
def tags(self):
| return self._doc.get('tags', {})
|
'This server\'s opinion about who the primary is, or None.'
| @property
def primary(self):
| if self._doc.get('primary'):
return common.partition_node(self._doc['primary'])
else:
return None
|
'Replica set name or None.'
| @property
def replica_set_name(self):
| return self._doc.get('setName')
|
'Get / create a Mongo collection.
Raises :class:`TypeError` if `name` is not an instance of
:class:`basestring` (:class:`str` in python 3). Raises
:class:`~pymongo.errors.InvalidName` if `name` is not a valid
collection name. Any additional keyword arguments will be used
as options passed to the create command. See
:me... | def __init__(self, database, name, create=False, codec_options=None, read_preference=None, write_concern=None, read_concern=None, **kwargs):
| super(Collection, self).__init__((codec_options or database.codec_options), (read_preference or database.read_preference), (write_concern or database.write_concern), (read_concern or database.read_concern))
if (not isinstance(name, string_type)):
raise TypeError(('name must be an instance ... |
'Internal command helper.
:Parameters:
- `sock_info` - A SocketInfo instance.
- `command` - The command itself, as a SON instance.
- `slave_ok`: whether to set the SlaveOkay wire protocol bit.
- `codec_options` (optional) - An instance of
:class:`~bson.codec_options.CodecOptions`.
- `check`: raise OperationFailure if t... | def _command(self, sock_info, command, slave_ok=False, read_preference=None, codec_options=None, check=True, allowable_errors=None, read_concern=DEFAULT_READ_CONCERN, write_concern=None, parse_write_concern_error=False, collation=None):
| return sock_info.command(self.__database.name, command, slave_ok, (read_preference or self.read_preference), (codec_options or self.codec_options), check, allowable_errors, read_concern=read_concern, write_concern=write_concern, parse_write_concern_error=parse_write_concern_error, collation=collation)
|
'Sends a create command with the given options.'
| def __create(self, options, collation):
| cmd = SON([('create', self.__name)])
if options:
if ('size' in options):
options['size'] = float(options['size'])
cmd.update(options)
with self._socket_for_writes() as sock_info:
self._command(sock_info, cmd, read_preference=ReadPreference.PRIMARY, write_concern=self.writ... |
'Get a sub-collection of this collection by name.
Raises InvalidName if an invalid collection name is used.
:Parameters:
- `name`: the name of the collection to get'
| def __getattr__(self, name):
| if name.startswith('_'):
full_name = (_UJOIN % (self.__name, name))
raise AttributeError(("Collection has no attribute %r. To access the %s collection, use database['%s']." % (name, full_name, full_name)))
return self.__getitem__(name)
|
'The full name of this :class:`Collection`.
The full name is of the form `database_name.collection_name`.'
| @property
def full_name(self):
| return self.__full_name
|
'The name of this :class:`Collection`.'
| @property
def name(self):
| return self.__name
|
'The :class:`~pymongo.database.Database` that this
:class:`Collection` is a part of.'
| @property
def database(self):
| return self.__database
|
'Get a clone of this collection changing the specified settings.
>>> coll1.read_preference
Primary()
>>> from pymongo import ReadPreference
>>> coll2 = coll1.with_options(read_preference=ReadPreference.SECONDARY)
>>> coll1.read_preference
Primary()
>>> coll2.read_preference
Secondary(tag_sets=None)
:Parameters:
- `code... | def with_options(self, codec_options=None, read_preference=None, write_concern=None, read_concern=None):
| return Collection(self.__database, self.__name, False, (codec_options or self.codec_options), (read_preference or self.read_preference), (write_concern or self.write_concern), (read_concern or self.read_concern))
|
'**DEPRECATED** - Initialize an unordered batch of write operations.
Operations will be performed on the server in arbitrary order,
possibly in parallel. All operations will be attempted.
:Parameters:
- `bypass_document_validation`: (optional) If ``True``, allows the
write to opt-out of document level validation. Defau... | def initialize_unordered_bulk_op(self, bypass_document_validation=False):
| warnings.warn('initialize_unordered_bulk_op is deprecated', DeprecationWarning, stacklevel=2)
return BulkOperationBuilder(self, False, bypass_document_validation)
|
'**DEPRECATED** - Initialize an ordered batch of write operations.
Operations will be performed on the server serially, in the
order provided. If an error occurs all remaining operations
are aborted.
:Parameters:
- `bypass_document_validation`: (optional) If ``True``, allows the
write to opt-out of document level valid... | def initialize_ordered_bulk_op(self, bypass_document_validation=False):
| warnings.warn('initialize_ordered_bulk_op is deprecated', DeprecationWarning, stacklevel=2)
return BulkOperationBuilder(self, True, bypass_document_validation)
|
'Send a batch of write operations to the server.
Requests are passed as a list of write operation instances (
:class:`~pymongo.operations.InsertOne`,
:class:`~pymongo.operations.UpdateOne`,
:class:`~pymongo.operations.UpdateMany`,
:class:`~pymongo.operations.ReplaceOne`,
:class:`~pymongo.operations.DeleteOne`, or
:clas... | def bulk_write(self, requests, ordered=True, bypass_document_validation=False):
| if (not isinstance(requests, list)):
raise TypeError('requests must be a list')
blk = _Bulk(self, ordered, bypass_document_validation)
for request in requests:
try:
request._add_to_bulk(blk)
except AttributeError:
raise TypeError(('%r is not ... |
'Internal legacy write helper.'
| def _legacy_write(self, sock_info, name, cmd, acknowledged, op_id, bypass_doc_val, func, *args):
| if (bypass_doc_val and (not acknowledged) and (sock_info.max_wire_version >= 4)):
raise OperationFailure('Cannot set bypass_document_validation with unacknowledged write concern')
listeners = self.database.client._event_listeners
publish = listeners.enabled_for_commands
if publ... |
'Internal helper for inserting a single document.'
| def _insert_one(self, sock_info, doc, ordered, check_keys, manipulate, write_concern, op_id, bypass_doc_val):
| if manipulate:
doc = self.__database._apply_incoming_manipulators(doc, self)
if ((not isinstance(doc, RawBSONDocument)) and ('_id' not in doc)):
doc['_id'] = ObjectId()
doc = self.__database._apply_incoming_copying_manipulators(doc, self)
concern = (write_concern or self.writ... |
'Internal insert helper.'
| def _insert(self, sock_info, docs, ordered=True, check_keys=True, manipulate=False, write_concern=None, op_id=None, bypass_doc_val=False):
| if isinstance(docs, collections.Mapping):
return self._insert_one(sock_info, docs, ordered, check_keys, manipulate, write_concern, op_id, bypass_doc_val)
ids = []
if manipulate:
def gen():
'Generator that applies SON manipulators to each document\n ... |
'Insert a single document.
>>> db.test.count({\'x\': 1})
0
>>> result = db.test.insert_one({\'x\': 1})
>>> result.inserted_id
ObjectId(\'54f112defba522406c9cc208\')
>>> db.test.find_one({\'x\': 1})
{u\'x\': 1, u\'_id\': ObjectId(\'54f112defba522406c9cc208\')}
:Parameters:
- `document`: The document to insert. Must be a... | def insert_one(self, document, bypass_document_validation=False):
| common.validate_is_document_type('document', document)
if (not (isinstance(document, RawBSONDocument) or ('_id' in document))):
document['_id'] = ObjectId()
with self._socket_for_writes() as sock_info:
return InsertOneResult(self._insert(sock_info, document, bypass_doc_val=bypass_document_va... |
'Insert an iterable of documents.
>>> db.test.count()
0
>>> result = db.test.insert_many([{\'x\': i} for i in range(2)])
>>> result.inserted_ids
[ObjectId(\'54f113fffba522406c9cc20e\'), ObjectId(\'54f113fffba522406c9cc20f\')]
>>> db.test.count()
2
:Parameters:
- `documents`: A iterable of documents to insert.
- `ordere... | def insert_many(self, documents, ordered=True, bypass_document_validation=False):
| if ((not isinstance(documents, collections.Iterable)) or (not documents)):
raise TypeError('documents must be a non-empty list')
inserted_ids = []
def gen():
'A generator that validates documents and handles _ids.'
for document in documents:
... |
'Internal update / replace helper.'
| def _update(self, sock_info, criteria, document, upsert=False, check_keys=True, multi=False, manipulate=False, write_concern=None, op_id=None, ordered=True, bypass_doc_val=False, collation=None):
| common.validate_boolean('upsert', upsert)
if manipulate:
document = self.__database._fix_incoming(document, self)
collation = validate_collation_or_none(collation)
concern = (write_concern or self.write_concern).document
acknowledged = (concern.get('w') != 0)
update_doc = SON([('q', crit... |
'Replace a single document matching the filter.
>>> for doc in db.test.find({}):
... print(doc)
{u\'x\': 1, u\'_id\': ObjectId(\'54f4c5befba5220aa4d6dee7\')}
>>> result = db.test.replace_one({\'x\': 1}, {\'y\': 1})
>>> result.matched_count
1
>>> result.modified_count
1
>>> for doc in db.test.find({}):
... print... | def replace_one(self, filter, replacement, upsert=False, bypass_document_validation=False, collation=None):
| common.validate_is_mapping('filter', filter)
common.validate_ok_for_replace(replacement)
with self._socket_for_writes() as sock_info:
result = self._update(sock_info, filter, replacement, upsert, bypass_doc_val=bypass_document_validation, collation=collation)
return UpdateResult(result, self.wri... |
'Update a single document matching the filter.
>>> for doc in db.test.find():
... print(doc)
{u\'x\': 1, u\'_id\': 0}
{u\'x\': 1, u\'_id\': 1}
{u\'x\': 1, u\'_id\': 2}
>>> result = db.test.update_one({\'x\': 1}, {\'$inc\': {\'x\': 3}})
>>> result.matched_count
1
>>> result.modified_count
1
>>> for doc in db.test.fi... | def update_one(self, filter, update, upsert=False, bypass_document_validation=False, collation=None):
| common.validate_is_mapping('filter', filter)
common.validate_ok_for_update(update)
with self._socket_for_writes() as sock_info:
result = self._update(sock_info, filter, update, upsert, check_keys=False, bypass_doc_val=bypass_document_validation, collation=collation)
return UpdateResult(result, s... |
'Update one or more documents that match the filter.
>>> for doc in db.test.find():
... print(doc)
{u\'x\': 1, u\'_id\': 0}
{u\'x\': 1, u\'_id\': 1}
{u\'x\': 1, u\'_id\': 2}
>>> result = db.test.update_many({\'x\': 1}, {\'$inc\': {\'x\': 3}})
>>> result.matched_count
3
>>> result.modified_count
3
>>> for doc in db.... | def update_many(self, filter, update, upsert=False, bypass_document_validation=False, collation=None):
| common.validate_is_mapping('filter', filter)
common.validate_ok_for_update(update)
with self._socket_for_writes() as sock_info:
result = self._update(sock_info, filter, update, upsert, check_keys=False, multi=True, bypass_doc_val=bypass_document_validation, collation=collation)
return UpdateResu... |
'Alias for :meth:`~pymongo.database.Database.drop_collection`.
The following two calls are equivalent:
>>> db.foo.drop()
>>> db.drop_collection("foo")'
| def drop(self):
| self.__database.drop_collection(self.__name)
|
'Internal delete helper.'
| def _delete(self, sock_info, criteria, multi, write_concern=None, op_id=None, ordered=True, collation=None):
| common.validate_is_mapping('filter', criteria)
concern = (write_concern or self.write_concern).document
acknowledged = (concern.get('w') != 0)
delete_doc = SON([('q', criteria), ('limit', int((not multi)))])
collation = validate_collation_or_none(collation)
if (collation is not None):
if... |
'Delete a single document matching the filter.
>>> db.test.count({\'x\': 1})
3
>>> result = db.test.delete_one({\'x\': 1})
>>> result.deleted_count
1
>>> db.test.count({\'x\': 1})
2
:Parameters:
- `filter`: A query that matches the document to delete.
- `collation` (optional): An instance of
:class:`~pymongo.collation.... | def delete_one(self, filter, collation=None):
| with self._socket_for_writes() as sock_info:
return DeleteResult(self._delete(sock_info, filter, False, collation=collation), self.write_concern.acknowledged)
|
'Delete one or more documents matching the filter.
>>> db.test.count({\'x\': 1})
3
>>> result = db.test.delete_many({\'x\': 1})
>>> result.deleted_count
3
>>> db.test.count({\'x\': 1})
0
:Parameters:
- `filter`: A query that matches the documents to delete.
- `collation` (optional): An instance of
:class:`~pymongo.coll... | def delete_many(self, filter, collation=None):
| with self._socket_for_writes() as sock_info:
return DeleteResult(self._delete(sock_info, filter, True, collation=collation), self.write_concern.acknowledged)
|
'Get a single document from the database.
All arguments to :meth:`find` are also valid arguments for
:meth:`find_one`, although any `limit` argument will be
ignored. Returns a single document, or ``None`` if no matching
document is found.
The :meth:`find_one` method obeys the :attr:`read_preference` of
this :class:`Col... | def find_one(self, filter=None, *args, **kwargs):
| if ((filter is not None) and (not isinstance(filter, collections.Mapping))):
filter = {'_id': filter}
cursor = self.find(filter, *args, **kwargs)
for result in cursor.limit((-1)):
return result
return None
|
'Query the database.
The `filter` argument is a prototype document that all results
must match. For example:
>>> db.test.find({"hello": "world"})
only matches documents that have a key "hello" with value
"world". Matches can have other keys *in addition* to
"hello". The `projection` argument is used to specify a subse... | def find(self, *args, **kwargs):
| return Cursor(self, *args, **kwargs)
|
'Scan this entire collection in parallel.
Returns a list of up to ``num_cursors`` cursors that can be iterated
concurrently. As long as the collection is not modified during
scanning, each document appears once in one of the cursors result
sets.
For example, to process each document in a collection using some
thread-sa... | def parallel_scan(self, num_cursors, **kwargs):
| cmd = SON([('parallelCollectionScan', self.__name), ('numCursors', num_cursors)])
cmd.update(kwargs)
with self._socket_for_reads() as (sock_info, slave_ok):
result = self._command(sock_info, cmd, slave_ok, read_concern=self.read_concern)
return [CommandCursor(self, cursor['cursor'], sock_info.ad... |
'Internal count helper.'
| def _count(self, cmd, collation=None):
| with self._socket_for_reads() as (sock_info, slave_ok):
res = self._command(sock_info, cmd, slave_ok, allowable_errors=['ns missing'], codec_options=self.__write_response_codec_options, read_concern=self.read_concern, collation=collation)
if (res.get('errmsg', '') == 'ns missing'):
return ... |
'Get the number of documents in this collection.
All optional count parameters should be passed as keyword arguments
to this method. Valid options include:
- `hint` (string or list of tuples): The index to use. Specify either
the index name as a string or the index specification as a list of
tuples (e.g. [(\'a\', pymon... | def count(self, filter=None, **kwargs):
| cmd = SON([('count', self.__name)])
if (filter is not None):
if ('query' in kwargs):
raise ConfigurationError("can't pass both filter and query")
kwargs['query'] = filter
if (('hint' in kwargs) and (not isinstance(kwargs['hint'], string_type))):
kwargs['hin... |
'Create one or more indexes on this collection.
>>> from pymongo import IndexModel, ASCENDING, DESCENDING
>>> index1 = IndexModel([("hello", DESCENDING),
... ("world", ASCENDING)], name="hello_world")
>>> index2 = IndexModel([("goodbye", DESCENDING)])
>>> db.test.create_indexes([index1, index2])
["... | def create_indexes(self, indexes):
| if (not isinstance(indexes, list)):
raise TypeError('indexes must be a list')
names = []
def gen_indexes():
for index in indexes:
if (not isinstance(index, IndexModel)):
raise TypeError(('%r is not an instance of pymongo.operations.In... |
'Internal create index helper.
:Parameters:
- `keys`: a list of tuples [(key, type), (key, type), ...]
- `index_options`: a dict of index options.'
| def __create_index(self, keys, index_options):
| index_doc = helpers._index_document(keys)
index = {'key': index_doc}
collation = validate_collation_or_none(index_options.pop('collation', None))
index.update(index_options)
with self._socket_for_writes() as sock_info:
if (collation is not None):
if (sock_info.max_wire_version < ... |
'Creates an index on this collection.
Takes either a single key or a list of (key, direction) pairs.
The key(s) must be an instance of :class:`basestring`
(:class:`str` in python 3), and the direction(s) must be one of
(:data:`~pymongo.ASCENDING`, :data:`~pymongo.DESCENDING`,
:data:`~pymongo.GEO2D`, :data:`~pymongo.GEO... | def create_index(self, keys, **kwargs):
| keys = helpers._index_list(keys)
name = kwargs.setdefault('name', helpers._gen_index_name(keys))
self.__create_index(keys, kwargs)
return name
|
'**DEPRECATED** - Ensures that an index exists on this collection.
.. versionchanged:: 3.0
**DEPRECATED**'
| def ensure_index(self, key_or_list, cache_for=300, **kwargs):
| warnings.warn('ensure_index is deprecated. Use create_index instead.', DeprecationWarning, stacklevel=2)
if (not (isinstance(cache_for, integer_types) or isinstance(cache_for, float))):
raise TypeError('cache_for must be an integer or float.')
if ('drop_dups' in kwar... |
'Drops all indexes on this collection.
Can be used on non-existant collections or collections with no indexes.
Raises OperationFailure on an error.
.. note:: The :attr:`~pymongo.collection.Collection.write_concern` of
this collection is automatically applied to this operation when using
MongoDB >= 3.4.
.. versionchange... | def drop_indexes(self):
| self.__database.client._purge_index(self.__database.name, self.__name)
self.drop_index('*')
|
'Drops the specified index on this collection.
Can be used on non-existant collections or collections with no
indexes. Raises OperationFailure on an error (e.g. trying to
drop an index that does not exist). `index_or_name`
can be either an index name (as returned by `create_index`),
or an index specifier (as passed to... | def drop_index(self, index_or_name):
| name = index_or_name
if isinstance(index_or_name, list):
name = helpers._gen_index_name(index_or_name)
if (not isinstance(name, string_type)):
raise TypeError('index_or_name must be an index name or list')
self.__database.client._purge_index(self.__database.name, sel... |
'Rebuilds all indexes on this collection.
.. warning:: reindex blocks all other operations (indexes
are built in the foreground) and will be slow for large
collections.
.. versionchanged:: 3.4
Apply this collection\'s write concern automatically to this operation
when connected to MongoDB >= 3.4.
.. versionchanged:: 3.... | def reindex(self):
| cmd = SON([('reIndex', self.__name)])
with self._socket_for_writes() as sock_info:
return self._command(sock_info, cmd, read_preference=ReadPreference.PRIMARY, parse_write_concern_error=True)
|
'Get a cursor over the index documents for this collection.
>>> for index in db.test.list_indexes():
... print(index)
SON([(u\'v\', 1), (u\'key\', SON([(u\'_id\', 1)])),
(u\'name\', u\'_id_\'), (u\'ns\', u\'test.test\')])
:Returns:
An instance of :class:`~pymongo.command_cursor.CommandCursor`.
.. versionadded:: 3.0... | def list_indexes(self):
| codec_options = CodecOptions(SON)
coll = self.with_options(codec_options)
with self._socket_for_primary_reads() as (sock_info, slave_ok):
cmd = SON([('listIndexes', self.__name), ('cursor', {})])
if (sock_info.max_wire_version > 2):
try:
cursor = self._command(soc... |
'Get information on this collection\'s indexes.
Returns a dictionary where the keys are index names (as
returned by create_index()) and the values are dictionaries
containing information about each index. The dictionary is
guaranteed to contain at least a single key, ``"key"`` which
is a list of (key, direction) pairs ... | def index_information(self):
| cursor = self.list_indexes()
info = {}
for index in cursor:
index['key'] = index['key'].items()
index = dict(index)
info[index.pop('name')] = index
return info
|
'Get the options set on this collection.
Returns a dictionary of options and their values - see
:meth:`~pymongo.database.Database.create_collection` for more
information on the possible options. Returns an empty
dictionary if the collection has not been created yet.'
| def options(self):
| with self._socket_for_primary_reads() as (sock_info, slave_ok):
if (sock_info.max_wire_version > 2):
criteria = {'name': self.__name}
else:
criteria = {'name': self.__full_name}
cursor = self.__database._list_collections(sock_info, slave_ok, criteria)
result = Non... |
'Perform an aggregation using the aggregation framework on this
collection.
All optional aggregate parameters should be passed as keyword arguments
to this method. Valid options include, but are not limited to:
- `allowDiskUse` (bool): Enables writing to temporary files. When set
to True, aggregation stages can write d... | def aggregate(self, pipeline, **kwargs):
| if (not isinstance(pipeline, list)):
raise TypeError('pipeline must be a list')
if ('explain' in kwargs):
raise ConfigurationError('The explain option is not supported. Use Database.command instead.')
collation = validate_collation_or_none(kwargs.pop('coll... |
'Perform a query similar to an SQL *group by* operation.
**DEPRECATED** - The group command was deprecated in MongoDB 3.4. The
:meth:`~group` method is deprecated and will be removed in PyMongo 4.0.
Use :meth:`~aggregate` with the `$group` stage or :meth:`~map_reduce`
instead.
.. versionchanged:: 3.5
Deprecated the gro... | def group(self, key, condition, initial, reduce, finalize=None, **kwargs):
| warnings.warn('The group method is deprecated and will be removed in PyMongo 4.0. Use the aggregate method with the $group stage or the map_reduce method instead.', DeprecationWarning, stacklevel=2)
group = {}
if isinstance(key, string_... |
'Rename this collection.
If operating in auth mode, client must be authorized as an
admin to perform this operation. Raises :class:`TypeError` if
`new_name` is not an instance of :class:`basestring`
(:class:`str` in python 3). Raises :class:`~pymongo.errors.InvalidName`
if `new_name` is not a valid collection name.
:Pa... | def rename(self, new_name, **kwargs):
| if (not isinstance(new_name, string_type)):
raise TypeError(('new_name must be an instance of %s' % (string_type.__name__,)))
if ((not new_name) or ('..' in new_name)):
raise InvalidName('collection names cannot be empty')
if ((new_name[0] == '.') or (new_name[(... |
'Get a list of distinct values for `key` among all documents
in this collection.
Raises :class:`TypeError` if `key` is not an instance of
:class:`basestring` (:class:`str` in python 3).
All optional distinct parameters should be passed as keyword arguments
to this method. Valid options include:
- `maxTimeMS` (int): The... | def distinct(self, key, filter=None, **kwargs):
| if (not isinstance(key, string_type)):
raise TypeError(('key must be an instance of %s' % (string_type.__name__,)))
cmd = SON([('distinct', self.__name), ('key', key)])
if (filter is not None):
if ('query' in kwargs):
raise ConfigurationError("can't pass b... |
'Perform a map/reduce operation on this collection.
If `full_response` is ``False`` (default) returns a
:class:`~pymongo.collection.Collection` instance containing
the results of the operation. Otherwise, returns the full
response from the server to the `map reduce command`_.
:Parameters:
- `map`: map function (as a Ja... | def map_reduce(self, map, reduce, out, full_response=False, **kwargs):
| if (not isinstance(out, (string_type, collections.Mapping))):
raise TypeError(("'out' must be an instance of %s or a mapping" % (string_type.__name__,)))
cmd = SON([('mapreduce', self.__name), ('map', map), ('reduce', reduce), ('out', out)])
collation = validate_collation_... |
'Perform an inline map/reduce operation on this collection.
Perform the map/reduce operation on the server in RAM. A result
collection is not created. The result set is returned as a list
of documents.
If `full_response` is ``False`` (default) returns the
result documents in a list. Otherwise, returns the full
response... | def inline_map_reduce(self, map, reduce, full_response=False, **kwargs):
| cmd = SON([('mapreduce', self.__name), ('map', map), ('reduce', reduce), ('out', {'inline': 1})])
collation = validate_collation_or_none(kwargs.pop('collation', None))
cmd.update(kwargs)
with self._socket_for_reads() as (sock_info, slave_ok):
if ((sock_info.max_wire_version >= 4) and ('readConce... |
'Internal findAndModify helper.'
| def __find_and_modify(self, filter, projection, sort, upsert=None, return_document=ReturnDocument.BEFORE, **kwargs):
| common.validate_is_mapping('filter', filter)
if (not isinstance(return_document, bool)):
raise ValueError('return_document must be ReturnDocument.BEFORE or ReturnDocument.AFTER')
collation = validate_collation_or_none(kwargs.pop('collation', None))
cmd = SON([('findAndModify', sel... |
'Finds a single document and deletes it, returning the document.
>>> db.test.count({\'x\': 1})
2
>>> db.test.find_one_and_delete({\'x\': 1})
{u\'x\': 1, u\'_id\': ObjectId(\'54f4e12bfba5220aa4d6dee8\')}
>>> db.test.count({\'x\': 1})
1
If multiple documents match *filter*, a *sort* can be applied.
>>> for doc in db.test... | def find_one_and_delete(self, filter, projection=None, sort=None, **kwargs):
| kwargs['remove'] = True
return self.__find_and_modify(filter, projection, sort, **kwargs)
|
'Finds a single document and replaces it, returning either the
original or the replaced document.
The :meth:`find_one_and_replace` method differs from
:meth:`find_one_and_update` by replacing the document matched by
*filter*, rather than modifying the existing document.
>>> for doc in db.test.find({}):
... print(do... | def find_one_and_replace(self, filter, replacement, projection=None, sort=None, upsert=False, return_document=ReturnDocument.BEFORE, **kwargs):
| common.validate_ok_for_replace(replacement)
kwargs['update'] = replacement
return self.__find_and_modify(filter, projection, sort, upsert, return_document, **kwargs)
|
'Finds a single document and updates it, returning either the
original or the updated document.
>>> db.test.find_one_and_update(
... {\'_id\': 665}, {\'$inc\': {\'count\': 1}, \'$set\': {\'done\': True}})
{u\'_id\': 665, u\'done\': False, u\'count\': 25}}
By default :meth:`find_one_and_update` returns the original v... | def find_one_and_update(self, filter, update, projection=None, sort=None, upsert=False, return_document=ReturnDocument.BEFORE, **kwargs):
| common.validate_ok_for_update(update)
kwargs['update'] = update
return self.__find_and_modify(filter, projection, sort, upsert, return_document, **kwargs)
|
'Save a document in this collection.
**DEPRECATED** - Use :meth:`insert_one` or :meth:`replace_one` instead.
.. versionchanged:: 3.0
Removed the `safe` parameter. Pass ``w=0`` for unacknowledged write
operations.'
| def save(self, to_save, manipulate=True, check_keys=True, **kwargs):
| warnings.warn('save is deprecated. Use insert_one or replace_one instead', DeprecationWarning, stacklevel=2)
common.validate_is_document_type('to_save', to_save)
write_concern = None
collation = validate_collation_or_none(kwargs.pop('collation', None))
if kwargs:
write_c... |
'Insert a document(s) into this collection.
**DEPRECATED** - Use :meth:`insert_one` or :meth:`insert_many` instead.
.. versionchanged:: 3.0
Removed the `safe` parameter. Pass ``w=0`` for unacknowledged write
operations.'
| def insert(self, doc_or_docs, manipulate=True, check_keys=True, continue_on_error=False, **kwargs):
| warnings.warn('insert is deprecated. Use insert_one or insert_many instead.', DeprecationWarning, stacklevel=2)
write_concern = None
if kwargs:
write_concern = WriteConcern(**kwargs)
with self._socket_for_writes() as sock_info:
return self._insert(sock_info, doc_or_d... |
'Update a document(s) in this collection.
**DEPRECATED** - Use :meth:`replace_one`, :meth:`update_one`, or
:meth:`update_many` instead.
.. versionchanged:: 3.0
Removed the `safe` parameter. Pass ``w=0`` for unacknowledged write
operations.'
| def update(self, spec, document, upsert=False, manipulate=False, multi=False, check_keys=True, **kwargs):
| warnings.warn('update is deprecated. Use replace_one, update_one or update_many instead.', DeprecationWarning, stacklevel=2)
common.validate_is_mapping('spec', spec)
common.validate_is_mapping('document', document)
if document:
first = next(iter(document))
if firs... |
'Remove a document(s) from this collection.
**DEPRECATED** - Use :meth:`delete_one` or :meth:`delete_many` instead.
.. versionchanged:: 3.0
Removed the `safe` parameter. Pass ``w=0`` for unacknowledged write
operations.'
| def remove(self, spec_or_id=None, multi=True, **kwargs):
| warnings.warn('remove is deprecated. Use delete_one or delete_many instead.', DeprecationWarning, stacklevel=2)
if (spec_or_id is None):
spec_or_id = {}
if (not isinstance(spec_or_id, collections.Mapping)):
spec_or_id = {'_id': spec_or_id}
write_concern = None
co... |
'Update and return an object.
**DEPRECATED** - Use :meth:`find_one_and_delete`,
:meth:`find_one_and_replace`, or :meth:`find_one_and_update` instead.'
| def find_and_modify(self, query={}, update=None, upsert=False, sort=None, full_response=False, manipulate=False, **kwargs):
| warnings.warn('find_and_modify is deprecated, use find_one_and_delete, find_one_and_replace, or find_one_and_update instead', DeprecationWarning, stacklevel=2)
if ((not update) and (not kwargs.get('remove', None))):
raise ValueError('Must either update or remove')
... |
'This is only here so that some API misusages are easier to debug.'
| def __call__(self, *args, **kwargs):
| if ('.' not in self.__name):
raise TypeError(("'Collection' object is not callable. If you meant to call the '%s' method on a 'Database' object it is failing because no such method exists." % self.__name))
raise TypeError(("'Collect... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.