desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Sorts this cursor\'s results. Pass a field name and a direction, either :data:`~pymongo.ASCENDING` or :data:`~pymongo.DESCENDING`:: for doc in collection.find().sort(\'field\', pymongo.ASCENDING): print(doc) To sort by multiple fields, pass a list of (key, direction) pairs:: for doc in collection.find().sort([ (\'fiel...
def sort(self, key_or_list, direction=None):
self.__check_okay_to_chain() keys = helpers._index_list(key_or_list, direction) self.__ordering = helpers._index_document(keys) return self
'Get the size of the results set for this query. Returns the number of documents in the results set for this query. Does not take :meth:`limit` and :meth:`skip` into account by default - set `with_limit_and_skip` to ``True`` if that is the desired behavior. Raises :class:`~pymongo.errors.OperationFailure` on a database...
def count(self, with_limit_and_skip=False):
validate_boolean('with_limit_and_skip', with_limit_and_skip) cmd = SON([('count', self.__collection.name), ('query', self.__spec)]) if (self.__max_time_ms is not None): cmd['maxTimeMS'] = self.__max_time_ms if self.__comment: cmd['$comment'] = self.__comment if (self.__hint is not No...
'Get a list of distinct values for `key` among all documents in the result set of this query. Raises :class:`TypeError` if `key` is not an instance of :class:`basestring` (:class:`str` in python 3). The :meth:`distinct` method obeys the :attr:`~pymongo.collection.Collection.read_preference` of the :class:`~pymongo.coll...
def distinct(self, key):
options = {} if self.__spec: options['query'] = self.__spec if (self.__max_time_ms is not None): options['maxTimeMS'] = self.__max_time_ms if self.__comment: options['$comment'] = self.__comment if (self.__collation is not None): options['collation'] = self.__collatio...
'Returns an explain plan record for this cursor. .. mongodoc:: explain'
def explain(self):
c = self.clone() c.__explain = True if c.__limit: c.__limit = (- abs(c.__limit)) return next(c)
'Adds a \'hint\', telling Mongo the proper index to use for the query. Judicious use of hints can greatly improve query performance. When doing a query on multiple fields (at least one of which is indexed) pass the indexed field as a hint to the query. Hinting will not do anything if the corresponding index does not ex...
def hint(self, index):
self.__check_okay_to_chain() self.__set_hint(index) return self
'Adds a \'comment\' to the cursor. http://docs.mongodb.org/manual/reference/operator/comment/ :Parameters: - `comment`: A string or document .. versionadded:: 2.7'
def comment(self, comment):
self.__check_okay_to_chain() self.__comment = comment return self
'Adds a $where clause to this query. The `code` argument must be an instance of :class:`basestring` (:class:`str` in python 3) or :class:`~bson.code.Code` containing a JavaScript expression. This expression will be evaluated for each document scanned. Only those documents for which the expression evaluates to *true* wi...
def where(self, code):
self.__check_okay_to_chain() if (not isinstance(code, Code)): code = Code(code) self.__spec['$where'] = code return self
'Adds a :class:`~pymongo.collation.Collation` to this query. This option is only supported on MongoDB 3.4 and above. Raises :exc:`TypeError` if `collation` is not an instance of :class:`~pymongo.collation.Collation` or a ``dict``. Raises :exc:`~pymongo.errors.InvalidOperation` if this :class:`Cursor` has already been u...
def collation(self, collation):
self.__check_okay_to_chain() self.__collation = validate_collation_or_none(collation) return self
'Send a query or getmore operation and handles the response. If operation is ``None`` this is an exhaust cursor, which reads the next result batch off the exhaust socket instead of sending getMore messages to the server. Can raise ConnectionFailure.'
def __send_message(self, operation):
client = self.__collection.database.client listeners = client._event_listeners publish = listeners.enabled_for_commands from_command = False if operation: kwargs = {'read_preference': self.__read_preference, 'exhaust': self.__exhaust} if (self.__address is not None): kwar...
'Refreshes the cursor with more data from Mongo. Returns the length of self.__data after refresh. Will exit early if self.__data is already non-empty. Raises OperationFailure when the cursor cannot be refreshed due to an error on the query.'
def _refresh(self):
if (len(self.__data) or self.__killed): return len(self.__data) if (self.__id is None): self.__send_message(_Query(self.__query_flags, self.__collection.database.name, self.__collection.name, self.__skip, self.__query_spec(), self.__projection, self.__codec_options, self.__read_preference, self....
'Does this cursor have the potential to return more data? This is mostly useful with `tailable cursors <http://www.mongodb.org/display/DOCS/Tailable+Cursors>`_ since they will stop iterating even though they *may* return more results in the future. With regular cursors, simply use a for loop instead of :attr:`alive`:: ...
@property def alive(self):
return bool((len(self.__data) or (not self.__killed)))
'Returns the id of the cursor Useful if you need to manage cursor ids and want to handle killing cursors manually using :meth:`~pymongo.mongo_client.MongoClient.kill_cursors` .. versionadded:: 2.2'
@property def cursor_id(self):
return self.__id
'The (host, port) of the server used, or None. .. versionchanged:: 3.0 Renamed from "conn_id".'
@property def address(self):
return self.__address
'Advance the cursor.'
def next(self):
if self.__empty: raise StopIteration if (len(self.__data) or self._refresh()): if self.__manipulate: _db = self.__collection.database return _db._fix_outgoing(self.__data.popleft(), self.__collection) else: return self.__data.popleft() else: ...
'Support function for `copy.copy()`. .. versionadded:: 2.4'
def __copy__(self):
return self._clone(deepcopy=False)
'Support function for `copy.deepcopy()`. .. versionadded:: 2.4'
def __deepcopy__(self, memo):
return self._clone(deepcopy=True)
'Deepcopy helper for the data dictionary or list. Regular expressions cannot be deep copied but as they are immutable we don\'t have to copy them when cloning.'
def _deepcopy(self, x, memo=None):
if (not hasattr(x, 'items')): (y, is_list, iterator) = ([], True, enumerate(x)) else: (y, is_list, iterator) = ({}, False, iteritems(x)) if (memo is None): memo = {} val_id = id(x) if (val_id in memo): return memo.get(val_id) memo[val_id] = y for (key, value) ...
'Create a client and grab essential information from the server.'
def __init__(self):
self.connected = False self.ismaster = {} self.w = None self.nodes = set() self.replica_set_name = None self.cmd_line = None self.version = Version((-1)) self.auth_enabled = False self.test_commands_enabled = False self.is_mongos = False self.is_rs = False self.has_ipv6 =...
'Return True if db_user/db_password is already an admin user.'
def _check_user_provided(self):
client = pymongo.MongoClient(host, port, username=db_user, password=db_pwd, serverSelectionTimeoutMS=100, **self.ssl_client_options) try: return (db_user in _all_users(client.admin)) except pymongo.errors.OperationFailure as e: msg = e.details.get('errmsg', '') if ((e.code == 18) or ...
'Run a test only if we can connect to MongoDB.'
def require_connection(self, func):
return self._require(self.connected, ('Cannot connect to MongoDB on %s' % (self.pair,)), func=func)
'Run a test only if the server version is at least ``version``.'
def require_version_min(self, *ver):
other_version = Version(*ver) return self._require((self.version >= other_version), ('Server version must be at least %s' % str(other_version)))
'Run a test only if the server version is at most ``version``.'
def require_version_max(self, *ver):
other_version = Version(*ver) return self._require((self.version <= other_version), ('Server version must be at most %s' % str(other_version)))
'Run a test only if the server is running with auth enabled.'
def require_auth(self, func):
return self.check_auth_with_sharding(self._require(self.auth_enabled, 'Authentication is not enabled on the server', func=func))
'Run a test only if the server is running without auth enabled.'
def require_no_auth(self, func):
return self._require((not self.auth_enabled), 'Authentication must not be enabled on the server', func=func)
'Run a test only if the client is connected to a replica set.'
def require_replica_set(self, func):
return self._require(self.is_rs, 'Not connected to a replica set', func=func)
'Run a test only if the client is connected to a replica set that has `count` secondaries.'
def require_secondaries_count(self, count):
sec_count = (0 if (not self.client) else len(self.client.secondaries)) return self._require((sec_count >= count), ('Need %d secondaries, %d available' % (count, sec_count)))
'Run a test if the client is *not* connected to a replica set.'
def require_no_replica_set(self, func):
return self._require((not self.is_rs), 'Connected to a replica set, not a standalone mongod', func=func)
'Run a test only if the client can connect to a server via IPv6.'
def require_ipv6(self, func):
return self._require(self.has_ipv6, 'No IPv6', func=func)
'Run a test only if the client is not connected to a mongos.'
def require_no_mongos(self, func):
return self._require((not self.is_mongos), 'Must be connected to a mongod, not a mongos', func=func)
'Run a test only if the client is connected to a mongos.'
def require_mongos(self, func):
return self._require(self.is_mongos, 'Must be connected to a mongos', func=func)
'Skip a test when connected to mongos < 2.0 and running with auth.'
def check_auth_with_sharding(self, func):
condition = (not (self.auth_enabled and self.is_mongos and (self.version < (2,)))) return self._require(condition, 'Auth with sharding requires MongoDB >= 2.0.0', func=func)
'Run a test only if the server has test commands enabled.'
def require_test_commands(self, func):
return self._require(self.test_commands_enabled, 'Test commands must be enabled', func=func)
'Run a test only if the client can connect over SSL.'
def require_ssl(self, func):
return self._require(self.ssl, 'Must be able to connect via SSL', func=func)
'Run a test only if the client can connect over SSL.'
def require_no_ssl(self, func):
return self._require((not self.ssl), 'Must be able to connect without SSL', func=func)
'Run a test only if the client can connect with ssl.CERT_NONE.'
def require_ssl_cert_none(self, func):
return self._require(self.ssl_cert_none, 'Must be able to connect with ssl.CERT_NONE', func=func)
'Run a test only if the client can connect with ssl_certfile.'
def require_ssl_certfile(self, func):
return self._require(self.ssl_certfile, 'Must be able to connect with ssl_certfile', func=func)
'Run a test only if the hostname \'server\' is resolvable.'
def require_server_resolvable(self, func):
return self._require(self.server_is_resolvable, "No hosts entry for 'server'. Cannot validate hostname in the certificate", func=func)
'A MongoClient connected to the default server, with a mock topology. standalones, members, mongoses determine the configuration of the topology. They are formatted like [\'a:1\', \'b:2\']. ismaster_hosts provides an alternative host list for the server\'s mocked ismaster response; see test_connect_with_internal_ips.'
def __init__(self, standalones, members, mongoses, ismaster_hosts=None, *args, **kwargs):
self.mock_standalones = standalones[:] self.mock_members = members[:] if self.mock_members: self.mock_primary = self.mock_members[0] else: self.mock_primary = None if (ismaster_hosts is not None): self.mock_ismaster_hosts = ismaster_hosts else: self.mock_ismaster_...
'Host is like \'a:1\'.'
def kill_host(self, host):
self.mock_down_hosts.append(host)
'Host is like \'a:1\'.'
def revive_host(self, host):
self.mock_down_hosts.remove(host)
'Return mock ismaster response (a dict) and round trip time.'
def mock_is_master(self, host):
(min_wire_version, max_wire_version) = self.mock_wire_versions.get(host, (common.MIN_WIRE_VERSION, common.MAX_WIRE_VERSION)) max_write_batch_size = self.mock_max_write_batch_sizes.get(host, common.MAX_WRITE_BATCH_SIZE) rtt = self.mock_rtts.get(host, 0) if (host in self.mock_down_hosts): raise Ne...
'This test uses MongoClient explicitly to make sure that host and port are not overloaded.'
def test_constants(self):
(host, port) = (client_context.host, client_context.port) MongoClient.HOST = 'somedomainthatdoesntexist.org' MongoClient.PORT = 123456789 with self.assertRaises(AutoReconnect): connected(MongoClient(serverSelectionTimeoutMS=10, **client_context.ssl_client_options)) connected(MongoClient(host...
'Start filtering deprecations.'
def __init__(self, action='ignore'):
self.warn_context = warnings.catch_warnings() self.warn_context.__enter__() warnings.simplefilter(action, DeprecationWarning)
'Stop filtering deprecations.'
def stop(self):
self.warn_context.__exit__() self.warn_context = None
'Test __iter__'
def test_iteration(self):
test_son = SON([(1, 100), (2, 200), (3, 300)]) for ele in test_son: self.assertEqual((ele * 100), test_son[ele])
'has_key and __contains__'
def test_contains_has(self):
test_son = SON([(1, 100), (2, 200), (3, 300)]) self.assertIn(1, test_son) self.assertTrue((2 in test_son), 'in failed') self.assertFalse((22 in test_son), "in succeeded when it shouldn't") self.assertTrue(test_son.has_key(2), 'has_key failed') self.assertFalse(test_son.has_key(...
'Test clear()'
def test_clears(self):
test_son = SON([(1, 100), (2, 200), (3, 300)]) test_son.clear() self.assertNotIn(1, test_son) self.assertEqual(0, len(test_son)) self.assertEqual(0, len(test_son.keys())) self.assertEqual({}, test_son.to_dict())
'Test len'
def test_len(self):
test_son = SON() self.assertEqual(0, len(test_son)) test_son = SON([(1, 100), (2, 200), (3, 300)]) self.assertEqual(3, len(test_son)) test_son.popitem() self.assertEqual(2, len(test_son))
'Fail if the two json strings are unequal. Normalize json by parsing it with the built-in json library. This accounts for discrepancies in spacing.'
def assertJsonEqual(self, first, second, msg=None):
self.assertEqual(loads(first), loads(second), msg=msg)
'Do a find() on the client and return which host was used'
def read_from_which_host(self, client):
cursor = client.pymongo_test.test.find() next(cursor) return cursor.address
'Do a find() on the client and return \'primary\' or \'secondary\' depending on which the client used.'
def read_from_which_kind(self, client):
address = self.read_from_which_host(client) if (address == client.primary): return 'primary' elif (address in client.secondaries): return 'secondary' else: self.fail(('Cursor used address %s, expected either primary %s or secondaries %s' % (address, ...
'Execute fn(*args, **kwargs) and return the Server instance used.'
def executed_on_which_server(self, client, fn, *args, **kwargs):
client.has_read_from.clear() fn(*args, **kwargs) self.assertEqual(1, len(client.has_read_from)) return one(client.has_read_from)
'Compare response from bulk.execute() to expected response.'
def assertEqualResponse(self, expected, actual):
for (key, value) in expected.items(): if (key == 'nModified'): if self.has_write_commands: self.assertEqual(value, actual['nModified']) else: self.assertFalse(('nModified' in actual)) elif (key == 'upserted'): expected_upserts = val...
'Compare bulk.execute()[\'upserts\'] to expected value. Like: {\'index\': 0, \'_id\': ObjectId()}'
def assertEqualUpsert(self, expected, actual):
self.assertEqual(expected['index'], actual['index']) if (expected['_id'] == '...'): self.assertTrue(('_id' in actual)) else: self.assertEqual(expected['_id'], actual['_id'])
'Compare bulk.execute()[\'writeErrors\'] to expected value. Like: {\'index\': 0, \'code\': 123, \'errmsg\': \'...\', \'op\': { ... }}'
def assertEqualWriteError(self, expected, actual):
self.assertEqual(expected['index'], actual['index']) self.assertEqual(expected['code'], actual['code']) if (expected['errmsg'] == '...'): self.assertTrue(('errmsg' in actual)) else: self.assertEqual(expected['errmsg'], actual['errmsg']) expected_op = expected['op'].copy() actual_...
'Subtype of this binary data.'
@property def subtype(self):
return self.__subtype
'UUID instance wrapped by this UUIDLegacy instance.'
@property def uuid(self):
return self.__uuid
'Create a new :class:`RawBSONDocument`. :Parameters: - `bson_bytes`: the BSON bytes that compose this document - `codec_options` (optional): An instance of :class:`~bson.codec_options.CodecOptions`. .. versionchanged:: 3.5 If a :class:`~bson.codec_options.CodecOptions` is passed in, its `document_class` must be :class:...
def __init__(self, bson_bytes, codec_options=None):
self.__raw = bson_bytes self.__inflated_doc = None if (codec_options is None): codec_options = DEFAULT_RAW_BSON_OPTIONS elif (codec_options.document_class is not RawBSONDocument): raise TypeError(('RawBSONDocument cannot use CodecOptions with document class %s' % (co...
'The raw BSON bytes composing this document.'
@property def raw(self):
return self.__raw
'Lazily decode and iterate elements in this document.'
def items(self):
return iteritems(self.__inflated)
'Representation of the arguments used to create this object.'
def _arguments_repr(self):
document_class_repr = ('dict' if (self.document_class is dict) else repr(self.document_class)) uuid_rep_repr = UUID_REPRESENTATION_NAMES.get(self.uuid_representation, self.uuid_representation) return ('document_class=%s, tz_aware=%r, uuid_representation=%s, unicode_decode_error_handler=%r, tzinf...
'Make a copy of this CodecOptions, overriding some options:: >>> from bson.codec_options import DEFAULT_CODEC_OPTIONS >>> DEFAULT_CODEC_OPTIONS.tz_aware False >>> options = DEFAULT_CODEC_OPTIONS.with_options(tz_aware=True) >>> options.tz_aware True .. versionadded:: 3.5'
def with_options(self, **kwargs):
return CodecOptions(kwargs.get('document_class', self.document_class), kwargs.get('tz_aware', self.tz_aware), kwargs.get('uuid_representation', self.uuid_representation), kwargs.get('unicode_decode_error_handler', self.unicode_decode_error_handler), kwargs.get('tzinfo', self.tzinfo))
'Encode a document to a new :class:`BSON` instance. A document can be any mapping type (like :class:`dict`). Raises :class:`TypeError` if `document` is not a mapping type, or contains keys that are not instances of :class:`basestring` (:class:`str` in python 3). Raises :class:`~bson.errors.InvalidDocument` if `document...
@classmethod def encode(cls, document, check_keys=False, codec_options=DEFAULT_CODEC_OPTIONS):
if (not isinstance(codec_options, CodecOptions)): raise _CODEC_OPTIONS_TYPE_ERROR return cls(_dict_to_bson(document, check_keys, codec_options))
'Decode this BSON data. By default, returns a BSON document represented as a Python :class:`dict`. To use a different :class:`MutableMapping` class, configure a :class:`~bson.codec_options.CodecOptions`:: >>> import collections # From Python standard library. >>> import bson >>> from bson.codec_options import CodecOpt...
def decode(self, codec_options=DEFAULT_CODEC_OPTIONS):
if (not isinstance(codec_options, CodecOptions)): raise _CODEC_OPTIONS_TYPE_ERROR return _bson_to_dict(self, codec_options)
'Scope dictionary for this instance or ``None``.'
@property def scope(self):
return self.__scope
'Create a new :class:`Timestamp`. This class is only for use with the MongoDB opLog. If you need to store a regular timestamp, please use a :class:`~datetime.datetime`. Raises :class:`TypeError` if `time` is not an instance of :class: `int` or :class:`~datetime.datetime`, or `inc` is not an instance of :class:`int`. Ra...
def __init__(self, time, inc):
if isinstance(time, datetime.datetime): if (time.utcoffset() is not None): time = (time - time.utcoffset()) time = int(calendar.timegm(time.timetuple())) if (not isinstance(time, integer_types)): raise TypeError('time must be an instance of int') if (not...
'Get the time portion of this :class:`Timestamp`.'
@property def time(self):
return self.__time
'Get the inc portion of this :class:`Timestamp`.'
@property def inc(self):
return self.__inc
'Return a :class:`~datetime.datetime` instance corresponding to the time portion of this :class:`Timestamp`. The returned datetime\'s timezone is UTC.'
def as_datetime(self):
return datetime.datetime.fromtimestamp(self.__time, utc)
'Initialize a new ObjectId. An ObjectId is a 12-byte unique identifier consisting of: - a 4-byte value representing the seconds since the Unix epoch, - a 3-byte machine identifier, - a 2-byte process id, and - a 3-byte counter, starting with a random value. By default, ``ObjectId()`` creates a new unique identifier. Th...
def __init__(self, oid=None):
if (oid is None): self.__generate() elif (isinstance(oid, bytes) and (len(oid) == 12)): self.__id = oid else: self.__validate(oid)
'Create a dummy ObjectId instance with a specific generation time. This method is useful for doing range queries on a field containing :class:`ObjectId` instances. .. warning:: It is not safe to insert a document containing an ObjectId generated using this method. This method deliberately eliminates the uniqueness guar...
@classmethod def from_datetime(cls, generation_time):
if (generation_time.utcoffset() is not None): generation_time = (generation_time - generation_time.utcoffset()) timestamp = calendar.timegm(generation_time.timetuple()) oid = (struct.pack('>i', int(timestamp)) + '\x00\x00\x00\x00\x00\x00\x00\x00') return cls(oid)
'Checks if a `oid` string is valid or not. :Parameters: - `oid`: the object id to validate .. versionadded:: 2.3'
@classmethod def is_valid(cls, oid):
if (not oid): return False try: ObjectId(oid) return True except (InvalidId, TypeError): return False
'Generate a new value for this ObjectId.'
def __generate(self):
oid = struct.pack('>i', int(time.time())) oid += ObjectId._machine_bytes oid += struct.pack('>H', (os.getpid() % 65535)) with ObjectId._inc_lock: oid += struct.pack('>i', ObjectId._inc)[1:4] ObjectId._inc = ((ObjectId._inc + 1) % 16777215) self.__id = oid
'Validate and use the given id for this ObjectId. Raises TypeError if id is not an instance of (:class:`basestring` (:class:`str` or :class:`bytes` in python 3), ObjectId) and InvalidId if it is not a valid ObjectId. :Parameters: - `oid`: a valid ObjectId'
def __validate(self, oid):
if isinstance(oid, ObjectId): self.__id = oid.binary elif isinstance(oid, string_type): if (len(oid) == 24): try: self.__id = bytes_from_hex(oid) except (TypeError, ValueError): _raise_invalid_id(oid) else: _raise_invali...
'12-byte binary representation of this ObjectId.'
@property def binary(self):
return self.__id
'A :class:`datetime.datetime` instance representing the time of generation for this :class:`ObjectId`. The :class:`datetime.datetime` is timezone aware, and represents the generation time in UTC. It is precise to the second.'
@property def generation_time(self):
timestamp = struct.unpack('>i', self.__id[0:4])[0] return datetime.datetime.fromtimestamp(timestamp, utc)
'return value of object for pickling. needed explicitly because __slots__() defined.'
def __getstate__(self):
return self.__id
'explicit state set from pickling'
def __setstate__(self, value):
if isinstance(value, dict): oid = value['_ObjectId__id'] else: oid = value if (PY3 and isinstance(oid, text_type)): self.__id = oid.encode('latin-1') else: self.__id = oid
'Get a hash value for this :class:`ObjectId`.'
def __hash__(self):
return hash(self.__id)
'Returns an instance of :class:`decimal.Decimal` for this :class:`Decimal128`.'
def to_decimal(self):
high = self.__high low = self.__low sign = (1 if (high & _SIGN) else 0) if ((high & _SNAN) == _SNAN): return decimal.Decimal((sign, (), 'N')) elif ((high & _NAN) == _NAN): return decimal.Decimal((sign, (), 'n')) elif ((high & _INF) == _INF): return decimal.Decimal((sign, ...
'Create an instance of :class:`Decimal128` from Binary Integer Decimal string. :Parameters: - `value`: 16 byte string (128-bit IEEE 754-2008 decimal floating point in Binary Integer Decimal (BID) format).'
@classmethod def from_bid(cls, value):
if (not isinstance(value, bytes)): raise TypeError('value must be an instance of bytes') if (len(value) != 16): raise ValueError('value must be exactly 16 bytes') return cls((_UNPACK_64(value[8:])[0], _UNPACK_64(value[:8])[0]))
'The Binary Integer Decimal (BID) encoding of this instance.'
@property def bid(self):
return (_PACK_64(self.__low) + _PACK_64(self.__high))
'Initialize a new :class:`DBRef`. Raises :class:`TypeError` if `collection` or `database` is not an instance of :class:`basestring` (:class:`str` in python 3). `database` is optional and allows references to documents to work across databases. Any additional keyword arguments will create additional fields in the result...
def __init__(self, collection, id, database=None, _extra={}, **kwargs):
if (not isinstance(collection, string_type)): raise TypeError(('collection must be an instance of %s' % string_type.__name__)) if ((database is not None) and (not isinstance(database, string_type))): raise TypeError(('database must be an instance of %s' % stri...
'Get the name of this DBRef\'s collection as unicode.'
@property def collection(self):
return self.__collection
'Get this DBRef\'s _id.'
@property def id(self):
return self.__id
'Get the name of this DBRef\'s database. Returns None if this DBRef doesn\'t specify a database.'
@property def database(self):
return self.__database
'Get the SON document representation of this DBRef. Generally not needed by application developers'
def as_doc(self):
doc = SON([('$ref', self.collection), ('$id', self.id)]) if (self.database is not None): doc['$db'] = self.database doc.update(self.__kwargs) return doc
'Get a hash value for this :class:`DBRef`.'
def __hash__(self):
return hash((self.__collection, self.__id, self.__database, tuple(sorted(self.__kwargs.items()))))
'Support function for `copy.deepcopy()`.'
def __deepcopy__(self, memo):
return DBRef(deepcopy(self.__collection, memo), deepcopy(self.__id, memo), deepcopy(self.__database, memo), deepcopy(self.__kwargs, memo))
'Comparison to another SON is order-sensitive while comparison to a regular dictionary is order-insensitive.'
def __eq__(self, other):
if isinstance(other, SON): return ((len(self) == len(other)) and (self.items() == other.items())) return (self.to_dict() == other)
'Convert a SON document to a normal Python dictionary instance. This is trickier than just *dict(...)* because it needs to be recursive.'
def to_dict(self):
def transform_value(value): if isinstance(value, list): return [transform_value(v) for v in value] elif isinstance(value, collections.Mapping): return dict([(k, transform_value(v)) for (k, v) in iteritems(value)]) else: return value return transform_va...
'Convert a Python regular expression into a ``Regex`` instance. Note that in Python 3, a regular expression compiled from a :class:`str` has the ``re.UNICODE`` flag set. If it is undesirable to store this flag in a BSON regular expression, unset it first:: >>> pattern = re.compile(\'.*\') >>> regex = Regex.from_native(...
@classmethod def from_native(cls, regex):
if (not isinstance(regex, RE_TYPE)): raise TypeError(('regex must be a compiled regular expression, not %s' % type(regex))) return Regex(regex.pattern, regex.flags)
'BSON regular expression data. This class is useful to store and retrieve regular expressions that are incompatible with Python\'s regular expression dialect. :Parameters: - `pattern`: string - `flags`: (optional) an integer bitmask, or a string of flag characters like "im" for IGNORECASE and MULTILINE'
def __init__(self, pattern, flags=0):
if (not isinstance(pattern, (text_type, bytes))): raise TypeError(('pattern must be a string, not %s' % type(pattern))) self.pattern = pattern if isinstance(flags, string_type): self.flags = str_flags_to_int(flags) elif isinstance(flags, int): self.flags = flags...
'Compile this :class:`Regex` as a Python regular expression. .. warning:: Python regular expressions use a different syntax and different set of flags than MongoDB, which uses `PCRE`_. A regular expression retrieved from the server may not compile in Python, or may match a different set of strings in Python than when u...
def try_compile(self):
return re.compile(self.pattern, self.flags)
'Construct a ZipFile or ContextualZipFile as appropriate.'
def __new__(cls, *args, **kwargs):
if hasattr(zipfile.ZipFile, '__exit__'): return zipfile.ZipFile(*args, **kwargs) return super(ContextualZipFile, cls).__new__(cls)
'Save parameters into MongoDB Buckets, and save the file ID into Params Collections. Parameters params : a list of parameters args : dictionary, item meta data. Returns f_id : the Buckets ID of the parameters.'
def save_params(self, params=[], args={}):
self.__autofill(args) s = time.time() f_id = self.paramsfs.put(self.__serialization(params)) args.update({'f_id': f_id, 'time': datetime.utcnow()}) self.db.Params.insert_one(args) print '[TensorDB] Save params: SUCCESS, took: {}s'.format(round((time.time() - s), 2)) return f_i...
'Find one parameter from MongoDB Buckets. Parameters args : dictionary, find items. Returns params : the parameters, return False if nothing found. f_id : the Buckets ID of the parameters, return False if nothing found.'
@AutoFill def find_one_params(self, args={}, sort=None):
s = time.time() d = self.db.Params.find_one(filter=args, sort=sort) if (d is not None): f_id = d['f_id'] else: print '[TensorDB] FAIL! Cannot find: {}'.format(args) return (False, False) try: params = self.__deserialization(self.paramsfs.get(f_id).read()) ...
'Find all parameter from MongoDB Buckets Parameters args : dictionary, find items Returns params : the parameters, return False if nothing found.'
@AutoFill def find_all_params(self, args={}):
s = time.time() pc = self.db.Params.find(args) if (pc is not None): f_id_list = pc.distinct('f_id') params = [] for f_id in f_id_list: tmp = self.paramsfs.get(f_id).read() params.append(self.__deserialization(tmp)) else: print '[TensorDB] FAIL! ...
'Delete params in MongoDB uckets. Parameters args : dictionary, find items to delete, leave it empty to delete all parameters.'
@AutoFill def del_params(self, args={}):
pc = self.db.Params.find(args) f_id_list = pc.distinct('f_id') for f in f_id_list: self.paramsfs.delete(f) self.db.Params.remove(args) print '[TensorDB] Delete params SUCCESS: {}'.format(args)
'Save the training log. Parameters args : dictionary, items to save. Examples >>> db.train_log(time=time.time(), {\'loss\': loss, \'acc\': acc})'
@AutoFill def train_log(self, args={}):
_result = self.db.TrainLog.insert_one(args) _log = self._print_dict(args) return _result