Skip to content

Commit 12b4d2d

Browse files
author
Marcus Baker
authored
Merge pull request colinhowe#7 from conversocial/get_tests_passing
fix: CON-112 - Get tests passing
2 parents 01c1e42 + 8b1f869 commit 12b4d2d

18 files changed

+1165
-687
lines changed

mongoengine/base.py

Lines changed: 101 additions & 51 deletions
Large diffs are not rendered by default.

mongoengine/connection.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,7 @@ def register_connection(alias, host='localhost', port=27017,
3232
:param name: the name of the specific database to use
3333
:param host: the host name of the :program:`mongod` instance to connect to
3434
:param port: the port that the :program:`mongod` instance is running on
35-
:param is_slave: whether the connection can act as a slave ** Depreciated pymongo 2.0.1+
36-
:param read_preference: The read preference for the collection ** Added pymongo 2.1
35+
:param read_preference: The read preference for the collection
3736
:param slaves: a list of aliases of slave connections; each of these must
3837
be a registered connection that has :attr:`is_slave` set to ``True``
3938
:param username: username to authenticate with
@@ -109,7 +108,8 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
109108
try:
110109
_connections[alias] = MongoClient(**conn_settings)
111110
except Exception, e:
112-
raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e))
111+
raise ConnectionError(
112+
"Cannot connect to database %s :\n%s" % (alias, e))
113113
return _connections[alias]
114114

115115

@@ -126,6 +126,7 @@ def register_db(
126126
'db_name': db_name,
127127
}
128128

129+
129130
def get_db(alias=DEFAULT_DB_ALIAS, reconnect=False, refresh=False):
130131
global _dbs
131132
global _db_settings

mongoengine/dereference.py

Lines changed: 31 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -69,21 +69,29 @@ def _find_references(self, items, depth=0):
6969
for field_name, field in item._fields.iteritems():
7070
v = item._data.get(field_name, None)
7171
if isinstance(v, (DBRef)):
72-
reference_map.setdefault(field.document_type, []).append(v.id)
72+
reference_map.setdefault(field.document_type, []) \
73+
.append(v.id)
7374
elif isinstance(v, (dict, SON)) and '_ref' in v:
74-
reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id)
75-
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
76-
field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
75+
reference_map.setdefault(get_document(v['_cls']), []) \
76+
.append(v['_ref'].id)
77+
elif isinstance(v, (dict, list, tuple)) and \
78+
depth <= self.max_depth:
79+
field_cls = getattr(getattr(field, 'field', None),
80+
'document_type', None)
7781
references = self._find_references(v, depth)
7882
for key, refs in references.iteritems():
79-
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
83+
if isinstance(
84+
field_cls,
85+
(Document, TopLevelDocumentMetaclass)):
8086
key = field_cls
8187
reference_map.setdefault(key, []).extend(refs)
8288
elif isinstance(item, (DBRef)):
8389
reference_map.setdefault(item.collection, []).append(item.id)
8490
elif isinstance(item, (dict, SON)) and '_ref' in item:
85-
reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id)
86-
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
91+
reference_map.setdefault(get_document(item['_cls']), []) \
92+
.append(item['_ref'].id)
93+
elif isinstance(item, (dict, list, tuple)) and \
94+
depth - 1 <= self.max_depth:
8795
references = self._find_references(item, depth - 1)
8896
for key, refs in references.iteritems():
8997
reference_map.setdefault(key, []).extend(refs)
@@ -96,14 +104,17 @@ def _fetch_objects(self, doc_type=None):
96104
object_map = {}
97105
for col, dbrefs in self.reference_map.iteritems():
98106
keys = object_map.keys()
99-
refs = list(set([dbref for dbref in dbrefs if str(dbref) not in keys]))
107+
refs = list(set(
108+
[dbref for dbref in dbrefs if str(dbref) not in keys]))
100109
if hasattr(col, 'objects'): # We have a document class for the refs
101110
references = col.objects.in_bulk(refs)
102111
for key, doc in references.iteritems():
103112
object_map[key] = doc
104113
else: # Generic reference: use the refs data to convert to document
105-
if doc_type and not isinstance(doc_type, (ListField, DictField, MapField,) ):
106-
references = doc_type._get_db()[col].find({'_id': {'$in': refs}})
114+
if doc_type and \
115+
not isinstance(doc_type, (ListField, DictField, MapField,)): # noqa
116+
references = doc_type._get_db()[col].find(
117+
{'_id': {'$in': refs}})
107118
for ref in references:
108119
doc = doc_type._from_son(ref)
109120
object_map[doc.id] = doc
@@ -170,13 +181,18 @@ def _attach_objects(self, items, depth=0, instance=None, name=None):
170181
if isinstance(v, (DBRef)):
171182
data[k]._data[field_name] = self.object_map.get(v.id, v)
172183
elif isinstance(v, (dict, SON)) and '_ref' in v:
173-
data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v)
184+
data[k]._data[field_name] = \
185+
self.object_map.get(v['_ref'].id, v)
174186
elif isinstance(v, dict) and depth <= self.max_depth:
175-
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
176-
elif isinstance(v, (list, tuple)) and depth <= self.max_depth:
177-
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
187+
data[k]._data[field_name] = self._attach_objects(
188+
v, depth, instance=instance, name=name)
189+
elif isinstance(v, (list, tuple)) and \
190+
depth <= self.max_depth:
191+
data[k]._data[field_name] = self._attach_objects(
192+
v, depth, instance=instance, name=name)
178193
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
179-
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name)
194+
data[k] = self._attach_objects(
195+
v, depth - 1, instance=instance, name=name)
180196
elif hasattr(v, 'id'):
181197
data[k] = self.object_map.get(v.id, v)
182198

mongoengine/document.py

Lines changed: 33 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,8 @@
88
from connection import get_db, DEFAULT_CONNECTION_NAME
99

1010
__all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument',
11-
'DynamicEmbeddedDocument', 'OperationError', 'InvalidCollectionError']
11+
'DynamicEmbeddedDocument', 'OperationError',
12+
'InvalidCollectionError']
1213

1314

1415
class InvalidCollectionError(Exception):
@@ -81,14 +82,15 @@ def pk():
8182
"""
8283
def fget(self):
8384
return getattr(self, self._meta['id_field'])
85+
8486
def fset(self, value):
8587
return setattr(self, self._meta['id_field'], value)
8688
return property(fget, fset)
8789

8890
@classmethod
8991
def _get_db(cls):
9092
"""Some Model using other db_alias"""
91-
return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME ))
93+
return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME))
9294

9395
@classmethod
9496
def _get_subdocuments(cls):
@@ -122,7 +124,8 @@ def _get_collection(cls):
122124
if options.get('max') != max_documents or \
123125
options.get('size') != max_size:
124126
msg = ('Cannot create collection "%s" as a capped '
125-
'collection as it already exists') % cls._collection
127+
'collection as it already exists'
128+
% cls._collection)
126129
raise InvalidCollectionError(msg)
127130
else:
128131
# Create the collection as a capped collection
@@ -139,7 +142,7 @@ def _get_collection(cls):
139142
return cls._collection
140143

141144
def save(self, force_insert=False, validate=True, write_options=None,
142-
cascade=None, cascade_kwargs=None, _refs=None):
145+
cascade=None, cascade_kwargs=None, _refs=None):
143146
"""Save the :class:`~mongoengine.Document` to the database. If the
144147
document already exists, it will be updated, otherwise it will be
145148
created.
@@ -150,24 +153,28 @@ def save(self, force_insert=False, validate=True, write_options=None,
150153
:param write_options: Extra keyword arguments are passed down to
151154
:meth:`~pymongo.collection.Collection.save` OR
152155
:meth:`~pymongo.collection.Collection.insert`
153-
which will be used as options for the resultant ``getLastError`` command.
154-
For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers
155-
have recorded the write and will force an fsync on each server being written to.
156-
:param cascade: Sets the flag for cascading saves. You can set a default by setting
157-
"cascade" in the document __meta__
158-
:param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves
156+
which will be used as options for the resultant
157+
``getLastError`` command.
158+
For example, ``save(..., w=2, fsync=True)`` will wait until at
159+
least two servers have recorded the write and will force an
160+
fsync on each server being written to.
161+
:param cascade: Sets the flag for cascading saves. You can set a
162+
default by setting "cascade" in the document __meta__
163+
:param cascade_kwargs: optional kwargs dictionary to be passed throw
164+
to cascading saves
159165
:param _refs: A list of processed references used in cascading saves
160166
161167
.. versionchanged:: 0.5
162168
In existing documents it only saves changed fields using set / unset
163169
Saves are cascaded and any :class:`~bson.dbref.DBRef` objects
164170
that have changes are saved as well.
165171
.. versionchanged:: 0.6
166-
Cascade saves are optional = defaults to True, if you want fine grain
167-
control then you can turn off using document meta['cascade'] = False
168-
Also you can pass different kwargs to the cascade save using cascade_kwargs
169-
which overwrites the existing kwargs with custom values
170-
172+
Cascade saves are optional = defaults to True, if you want fine
173+
grain control then you can turn off using document
174+
meta['cascade'] = False
175+
Also you can pass different kwargs to the cascade save using
176+
cascade_kwargs which overwrites the existing kwargs with custom
177+
values
171178
"""
172179
signals.pre_save.send(self.__class__, document=self)
173180

@@ -201,11 +208,17 @@ def save(self, force_insert=False, validate=True, write_options=None,
201208

202209
upsert = self._created
203210
if updates:
204-
collection.update(select_dict, {"$set": updates}, upsert=upsert, **write_options)
211+
collection.update(select_dict,
212+
{"$set": updates},
213+
upsert=upsert,
214+
**write_options)
205215
if removals:
206-
collection.update(select_dict, {"$unset": removals}, upsert=upsert, **write_options)
216+
collection.update(select_dict,
217+
{"$unset": removals},
218+
upsert=upsert,
219+
**write_options)
207220

208-
cascade = self._meta.get('cascade', True) if cascade is None else cascade
221+
cascade = self._meta.get('cascade', True) if cascade is None else cascade # noqa
209222
if cascade:
210223
kwargs = {
211224
"force_insert": force_insert,
@@ -419,8 +432,8 @@ def object(self):
419432
try:
420433
self.key = id_field_type(self.key)
421434
except:
422-
raise Exception("Could not cast key as %s" % \
423-
id_field_type.__name__)
435+
raise Exception(
436+
"Could not cast key as %s" % id_field_type.__name__)
424437

425438
if not hasattr(self, "_key_object"):
426439
self._key_object = self._document.objects.with_id(self.key)

0 commit comments

Comments
 (0)