|
1 import warnings |
|
2 |
|
3 from django.conf import settings |
|
4 from django.db import connection, transaction, IntegrityError |
|
5 from django.db.models.fields import DateField, FieldDoesNotExist |
|
6 from django.db.models.query_utils import Q |
|
7 from django.db.models import signals, sql |
|
8 from django.dispatch import dispatcher |
|
9 from django.utils.datastructures import SortedDict |
|
10 |
|
11 # Used to control how many objects are worked with at once in some cases (e.g. |
|
12 # when deleting objects). |
|
13 CHUNK_SIZE = 100 |
|
14 ITER_CHUNK_SIZE = CHUNK_SIZE |
|
15 |
|
16 # Pull into this namespace for backwards compatibility |
|
17 EmptyResultSet = sql.EmptyResultSet |
|
18 |
|
19 class QuerySet(object): |
|
20 "Represents a lazy database lookup for a set of objects" |
|
21 def __init__(self, model=None, query=None): |
|
22 self.model = model |
|
23 self.query = query or sql.Query(self.model, connection) |
|
24 self._result_cache = None |
|
25 self._iter = None |
|
26 |
|
27 ######################## |
|
28 # PYTHON MAGIC METHODS # |
|
29 ######################## |
|
30 |
|
31 def __getstate__(self): |
|
32 """ |
|
33 Allows the Queryset to be pickled. |
|
34 """ |
|
35 # Force the cache to be fully populated. |
|
36 len(self) |
|
37 |
|
38 obj_dict = self.__dict__.copy() |
|
39 obj_dict['_iter'] = None |
|
40 return obj_dict |
|
41 |
|
42 def __repr__(self): |
|
43 return repr(list(self)) |
|
44 |
|
45 def __len__(self): |
|
46 # Since __len__ is called quite frequently (for example, as part of |
|
47 # list(qs), we make some effort here to be as efficient as possible |
|
48 # whilst not messing up any existing iterators against the queryset. |
|
49 if self._result_cache is None: |
|
50 if self._iter: |
|
51 self._result_cache = list(self._iter) |
|
52 else: |
|
53 self._result_cache = list(self.iterator()) |
|
54 elif self._iter: |
|
55 self._result_cache.extend(list(self._iter)) |
|
56 return len(self._result_cache) |
|
57 |
|
58 def __iter__(self): |
|
59 if self._result_cache is None: |
|
60 self._iter = self.iterator() |
|
61 self._result_cache = [] |
|
62 if self._iter: |
|
63 return self._result_iter() |
|
64 # Python's list iterator is better than our version when we're just |
|
65 # iterating over the cache. |
|
66 return iter(self._result_cache) |
|
67 |
|
68 def _result_iter(self): |
|
69 pos = 0 |
|
70 while 1: |
|
71 upper = len(self._result_cache) |
|
72 while pos < upper: |
|
73 yield self._result_cache[pos] |
|
74 pos = pos + 1 |
|
75 if not self._iter: |
|
76 raise StopIteration |
|
77 if len(self._result_cache) <= pos: |
|
78 self._fill_cache() |
|
79 |
|
80 def __nonzero__(self): |
|
81 if self._result_cache is not None: |
|
82 return bool(self._result_cache) |
|
83 try: |
|
84 iter(self).next() |
|
85 except StopIteration: |
|
86 return False |
|
87 return True |
|
88 |
|
89 def __getitem__(self, k): |
|
90 "Retrieve an item or slice from the set of results." |
|
91 if not isinstance(k, (slice, int, long)): |
|
92 raise TypeError |
|
93 assert ((not isinstance(k, slice) and (k >= 0)) |
|
94 or (isinstance(k, slice) and (k.start is None or k.start >= 0) |
|
95 and (k.stop is None or k.stop >= 0))), \ |
|
96 "Negative indexing is not supported." |
|
97 |
|
98 if self._result_cache is not None: |
|
99 if self._iter is not None: |
|
100 # The result cache has only been partially populated, so we may |
|
101 # need to fill it out a bit more. |
|
102 if isinstance(k, slice): |
|
103 if k.stop is not None: |
|
104 # Some people insist on passing in strings here. |
|
105 bound = int(k.stop) |
|
106 else: |
|
107 bound = None |
|
108 else: |
|
109 bound = k + 1 |
|
110 if len(self._result_cache) < bound: |
|
111 self._fill_cache(bound - len(self._result_cache)) |
|
112 return self._result_cache[k] |
|
113 |
|
114 if isinstance(k, slice): |
|
115 qs = self._clone() |
|
116 if k.start is not None: |
|
117 start = int(k.start) |
|
118 else: |
|
119 start = None |
|
120 if k.stop is not None: |
|
121 stop = int(k.stop) |
|
122 else: |
|
123 stop = None |
|
124 qs.query.set_limits(start, stop) |
|
125 return k.step and list(qs)[::k.step] or qs |
|
126 try: |
|
127 qs = self._clone() |
|
128 qs.query.set_limits(k, k + 1) |
|
129 return list(qs)[0] |
|
130 except self.model.DoesNotExist, e: |
|
131 raise IndexError, e.args |
|
132 |
|
133 def __and__(self, other): |
|
134 self._merge_sanity_check(other) |
|
135 combined = self._clone() |
|
136 combined.query.combine(other.query, sql.AND) |
|
137 return combined |
|
138 |
|
139 def __or__(self, other): |
|
140 self._merge_sanity_check(other) |
|
141 combined = self._clone() |
|
142 combined.query.combine(other.query, sql.OR) |
|
143 return combined |
|
144 |
|
145 #################################### |
|
146 # METHODS THAT DO DATABASE QUERIES # |
|
147 #################################### |
|
148 |
|
149 def iterator(self): |
|
150 """ |
|
151 An iterator over the results from applying this QuerySet to the |
|
152 database. |
|
153 """ |
|
154 fill_cache = self.query.select_related |
|
155 if isinstance(fill_cache, dict): |
|
156 requested = fill_cache |
|
157 else: |
|
158 requested = None |
|
159 max_depth = self.query.max_depth |
|
160 extra_select = self.query.extra_select.keys() |
|
161 index_start = len(extra_select) |
|
162 for row in self.query.results_iter(): |
|
163 if fill_cache: |
|
164 obj, _ = get_cached_row(self.model, row, index_start, |
|
165 max_depth, requested=requested) |
|
166 else: |
|
167 obj = self.model(*row[index_start:]) |
|
168 for i, k in enumerate(extra_select): |
|
169 setattr(obj, k, row[i]) |
|
170 yield obj |
|
171 |
|
172 def count(self): |
|
173 """ |
|
174 Performs a SELECT COUNT() and returns the number of records as an |
|
175 integer. |
|
176 |
|
177 If the queryset is already cached (i.e. self._result_cache is set) this |
|
178 simply returns the length of the cached results set to avoid multiple |
|
179 SELECT COUNT(*) calls. |
|
180 """ |
|
181 if self._result_cache is not None: |
|
182 return len(self._result_cache) |
|
183 |
|
184 return self.query.get_count() |
|
185 |
|
186 def get(self, *args, **kwargs): |
|
187 """ |
|
188 Performs the query and returns a single object matching the given |
|
189 keyword arguments. |
|
190 """ |
|
191 clone = self.filter(*args, **kwargs) |
|
192 num = len(clone) |
|
193 if num == 1: |
|
194 return clone._result_cache[0] |
|
195 if not num: |
|
196 raise self.model.DoesNotExist("%s matching query does not exist." |
|
197 % self.model._meta.object_name) |
|
198 raise self.model.MultipleObjectsReturned("get() returned more than one %s -- it returned %s! Lookup parameters were %s" |
|
199 % (self.model._meta.object_name, num, kwargs)) |
|
200 |
|
201 def create(self, **kwargs): |
|
202 """ |
|
203 Create a new object with the given kwargs, saving it to the database |
|
204 and returning the created object. |
|
205 """ |
|
206 obj = self.model(**kwargs) |
|
207 obj.save() |
|
208 return obj |
|
209 |
|
210 def get_or_create(self, **kwargs): |
|
211 """ |
|
212 Looks up an object with the given kwargs, creating one if necessary. |
|
213 Returns a tuple of (object, created), where created is a boolean |
|
214 specifying whether an object was created. |
|
215 """ |
|
216 assert kwargs, \ |
|
217 'get_or_create() must be passed at least one keyword argument' |
|
218 defaults = kwargs.pop('defaults', {}) |
|
219 try: |
|
220 return self.get(**kwargs), False |
|
221 except self.model.DoesNotExist: |
|
222 try: |
|
223 params = dict([(k, v) for k, v in kwargs.items() if '__' not in k]) |
|
224 params.update(defaults) |
|
225 obj = self.model(**params) |
|
226 obj.save() |
|
227 return obj, True |
|
228 except IntegrityError, e: |
|
229 return self.get(**kwargs), False |
|
230 |
|
231 def latest(self, field_name=None): |
|
232 """ |
|
233 Returns the latest object, according to the model's 'get_latest_by' |
|
234 option or optional given field_name. |
|
235 """ |
|
236 latest_by = field_name or self.model._meta.get_latest_by |
|
237 assert bool(latest_by), "latest() requires either a field_name parameter or 'get_latest_by' in the model" |
|
238 assert self.query.can_filter(), \ |
|
239 "Cannot change a query once a slice has been taken." |
|
240 obj = self._clone() |
|
241 obj.query.set_limits(high=1) |
|
242 obj.query.add_ordering('-%s' % latest_by) |
|
243 return obj.get() |
|
244 |
|
245 def in_bulk(self, id_list): |
|
246 """ |
|
247 Returns a dictionary mapping each of the given IDs to the object with |
|
248 that ID. |
|
249 """ |
|
250 assert self.query.can_filter(), \ |
|
251 "Cannot use 'limit' or 'offset' with in_bulk" |
|
252 assert isinstance(id_list, (tuple, list)), \ |
|
253 "in_bulk() must be provided with a list of IDs." |
|
254 if not id_list: |
|
255 return {} |
|
256 qs = self._clone() |
|
257 qs.query.add_filter(('pk__in', id_list)) |
|
258 return dict([(obj._get_pk_val(), obj) for obj in qs.iterator()]) |
|
259 |
|
260 def delete(self): |
|
261 """ |
|
262 Deletes the records in the current QuerySet. |
|
263 """ |
|
264 assert self.query.can_filter(), \ |
|
265 "Cannot use 'limit' or 'offset' with delete." |
|
266 |
|
267 del_query = self._clone() |
|
268 |
|
269 # Disable non-supported fields. |
|
270 del_query.query.select_related = False |
|
271 del_query.query.clear_ordering() |
|
272 |
|
273 # Delete objects in chunks to prevent the list of related objects from |
|
274 # becoming too long. |
|
275 while 1: |
|
276 # Collect all the objects to be deleted in this chunk, and all the |
|
277 # objects that are related to the objects that are to be deleted. |
|
278 seen_objs = SortedDict() |
|
279 for object in del_query[:CHUNK_SIZE]: |
|
280 object._collect_sub_objects(seen_objs) |
|
281 |
|
282 if not seen_objs: |
|
283 break |
|
284 delete_objects(seen_objs) |
|
285 |
|
286 # Clear the result cache, in case this QuerySet gets reused. |
|
287 self._result_cache = None |
|
288 delete.alters_data = True |
|
289 |
|
290 def update(self, **kwargs): |
|
291 """ |
|
292 Updates all elements in the current QuerySet, setting all the given |
|
293 fields to the appropriate values. |
|
294 """ |
|
295 query = self.query.clone(sql.UpdateQuery) |
|
296 query.add_update_values(kwargs) |
|
297 query.execute_sql(None) |
|
298 transaction.commit_unless_managed() |
|
299 self._result_cache = None |
|
300 update.alters_data = True |
|
301 |
|
302 def _update(self, values): |
|
303 """ |
|
304 A version of update that accepts field objects instead of field names. |
|
305 Used primarily for model saving and not intended for use by general |
|
306 code (it requires too much poking around at model internals to be |
|
307 useful at that level). |
|
308 """ |
|
309 query = self.query.clone(sql.UpdateQuery) |
|
310 query.add_update_fields(values) |
|
311 query.execute_sql(None) |
|
312 self._result_cache = None |
|
313 _update.alters_data = True |
|
314 |
|
315 ################################################## |
|
316 # PUBLIC METHODS THAT RETURN A QUERYSET SUBCLASS # |
|
317 ################################################## |
|
318 |
|
319 def values(self, *fields): |
|
320 return self._clone(klass=ValuesQuerySet, setup=True, _fields=fields) |
|
321 |
|
322 def values_list(self, *fields, **kwargs): |
|
323 flat = kwargs.pop('flat', False) |
|
324 if kwargs: |
|
325 raise TypeError('Unexpected keyword arguments to values_list: %s' |
|
326 % (kwargs.keys(),)) |
|
327 if flat and len(fields) > 1: |
|
328 raise TypeError("'flat' is not valid when values_list is called with more than one field.") |
|
329 return self._clone(klass=ValuesListQuerySet, setup=True, flat=flat, |
|
330 _fields=fields) |
|
331 |
|
332 def dates(self, field_name, kind, order='ASC'): |
|
333 """ |
|
334 Returns a list of datetime objects representing all available dates |
|
335 for the given field_name, scoped to 'kind'. |
|
336 """ |
|
337 assert kind in ("month", "year", "day"), \ |
|
338 "'kind' must be one of 'year', 'month' or 'day'." |
|
339 assert order in ('ASC', 'DESC'), \ |
|
340 "'order' must be either 'ASC' or 'DESC'." |
|
341 # Let the FieldDoesNotExist exception propagate. |
|
342 field = self.model._meta.get_field(field_name, many_to_many=False) |
|
343 assert isinstance(field, DateField), "%r isn't a DateField." \ |
|
344 % field_name |
|
345 return self._clone(klass=DateQuerySet, setup=True, _field=field, |
|
346 _kind=kind, _order=order) |
|
347 |
|
348 def none(self): |
|
349 """ |
|
350 Returns an empty queryset. |
|
351 """ |
|
352 return self._clone(klass=EmptyQuerySet) |
|
353 |
|
354 ################################################################## |
|
355 # PUBLIC METHODS THAT ALTER ATTRIBUTES AND RETURN A NEW QUERYSET # |
|
356 ################################################################## |
|
357 |
|
358 def all(self): |
|
359 """ |
|
360 Returns a new QuerySet that is a copy of the current one. This allows a |
|
361 QuerySet to proxy for a model manager in some cases. |
|
362 """ |
|
363 return self._clone() |
|
364 |
|
365 def filter(self, *args, **kwargs): |
|
366 """ |
|
367 Returns a new QuerySet instance with the args ANDed to the existing |
|
368 set. |
|
369 """ |
|
370 return self._filter_or_exclude(False, *args, **kwargs) |
|
371 |
|
372 def exclude(self, *args, **kwargs): |
|
373 """ |
|
374 Returns a new QuerySet instance with NOT (args) ANDed to the existing |
|
375 set. |
|
376 """ |
|
377 return self._filter_or_exclude(True, *args, **kwargs) |
|
378 |
|
379 def _filter_or_exclude(self, negate, *args, **kwargs): |
|
380 if args or kwargs: |
|
381 assert self.query.can_filter(), \ |
|
382 "Cannot filter a query once a slice has been taken." |
|
383 |
|
384 clone = self._clone() |
|
385 if negate: |
|
386 clone.query.add_q(~Q(*args, **kwargs)) |
|
387 else: |
|
388 clone.query.add_q(Q(*args, **kwargs)) |
|
389 return clone |
|
390 |
|
391 def complex_filter(self, filter_obj): |
|
392 """ |
|
393 Returns a new QuerySet instance with filter_obj added to the filters. |
|
394 filter_obj can be a Q object (or anything with an add_to_query() |
|
395 method) or a dictionary of keyword lookup arguments. |
|
396 |
|
397 This exists to support framework features such as 'limit_choices_to', |
|
398 and usually it will be more natural to use other methods. |
|
399 """ |
|
400 if isinstance(filter_obj, Q) or hasattr(filter_obj, 'add_to_query'): |
|
401 return self._filter_or_exclude(None, filter_obj) |
|
402 else: |
|
403 return self._filter_or_exclude(None, **filter_obj) |
|
404 |
|
405 def select_related(self, *fields, **kwargs): |
|
406 """ |
|
407 Returns a new QuerySet instance that will select related objects. If |
|
408 fields are specified, they must be ForeignKey fields and only those |
|
409 related objects are included in the selection. |
|
410 """ |
|
411 depth = kwargs.pop('depth', 0) |
|
412 if kwargs: |
|
413 raise TypeError('Unexpected keyword arguments to select_related: %s' |
|
414 % (kwargs.keys(),)) |
|
415 obj = self._clone() |
|
416 if fields: |
|
417 if depth: |
|
418 raise TypeError('Cannot pass both "depth" and fields to select_related()') |
|
419 obj.query.add_select_related(fields) |
|
420 else: |
|
421 obj.query.select_related = True |
|
422 if depth: |
|
423 obj.query.max_depth = depth |
|
424 return obj |
|
425 |
|
426 def dup_select_related(self, other): |
|
427 """ |
|
428 Copies the related selection status from the queryset 'other' to the |
|
429 current queryset. |
|
430 """ |
|
431 self.query.select_related = other.query.select_related |
|
432 |
|
433 def order_by(self, *field_names): |
|
434 """Returns a new QuerySet instance with the ordering changed.""" |
|
435 assert self.query.can_filter(), \ |
|
436 "Cannot reorder a query once a slice has been taken." |
|
437 obj = self._clone() |
|
438 obj.query.clear_ordering() |
|
439 obj.query.add_ordering(*field_names) |
|
440 return obj |
|
441 |
|
442 def distinct(self, true_or_false=True): |
|
443 """ |
|
444 Returns a new QuerySet instance that will select only distinct results. |
|
445 """ |
|
446 obj = self._clone() |
|
447 obj.query.distinct = true_or_false |
|
448 return obj |
|
449 |
|
450 def extra(self, select=None, where=None, params=None, tables=None, |
|
451 order_by=None, select_params=None): |
|
452 """ |
|
453 Add extra SQL fragments to the query. |
|
454 """ |
|
455 assert self.query.can_filter(), \ |
|
456 "Cannot change a query once a slice has been taken" |
|
457 clone = self._clone() |
|
458 clone.query.add_extra(select, select_params, where, params, tables, order_by) |
|
459 return clone |
|
460 |
|
461 def reverse(self): |
|
462 """ |
|
463 Reverses the ordering of the queryset. |
|
464 """ |
|
465 clone = self._clone() |
|
466 clone.query.standard_ordering = not clone.query.standard_ordering |
|
467 return clone |
|
468 |
|
469 ################### |
|
470 # PRIVATE METHODS # |
|
471 ################### |
|
472 |
|
473 def _clone(self, klass=None, setup=False, **kwargs): |
|
474 if klass is None: |
|
475 klass = self.__class__ |
|
476 c = klass(model=self.model, query=self.query.clone()) |
|
477 c.__dict__.update(kwargs) |
|
478 if setup and hasattr(c, '_setup_query'): |
|
479 c._setup_query() |
|
480 return c |
|
481 |
|
482 def _fill_cache(self, num=None): |
|
483 """ |
|
484 Fills the result cache with 'num' more entries (or until the results |
|
485 iterator is exhausted). |
|
486 """ |
|
487 if self._iter: |
|
488 try: |
|
489 for i in range(num or ITER_CHUNK_SIZE): |
|
490 self._result_cache.append(self._iter.next()) |
|
491 except StopIteration: |
|
492 self._iter = None |
|
493 |
|
494 def _merge_sanity_check(self, other): |
|
495 """ |
|
496 Checks that we are merging two comparable queryset classes. |
|
497 """ |
|
498 if self.__class__ is not other.__class__: |
|
499 raise TypeError("Cannot merge querysets of different types ('%s' and '%s'." |
|
500 % (self.__class__.__name__, other.__class__.__name__)) |
|
501 |
|
502 class ValuesQuerySet(QuerySet): |
|
503 def __init__(self, *args, **kwargs): |
|
504 super(ValuesQuerySet, self).__init__(*args, **kwargs) |
|
505 # select_related isn't supported in values(). (FIXME -#3358) |
|
506 self.query.select_related = False |
|
507 |
|
508 # QuerySet.clone() will also set up the _fields attribute with the |
|
509 # names of the model fields to select. |
|
510 |
|
511 def iterator(self): |
|
512 self.query.trim_extra_select(self.extra_names) |
|
513 names = self.query.extra_select.keys() + self.field_names |
|
514 for row in self.query.results_iter(): |
|
515 yield dict(zip(names, row)) |
|
516 |
|
517 def _setup_query(self): |
|
518 """ |
|
519 Constructs the field_names list that the values query will be |
|
520 retrieving. |
|
521 |
|
522 Called by the _clone() method after initialising the rest of the |
|
523 instance. |
|
524 """ |
|
525 self.extra_names = [] |
|
526 if self._fields: |
|
527 if not self.query.extra_select: |
|
528 field_names = list(self._fields) |
|
529 else: |
|
530 field_names = [] |
|
531 for f in self._fields: |
|
532 if self.query.extra_select.has_key(f): |
|
533 self.extra_names.append(f) |
|
534 else: |
|
535 field_names.append(f) |
|
536 else: |
|
537 # Default to all fields. |
|
538 field_names = [f.attname for f in self.model._meta.fields] |
|
539 |
|
540 self.query.add_fields(field_names, False) |
|
541 self.query.default_cols = False |
|
542 self.field_names = field_names |
|
543 |
|
544 def _clone(self, klass=None, setup=False, **kwargs): |
|
545 """ |
|
546 Cloning a ValuesQuerySet preserves the current fields. |
|
547 """ |
|
548 c = super(ValuesQuerySet, self)._clone(klass, **kwargs) |
|
549 c._fields = self._fields[:] |
|
550 c.field_names = self.field_names |
|
551 c.extra_names = self.extra_names |
|
552 if setup and hasattr(c, '_setup_query'): |
|
553 c._setup_query() |
|
554 return c |
|
555 |
|
556 def _merge_sanity_check(self, other): |
|
557 super(ValuesQuerySet, self)._merge_sanity_check(other) |
|
558 if (set(self.extra_names) != set(other.extra_names) or |
|
559 set(self.field_names) != set(other.field_names)): |
|
560 raise TypeError("Merging '%s' classes must involve the same values in each case." |
|
561 % self.__class__.__name__) |
|
562 |
|
563 class ValuesListQuerySet(ValuesQuerySet): |
|
564 def iterator(self): |
|
565 self.query.trim_extra_select(self.extra_names) |
|
566 if self.flat and len(self._fields) == 1: |
|
567 for row in self.query.results_iter(): |
|
568 yield row[0] |
|
569 elif not self.query.extra_select: |
|
570 for row in self.query.results_iter(): |
|
571 yield row |
|
572 else: |
|
573 # When extra(select=...) is involved, the extra cols come are |
|
574 # always at the start of the row, so we need to reorder the fields |
|
575 # to match the order in self._fields. |
|
576 names = self.query.extra_select.keys() + self.field_names |
|
577 for row in self.query.results_iter(): |
|
578 data = dict(zip(names, row)) |
|
579 yield tuple([data[f] for f in self._fields]) |
|
580 |
|
581 def _clone(self, *args, **kwargs): |
|
582 clone = super(ValuesListQuerySet, self)._clone(*args, **kwargs) |
|
583 clone.flat = self.flat |
|
584 return clone |
|
585 |
|
586 class DateQuerySet(QuerySet): |
|
587 def iterator(self): |
|
588 return self.query.results_iter() |
|
589 |
|
590 def _setup_query(self): |
|
591 """ |
|
592 Sets up any special features of the query attribute. |
|
593 |
|
594 Called by the _clone() method after initialising the rest of the |
|
595 instance. |
|
596 """ |
|
597 self.query = self.query.clone(klass=sql.DateQuery, setup=True) |
|
598 self.query.select = [] |
|
599 self.query.add_date_select(self._field.column, self._kind, self._order) |
|
600 if self._field.null: |
|
601 self.query.add_filter(('%s__isnull' % self._field.name, True)) |
|
602 |
|
603 def _clone(self, klass=None, setup=False, **kwargs): |
|
604 c = super(DateQuerySet, self)._clone(klass, False, **kwargs) |
|
605 c._field = self._field |
|
606 c._kind = self._kind |
|
607 if setup and hasattr(c, '_setup_query'): |
|
608 c._setup_query() |
|
609 return c |
|
610 |
|
611 class EmptyQuerySet(QuerySet): |
|
612 def __init__(self, model=None, query=None): |
|
613 super(EmptyQuerySet, self).__init__(model, query) |
|
614 self._result_cache = [] |
|
615 |
|
616 def count(self): |
|
617 return 0 |
|
618 |
|
619 def delete(self): |
|
620 pass |
|
621 |
|
622 def _clone(self, klass=None, setup=False, **kwargs): |
|
623 c = super(EmptyQuerySet, self)._clone(klass, **kwargs) |
|
624 c._result_cache = [] |
|
625 return c |
|
626 |
|
627 def iterator(self): |
|
628 # This slightly odd construction is because we need an empty generator |
|
629 # (it raises StopIteration immediately). |
|
630 yield iter([]).next() |
|
631 |
|
632 # QOperator, QNot, QAnd and QOr are temporarily retained for backwards |
|
633 # compatibility. All the old functionality is now part of the 'Q' class. |
|
634 class QOperator(Q): |
|
635 def __init__(self, *args, **kwargs): |
|
636 warnings.warn('Use Q instead of QOr, QAnd or QOperation.', |
|
637 DeprecationWarning, stacklevel=2) |
|
638 super(QOperator, self).__init__(*args, **kwargs) |
|
639 |
|
640 QOr = QAnd = QOperator |
|
641 |
|
642 def QNot(q): |
|
643 warnings.warn('Use ~q instead of QNot(q)', DeprecationWarning, stacklevel=2) |
|
644 return ~q |
|
645 |
|
646 def get_cached_row(klass, row, index_start, max_depth=0, cur_depth=0, |
|
647 requested=None): |
|
648 """ |
|
649 Helper function that recursively returns an object with the specified |
|
650 related attributes already populated. |
|
651 """ |
|
652 if max_depth and requested is None and cur_depth > max_depth: |
|
653 # We've recursed deeply enough; stop now. |
|
654 return None |
|
655 |
|
656 restricted = requested is not None |
|
657 index_end = index_start + len(klass._meta.fields) |
|
658 obj = klass(*row[index_start:index_end]) |
|
659 for f in klass._meta.fields: |
|
660 if (not f.rel or (not restricted and f.null) or |
|
661 (restricted and f.name not in requested) or f.rel.parent_link): |
|
662 continue |
|
663 if restricted: |
|
664 next = requested[f.name] |
|
665 else: |
|
666 next = None |
|
667 cached_row = get_cached_row(f.rel.to, row, index_end, max_depth, |
|
668 cur_depth+1, next) |
|
669 if cached_row: |
|
670 rel_obj, index_end = cached_row |
|
671 setattr(obj, f.get_cache_name(), rel_obj) |
|
672 return obj, index_end |
|
673 |
|
674 def delete_objects(seen_objs): |
|
675 """ |
|
676 Iterate through a list of seen classes, and remove any instances that are |
|
677 referred to. |
|
678 """ |
|
679 ordered_classes = seen_objs.keys() |
|
680 ordered_classes.reverse() |
|
681 |
|
682 for cls in ordered_classes: |
|
683 seen_objs[cls] = seen_objs[cls].items() |
|
684 seen_objs[cls].sort() |
|
685 |
|
686 # Pre notify all instances to be deleted |
|
687 for pk_val, instance in seen_objs[cls]: |
|
688 dispatcher.send(signal=signals.pre_delete, sender=cls, |
|
689 instance=instance) |
|
690 |
|
691 pk_list = [pk for pk,instance in seen_objs[cls]] |
|
692 del_query = sql.DeleteQuery(cls, connection) |
|
693 del_query.delete_batch_related(pk_list) |
|
694 |
|
695 update_query = sql.UpdateQuery(cls, connection) |
|
696 for field in cls._meta.fields: |
|
697 if field.rel and field.null and field.rel.to in seen_objs: |
|
698 update_query.clear_related(field, pk_list) |
|
699 |
|
700 # Now delete the actual data |
|
701 for cls in ordered_classes: |
|
702 seen_objs[cls].reverse() |
|
703 pk_list = [pk for pk,instance in seen_objs[cls]] |
|
704 del_query = sql.DeleteQuery(cls, connection) |
|
705 del_query.delete_batch(pk_list) |
|
706 |
|
707 # Last cleanup; set NULLs where there once was a reference to the |
|
708 # object, NULL the primary key of the found objects, and perform |
|
709 # post-notification. |
|
710 for pk_val, instance in seen_objs[cls]: |
|
711 for field in cls._meta.fields: |
|
712 if field.rel and field.null and field.rel.to in seen_objs: |
|
713 setattr(instance, field.attname, None) |
|
714 |
|
715 dispatcher.send(signal=signals.post_delete, sender=cls, |
|
716 instance=instance) |
|
717 setattr(instance, cls._meta.pk.attname, None) |
|
718 |
|
719 transaction.commit_unless_managed() |
|
720 |
|
721 def insert_query(model, values, return_id=False, raw_values=False): |
|
722 """ |
|
723 Inserts a new record for the given model. This provides an interface to |
|
724 the InsertQuery class and is how Model.save() is implemented. It is not |
|
725 part of the public API. |
|
726 """ |
|
727 query = sql.InsertQuery(model, connection) |
|
728 query.insert_values(values, raw_values) |
|
729 return query.execute_sql(return_id) |
|
730 |