175 obj.distinct = self.distinct |
181 obj.distinct = self.distinct |
176 obj.select_related = self.select_related |
182 obj.select_related = self.select_related |
177 obj.related_select_cols = [] |
183 obj.related_select_cols = [] |
178 obj.max_depth = self.max_depth |
184 obj.max_depth = self.max_depth |
179 obj.extra_select = self.extra_select.copy() |
185 obj.extra_select = self.extra_select.copy() |
180 obj.extra_select_params = self.extra_select_params |
|
181 obj.extra_tables = self.extra_tables |
186 obj.extra_tables = self.extra_tables |
182 obj.extra_where = self.extra_where |
187 obj.extra_where = self.extra_where |
183 obj.extra_params = self.extra_params |
188 obj.extra_params = self.extra_params |
184 obj.extra_order_by = self.extra_order_by |
189 obj.extra_order_by = self.extra_order_by |
|
190 if self.filter_is_sticky and self.used_aliases: |
|
191 obj.used_aliases = self.used_aliases.copy() |
|
192 else: |
|
193 obj.used_aliases = set() |
|
194 obj.filter_is_sticky = False |
185 obj.__dict__.update(kwargs) |
195 obj.__dict__.update(kwargs) |
186 if hasattr(obj, '_setup_query'): |
196 if hasattr(obj, '_setup_query'): |
187 obj._setup_query() |
197 obj._setup_query() |
188 return obj |
198 return obj |
189 |
199 |
190 def results_iter(self): |
200 def results_iter(self): |
191 """ |
201 """ |
192 Returns an iterator over the results from executing this query. |
202 Returns an iterator over the results from executing this query. |
193 """ |
203 """ |
194 resolve_columns = hasattr(self, 'resolve_columns') |
204 resolve_columns = hasattr(self, 'resolve_columns') |
195 if resolve_columns: |
205 fields = None |
196 if self.select_fields: |
|
197 fields = self.select_fields + self.related_select_fields |
|
198 else: |
|
199 fields = self.model._meta.fields |
|
200 for rows in self.execute_sql(MULTI): |
206 for rows in self.execute_sql(MULTI): |
201 for row in rows: |
207 for row in rows: |
202 if resolve_columns: |
208 if resolve_columns: |
|
209 if fields is None: |
|
210 # We only set this up here because |
|
211 # related_select_fields isn't populated until |
|
212 # execute_sql() has been called. |
|
213 if self.select_fields: |
|
214 fields = self.select_fields + self.related_select_fields |
|
215 else: |
|
216 fields = self.model._meta.fields |
203 row = self.resolve_columns(row, fields) |
217 row = self.resolve_columns(row, fields) |
204 yield row |
218 yield row |
205 |
219 |
206 def get_count(self): |
220 def get_count(self): |
207 """ |
221 """ |
360 else: |
380 else: |
361 item = deepcopy(col) |
381 item = deepcopy(col) |
362 item.relabel_aliases(change_map) |
382 item.relabel_aliases(change_map) |
363 self.select.append(item) |
383 self.select.append(item) |
364 self.select_fields = rhs.select_fields[:] |
384 self.select_fields = rhs.select_fields[:] |
365 self.extra_select = rhs.extra_select.copy() |
385 |
366 self.extra_tables = rhs.extra_tables |
386 if connector == OR: |
367 self.extra_where = rhs.extra_where |
387 # It would be nice to be able to handle this, but the queries don't |
368 self.extra_params = rhs.extra_params |
388 # really make sense (or return consistent value sets). Not worth |
|
389 # the extra complexity when you can write a real query instead. |
|
390 if self.extra_select and rhs.extra_select: |
|
391 raise ValueError("When merging querysets using 'or', you " |
|
392 "cannot have extra(select=...) on both sides.") |
|
393 if self.extra_where and rhs.extra_where: |
|
394 raise ValueError("When merging querysets using 'or', you " |
|
395 "cannot have extra(where=...) on both sides.") |
|
396 self.extra_select.update(rhs.extra_select) |
|
397 self.extra_tables += rhs.extra_tables |
|
398 self.extra_where += rhs.extra_where |
|
399 self.extra_params += rhs.extra_params |
369 |
400 |
370 # Ordering uses the 'rhs' ordering, unless it has none, in which case |
401 # Ordering uses the 'rhs' ordering, unless it has none, in which case |
371 # the current ordering is used. |
402 # the current ordering is used. |
372 self.order_by = rhs.order_by and rhs.order_by[:] or self.order_by |
403 self.order_by = rhs.order_by and rhs.order_by[:] or self.order_by |
373 self.extra_order_by = rhs.extra_order_by or self.extra_order_by |
404 self.extra_order_by = rhs.extra_order_by or self.extra_order_by |
437 col_aliases.add(col) |
468 col_aliases.add(col) |
438 |
469 |
439 self._select_aliases = aliases |
470 self._select_aliases = aliases |
440 return result |
471 return result |
441 |
472 |
442 def get_default_columns(self, with_aliases=False, col_aliases=None): |
473 def get_default_columns(self, with_aliases=False, col_aliases=None, |
|
474 start_alias=None, opts=None, as_pairs=False): |
443 """ |
475 """ |
444 Computes the default columns for selecting every field in the base |
476 Computes the default columns for selecting every field in the base |
445 model. |
477 model. |
446 |
478 |
447 Returns a list of strings, quoted appropriately for use in SQL |
479 Returns a list of strings, quoted appropriately for use in SQL |
448 directly, as well as a set of aliases used in the select statement. |
480 directly, as well as a set of aliases used in the select statement (if |
|
481 'as_pairs' is True, returns a list of (alias, col_name) pairs instead |
|
482 of strings as the first component and None as the second component). |
449 """ |
483 """ |
450 result = [] |
484 result = [] |
451 table_alias = self.tables[0] |
485 if opts is None: |
452 root_pk = self.model._meta.pk.column |
486 opts = self.model._meta |
|
487 if start_alias: |
|
488 table_alias = start_alias |
|
489 else: |
|
490 table_alias = self.tables[0] |
|
491 root_pk = opts.pk.column |
453 seen = {None: table_alias} |
492 seen = {None: table_alias} |
454 qn = self.quote_name_unless_alias |
493 qn = self.quote_name_unless_alias |
455 qn2 = self.connection.ops.quote_name |
494 qn2 = self.connection.ops.quote_name |
456 aliases = set() |
495 aliases = set() |
457 for field, model in self.model._meta.get_fields_with_model(): |
496 for field, model in opts.get_fields_with_model(): |
458 try: |
497 try: |
459 alias = seen[model] |
498 alias = seen[model] |
460 except KeyError: |
499 except KeyError: |
461 alias = self.join((table_alias, model._meta.db_table, |
500 alias = self.join((table_alias, model._meta.db_table, |
462 root_pk, model._meta.pk.column)) |
501 root_pk, model._meta.pk.column)) |
463 seen[model] = alias |
502 seen[model] = alias |
|
503 if as_pairs: |
|
504 result.append((alias, field.column)) |
|
505 continue |
464 if with_aliases and field.column in col_aliases: |
506 if with_aliases and field.column in col_aliases: |
465 c_alias = 'Col%d' % len(col_aliases) |
507 c_alias = 'Col%d' % len(col_aliases) |
466 result.append('%s.%s AS %s' % (qn(alias), |
508 result.append('%s.%s AS %s' % (qn(alias), |
467 qn2(field.column), c_alias)) |
509 qn2(field.column), c_alias)) |
468 col_aliases.add(c_alias) |
510 col_aliases.add(c_alias) |
677 """ |
751 """ |
678 Promotes the join type of an alias to an outer join if it's possible |
752 Promotes the join type of an alias to an outer join if it's possible |
679 for the join to contain NULL values on the left. If 'unconditional' is |
753 for the join to contain NULL values on the left. If 'unconditional' is |
680 False, the join is only promoted if it is nullable, otherwise it is |
754 False, the join is only promoted if it is nullable, otherwise it is |
681 always promoted. |
755 always promoted. |
|
756 |
|
757 Returns True if the join was promoted. |
682 """ |
758 """ |
683 if ((unconditional or self.alias_map[alias][NULLABLE]) and |
759 if ((unconditional or self.alias_map[alias][NULLABLE]) and |
684 self.alias_map[alias] != self.LOUTER): |
760 self.alias_map[alias][JOIN_TYPE] != self.LOUTER): |
685 data = list(self.alias_map[alias]) |
761 data = list(self.alias_map[alias]) |
686 data[JOIN_TYPE] = self.LOUTER |
762 data[JOIN_TYPE] = self.LOUTER |
687 self.alias_map[alias] = tuple(data) |
763 self.alias_map[alias] = tuple(data) |
|
764 return True |
|
765 return False |
|
766 |
|
767 def promote_alias_chain(self, chain, must_promote=False): |
|
768 """ |
|
769 Walks along a chain of aliases, promoting the first nullable join and |
|
770 any joins following that. If 'must_promote' is True, all the aliases in |
|
771 the chain are promoted. |
|
772 """ |
|
773 for alias in chain: |
|
774 if self.promote_alias(alias, must_promote): |
|
775 must_promote = True |
|
776 |
|
777 def promote_unused_aliases(self, initial_refcounts, used_aliases): |
|
778 """ |
|
779 Given a "before" copy of the alias_refcounts dictionary (as |
|
780 'initial_refcounts') and a collection of aliases that may have been |
|
781 changed or created, works out which aliases have been created since |
|
782 then and which ones haven't been used and promotes all of those |
|
783 aliases, plus any children of theirs in the alias tree, to outer joins. |
|
784 """ |
|
785 # FIXME: There's some (a lot of!) overlap with the similar OR promotion |
|
786 # in add_filter(). It's not quite identical, but is very similar. So |
|
787 # pulling out the common bits is something for later. |
|
788 considered = {} |
|
789 for alias in self.tables: |
|
790 if alias not in used_aliases: |
|
791 continue |
|
792 if (alias not in initial_refcounts or |
|
793 self.alias_refcount[alias] == initial_refcounts[alias]): |
|
794 parent = self.alias_map[alias][LHS_ALIAS] |
|
795 must_promote = considered.get(parent, False) |
|
796 promoted = self.promote_alias(alias, must_promote) |
|
797 considered[alias] = must_promote or promoted |
688 |
798 |
689 def change_aliases(self, change_map): |
799 def change_aliases(self, change_map): |
690 """ |
800 """ |
691 Changes the aliases in change_map (which maps old-alias -> new-alias), |
801 Changes the aliases in change_map (which maps old-alias -> new-alias), |
692 relabelling any references to them in select columns and the where |
802 relabelling any references to them in select columns and the where |
849 self.join_map[t_ident] = (alias,) |
967 self.join_map[t_ident] = (alias,) |
850 self.rev_join_map[alias] = t_ident |
968 self.rev_join_map[alias] = t_ident |
851 return alias |
969 return alias |
852 |
970 |
853 def fill_related_selections(self, opts=None, root_alias=None, cur_depth=1, |
971 def fill_related_selections(self, opts=None, root_alias=None, cur_depth=1, |
854 used=None, requested=None, restricted=None): |
972 used=None, requested=None, restricted=None, nullable=None, |
|
973 dupe_set=None, avoid_set=None): |
855 """ |
974 """ |
856 Fill in the information needed for a select_related query. The current |
975 Fill in the information needed for a select_related query. The current |
857 depth is measured as the number of connections away from the root model |
976 depth is measured as the number of connections away from the root model |
858 (for example, cur_depth=1 means we are looking at models with direct |
977 (for example, cur_depth=1 means we are looking at models with direct |
859 connections to the root model). |
978 connections to the root model). |
860 """ |
979 """ |
861 if not restricted and self.max_depth and cur_depth > self.max_depth: |
980 if not restricted and self.max_depth and cur_depth > self.max_depth: |
862 # We've recursed far enough; bail out. |
981 # We've recursed far enough; bail out. |
863 return |
982 return |
|
983 |
864 if not opts: |
984 if not opts: |
865 opts = self.get_meta() |
985 opts = self.get_meta() |
866 root_alias = self.get_initial_alias() |
986 root_alias = self.get_initial_alias() |
867 self.related_select_cols = [] |
987 self.related_select_cols = [] |
868 self.related_select_fields = [] |
988 self.related_select_fields = [] |
869 if not used: |
989 if not used: |
870 used = set() |
990 used = set() |
|
991 if dupe_set is None: |
|
992 dupe_set = set() |
|
993 if avoid_set is None: |
|
994 avoid_set = set() |
|
995 orig_dupe_set = dupe_set |
871 |
996 |
872 # Setup for the case when only particular related fields should be |
997 # Setup for the case when only particular related fields should be |
873 # included in the related selection. |
998 # included in the related selection. |
874 if requested is None and restricted is not False: |
999 if requested is None and restricted is not False: |
875 if isinstance(self.select_related, dict): |
1000 if isinstance(self.select_related, dict): |
877 restricted = True |
1002 restricted = True |
878 else: |
1003 else: |
879 restricted = False |
1004 restricted = False |
880 |
1005 |
881 for f, model in opts.get_fields_with_model(): |
1006 for f, model in opts.get_fields_with_model(): |
882 if (not f.rel or (restricted and f.name not in requested) or |
1007 if not select_related_descend(f, restricted, requested): |
883 (not restricted and f.null) or f.rel.parent_link): |
|
884 continue |
1008 continue |
|
1009 # The "avoid" set is aliases we want to avoid just for this |
|
1010 # particular branch of the recursion. They aren't permanently |
|
1011 # forbidden from reuse in the related selection tables (which is |
|
1012 # what "used" specifies). |
|
1013 avoid = avoid_set.copy() |
|
1014 dupe_set = orig_dupe_set.copy() |
885 table = f.rel.to._meta.db_table |
1015 table = f.rel.to._meta.db_table |
|
1016 if nullable or f.null: |
|
1017 promote = True |
|
1018 else: |
|
1019 promote = False |
886 if model: |
1020 if model: |
887 int_opts = opts |
1021 int_opts = opts |
888 alias = root_alias |
1022 alias = root_alias |
889 for int_model in opts.get_base_chain(model): |
1023 for int_model in opts.get_base_chain(model): |
890 lhs_col = int_opts.parents[int_model].column |
1024 lhs_col = int_opts.parents[int_model].column |
|
1025 dedupe = lhs_col in opts.duplicate_targets |
|
1026 if dedupe: |
|
1027 avoid.update(self.dupe_avoidance.get(id(opts), lhs_col), |
|
1028 ()) |
|
1029 dupe_set.add((opts, lhs_col)) |
891 int_opts = int_model._meta |
1030 int_opts = int_model._meta |
892 alias = self.join((alias, int_opts.db_table, lhs_col, |
1031 alias = self.join((alias, int_opts.db_table, lhs_col, |
893 int_opts.pk.column), exclusions=used, |
1032 int_opts.pk.column), exclusions=used, |
894 promote=f.null) |
1033 promote=promote) |
|
1034 for (dupe_opts, dupe_col) in dupe_set: |
|
1035 self.update_dupe_avoidance(dupe_opts, dupe_col, alias) |
895 else: |
1036 else: |
896 alias = root_alias |
1037 alias = root_alias |
|
1038 |
|
1039 dedupe = f.column in opts.duplicate_targets |
|
1040 if dupe_set or dedupe: |
|
1041 avoid.update(self.dupe_avoidance.get((id(opts), f.column), ())) |
|
1042 if dedupe: |
|
1043 dupe_set.add((opts, f.column)) |
|
1044 |
897 alias = self.join((alias, table, f.column, |
1045 alias = self.join((alias, table, f.column, |
898 f.rel.get_related_field().column), exclusions=used, |
1046 f.rel.get_related_field().column), |
899 promote=f.null) |
1047 exclusions=used.union(avoid), promote=promote) |
900 used.add(alias) |
1048 used.add(alias) |
901 self.related_select_cols.extend([(alias, f2.column) |
1049 self.related_select_cols.extend(self.get_default_columns( |
902 for f2 in f.rel.to._meta.fields]) |
1050 start_alias=alias, opts=f.rel.to._meta, as_pairs=True)[0]) |
903 self.related_select_fields.extend(f.rel.to._meta.fields) |
1051 self.related_select_fields.extend(f.rel.to._meta.fields) |
904 if restricted: |
1052 if restricted: |
905 next = requested.get(f.name, {}) |
1053 next = requested.get(f.name, {}) |
906 else: |
1054 else: |
907 next = False |
1055 next = False |
|
1056 if f.null is not None: |
|
1057 new_nullable = f.null |
|
1058 else: |
|
1059 new_nullable = None |
|
1060 for dupe_opts, dupe_col in dupe_set: |
|
1061 self.update_dupe_avoidance(dupe_opts, dupe_col, alias) |
908 self.fill_related_selections(f.rel.to._meta, alias, cur_depth + 1, |
1062 self.fill_related_selections(f.rel.to._meta, alias, cur_depth + 1, |
909 used, next, restricted) |
1063 used, next, restricted, new_nullable, dupe_set, avoid) |
910 |
1064 |
911 def add_filter(self, filter_expr, connector=AND, negate=False, trim=False, |
1065 def add_filter(self, filter_expr, connector=AND, negate=False, trim=False, |
912 can_reuse=None): |
1066 can_reuse=None, process_extras=True): |
913 """ |
1067 """ |
914 Add a single filter to the query. The 'filter_expr' is a pair: |
1068 Add a single filter to the query. The 'filter_expr' is a pair: |
915 (filter_string, value). E.g. ('name__contains', 'fred') |
1069 (filter_string, value). E.g. ('name__contains', 'fred') |
916 |
1070 |
917 If 'negate' is True, this is an exclude() filter. It's important to |
1071 If 'negate' is True, this is an exclude() filter. It's important to |
946 if value is None: |
1104 if value is None: |
947 if lookup_type != 'exact': |
1105 if lookup_type != 'exact': |
948 raise ValueError("Cannot use None as a query value") |
1106 raise ValueError("Cannot use None as a query value") |
949 lookup_type = 'isnull' |
1107 lookup_type = 'isnull' |
950 value = True |
1108 value = True |
|
1109 elif (value == '' and lookup_type == 'exact' and |
|
1110 connection.features.interprets_empty_strings_as_nulls): |
|
1111 lookup_type = 'isnull' |
|
1112 value = True |
951 elif callable(value): |
1113 elif callable(value): |
952 value = value() |
1114 value = value() |
953 |
1115 |
954 opts = self.get_meta() |
1116 opts = self.get_meta() |
955 alias = self.get_initial_alias() |
1117 alias = self.get_initial_alias() |
956 allow_many = trim or not negate |
1118 allow_many = trim or not negate |
957 |
1119 |
958 try: |
1120 try: |
959 field, target, opts, join_list, last = self.setup_joins(parts, opts, |
1121 field, target, opts, join_list, last, extra_filters = self.setup_joins( |
960 alias, True, allow_many, can_reuse=can_reuse) |
1122 parts, opts, alias, True, allow_many, can_reuse=can_reuse, |
|
1123 negate=negate, process_extras=process_extras) |
961 except MultiJoin, e: |
1124 except MultiJoin, e: |
962 self.split_exclude(filter_expr, LOOKUP_SEP.join(parts[:e.level])) |
1125 self.split_exclude(filter_expr, LOOKUP_SEP.join(parts[:e.level]), |
|
1126 can_reuse) |
963 return |
1127 return |
964 final = len(join_list) |
1128 final = len(join_list) |
965 penultimate = last.pop() |
1129 penultimate = last.pop() |
966 if penultimate == final: |
1130 if penultimate == final: |
967 penultimate = last.pop() |
1131 penultimate = last.pop() |
975 self.unref_alias(alias) |
1139 self.unref_alias(alias) |
976 else: |
1140 else: |
977 col = target.column |
1141 col = target.column |
978 alias = join_list[-1] |
1142 alias = join_list[-1] |
979 |
1143 |
980 if final > 1: |
1144 while final > 1: |
981 # An optimization: if the final join is against the same column as |
1145 # An optimization: if the final join is against the same column as |
982 # we are comparing against, we can go back one step in the join |
1146 # we are comparing against, we can go back one step in the join |
983 # chain and compare against the lhs of the join instead. The result |
1147 # chain and compare against the lhs of the join instead (and then |
984 # (potentially) involves one less table join. |
1148 # repeat the optimization). The result, potentially, involves less |
|
1149 # table joins. |
985 join = self.alias_map[alias] |
1150 join = self.alias_map[alias] |
986 if col == join[RHS_JOIN_COL]: |
1151 if col != join[RHS_JOIN_COL]: |
987 self.unref_alias(alias) |
1152 break |
988 alias = join[LHS_ALIAS] |
1153 self.unref_alias(alias) |
989 col = join[LHS_JOIN_COL] |
1154 alias = join[LHS_ALIAS] |
990 join_list = join_list[:-1] |
1155 col = join[LHS_JOIN_COL] |
991 final -= 1 |
1156 join_list = join_list[:-1] |
992 if final == penultimate: |
1157 final -= 1 |
993 penultimate = last.pop() |
1158 if final == penultimate: |
|
1159 penultimate = last.pop() |
994 |
1160 |
995 if (lookup_type == 'isnull' and value is True and not negate and |
1161 if (lookup_type == 'isnull' and value is True and not negate and |
996 final > 1): |
1162 final > 1): |
997 # If the comparison is against NULL, we need to use a left outer |
1163 # If the comparison is against NULL, we need to use a left outer |
998 # join when connecting to the previous model. We make that |
1164 # join when connecting to the previous model. We make that |
1007 # make the new additions (and any existing ones not used in the new |
1173 # make the new additions (and any existing ones not used in the new |
1008 # join list) an outer join. |
1174 # join list) an outer join. |
1009 join_it = iter(join_list) |
1175 join_it = iter(join_list) |
1010 table_it = iter(self.tables) |
1176 table_it = iter(self.tables) |
1011 join_it.next(), table_it.next() |
1177 join_it.next(), table_it.next() |
|
1178 table_promote = False |
|
1179 join_promote = False |
1012 for join in join_it: |
1180 for join in join_it: |
1013 table = table_it.next() |
1181 table = table_it.next() |
1014 if join == table and self.alias_refcount[join] > 1: |
1182 if join == table and self.alias_refcount[join] > 1: |
1015 continue |
1183 continue |
1016 self.promote_alias(join) |
1184 join_promote = self.promote_alias(join) |
1017 if table != join: |
1185 if table != join: |
1018 self.promote_alias(table) |
1186 table_promote = self.promote_alias(table) |
1019 break |
1187 break |
1020 for join in join_it: |
1188 self.promote_alias_chain(join_it, join_promote) |
1021 self.promote_alias(join) |
1189 self.promote_alias_chain(table_it, table_promote) |
1022 for table in table_it: |
|
1023 # Some of these will have been promoted from the join_list, but |
|
1024 # that's harmless. |
|
1025 self.promote_alias(table) |
|
1026 |
1190 |
1027 self.where.add((alias, col, field, lookup_type, value), connector) |
1191 self.where.add((alias, col, field, lookup_type, value), connector) |
|
1192 |
1028 if negate: |
1193 if negate: |
1029 for alias in join_list: |
1194 self.promote_alias_chain(join_list) |
1030 self.promote_alias(alias) |
1195 if lookup_type != 'isnull': |
1031 if final > 1 and lookup_type != 'isnull': |
1196 if final > 1: |
1032 for alias in join_list: |
1197 for alias in join_list: |
1033 if self.alias_map[alias] == self.LOUTER: |
1198 if self.alias_map[alias][JOIN_TYPE] == self.LOUTER: |
1034 j_col = self.alias_map[alias][RHS_JOIN_COL] |
1199 j_col = self.alias_map[alias][RHS_JOIN_COL] |
1035 entry = Node([(alias, j_col, None, 'isnull', True)]) |
1200 entry = self.where_class() |
1036 entry.negate() |
1201 entry.add((alias, j_col, None, 'isnull', True), AND) |
1037 self.where.add(entry, AND) |
1202 entry.negate() |
1038 break |
1203 self.where.add(entry, AND) |
|
1204 break |
|
1205 elif not (lookup_type == 'in' and not value) and field.null: |
|
1206 # Leaky abstraction artifact: We have to specifically |
|
1207 # exclude the "foo__in=[]" case from this handling, because |
|
1208 # it's short-circuited in the Where class. |
|
1209 entry = self.where_class() |
|
1210 entry.add((alias, col, None, 'isnull', True), AND) |
|
1211 entry.negate() |
|
1212 self.where.add(entry, AND) |
|
1213 |
1039 if can_reuse is not None: |
1214 if can_reuse is not None: |
1040 can_reuse.update(join_list) |
1215 can_reuse.update(join_list) |
|
1216 if process_extras: |
|
1217 for filter in extra_filters: |
|
1218 self.add_filter(filter, negate=negate, can_reuse=can_reuse, |
|
1219 process_extras=False) |
1041 |
1220 |
1042 def add_q(self, q_object, used_aliases=None): |
1221 def add_q(self, q_object, used_aliases=None): |
1043 """ |
1222 """ |
1044 Adds a Q-object to the current filter. |
1223 Adds a Q-object to the current filter. |
1045 |
1224 |
1046 Can also be used to add anything that has an 'add_to_query()' method. |
1225 Can also be used to add anything that has an 'add_to_query()' method. |
1047 """ |
1226 """ |
1048 if used_aliases is None: |
1227 if used_aliases is None: |
1049 used_aliases = set() |
1228 used_aliases = self.used_aliases |
1050 if hasattr(q_object, 'add_to_query'): |
1229 if hasattr(q_object, 'add_to_query'): |
1051 # Complex custom objects are responsible for adding themselves. |
1230 # Complex custom objects are responsible for adding themselves. |
1052 q_object.add_to_query(self, used_aliases) |
1231 q_object.add_to_query(self, used_aliases) |
1053 return |
|
1054 |
|
1055 if self.where and q_object.connector != AND and len(q_object) > 1: |
|
1056 self.where.start_subtree(AND) |
|
1057 subtree = True |
|
1058 else: |
1232 else: |
1059 subtree = False |
1233 if self.where and q_object.connector != AND and len(q_object) > 1: |
1060 connector = AND |
1234 self.where.start_subtree(AND) |
1061 for child in q_object.children: |
1235 subtree = True |
1062 if isinstance(child, Node): |
1236 else: |
1063 self.where.start_subtree(connector) |
1237 subtree = False |
1064 self.add_q(child, used_aliases) |
1238 connector = AND |
|
1239 for child in q_object.children: |
|
1240 if connector == OR: |
|
1241 refcounts_before = self.alias_refcount.copy() |
|
1242 if isinstance(child, Node): |
|
1243 self.where.start_subtree(connector) |
|
1244 self.add_q(child, used_aliases) |
|
1245 self.where.end_subtree() |
|
1246 else: |
|
1247 self.add_filter(child, connector, q_object.negated, |
|
1248 can_reuse=used_aliases) |
|
1249 if connector == OR: |
|
1250 # Aliases that were newly added or not used at all need to |
|
1251 # be promoted to outer joins if they are nullable relations. |
|
1252 # (they shouldn't turn the whole conditional into the empty |
|
1253 # set just because they don't match anything). |
|
1254 self.promote_unused_aliases(refcounts_before, used_aliases) |
|
1255 connector = q_object.connector |
|
1256 if q_object.negated: |
|
1257 self.where.negate() |
|
1258 if subtree: |
1065 self.where.end_subtree() |
1259 self.where.end_subtree() |
1066 else: |
1260 if self.filter_is_sticky: |
1067 self.add_filter(child, connector, q_object.negated, |
1261 self.used_aliases = used_aliases |
1068 can_reuse=used_aliases) |
|
1069 connector = q_object.connector |
|
1070 if q_object.negated: |
|
1071 self.where.negate() |
|
1072 if subtree: |
|
1073 self.where.end_subtree() |
|
1074 |
1262 |
1075 def setup_joins(self, names, opts, alias, dupe_multis, allow_many=True, |
1263 def setup_joins(self, names, opts, alias, dupe_multis, allow_many=True, |
1076 allow_explicit_fk=False, can_reuse=None): |
1264 allow_explicit_fk=False, can_reuse=None, negate=False, |
|
1265 process_extras=True): |
1077 """ |
1266 """ |
1078 Compute the necessary table joins for the passage through the fields |
1267 Compute the necessary table joins for the passage through the fields |
1079 given in 'names'. 'opts' is the Options class for the current model |
1268 given in 'names'. 'opts' is the Options class for the current model |
1080 (which gives the table we are joining to), 'alias' is the alias for the |
1269 (which gives the table we are joining to), 'alias' is the alias for the |
1081 table we are joining to. If dupe_multis is True, any many-to-many or |
1270 table we are joining to. If dupe_multis is True, any many-to-many or |
1082 many-to-one joins will always create a new alias (necessary for |
1271 many-to-one joins will always create a new alias (necessary for |
1083 disjunctive filters). |
1272 disjunctive filters). If can_reuse is not None, it's a list of aliases |
|
1273 that can be reused in these joins (nothing else can be reused in this |
|
1274 case). Finally, 'negate' is used in the same sense as for add_filter() |
|
1275 -- it indicates an exclude() filter, or something similar. It is only |
|
1276 passed in here so that it can be passed to a field's extra_filter() for |
|
1277 customised behaviour. |
1084 |
1278 |
1085 Returns the final field involved in the join, the target database |
1279 Returns the final field involved in the join, the target database |
1086 column (used for any 'where' constraint), the final 'opts' value and the |
1280 column (used for any 'where' constraint), the final 'opts' value and the |
1087 list of tables joined. |
1281 list of tables joined. |
1088 """ |
1282 """ |
1089 joins = [alias] |
1283 joins = [alias] |
1090 last = [0] |
1284 last = [0] |
|
1285 dupe_set = set() |
|
1286 exclusions = set() |
|
1287 extra_filters = [] |
1091 for pos, name in enumerate(names): |
1288 for pos, name in enumerate(names): |
|
1289 try: |
|
1290 exclusions.add(int_alias) |
|
1291 except NameError: |
|
1292 pass |
|
1293 exclusions.add(alias) |
1092 last.append(len(joins)) |
1294 last.append(len(joins)) |
1093 if name == 'pk': |
1295 if name == 'pk': |
1094 name = opts.pk.name |
1296 name = opts.pk.name |
1095 |
1297 |
1096 try: |
1298 try: |
1105 break |
1307 break |
1106 else: |
1308 else: |
1107 names = opts.get_all_field_names() |
1309 names = opts.get_all_field_names() |
1108 raise FieldError("Cannot resolve keyword %r into field. " |
1310 raise FieldError("Cannot resolve keyword %r into field. " |
1109 "Choices are: %s" % (name, ", ".join(names))) |
1311 "Choices are: %s" % (name, ", ".join(names))) |
|
1312 |
1110 if not allow_many and (m2m or not direct): |
1313 if not allow_many and (m2m or not direct): |
1111 for alias in joins: |
1314 for alias in joins: |
1112 self.unref_alias(alias) |
1315 self.unref_alias(alias) |
1113 raise MultiJoin(pos + 1) |
1316 raise MultiJoin(pos + 1) |
1114 if model: |
1317 if model: |
1115 # The field lives on a base class of the current model. |
1318 # The field lives on a base class of the current model. |
1116 alias_list = [] |
|
1117 for int_model in opts.get_base_chain(model): |
1319 for int_model in opts.get_base_chain(model): |
1118 lhs_col = opts.parents[int_model].column |
1320 lhs_col = opts.parents[int_model].column |
|
1321 dedupe = lhs_col in opts.duplicate_targets |
|
1322 if dedupe: |
|
1323 exclusions.update(self.dupe_avoidance.get( |
|
1324 (id(opts), lhs_col), ())) |
|
1325 dupe_set.add((opts, lhs_col)) |
1119 opts = int_model._meta |
1326 opts = int_model._meta |
1120 alias = self.join((alias, opts.db_table, lhs_col, |
1327 alias = self.join((alias, opts.db_table, lhs_col, |
1121 opts.pk.column), exclusions=joins) |
1328 opts.pk.column), exclusions=exclusions) |
1122 joins.append(alias) |
1329 joins.append(alias) |
|
1330 exclusions.add(alias) |
|
1331 for (dupe_opts, dupe_col) in dupe_set: |
|
1332 self.update_dupe_avoidance(dupe_opts, dupe_col, alias) |
1123 cached_data = opts._join_cache.get(name) |
1333 cached_data = opts._join_cache.get(name) |
1124 orig_opts = opts |
1334 orig_opts = opts |
1125 |
1335 dupe_col = direct and field.column or field.field.column |
|
1336 dedupe = dupe_col in opts.duplicate_targets |
|
1337 if dupe_set or dedupe: |
|
1338 if dedupe: |
|
1339 dupe_set.add((opts, dupe_col)) |
|
1340 exclusions.update(self.dupe_avoidance.get((id(opts), dupe_col), |
|
1341 ())) |
|
1342 |
|
1343 if process_extras and hasattr(field, 'extra_filters'): |
|
1344 extra_filters.extend(field.extra_filters(names, pos, negate)) |
1126 if direct: |
1345 if direct: |
1127 if m2m: |
1346 if m2m: |
1128 # Many-to-many field defined on the current model. |
1347 # Many-to-many field defined on the current model. |
1129 if cached_data: |
1348 if cached_data: |
1130 (table1, from_col1, to_col1, table2, from_col2, |
1349 (table1, from_col1, to_col1, table2, from_col2, |
1141 orig_opts._join_cache[name] = (table1, from_col1, |
1360 orig_opts._join_cache[name] = (table1, from_col1, |
1142 to_col1, table2, from_col2, to_col2, opts, |
1361 to_col1, table2, from_col2, to_col2, opts, |
1143 target) |
1362 target) |
1144 |
1363 |
1145 int_alias = self.join((alias, table1, from_col1, to_col1), |
1364 int_alias = self.join((alias, table1, from_col1, to_col1), |
1146 dupe_multis, joins, nullable=True, reuse=can_reuse) |
1365 dupe_multis, exclusions, nullable=True, |
1147 alias = self.join((int_alias, table2, from_col2, to_col2), |
1366 reuse=can_reuse) |
1148 dupe_multis, joins, nullable=True, reuse=can_reuse) |
1367 if int_alias == table2 and from_col2 == to_col2: |
1149 joins.extend([int_alias, alias]) |
1368 joins.append(int_alias) |
|
1369 alias = int_alias |
|
1370 else: |
|
1371 alias = self.join( |
|
1372 (int_alias, table2, from_col2, to_col2), |
|
1373 dupe_multis, exclusions, nullable=True, |
|
1374 reuse=can_reuse) |
|
1375 joins.extend([int_alias, alias]) |
1150 elif field.rel: |
1376 elif field.rel: |
1151 # One-to-one or many-to-one field |
1377 # One-to-one or many-to-one field |
1152 if cached_data: |
1378 if cached_data: |
1153 (table, from_col, to_col, opts, target) = cached_data |
1379 (table, from_col, to_col, opts, target) = cached_data |
1154 else: |
1380 else: |
1187 orig_opts._join_cache[name] = (table1, from_col1, |
1413 orig_opts._join_cache[name] = (table1, from_col1, |
1188 to_col1, table2, from_col2, to_col2, opts, |
1414 to_col1, table2, from_col2, to_col2, opts, |
1189 target) |
1415 target) |
1190 |
1416 |
1191 int_alias = self.join((alias, table1, from_col1, to_col1), |
1417 int_alias = self.join((alias, table1, from_col1, to_col1), |
1192 dupe_multis, joins, nullable=True, reuse=can_reuse) |
1418 dupe_multis, exclusions, nullable=True, |
|
1419 reuse=can_reuse) |
1193 alias = self.join((int_alias, table2, from_col2, to_col2), |
1420 alias = self.join((int_alias, table2, from_col2, to_col2), |
1194 dupe_multis, joins, nullable=True, reuse=can_reuse) |
1421 dupe_multis, exclusions, nullable=True, |
|
1422 reuse=can_reuse) |
1195 joins.extend([int_alias, alias]) |
1423 joins.extend([int_alias, alias]) |
1196 else: |
1424 else: |
1197 # One-to-many field (ForeignKey defined on the target model) |
1425 # One-to-many field (ForeignKey defined on the target model) |
1198 if cached_data: |
1426 if cached_data: |
1199 (table, from_col, to_col, opts, target) = cached_data |
1427 (table, from_col, to_col, opts, target) = cached_data |
1207 target = opts.pk |
1435 target = opts.pk |
1208 orig_opts._join_cache[name] = (table, from_col, to_col, |
1436 orig_opts._join_cache[name] = (table, from_col, to_col, |
1209 opts, target) |
1437 opts, target) |
1210 |
1438 |
1211 alias = self.join((alias, table, from_col, to_col), |
1439 alias = self.join((alias, table, from_col, to_col), |
1212 dupe_multis, joins, nullable=True, reuse=can_reuse) |
1440 dupe_multis, exclusions, nullable=True, |
|
1441 reuse=can_reuse) |
1213 joins.append(alias) |
1442 joins.append(alias) |
|
1443 |
|
1444 for (dupe_opts, dupe_col) in dupe_set: |
|
1445 try: |
|
1446 self.update_dupe_avoidance(dupe_opts, dupe_col, int_alias) |
|
1447 except NameError: |
|
1448 self.update_dupe_avoidance(dupe_opts, dupe_col, alias) |
1214 |
1449 |
1215 if pos != len(names) - 1: |
1450 if pos != len(names) - 1: |
1216 raise FieldError("Join on field %r not permitted." % name) |
1451 raise FieldError("Join on field %r not permitted." % name) |
1217 |
1452 |
1218 return field, target, opts, joins, last |
1453 return field, target, opts, joins, last, extra_filters |
1219 |
1454 |
1220 def split_exclude(self, filter_expr, prefix): |
1455 def update_dupe_avoidance(self, opts, col, alias): |
|
1456 """ |
|
1457 For a column that is one of multiple pointing to the same table, update |
|
1458 the internal data structures to note that this alias shouldn't be used |
|
1459 for those other columns. |
|
1460 """ |
|
1461 ident = id(opts) |
|
1462 for name in opts.duplicate_targets[col]: |
|
1463 try: |
|
1464 self.dupe_avoidance[ident, name].add(alias) |
|
1465 except KeyError: |
|
1466 self.dupe_avoidance[ident, name] = set([alias]) |
|
1467 |
|
1468 def split_exclude(self, filter_expr, prefix, can_reuse): |
1221 """ |
1469 """ |
1222 When doing an exclude against any kind of N-to-many relation, we need |
1470 When doing an exclude against any kind of N-to-many relation, we need |
1223 to use a subquery. This method constructs the nested query, given the |
1471 to use a subquery. This method constructs the nested query, given the |
1224 original exclude filter (filter_expr) and the portion up to the first |
1472 original exclude filter (filter_expr) and the portion up to the first |
1225 N-to-many relation field. |
1473 N-to-many relation field. |
1226 """ |
1474 """ |
1227 query = Query(self.model, self.connection) |
1475 query = Query(self.model, self.connection) |
1228 query.add_filter(filter_expr) |
1476 query.add_filter(filter_expr, can_reuse=can_reuse) |
|
1477 query.bump_prefix() |
1229 query.set_start(prefix) |
1478 query.set_start(prefix) |
1230 query.clear_ordering(True) |
1479 query.clear_ordering(True) |
1231 self.add_filter(('%s__in' % prefix, query), negate=True, trim=True) |
1480 self.add_filter(('%s__in' % prefix, query), negate=True, trim=True, |
|
1481 can_reuse=can_reuse) |
1232 |
1482 |
1233 def set_limits(self, low=None, high=None): |
1483 def set_limits(self, low=None, high=None): |
1234 """ |
1484 """ |
1235 Adjusts the limits on the rows retrieved. We use low/high to set these, |
1485 Adjusts the limits on the rows retrieved. We use low/high to set these, |
1236 as it makes it more Pythonic to read and write. When the SQL query is |
1486 as it makes it more Pythonic to read and write. When the SQL query is |