Commit 68847135 authored by Anssi Kääriäinen's avatar Anssi Kääriäinen
Browse files

Removed dupe_avoidance from sql/query and sql/compiler.py

The dupe avoidance logic was removed as it doesn't seem to do anything,
it is complicated, and it has nearly zero documentation.

The removal of dupe_avoidance allowed for refactoring of both the
implementation and signature of Query.join(). This refactoring cascades
again to some other parts. The most significant of them is the changes
in qs.combine(), and compiler.select_related_descent().
parent 9bf0eedb
Loading
Loading
Loading
Loading
+0 −11
Original line number Diff line number Diff line
@@ -1053,11 +1053,6 @@ class ForeignKey(RelatedField, Field):
    def contribute_to_class(self, cls, name):
        super(ForeignKey, self).contribute_to_class(cls, name)
        setattr(cls, self.name, ReverseSingleRelatedObjectDescriptor(self))
        if isinstance(self.rel.to, six.string_types):
            target = self.rel.to
        else:
            target = self.rel.to._meta.db_table
        cls._meta.duplicate_targets[self.column] = (target, "o2m")

    def contribute_to_related_class(self, cls, related):
        # Internal FK's - i.e., those with a related name ending with '+' -
@@ -1293,12 +1288,6 @@ class ManyToManyField(RelatedField, Field):
                field.rel.through = model
            add_lazy_relation(cls, self, self.rel.through, resolve_through_model)

        if isinstance(self.rel.to, six.string_types):
            target = self.rel.to
        else:
            target = self.rel.to._meta.db_table
        cls._meta.duplicate_targets[self.column] = (target, "m2m")

    def contribute_to_related_class(self, cls, related):
        # Internal M2Ms (i.e., those with a related name ending with '+')
        # and swapped models don't get a related descriptor.
+0 −19
Original line number Diff line number Diff line
@@ -58,7 +58,6 @@ class Options(object):
        self.concrete_model = None
        self.swappable = None
        self.parents = SortedDict()
        self.duplicate_targets = {}
        self.auto_created = False

        # To handle various inheritance situations, we need to track where
@@ -147,24 +146,6 @@ class Options(object):
                        auto_created=True)
                model.add_to_class('id', auto)

        # Determine any sets of fields that are pointing to the same targets
        # (e.g. two ForeignKeys to the same remote model). The query
        # construction code needs to know this. At the end of this,
        # self.duplicate_targets will map each duplicate field column to the
        # columns it duplicates.
        collections = {}
        for column, target in six.iteritems(self.duplicate_targets):
            try:
                collections[target].add(column)
            except KeyError:
                collections[target] = set([column])
        self.duplicate_targets = {}
        for elt in six.itervalues(collections):
            if len(elt) == 1:
                continue
            for column in elt:
                self.duplicate_targets[column] = elt.difference(set([column]))

    def add_field(self, field):
        # Insert the given field in the order in which it was created, using
        # the "creation_counter" attribute of the field.
+9 −59
Original line number Diff line number Diff line
@@ -6,7 +6,7 @@ from django.db.backends.util import truncate_name
from django.db.models.constants import LOOKUP_SEP
from django.db.models.query_utils import select_related_descend
from django.db.models.sql.constants import (SINGLE, MULTI, ORDER_DIR,
        GET_ITERATOR_CHUNK_SIZE, SelectInfo)
        GET_ITERATOR_CHUNK_SIZE, REUSE_ALL, SelectInfo)
from django.db.models.sql.datastructures import EmptyResultSet
from django.db.models.sql.expressions import SQLEvaluator
from django.db.models.sql.query import get_order_dir, Query
@@ -457,7 +457,7 @@ class SQLCompiler(object):
        if not alias:
            alias = self.query.get_initial_alias()
        field, target, opts, joins, _, _ = self.query.setup_joins(pieces,
                opts, alias, False)
                opts, alias, REUSE_ALL)
        # We will later on need to promote those joins that were added to the
        # query afresh above.
        joins_to_promote = [j for j in joins if self.query.alias_refcount[j] < 2]
@@ -574,8 +574,7 @@ class SQLCompiler(object):
        return result, params

    def fill_related_selections(self, opts=None, root_alias=None, cur_depth=1,
            used=None, requested=None, restricted=None, nullable=None,
            dupe_set=None, avoid_set=None):
            requested=None, restricted=None, nullable=None):
        """
        Fill in the information needed for a select_related query. The current
        depth is measured as the number of connections away from the root model
@@ -590,13 +589,6 @@ class SQLCompiler(object):
            opts = self.query.get_meta()
            root_alias = self.query.get_initial_alias()
            self.query.related_select_cols = []
        if not used:
            used = set()
        if dupe_set is None:
            dupe_set = set()
        if avoid_set is None:
            avoid_set = set()
        orig_dupe_set = dupe_set
        only_load = self.query.get_loaded_field_names()

        # Setup for the case when only particular related fields should be
@@ -616,12 +608,6 @@ class SQLCompiler(object):
            if not select_related_descend(f, restricted, requested,
                                          only_load.get(field_model)):
                continue
            # The "avoid" set is aliases we want to avoid just for this
            # particular branch of the recursion. They aren't permanently
            # forbidden from reuse in the related selection tables (which is
            # what "used" specifies).
            avoid = avoid_set.copy()
            dupe_set = orig_dupe_set.copy()
            table = f.rel.to._meta.db_table
            promote = nullable or f.null
            if model:
@@ -637,31 +623,17 @@ class SQLCompiler(object):
                        int_opts = int_model._meta
                        continue
                    lhs_col = int_opts.parents[int_model].column
                    dedupe = lhs_col in opts.duplicate_targets
                    if dedupe:
                        avoid.update(self.query.dupe_avoidance.get((id(opts), lhs_col),
                                ()))
                        dupe_set.add((opts, lhs_col))
                    int_opts = int_model._meta
                    alias = self.query.join((alias, int_opts.db_table, lhs_col,
                            int_opts.pk.column), exclusions=used,
                            int_opts.pk.column),
                            promote=promote)
                    alias_chain.append(alias)
                    for (dupe_opts, dupe_col) in dupe_set:
                        self.query.update_dupe_avoidance(dupe_opts, dupe_col, alias)
            else:
                alias = root_alias

            dedupe = f.column in opts.duplicate_targets
            if dupe_set or dedupe:
                avoid.update(self.query.dupe_avoidance.get((id(opts), f.column), ()))
                if dedupe:
                    dupe_set.add((opts, f.column))

            alias = self.query.join((alias, table, f.column,
                    f.rel.get_related_field().column),
                    exclusions=used.union(avoid), promote=promote)
            used.add(alias)
                    promote=promote)
            columns, aliases = self.get_default_columns(start_alias=alias,
                    opts=f.rel.to._meta, as_pairs=True)
            self.query.related_select_cols.extend(
@@ -671,10 +643,8 @@ class SQLCompiler(object):
            else:
                next = False
            new_nullable = f.null or promote
            for dupe_opts, dupe_col in dupe_set:
                self.query.update_dupe_avoidance(dupe_opts, dupe_col, alias)
            self.fill_related_selections(f.rel.to._meta, alias, cur_depth + 1,
                    used, next, restricted, new_nullable, dupe_set, avoid)
                    next, restricted, new_nullable)

        if restricted:
            related_fields = [
@@ -686,14 +656,8 @@ class SQLCompiler(object):
                if not select_related_descend(f, restricted, requested,
                                              only_load.get(model), reverse=True):
                    continue
                # The "avoid" set is aliases we want to avoid just for this
                # particular branch of the recursion. They aren't permanently
                # forbidden from reuse in the related selection tables (which is
                # what "used" specifies).
                avoid = avoid_set.copy()
                dupe_set = orig_dupe_set.copy()
                table = model._meta.db_table

                table = model._meta.db_table
                int_opts = opts
                alias = root_alias
                alias_chain = []
@@ -708,30 +672,16 @@ class SQLCompiler(object):
                            int_opts = int_model._meta
                            continue
                        lhs_col = int_opts.parents[int_model].column
                        dedupe = lhs_col in opts.duplicate_targets
                        if dedupe:
                            avoid.update((self.query.dupe_avoidance.get(id(opts), lhs_col),
                                ()))
                            dupe_set.add((opts, lhs_col))
                        int_opts = int_model._meta
                        alias = self.query.join(
                            (alias, int_opts.db_table, lhs_col, int_opts.pk.column),
                            exclusions=used, promote=True, reuse=used
                            promote=True,
                        )
                        alias_chain.append(alias)
                        for dupe_opts, dupe_col in dupe_set:
                            self.query.update_dupe_avoidance(dupe_opts, dupe_col, alias)
                    dedupe = f.column in opts.duplicate_targets
                    if dupe_set or dedupe:
                        avoid.update(self.query.dupe_avoidance.get((id(opts), f.column), ()))
                        if dedupe:
                            dupe_set.add((opts, f.column))
                alias = self.query.join(
                    (alias, table, f.rel.get_related_field().column, f.column),
                    exclusions=used.union(avoid),
                    promote=True
                )
                used.add(alias)
                columns, aliases = self.get_default_columns(start_alias=alias,
                    opts=model._meta, as_pairs=True, local_only=True)
                self.query.related_select_cols.extend(
@@ -743,7 +693,7 @@ class SQLCompiler(object):
                new_nullable = True

                self.fill_related_selections(model._meta, table, cur_depth+1,
                    used, next, restricted, new_nullable)
                    next, restricted, new_nullable)

    def deferred_to_columns(self):
        """
+3 −0
Original line number Diff line number Diff line
@@ -37,3 +37,6 @@ ORDER_DIR = {
    'ASC': ('ASC', 'DESC'),
    'DESC': ('DESC', 'ASC'),
}

# A marker for join-reusability.
REUSE_ALL = object()
+74 −122

File changed.

Preview size limit exceeded, changes collapsed.

Loading