Commit 056796b7 authored by Russell Keith-Magee's avatar Russell Keith-Magee
Browse files

[1.0.X] Fixed #9479 -- Corrected an edge case in bulk queryset deletion that...

[1.0.X] Fixed #9479 -- Corrected an edge case in bulk queryset deletion that could cause an infinite loop when using MySQL InnoDB.

Merge of 10913-10914 from trunk.


git-svn-id: http://code.djangoproject.com/svn/django/branches/releases/1.0.X@10915 bcc190cf-cafb-0310-a4f2-bffc1f526a37
parent 53b04361
Loading
Loading
Loading
Loading
+16 −2
Original line number Diff line number Diff line
@@ -37,11 +37,21 @@ class CollectedObjects(object):

    This is used for the database object deletion routines so that we can
    calculate the 'leaf' objects which should be deleted first.

    previously_seen is an optional argument. It must be a CollectedObjects
    instance itself; any previously_seen collected object will be blocked from
    being added to this instance.
    """

    def __init__(self):
    def __init__(self, previously_seen=None):
        self.data = {}
        self.children = {}
        if previously_seen:
            self.blocked = previously_seen.blocked
            for cls, seen in previously_seen.data.items():
                self.blocked.setdefault(cls, SortedDict()).update(seen)
        else:
            self.blocked = {}

    def add(self, model, pk, obj, parent_model, nullable=False):
        """
@@ -58,6 +68,9 @@ class CollectedObjects(object):
        Returns True if the item already existed in the structure and
        False otherwise.
        """
        if pk in self.blocked.get(model, {}):
            return True

        d = self.data.setdefault(model, SortedDict())
        retval = pk in d
        d[pk] = obj
@@ -390,10 +403,11 @@ class QuerySet(object):

        # Delete objects in chunks to prevent the list of related objects from
        # becoming too long.
        seen_objs = None
        while 1:
            # Collect all the objects to be deleted in this chunk, and all the
            # objects that are related to the objects that are to be deleted.
            seen_objs = CollectedObjects()
            seen_objs = CollectedObjects(seen_objs)
            for object in del_query[:CHUNK_SIZE]:
                object._collect_sub_objects(seen_objs)

+1 −0
Original line number Diff line number Diff line
+53 −0
Original line number Diff line number Diff line
from django.conf import settings
from django.db import models, backend, connection, transaction
from django.db.models import sql, query
from django.test import TestCase

class Book(models.Model):
    pagecount = models.IntegerField()

# Can't run this test under SQLite, because you can't
# get two connections to an in-memory database.
if settings.DATABASE_ENGINE != 'sqlite3':
    class DeleteLockingTest(TestCase):
        def setUp(self):
            # Create a second connection to the database
            self.conn2 = backend.DatabaseWrapper(**settings.DATABASE_OPTIONS)

            # Put both DB connections into managed transaction mode
            transaction.enter_transaction_management()
            transaction.managed(True)
            # self.conn2._enter_transaction_management(True)

        def tearDown(self):
            # Close down the second connection.
            transaction.leave_transaction_management()
            self.conn2.close()

        def test_concurrent_delete(self):
            "Deletes on concurrent transactions don't collide and lock the database. Regression for #9479"

            # Create some dummy data
            b1 = Book(id=1, pagecount=100)
            b2 = Book(id=2, pagecount=200)
            b3 = Book(id=3, pagecount=300)
            b1.save()
            b2.save()
            b3.save()

            transaction.commit()

            self.assertEquals(3, Book.objects.count())

            # Delete something using connection 2.
            cursor2 = self.conn2.cursor()
            cursor2.execute('DELETE from delete_regress_book WHERE id=1')
            self.conn2._commit();

            # Now perform a queryset delete that covers the object
            # deleted in connection 2. This causes an infinite loop
            # under MySQL InnoDB unless we keep track of already
            # deleted objects.
            Book.objects.filter(pagecount__lt=250).delete()
            transaction.commit()
            self.assertEquals(1, Book.objects.count())