[Zodb-checkins] CVS: ZODB4/src/zodb/storage/tests - packable.py:1.4.4.1

Barry Warsaw barry@wooz.org
Mon, 10 Feb 2003 18:20:31 -0500


Update of /cvs-repository/ZODB4/src/zodb/storage/tests
In directory cvs.zope.org:/tmp/cvs-serv7705/src/zodb/storage/tests

Modified Files:
      Tag: opaque-pickles-branch
	packable.py 
Log Message:
Greatly simplied by ditching all the kludgy code for storing the right
kind of pickles (those with extractable oids), in favor of essentially
the zodb.serialize helpers.

- Don't import cStringIO, don't import pickle

- Don't define the Root and Object classes

- Get rid of dumps()

- Add a Jar class which has the proper minimal API.

- Get rid of PackableStorageBase

- PackableStorage: rewrite all the tests to use _dostore() instead of
  _dostoreNP(), and to not pickle the objects.  Also to use
  zodb_unpickle() helper where appropriate and access the object's oid
  via obj._p_oid isntead of obj.getoid().


=== ZODB4/src/zodb/storage/tests/packable.py 1.4 => 1.4.4.1 ===
--- ZODB4/src/zodb/storage/tests/packable.py:1.4	Wed Feb  5 18:28:27 2003
+++ ZODB4/src/zodb/storage/tests/packable.py	Mon Feb 10 18:20:30 2003
@@ -14,127 +14,55 @@
 """Run some tests relevant for storages that support pack()."""
 
 import time
-from cStringIO import StringIO
-
-try:
-    import cPickle
-    pickle = cPickle
-    #import cPickle as pickle
-except ImportError:
-    import pickle
 
 from persistence import Persistent
 from transaction import get_transaction
 
 from zodb.db import DB
-from zodb.storage.base import ZERO
+from zodb.interfaces import ZERO
+from zodb.serialize import getDBRoot, ConnectionObjectReader
+from zodb.ztransaction import Transaction
 from zodb.storage.tests.minpo import MinPO
+from zodb.storage.tests.base import zodb_pickle, zodb_unpickle
 
 
-
-# This class is for the root object.  It must not contain a getoid() method
-# (really, attribute).  The persistent pickling machinery -- in the dumps()
-# function below -- will pickle Root objects as normal, but any attributes
-# which reference persistent Object instances will get pickled as persistent
-# ids, not as the object's state.  This makes the referencesf stuff work,
-# because it pickle sniffs for persistent ids (so we have to get those
-# persistent ids into the root object's pickle).
-class Root:
-    pass
-
-
-# This is the persistent Object class.  Because it has a getoid() method, the
-# persistent pickling machinery -- in the dumps() function below -- will
-# pickle the oid string instead of the object's actual state.  Yee haw, this
-# stuff is deep. ;)
-class Object:
-    def __init__(self, oid):
-        self._oid = oid
-
-    def getoid(self):
-        return self._oid
-
 class C(Persistent):
     pass
 
 
-# Here's where all the magic occurs.  Sadly, the pickle module is a bit
-# underdocumented, but here's what happens: by setting the persistent_id
-# attribute to getpersid() on the pickler, that function gets called for every
-# object being pickled.  By returning None when the object has no getoid
-# attribute, it signals pickle to serialize the object as normal.  That's how
-# the Root instance gets pickled correctly.  But, if the object has a getoid
-# attribute, then by returning that method's value, we tell pickle to
-# serialize the persistent id of the object instead of the object's state.
-# That sets the pickle up for proper sniffing by the referencesf machinery.
-# Fun, huh?
-def dumps(obj):
-    def getpersid(obj):
-        if hasattr(obj, 'getoid'):
-            return obj.getoid()
-        return None
-    s = StringIO()
-    p = pickle.Pickler(s)
-    p.persistent_id = getpersid
-    p.dump(obj)
-    return s.getvalue()
+class Jar:
+    def __init__(self, storage):
+        self._storage = storage
+        
+    def newObjectId(self):
+        return self._storage.newObjectId()
 
-
-
-class PackableStorageBase:
-    # We keep a cache of object ids to instances so that the unpickler can
-    # easily return any persistent object.
-    _cache = {}
-
-    def _newobj(self):
-        # This is a convenience method to create a new persistent Object
-        # instance.  It asks the storage for a new object id, creates the
-        # instance with the given oid, populates the cache and returns the
-        # object.
-        oid = self._storage.newObjectId()
-        obj = Object(oid)
-        self._cache[obj.getoid()] = obj
-        return obj
-
-    def _makeloader(self):
-        # This is the other side of the persistent pickling magic.  We need a
-        # custom unpickler to mirror our custom pickler above.  By setting the
-        # persistent_load function of the unpickler to self._cache.get(),
-        # whenever a persistent id is unpickled, it will actually return the
-        # Object instance out of the cache.  As far as returning a function
-        # with an argument bound to an instance attribute method, we do it
-        # this way because it makes the code in the tests more succinct.
-        #
-        # BUT!  Be careful in your use of loads() vs. pickle.loads().  loads()
-        # should only be used on the Root object's pickle since it's the only
-        # special one.  All the Object instances should use pickle.loads().
-        def loads(str, persfunc=self._cache.get):
-            fp = StringIO(str)
-            u = pickle.Unpickler(fp)
-            u.persistent_load = persfunc
-            return u.load()
-        return loads
+    def register(self, obj):
+        obj._p_oid = self.newObjectId()
 
 
 
-class PackableStorage(PackableStorageBase):
+class PackableStorage:
     def _initroot(self):
+        self._jar = jar = Jar(self._storage)
+        self._reader = ConnectionObjectReader(jar, {})
         try:
             self._storage.load(ZERO, '')
         except KeyError:
-            from persistence.dict import PersistentDict
-            from zodb.ztransaction import Transaction
-            file = StringIO()
-            p = cPickle.Pickler(file, 1)
-            p.dump((PersistentDict, None))
-            p.dump(PersistentDict().__getstate__())
+            root = C()
+            root._p_jar = jar
             t = Transaction()
             t.note("initial database creation")
             self._storage.tpcBegin(t)
-            self._storage.store(ZERO, None, file.getvalue(), '', t)
+            self._storage.store(ZERO, None, zodb_pickle(root), '', t)
             self._storage.tpcVote(t)
             self._storage.tpcFinish(t)
 
+    def _newobj(self):
+        obj = C()
+        obj._p_jar = self._jar
+        return obj
+
     def checkPackEmptyStorage(self):
         self._storage.pack(time.time())
 
@@ -152,30 +80,29 @@
         raises = self.assertRaises
         # Create a `persistent' object
         obj = self._newobj()
-        oid = obj.getoid()
         obj.value = 1
+        oid = obj._p_oid
         # Commit three different revisions
-        revid1 = self._dostoreNP(oid, data=pickle.dumps(obj))
+        revid1 = self._dostore(oid, data=obj)
         obj.value = 2
-        revid2 = self._dostoreNP(oid, revid=revid1, data=pickle.dumps(obj))
+        revid2 = self._dostore(oid, revid=revid1, data=obj)
         obj.value = 3
-        revid3 = self._dostoreNP(oid, revid=revid2, data=pickle.dumps(obj))
+        revid3 = self._dostore(oid, revid=revid2, data=obj)
         # Now make sure all three revisions can be extracted
         data = self._storage.loadSerial(oid, revid1)
-        pobj = pickle.loads(data)
-        eq(pobj.getoid(), oid)
+        pobj = zodb_unpickle(data)
         eq(pobj.value, 1)
         data = self._storage.loadSerial(oid, revid2)
-        pobj = pickle.loads(data)
-        eq(pobj.getoid(), oid)
+        pobj = zodb_unpickle(data)
         eq(pobj.value, 2)
         data = self._storage.loadSerial(oid, revid3)
-        pobj = pickle.loads(data)
-        eq(pobj.getoid(), oid)
+        pobj = zodb_unpickle(data)
         eq(pobj.value, 3)
         # Now pack all transactions; need to sleep a second to make
         # sure that the pack time is greater than the last commit time.
-        time.sleep(1)
+        now = int(time.time())
+        while now >= int(time.time()):
+            time.sleep(0.1)
         self._storage.pack(time.time())
         # All revisions of the object should be gone, since there is no
         # reference from the root object to this object.
@@ -186,147 +113,143 @@
     def checkPackJustOldRevisions(self):
         eq = self.assertEqual
         raises = self.assertRaises
-        loads = self._makeloader()
-        # Create a root object.  This can't be an instance of Object,
-        # otherwise the pickling machinery will serialize it as a persistent
-        # id and not as an object that contains references (persistent ids) to
-        # other objects.
-        root = Root()
-        # Create a persistent object, with some initial state
+        # Create a root object
+        self._initroot()
+        data, revid0 = self._storage.load(ZERO, '')
+        root = self._reader.getObject(data)
         obj = self._newobj()
-        oid = obj.getoid()
         # Link the root object to the persistent object, in order to keep the
-        # persistent object alive.  Store the root object.
+        # persistent object alive.  XXX Order here is important: an attribute
+        # on the root object must be set first, so that it gets oid 0, /then/
+        # the attribute on the obj can be set.
         root.obj = obj
+        obj.value = 0
         root.value = 0
-        revid0 = self._dostoreNP(ZERO, data=dumps(root))
+        root._p_jar = self._jar
+        revid0 = self._dostore(ZERO, revid=revid0, data=root)
         # Make sure the root can be retrieved
         data, revid = self._storage.load(ZERO, '')
         eq(revid, revid0)
-        eq(loads(data).value, 0)
+        eq(zodb_unpickle(data).value, 0)
         # Commit three different revisions of the other object
         obj.value = 1
-        revid1 = self._dostoreNP(oid, data=pickle.dumps(obj))
+        oid = obj._p_oid
+        revid1 = self._dostore(oid, data=obj)
         obj.value = 2
-        revid2 = self._dostoreNP(oid, revid=revid1, data=pickle.dumps(obj))
+        revid2 = self._dostore(oid, revid=revid1, data=obj)
         obj.value = 3
-        revid3 = self._dostoreNP(oid, revid=revid2, data=pickle.dumps(obj))
+        revid3 = self._dostore(oid, revid=revid2, data=obj)
         # Now make sure all three revisions can be extracted
         data = self._storage.loadSerial(oid, revid1)
-        pobj = pickle.loads(data)
-        eq(pobj.getoid(), oid)
+        pobj = zodb_unpickle(data)
         eq(pobj.value, 1)
         data = self._storage.loadSerial(oid, revid2)
-        pobj = pickle.loads(data)
-        eq(pobj.getoid(), oid)
+        pobj = zodb_unpickle(data)
         eq(pobj.value, 2)
         data = self._storage.loadSerial(oid, revid3)
-        pobj = pickle.loads(data)
-        eq(pobj.getoid(), oid)
+        pobj = zodb_unpickle(data)
         eq(pobj.value, 3)
         # Now pack just revisions 1 and 2.  The object's current revision
         # should stay alive because it's pointed to by the root.
-        time.sleep(1)
+        now = int(time.time())
+        while now >= int(time.time()):
+            time.sleep(0.1)
         self._storage.pack(time.time())
-        # Make sure the revisions are gone, but that object zero and revision
-        # 3 are still there and correct
+        # Make sure the revisions are gone, but that the root object and
+        # revision 3 are still there and correct
         data, revid = self._storage.load(ZERO, '')
         eq(revid, revid0)
-        eq(loads(data).value, 0)
+        eq(zodb_unpickle(data).value, 0)
         raises(KeyError, self._storage.loadSerial, oid, revid1)
         raises(KeyError, self._storage.loadSerial, oid, revid2)
         data = self._storage.loadSerial(oid, revid3)
-        pobj = pickle.loads(data)
-        eq(pobj.getoid(), oid)
+        pobj = zodb_unpickle(data)
         eq(pobj.value, 3)
         data, revid = self._storage.load(oid, '')
         eq(revid, revid3)
-        pobj = pickle.loads(data)
-        eq(pobj.getoid(), oid)
+        pobj = zodb_unpickle(data)
         eq(pobj.value, 3)
 
     def checkPackOnlyOneObject(self):
         eq = self.assertEqual
         raises = self.assertRaises
-        loads = self._makeloader()
-        # Create a root object.  This can't be an instance of Object,
-        # otherwise the pickling machinery will serialize it as a persistent
-        # id and not as an object that contains references (persistent ids) to
-        # other objects.
-        root = Root()
+        # Create a root object.
+        self._initroot()
+        data, revid0 = self._storage.load(ZERO, '')
+        root = self._reader.getObject(data)
+        root.value = -1
+        root._p_jar = self._jar
         # Create a persistent object, with some initial state
         obj1 = self._newobj()
-        oid1 = obj1.getoid()
+        obj1.value = -1
+        oid1 = obj1._p_oid
         # Create another persistent object, with some initial state.  Make
         # sure its oid is greater than the first object's oid.
         obj2 = self._newobj()
-        oid2 = obj2.getoid()
+        obj2.value = -1
+        oid2 = obj2._p_oid
         self.failUnless(oid2 > oid1)
         # Link the root object to the persistent objects, in order to keep
         # them alive.  Store the root object.
         root.obj1 = obj1
         root.obj2 = obj2
         root.value = 0
-        revid0 = self._dostoreNP(ZERO, data=dumps(root))
+        revid0 = self._dostore(ZERO, data=root, revid=revid0)
         # Make sure the root can be retrieved
         data, revid = self._storage.load(ZERO, '')
         eq(revid, revid0)
-        eq(loads(data).value, 0)
+        eq(zodb_unpickle(data).value, 0)
         # Commit three different revisions of the first object
         obj1.value = 1
-        revid1 = self._dostoreNP(oid1, data=pickle.dumps(obj1))
+        revid1 = self._dostore(oid1, data=obj1)
         obj1.value = 2
-        revid2 = self._dostoreNP(oid1, revid=revid1, data=pickle.dumps(obj1))
+        revid2 = self._dostore(oid1, revid=revid1, data=obj1)
         obj1.value = 3
-        revid3 = self._dostoreNP(oid1, revid=revid2, data=pickle.dumps(obj1))
+        revid3 = self._dostore(oid1, revid=revid2, data=obj1)
         # Now make sure all three revisions can be extracted
         data = self._storage.loadSerial(oid1, revid1)
-        pobj = pickle.loads(data)
-        eq(pobj.getoid(), oid1)
+        pobj = zodb_unpickle(data)
         eq(pobj.value, 1)
         data = self._storage.loadSerial(oid1, revid2)
-        pobj = pickle.loads(data)
-        eq(pobj.getoid(), oid1)
+        pobj = zodb_unpickle(data)
         eq(pobj.value, 2)
         data = self._storage.loadSerial(oid1, revid3)
-        pobj = pickle.loads(data)
-        eq(pobj.getoid(), oid1)
+        pobj = zodb_unpickle(data)
         eq(pobj.value, 3)
         # Now commit a revision of the second object
         obj2.value = 11
-        revid4 = self._dostoreNP(oid2, data=pickle.dumps(obj2))
+        revid4 = self._dostore(oid2, data=obj2)
         # And make sure the revision can be extracted
         data = self._storage.loadSerial(oid2, revid4)
-        pobj = pickle.loads(data)
-        eq(pobj.getoid(), oid2)
+        pobj = zodb_unpickle(data)
         eq(pobj.value, 11)
         # Now pack just revisions 1 and 2 of object1.  Object1's current
         # revision should stay alive because it's pointed to by the root, as
         # should Object2's current revision.
-        time.sleep(1)
+        now = time.time()
+        now = int(time.time())
+        while now >= int(time.time()):
+            time.sleep(0.1)
         self._storage.pack(time.time())
         # Make sure the revisions are gone, but that object zero, object2, and
         # revision 3 of object1 are still there and correct.
         data, revid = self._storage.load(ZERO, '')
         eq(revid, revid0)
-        eq(loads(data).value, 0)
+        eq(zodb_unpickle(data).value, 0)
         raises(KeyError, self._storage.loadSerial, oid1, revid1)
         raises(KeyError, self._storage.loadSerial, oid1, revid2)
         data = self._storage.loadSerial(oid1, revid3)
-        pobj = pickle.loads(data)
-        eq(pobj.getoid(), oid1)
+        pobj = zodb_unpickle(data)
         eq(pobj.value, 3)
         data, revid = self._storage.load(oid1, '')
         eq(revid, revid3)
-        pobj = pickle.loads(data)
-        eq(pobj.getoid(), oid1)
+        pobj = zodb_unpickle(data)
         eq(pobj.value, 3)
         data, revid = self._storage.load(oid2, '')
         eq(revid, revid4)
-        eq(loads(data).value, 11)
+        eq(zodb_unpickle(data).value, 11)
         data = self._storage.loadSerial(oid2, revid4)
-        pobj = pickle.loads(data)
-        eq(pobj.getoid(), oid2)
+        pobj = zodb_unpickle(data)
         eq(pobj.value, 11)
 
     def checkPackUnlinkedFromRoot(self):