[Zope-CVS] CVS: Products/Ape/lib/apelib/zodb3 - connection.py:1.8 db.py:1.7 scanner.py:1.4 serializers.py:1.5 storage.py:1.10

Shane Hathaway shane at zope.com
Tue Feb 17 00:25:44 EST 2004


Update of /cvs-repository/Products/Ape/lib/apelib/zodb3
In directory cvs.zope.org:/tmp/cvs-serv17338/lib/apelib/zodb3

Modified Files:
	connection.py db.py scanner.py serializers.py storage.py 
Log Message:
Another big pile of changes resulted from a little time. :-)

>From CHANGES.txt:

  - Removed the concepts of "classified state" and "hints", now
    relying on the existing concepts of classification and state.  This
    primarily involved changes to classifiers and the apelib.core.io
    module.

  - Implemented the folder item classification optimization.  Now,
    when Ape loads a folder, it passes along the classification of each
    subitem as part of the containing folder's state.  This means fewer
    round-trips.

Also fixed a couple of shallow bugs that prevented Ape from starting in 
Zope.



=== Products/Ape/lib/apelib/zodb3/connection.py 1.7 => 1.8 ===
--- Products/Ape/lib/apelib/zodb3/connection.py:1.7	Mon Feb  2 10:07:22 2004
+++ Products/Ape/lib/apelib/zodb3/connection.py	Tue Feb 17 00:25:13 2004
@@ -33,7 +33,7 @@
 from zLOG import LOG, ERROR
 
 from consts import HASH0, DEBUG
-from apelib.core.io import ObjectSystemIO, ClassifiedState
+from apelib.core.io import ObjectSystemIO
 from apelib.core.interfaces import IObjectDatabase, LoadError
 
 
@@ -128,12 +128,12 @@
         # unpickler.persistent_load=self._persistent_load
 
         try:
-            classified_state = unpickler.load()
+            classification = unpickler.load()
         except:
             raise "Could not load oid %s, pickled data in traceback info may\
             contain clues" % (oid)
         osio = self.getObjectSystemIO()
-        obj = osio.newObject(classified_state)
+        obj = osio.newObject(classification)
         assert obj is not None
 
         obj._p_oid=oid
@@ -147,7 +147,7 @@
         return obj
 
 
-    def _persistent_load(self, oid, hints=None):
+    def _persistent_load(self, oid, classification=None):
 
         __traceback_info__=oid
 
@@ -155,18 +155,15 @@
         if obj is not None:
             return obj
 
-        if hints:
-            mapper_name = hints.get('mapper_name')
-            if mapper_name is not None:
-                classified_state = ClassifiedState(None, None, mapper_name)
-                osio = self.getObjectSystemIO()
-                obj = osio.newObject(classified_state)
-                if obj is not None:
-                    obj._p_oid=oid
-                    obj._p_jar=self
-                    obj._p_changed=None
-                    self._cache[oid] = obj
-                    return obj
+        if classification:
+            osio = self.getObjectSystemIO()
+            obj = osio.newObject(classification)
+            if obj is not None:
+                obj._p_oid=oid
+                obj._p_jar=self
+                obj._p_changed=None
+                self._cache[oid] = obj
+                return obj
 
         # We don't have enough info for fast loading.  Load the whole object.
         return self[oid]
@@ -260,7 +257,7 @@
             # SDH: hook in the serializer.
             # state=obj.__getstate__()
             osio = self.getObjectSystemIO()
-            event, classified_state = osio.serialize(oid, obj)
+            event, classification, state = osio.serialize(oid, obj)
             ext_refs = event.external
             if ext_refs:
                 for (ext_oid, ext_ref) in ext_refs:
@@ -286,7 +283,8 @@
 
             seek(0)
             clear_memo()
-            dump(classified_state)
+            dump(classification)
+            dump(state)
             p=file(1)
             s=dbstore(oid,serial,p,version,transaction)
             self._store_count = self._store_count + 1
@@ -351,7 +349,8 @@
             unpickler=Unpickler(file)
             # SDH: external references are reassembled elsewhere.
             # unpickler.persistent_load=self._persistent_load
-            classified_state = unpickler.load()
+            classification = unpickler.load()
+            state = unpickler.load()
 
             # SDH: Let the object mapper do the state setting.
             # if hasattr(object, '__setstate__'):
@@ -360,7 +359,7 @@
             #     d=object.__dict__
             #     for k,v in state.items(): d[k]=v
             osio = self.getObjectSystemIO()
-            event = osio.deserialize(oid, obj, classified_state)
+            event = osio.deserialize(oid, obj, classification, state)
 
             if event.upos:
                 self.handleUnmanaged(obj, event.upos)


=== Products/Ape/lib/apelib/zodb3/db.py 1.6 => 1.7 ===
--- Products/Ape/lib/apelib/zodb3/db.py:1.6	Mon Feb  2 10:07:22 2004
+++ Products/Ape/lib/apelib/zodb3/db.py	Tue Feb 17 00:25:13 2004
@@ -73,7 +73,7 @@
                     raise ConfigurationError('Extra keyword args: %s' % kw)
                 if isinstance(storage, ApeStorage):
                     # Use the configuration from the storage
-                    conf_resource = storage.getConfResource()
+                    conf_resource = storage.conf_resource
                 else:
                     raise ConfigurationError(
                         'No configuration or factory specified')


=== Products/Ape/lib/apelib/zodb3/scanner.py 1.3 => 1.4 ===
--- Products/Ape/lib/apelib/zodb3/scanner.py:1.3	Mon Feb  2 10:07:22 2004
+++ Products/Ape/lib/apelib/zodb3/scanner.py	Tue Feb 17 00:25:13 2004
@@ -104,7 +104,7 @@
         """
         LOG('Ape', DEBUG, 'Scanning %d objects.' % len(self.oids))
         scanner = self.storage.scanner
-        inv = scanner.scan(prune)
+        inv = scanner.scan()
         scanner.pruneFuture()
         LOG('Ape', DEBUG,
             'Finished scanning. %d objects changed.' % len(inv))


=== Products/Ape/lib/apelib/zodb3/serializers.py 1.4 => 1.5 ===
--- Products/Ape/lib/apelib/zodb3/serializers.py:1.4	Mon Feb  2 10:07:22 2004
+++ Products/Ape/lib/apelib/zodb3/serializers.py	Tue Feb 17 00:25:13 2004
@@ -43,6 +43,7 @@
     schema = RowSequenceSchema()
     schema.addField('key', 'string', 1)
     schema.addField('oid', 'string')
+    schema.addField('classification', 'classification')
 
     def canSerialize(self, obj):
         return isinstance(obj, PersistentMapping)
@@ -55,15 +56,16 @@
             if oid is None:
                 oid = event.conf.oid_gen.new_oid(event, key, True)
             event.referenced(key, value, False, oid)
-            res.append((key, oid))
+            # No need to pass classification.
+            res.append((key, oid, None))
         event.ignore(('data', '_container'))
         return res
 
     def deserialize(self, event, state):
         assert self.canSerialize(event.obj)
         data = {}
-        for (key, oid) in state:
-            value = event.resolve(key, oid)
+        for (key, oid, classification) in state:
+            value = event.resolve(key, oid, classification)
             data[key] = value
         event.obj.__init__(data)
 


=== Products/Ape/lib/apelib/zodb3/storage.py 1.9 => 1.10 ===
--- Products/Ape/lib/apelib/zodb3/storage.py:1.9	Mon Feb  2 10:07:22 2004
+++ Products/Ape/lib/apelib/zodb3/storage.py	Tue Feb 17 00:25:13 2004
@@ -36,7 +36,7 @@
         connections is a mapping that maps names to ITPCConnections.
         """
         assert IResourceAccess.isImplementedBy(conf_resource)
-        self._conf_resource = conf_resource
+        self.conf_resource = conf_resource
         gwio = GatewayIO(conf_resource.access(self), connections)
         self._gwio = gwio
         self._conn_list = gwio.getConnectionList()
@@ -87,11 +87,12 @@
             raise POSException.Unsupported, "Versions aren't supported"
         self._lock_acquire()
         try:
-            self._conf_resource.access(self)  # Update configuration
-            event, classified_state, hash_value = self._gwio.load(oid)
+            self.conf_resource.access(self)  # Update configuration
+            event, classification, state, hash_value = self._gwio.load(oid)
             file = StringIO()
             p = Pickler(file)
-            p.dump(classified_state)
+            p.dump(classification)
+            p.dump(state)
             data = file.getvalue()
             h = self.hash64(hash_value)
             if DEBUG:
@@ -112,7 +113,7 @@
 
         self._lock_acquire()
         try:
-            self._conf_resource.access(self)  # Update configuration
+            self.conf_resource.access(self)  # Update configuration
 
             # First detect conflicts.
             # The "h64" argument, if its value is not 0,
@@ -126,19 +127,20 @@
                 # Overwriting an old object.  Use the hash to verify
                 # that the new data was derived from the old data.
                 is_new = False
-                event, old_cs, old_hash = self._gwio.load(oid)
+                event, old_c, old_state, old_hash = self._gwio.load(oid)
                 old_h64 = self.hash64(old_hash)
                 if h64 != old_h64:
                     raise POSException.ConflictError(
-                        "Storing %s based on old data. %s != %s" % (
-                        repr(oid),
-                        repr(h64), repr(old_h64)))
+                        "Storing %s based on old data.  %s != %s." % (
+                        repr(oid), repr(h64), repr(old_h64)))
 
             # Now unpickle and store the data.
             file = StringIO(data)
             u = Unpickler(file)
-            classified_state = u.load()
-            event, new_hash = self._gwio.store(oid, classified_state, is_new)
+            classification = u.load()
+            state = u.load()
+            event, new_hash = self._gwio.store(
+                oid, classification, state, is_new)
             new_h64 = self.hash64(new_hash)
             if self.scanner is not None:
                 sources = event.mapper.gateway.getPollSources(event)
@@ -198,5 +200,5 @@
     def close(self):
         for c in self._conn_list:
             c.close()
-        self._conf_resource.release(self)
+        self.conf_resource.release(self)
 




More information about the Zope-CVS mailing list