[Zope-Checkins] CVS: Zope3/lib/python/Zope/Server - ServerChannelBase.py:1.1.2.4

Shane Hathaway shane@cvs.zope.org
Thu, 4 Apr 2002 18:22:01 -0500


Update of /cvs-repository/Zope3/lib/python/Zope/Server
In directory cvs.zope.org:/tmp/cvs-serv15173

Modified Files:
      Tag: Zope3-Server-Branch
	ServerChannelBase.py 
Log Message:
There was potential DoS vulnerability that appeared once we decided not
all requests are serviced by a task: non-task requests that are queued up
while a task is running will later be serviced in recursive calls.  An
attacker could queue a lot of non-task requests and overflow the stack.
(process_request() calls end_task(), which calls process_request().)

I had to change the API a bit.  process_request() now either returns a
task or processes the request directly, returning None.  This lets the
server base have more control over threads.


=== Zope3/lib/python/Zope/Server/ServerChannelBase.py 1.1.2.3 => 1.1.2.4 ===
     proto_request = None      # A request parser instance
     ready_requests = None     # A list
+    # ready_requests must always be empty when not running tasks.
     last_activity = 0         # Time of last activity
     running_tasks = 0         # boolean: true when any task is being executed
 
@@ -104,14 +105,17 @@
         """
         now = time.time()
         cutoff = now - self.adj.channel_timeout
+        class_ = self.__class__  # Kill only channels of our own class.
         for channel in self.active_channels.values():
             if (channel is not self and not channel.running_tasks and
-                channel.last_activity < cutoff):
+                channel.last_activity < cutoff and
+                isinstance(channel, class_)):
                 channel.close()
 
 
     def received(self, data):
-        """Receives input asynchronously and launches or queues requests.
+        """Receive input asynchronously and send requests to
+        receivedCompleteRequest().
         """
         preq = self.proto_request
         while data:
@@ -121,7 +125,7 @@
             if preq.completed:
                 # The request is ready to use.
                 if not preq.empty:
-                    self.queue_request(preq)
+                    self.receivedCompleteRequest(preq)
                 preq = None
                 self.proto_request = None
             else:
@@ -131,14 +135,16 @@
             data = data[n:]
 
 
-    def queue_request(self, req):
-        """Queues a request to be processed in sequence.
+    def receivedCompleteRequest(self, req):
+        """If there are tasks running or requests on hold, queue
+        the request, otherwise execute it.
         """
         do_now = 0
         running_lock.acquire()
         try:
             if self.running_tasks:
-                # Wait for the current tasks to finish.
+                # A task thread is working.  It will read from the queue
+                # when it is finished.
                 rr = self.ready_requests
                 if rr is None:
                     rr = []
@@ -146,12 +152,15 @@
                 rr.append(req)
             else:
                 # Do it now.
-                self.running_tasks = 1
                 do_now = 1
         finally:
             running_lock.release()
         if do_now:
-            self.process_request(req)
+            task = self.process_request(req)
+            if task is not None:
+                self.running_tasks = 1
+                self.set_sync()
+                self.server.addTask(task)
 
 
     def handle_error(self):
@@ -172,6 +181,7 @@
             # Ignore socket errors.
             self.close()
 
+
     #
     # SYNCHRONOUS METHODS
     #
@@ -180,37 +190,48 @@
         """Called at the end of a task and may launch another task.
         """
         if close:
+            # Note that self.running_tasks is left on, which has the
+            # side effect of preventing further requests from being
+            # serviced even if more appear.  A good thing.
             self.close_when_done()
             return
-        new_req = None
-        running_lock.acquire()
-        try:
-            rr = self.ready_requests
-            if rr:
-                new_req = rr.pop(0)
+        # Process requests held in the queue, if any.
+        while 1:
+            req = None
+            running_lock.acquire()
+            try:
+                rr = self.ready_requests
+                if rr:
+                    req = rr.pop(0)
+                else:
+                    # No requests to process.
+                    self.running_tasks = 0
+            finally:
+                running_lock.release()
+
+            if req:
+                task = self.process_request(req)
+                if task is not None:
+                    # Add the new task.  It will service the queue.
+                    self.server.addTask(task)
+                    break
+                # else check the queue again.
             else:
-                # No requests to service.
-                self.running_tasks = 0
-        finally:
-            running_lock.release()
+                # Idle -- Wait for another request on this connection.
+                self.set_async()
+                break
 
-        if new_req:
-            # Respond to the next request.
-            self.process_request(new_req)
-        else:
-            # Wait for another request on this connection.
-            self.set_async()
 
     #
     # BOTH MODES
     #
 
     def process_request(self, req):
-        """Creates a new task and queues it for execution.
+        """Returns a task to execute or None if the request is quick and
+        can be processed in the main thread.
 
-        The task may get executed in another thread.
+        Override to handle some requests in the main thread.
         """
-        self.set_sync()
-        task = self.task_class(self, req)
-        self.server.addTask(task)
+        return self.task_class(self, req)
+