r230 - branches/rewrite/src

Otavio Salvador partial-mirror-devel@lists.alioth.debian.org
Tue, 21 Sep 2004 16:21:39 -0600


Author: otavio
Date: Tue Sep 21 16:21:38 2004
New Revision: 230

Modified:
   branches/rewrite/src/Download.py
Log:
More near of working code.

Modified: branches/rewrite/src/Download.py
==============================================================================
--- branches/rewrite/src/Download.py	(original)
+++ branches/rewrite/src/Download.py	Tue Sep 21 16:21:38 2004
@@ -17,12 +17,13 @@
 # $Id$
 
 import pycurl
-import sys, string
+import threading
 
 from Queue import Queue
 
-import signal
-from signal import SIGPIPE, SIG_IGN
+# Need to be removed when test code is removed.
+import sys, string
+import pdb
 
 class DownloadQueue(Queue):
     """ Implemente a Queue without duplicated items. """
@@ -30,22 +31,6 @@
         if item not in self.queue:
             self.queue.append(item)
 
-class Download:
-    """ Download queue """
-    queue = DownloadQueue()
-    """ Fetcher to use """
-    fetcher = None
-
-    def __init__(self, uri, destine):
-        self.queue.put((uri, destine))
-
-        print self.queue.qsize()
-
-        if not self.fetcher:
-            self.fetcher = DownloadFetcher(2)
-        else:
-            print "Usando fetcher anterior..."
-
 def progress(download_t, download_d, upload_t, upload_d):
     print "Total to download", download_t
     print "Total downloaded", download_d
@@ -53,16 +38,11 @@
     print "Total uploaded", upload_d
 
 class DownloadFetcher:
-    __objs = None
-    
     def __init__(self, max_connections = 2):
-        """ We should ignore SIGPIPE when using pycurl.NOSIGNAL - see
-        the libcurl documentation `libcurl-the-guide' for more info."""
-        signal.signal(signal.SIGPIPE, signal.SIG_IGN)
-
         """ Make the needed objects to handle the connections."""
         self.__objs = pycurl.CurlMulti()
-        self.__objs.handles = []
+        self.__handles = []
+        self.__threads = []
         for i in range(max_connections):
             c = pycurl.Curl()
             c.fp = None
@@ -71,16 +51,17 @@
             c.setopt(pycurl.MAXREDIRS, 5)
             c.setopt(pycurl.CONNECTTIMEOUT, 30)
             c.setopt(pycurl.TIMEOUT, 300)
-            c.setopt(pycurl.NOSIGNAL, 1)
             c.setopt(pycurl.PROGRESSFUNCTION, progress)
-            self.__objs.handles.append(c)
+#            c.setopt(pycurl.VERBOSE, 1)
+            self.__handles.append(c)
+
+        threading.Thread(target=self.thread).start()
 
-        free = self.__objs.handles[:]
-        num_processed = 0
+    def thread(self):
         while 1:
-            while Download.queue and free:
+            while Download.queue and self.__handles:
                 url, filename = Download.queue.get()
-                c = free.pop()
+                c = self.__handles.pop()
                 c.fp = open(filename, "wb")
                 c.setopt(pycurl.URL, url)
                 c.setopt(pycurl.WRITEDATA, c.fp)
@@ -95,7 +76,6 @@
                 if ret != pycurl.E_CALL_MULTI_PERFORM:
                     break
 
-            # Check for curl objects which have terminated, and add them to the freelist
             while 1:
                 num_q, ok_list, err_list = self.__objs.info_read()
                 for c in ok_list:
@@ -103,13 +83,13 @@
                     c.fp = None
                     self.__objs.remove_handle(c)
                     print "Success:", c.filename, c.url, c.getinfo(pycurl.EFFECTIVE_URL)
-                    freelist.append(c)
+                    self.__handles.append(c)
                 for c, errno, errmsg in err_list:
                     c.fp.close()
                     c.fp = None
                     self.__objs.remove_handle(c)
                     print "Failed: ", c.filename, c.url, errno, errmsg
-                    freelist.append(c)
+                    self.__handles.append(c)
                     num_processed = num_processed + len(ok_list) + len(err_list)
                 if num_q == 0:
                     break
@@ -119,6 +99,16 @@
             # We just use select() to wait until some more data is available.
             self.__objs.select()
 
+class Download:
+    """ Download queue """
+    queue = DownloadQueue()
+    """ Fetcher to use """
+    fetcher = DownloadFetcher(2)
+
+    def __init__(self, uri, destine):
+        self.queue.put((uri, destine))
+        print str(self.queue.qsize()) + " / " + str(self.fetcher)
+
 urls = open(sys.argv[1]).readlines()
 fileno = 1
 for url in urls: