I've been working on converting the HTTP/FTP installation chunks of
anaconda from using urllib2 directly to using urlgrabber out of yum.
The next step in this process is breaking urlgrabber out into its own
package. This way, we'll have fewer code blobs dealing with URLs. It
will also give us authentication, mirrors, and fancy retry and proxy
support when we get to that point.
Attached is a patch for converting anaconda to using urlgrabber. It
does not include the grabber.py or keepalive.py from urlgrabber. You'll
have to go look at those on your own. It also is against the latest CVS
(if that matters or not for these files).
Comments?
- Chris
Index: comps.py
===================================================================
RCS file: /usr/local/CVS/anaconda/comps.py,v
retrieving revision 1.150
diff -u -r1.150 comps.py
--- comps.py 20 May 2003 15:26:00 -0000 1.150
+++ comps.py 1 Feb 2005 21:33:33 -0000
@@ -20,10 +20,10 @@
import os
from string import *
import types
-import urllib2
import time
import language
import iutil
+import grabber
from rhpl.log import log
from rhpl.translate import _, N_
@@ -728,29 +728,10 @@
return truth
def readCompsFile(self, filename, packages):
- #
- # ugly - urlopen can return a variety of errors which
- # do not have same form.
- #
- connected = 0
- while not connected:
- try:
- file = urllib2.urlopen(filename)
- except urllib2.HTTPError, e:
- log("HTTPError: %s occurred getting %s", filename, e)
- except urllib2.URLError, e:
- log("URLError: %s occurred getting %s", filename, e)
- except IOError, (errnum, msg):
- log("IOError %s occurred getting %s: %s", filename,
- errnum, str(msg))
- except IOError, (errnum, msg):
- log("OSError %s occurred getting %s: %s", filename,
- errnum, str(msg))
- else:
- connected = 1
-
- if not connected:
- time.sleep(5)
+ try:
+ file = grabber.urlopen (filename)
+ except grabber.URLGrabError, e:
+ log ("URLGrabError: %s occurred getting %s", e.strerror, filename)
self.compsxml = rhpl.comps.Comps(file)
file.close()
Index: gui.py
===================================================================
RCS file: /usr/local/CVS/anaconda/gui.py,v
retrieving revision 1.384
diff -u -r1.384 gui.py
--- gui.py 26 Jan 2005 15:47:52 -0000 1.384
+++ gui.py 1 Feb 2005 21:33:33 -0000
@@ -900,12 +900,6 @@
file = open(tmpfile, "r")
self.releaseNotesContents = file.read()
file.close()
-
- # deal with stupid urllib2 creating a zero length file
- # when the specified FTP URL doesnt exist
- if len(self.releaseNotesContents) < 1:
- self.releaseNotesContents = None
- continue
except:
continue
Index: hdrlist.py
===================================================================
RCS file: /usr/local/CVS/anaconda/hdrlist.py,v
retrieving revision 1.24
diff -u -r1.24 hdrlist.py
--- hdrlist.py 29 Jul 2004 19:34:55 -0000 1.24
+++ hdrlist.py 1 Feb 2005 21:33:33 -0000
@@ -906,32 +906,15 @@
def groupSetFromCompsFile(filename, hdrlist, doSelect = 1):
- import urllib2
+ import grabber
- file = None
- tries = 0
- while tries < 5:
- try:
- file = urllib2.urlopen(filename)
- except urllib2.HTTPError, e:
- log("HTTPError: %s occurred getting %s", filename, e)
- except urllib2.URLError, e:
- log("URLError: %s occurred getting %s", filename, e)
- except IOError, (errnum, msg):
- log("IOError %s occurred getting %s: %s", filename,
- errnum, str(msg))
- except IOError, (errnum, msg):
- log("OSError %s occurred getting %s: %s", filename,
- errnum, str(msg))
- else:
- break
+ try:
+ extra_args = { "retry": 5 }
+ file = grabber.urlopen (filename, **extra_args)
+ except grabber.URLGrabError, e:
+ log ("URLGrabError: %s occurred getting %s", e.strerror, filename)
+ raise FileCopyException
- time.sleep(5)
- tries = tries + 1
-
- if file is None:
- raise FileCopyException
-
compsxml = rhpl.comps.Comps(file)
file.close()
grpset = GroupSet(compsxml, hdrlist)
Index: kickstart.py
===================================================================
RCS file: /usr/local/CVS/anaconda/kickstart.py,v
retrieving revision 1.251
diff -u -r1.251 kickstart.py
--- kickstart.py 31 Jan 2005 18:11:42 -0000 1.251
+++ kickstart.py 1 Feb 2005 21:33:33 -0000
@@ -24,7 +24,7 @@
import raid
import string
import partRequests
-import urllib2
+import grabber
import lvm
from rhpl.translate import _
@@ -1656,7 +1656,7 @@
# pull <url> down and append to /tmp/ks.cfg. This is run before we actually
# parse the complete kickstart file.
#
-# Main use is to have the ks.cfg you send to the loader by minimal, and then
+# Main use is to have the ks.cfg you send to the loader be minimal, and then
# use %ksappend to pull via https anything private (like passwords, etc) in
# the second stage.
#
@@ -1683,11 +1683,9 @@
log("Attempting to pull second part of ks.cfg from url %s" % (ksurl,))
try:
- url = urllib2.urlopen(ksurl)
- except urllib2.HTTPError, e:
- raise KSAppendException("IOError: %s:%s" % (e.code, e.msg))
- except urllib2.URLError, e:
- raise KSAppendException("IOError: -1:%s" % (e.reason,))
+ url = grabber.urlopen (ksurl)
+ except grabber.URLGrabError, e:
+ raise KSAppendException ("IOError: %s" % e.strerror)
else:
# sanity check result - sometimes FTP doesnt
# catch a file is missing
@@ -1699,7 +1697,7 @@
if clen < 1:
raise KSAppendException("IOError: -1:File not found")
- break
+ break
# if we got something then rewrite /tmp/ks.cfg with new information
if url is not None:
Index: urlinstall.py
===================================================================
RCS file: /usr/local/CVS/anaconda/urlinstall.py,v
retrieving revision 1.47
diff -u -r1.47 urlinstall.py
--- urlinstall.py 15 Oct 2004 20:22:19 -0000 1.47
+++ urlinstall.py 1 Feb 2005 21:33:33 -0000
@@ -18,13 +18,13 @@
import os
import rpm
import time
-import urllib2
import string
import struct
import socket
from snack import *
from constants import *
+import grabber
from rhpl.translate import _
@@ -41,27 +41,22 @@
def urlretrieve(location, file, callback=None):
"""Downloads from location and saves to file."""
-
if callback is not None:
callback(_("Connecting..."), 0)
-
+
try:
- url = urllib2.urlopen(location)
- except urllib2.HTTPError, e:
- raise IOError(e.code, e.msg)
- except urllib2.URLError, e:
- raise IOError(-1, e.reason)
+ url = grabber.urlopen(location)
+ except grabber.URLGrabError, e:
+ raise IOError (e.errno, e.strerror)
# see if there is a size
try:
filesize = int(url.info()["Content-Length"])
+ if filesize == 0:
+ filesize = None
except:
filesize = None
- # handle zero length case
- if filesize == 0:
- filesize = None
-
# create output file
f = open(file, 'w+')
@@ -82,7 +77,6 @@
f.close()
url.close()
-
class UrlInstallMethod(InstallMethod):
def readCompsViaMethod(self, hdlist):
@@ -170,28 +164,15 @@
os.remove(fullName)
def readHeaders(self):
- tries = 0
-
- while tries < 5:
- hdurl = "%s/%s/base/hdlist" % (self.baseUrl, productPath)
- try:
- url = urllib2.urlopen(hdurl)
- except urllib2.HTTPError, e:
- log("HTTPError: %s occurred getting %s", hdurl, e)
- except urllib2.URLError, e:
- log("URLError: %s occurred getting %s", hdurl, e)
- except IOError, (errnum, msg):
- log("IOError %s occurred getting %s: %s",
- errnum, hdurl, msg)
- else:
- break
+ extra_args = { "retry": 5 }
+ hdurl = "%s/%s/base/hdlist" % (self.baseUrl, productPath)
- time.sleep(5)
- tries = tries + 1
+ try:
+ url = grabber.urlopen (hdurl, **extra_args)
+ except grabber.URLGrabError, e:
+ log ("URLGrabError: %s occurred getting %s", e.strerror, hdurl)
+ raise FileCopyException
- if tries >= 5:
- raise FileCopyException
-
raw = url.read(16)
if raw is None or len(raw) < 1:
raise TypeError, "header list is empty!"