#! /usr/bin/python -tt # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # # # Copyright Red Hat Inc. 2008, 2009 # # Author: James Antill # # Just install this plugin, and have an avahi-packages-server.py running on # your local network (or something like it). # import os import sys import time import dbus, gobject, avahi from dbus import DBusException from dbus.mainloop.glib import DBusGMainLoop import urlgrabber from urlgrabber.grabber import default_grabber from urlgrabber.grabber import URLGrabber from urlgrabber.grabber import URLGrabError from urlgrabber.progress import format_number import yum from yum.plugins import TYPE_CORE requires_api_version = '2.5' plugin_type = (TYPE_CORE,) _avahi_cache_timestamp = 0 _avahi_cache_baseurls = [] def _avahi_baseurls(): """ Return a list of URLs we can try to get checksumed data from. """ global _avahi_cache_timestamp, _avahi_cache_baseurls now = time.time() if now <= (_avahi_cache_timestamp + 30): return _avahi_cache_baseurls __DNS__ = "_checksum_data._tcp" urls = [] def found(interface, protocol, name, stype, domain, flags): """ Found some data from Avahi. """ info = server.ResolveService(interface, protocol, name, stype, domain, avahi.PROTO_UNSPEC, dbus.UInt32(0)) url = "http://%s:%s/" % (str(info[7]), str(info[8])) if flags & avahi.LOOKUP_RESULT_LOCAL: urls.append(url) else: urls.insert(0, url) def all_done(): # Avahi callbacks finished mainloop.quit() # FIXME: I guess we should probably cache something here... loop = DBusGMainLoop() bus = dbus.SystemBus(mainloop=loop) server = dbus.Interface(bus.get_object(avahi.DBUS_NAME, avahi.DBUS_PATH_SERVER), avahi.DBUS_INTERFACE_SERVER) path = server.ServiceBrowserNew(avahi.IF_UNSPEC, avahi.PROTO_INET, __DNS__, "local", dbus.UInt32(0)) sbrowser = dbus.Interface(bus.get_object(avahi.DBUS_NAME, path), avahi.DBUS_INTERFACE_SERVICE_BROWSER) sbrowser.connect_to_signal('ItemNew', found) sbrowser.connect_to_signal('AllForNow', all_done) mainloop = gobject.MainLoop() mainloop.run() sbrowser.Free() print "JDBG: avahi urls:", now, urls _avahi_cache_baseurls = urls _avahi_cache_timestamp = now return urls def _avahi_grab(repo, remote_data, local, text, range=(None, None), reget='simple', checkfunc=None, http_headers=None): """ Try and get the data from avahi. """ if remote_data is None: return None relative = [] for key in ('sha256', 'sha1', 'md5', 'len'): if key in remote_data: relative.append(key) relative.append(str(remote_data[key])) if not relative: return None relative = '/'.join(relative) ug = URLGrabber(progress_obj = repo.callback, reget = reget, interrupt_callback=repo.interrupt_callback, timeout=repo.timeout, checkfunc=checkfunc, http_headers=http_headers, failure_callback=None) ug.opts.user_agent = default_grabber.opts.user_agent try: urls = _avahi_baseurls() except: return None # NOTE: Not sure this failure_cb is right... fcb = repo.mirror_failure_obj mug = urlgrabber.mirror.MGRandomOrder(ug, urls) #, failure_callback=fcb) try: result = mug.urlgrab(relative, local, text=text, range=range) except URLGrabError, e: return None return result def _avahi_grab_pkg(i, num, pkg, checkfunc): local = pkg.localPkg() remote_data = {} # FIXME: This needs work when we move to sha256/etc. T, V, blah = pkg.returnChecksums()[0] remote_data[T] = V remote_data['len'] = sz = pkg.packagesize text = os.path.basename(pkg.relativepath) if num != 1: text = '(%s/%s): %s' % (i, num, text) ret = _avahi_grab(pkg.repo, remote_data, local, yum.misc.to_utf8(text), checkfunc=checkfunc) if ret: return sz return 0 def predownload_hook(conduit): remote_size = 0 beg = time.time() yb = conduit._base remote_pkgs = [] for pkg in sorted(conduit.getDownloadPackages()): if hasattr(pkg, 'pkgtype') and pkg.pkgtype == 'local': continue local = pkg.localPkg() if os.path.exists(local): if yb.verifyPkg(local, pkg, False): continue if os.path.getsize(local) >= pkg.size: os.unlink(local) remote_pkgs.append(pkg) remote_size += pkg.size i = 0 num = len(remote_pkgs) local_size = 0 urlgrabber.progress.text_meter_total_size(remote_size, local_size) for pkg in remote_pkgs: local = pkg.localPkg() i += 1 if os.path.exists(local): if yb.verifyPkg(local, pkg, False): remote_size -= pkg.size urlgrabber.progress.text_meter_total_size(remote_size, local_size) continue if os.path.getsize(local) >= pkg.size: os.unlink(local) checkfunc = (yb.verifyPkg, (pkg, 1), {}) cur = _avahi_grab_pkg(i, num, pkg, checkfunc) local_size += cur urlgrabber.progress.text_meter_total_size(remote_size, local_size) if not cur: i -= 1 if local_size: bs = local_size / (time.time() - beg) print "Downloaded %sB from avahi at %sB/s" % (format_number(local_size), format_number(bs)) if remote_size != local_size: left = remote_size - local_size print "Still need to download: %sB" % format_number(left)