source: rtems-source-builder/source-builder/sb/download.py @ 158ad68

4.11
Last change on this file since 158ad68 was 158ad68, checked in by Chris Johns <chrisj@…>, on 10/03/20 at 11:53:04

sb: Back port the RTEMS 5 and 6 RSB engine.

  • Build GDb first as we do for RTEMS 5 and later
  • Update GDB to 9.1 for all archs expect SPARC. The SIS patches only apply to 7.9. Disable Python for SPARC

Closes #4111

  • Property mode set to 100644
File size: 24.7 KB
Line 
1#
2# RTEMS Tools Project (http://www.rtems.org/)
3# Copyright 2010-2016 Chris Johns (chrisj@rtems.org)
4# All rights reserved.
5#
6# This file is part of the RTEMS Tools package in 'rtems-tools'.
7#
8# Permission to use, copy, modify, and/or distribute this software for any
9# purpose with or without fee is hereby granted, provided that the above
10# copyright notice and this permission notice appear in all copies.
11#
12# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
13# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
14# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
15# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
16# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
17# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
18# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
19
20#
21# This code builds a package given a config file. It only builds to be
22# installed not to be package unless you run a packager around this.
23#
24
25from __future__ import print_function
26
27import base64
28import hashlib
29import os
30import re
31import stat
32import sys
33try:
34    import urllib.request as urllib_request
35    import urllib.parse as urllib_parse
36except ImportError:
37    import urllib2 as urllib_request
38    import urlparse as urllib_parse
39
40from . import cvs
41from . import error
42from . import git
43from . import log
44from . import path
45from . import sources
46from . import version
47
48def _do_download(opts):
49    download = True
50    if opts.dry_run():
51        download = False
52        wa = opts.with_arg('download')
53        if wa is not None:
54            if wa[0] == 'with_download' and wa[1] == 'yes':
55                download = True
56    return download
57
58def _humanize_bytes(bytes, precision = 1):
59    abbrevs = (
60        (1 << 50, 'PB'),
61        (1 << 40, 'TB'),
62        (1 << 30, 'GB'),
63        (1 << 20, 'MB'),
64        (1 << 10, 'kB'),
65        (1, ' bytes')
66    )
67    if bytes == 1:
68        return '1 byte'
69    for factor, suffix in abbrevs:
70        if bytes >= factor:
71            break
72    return '%.*f%s' % (precision, float(bytes) / factor, suffix)
73
74def _sensible_url(url, used = 0):
75    space = 100
76    if len(url) > space:
77        size = int(space - 14)
78        url = url[:size] + '...<see log>'
79    return url
80
81def _hash_check(file_, absfile, macros, remove = True):
82    failed = False
83    hash = sources.get_hash(file_.lower(), macros)
84    if hash is not None:
85        hash = hash.split()
86        if len(hash) != 2:
87            raise error.internal('invalid hash format: %s' % (file_))
88        try:
89            hashlib_algorithms = hashlib.algorithms
90        except:
91            hashlib_algorithms = ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512']
92        if hash[0] not in hashlib_algorithms:
93            raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0]))
94        hasher = None
95        _in = None
96        try:
97            hasher = hashlib.new(hash[0])
98            _in = open(path.host(absfile), 'rb')
99            hasher.update(_in.read())
100        except IOError as err:
101            log.notice('hash: %s: read error: %s' % (file_, str(err)))
102            failed = True
103        except:
104            msg = 'hash: %s: error' % (file_)
105            log.stderr(msg)
106            log.notice(msg)
107            if _in is not None:
108                _in.close()
109            raise
110        if _in is not None:
111            _in.close()
112        hash_hex = hasher.hexdigest()
113        hash_base64 = base64.b64encode(hasher.digest()).decode('utf-8')
114        log.output('checksums: %s: (hex: %s) (b64: %s) => %s' % (file_,
115                                                                 hash_hex,
116                                                                 hash_base64,
117                                                                 hash[1]))
118        if hash_hex != hash[1] and hash_base64 != hash[1]:
119            log.warning('checksum error: %s' % (file_))
120            failed = True
121        if failed and remove:
122            log.warning('removing: %s' % (file_))
123            if path.exists(absfile):
124                try:
125                    os.remove(path.host(absfile))
126                except IOError as err:
127                    raise error.general('hash: %s: remove: %s' % (absfile, str(err)))
128                except:
129                    raise error.general('hash: %s: remove error' % (file_))
130        if hasher is not None:
131            del hasher
132    else:
133        if version.released():
134            raise error.general('%s: no hash found in released RSB' % (file_))
135        log.warning('%s: no hash found' % (file_))
136    return not failed
137
138def _local_path(source, pathkey, config):
139    for p in config.define(pathkey).split(':'):
140        local_prefix = path.abspath(p)
141        local = path.join(local_prefix, source['file'])
142        if source['local'] is None:
143            source['local_prefix'] = local_prefix
144            source['local'] = local
145        if path.exists(local):
146            source['local_prefix'] = local_prefix
147            source['local'] = local
148            _hash_check(source['file'], local, config.macros)
149            break
150
151def _http_parser(source, pathkey, config, opts):
152    #
153    # If the file has not been overrided attempt to recover a possible file name.
154    #
155    if 'file-override' not in source['options']:
156        #
157        # Hack for gitweb.cgi patch downloads. We rewrite the various fields.
158        #
159        if 'gitweb.cgi' in source['url']:
160            url = source['url']
161            if '?' not in url:
162                raise error.general('invalid gitweb.cgi request: %s' % (url))
163            req = url.split('?')[1]
164            if len(req) == 0:
165                raise error.general('invalid gitweb.cgi request: %s' % (url))
166            #
167            # The gitweb.cgi request should have:
168            #    p=<what>
169            #    a=patch
170            #    h=<hash>
171            # so extract the p and h parts to make the local name.
172            #
173            p = None
174            a = None
175            h = None
176            for r in req.split(';'):
177                if '=' not in r:
178                    raise error.general('invalid gitweb.cgi path: %s' % (url))
179                rs = r.split('=')
180                if rs[0] == 'p':
181                    p = rs[1].replace('.', '-')
182                elif rs[0] == 'a':
183                    a = rs[1]
184                elif rs[0] == 'h':
185                    h = rs[1]
186            if p is None or h is None:
187                raise error.general('gitweb.cgi path missing p or h: %s' % (url))
188            source['file'] = '%s-%s.patch' % (p, h)
189        #
190        # Wipe out everything special in the file name.
191        #
192        source['file'] = re.sub(r'[^a-zA-Z0-9.\-]+', '-', source['file'])
193        max_file_len = 127
194        if len(source['file']) > max_file_len:
195            raise error.general('file name length is greater than %i (maybe use --rsb-file=FILE option): %s' % \
196                                (max_file_len, source['file']))
197    #
198    # Check local path
199    #
200    _local_path(source, pathkey, config)
201    #
202    # Is the file compressed ?
203    #
204    esl = source['ext'].split('.')
205    if esl[-1:][0] == 'gz':
206        source['compressed-type'] = 'gzip'
207        source['compressed'] = '%{__gzip} -dc'
208    elif esl[-1:][0] == 'bz2':
209        source['compressed-type'] = 'bzip2'
210        source['compressed'] = '%{__bzip2} -dc'
211    elif esl[-1:][0] == 'zip':
212        source['compressed-type'] = 'zip'
213        source['compressed'] = '%{__unzip} -u'
214    elif esl[-1:][0] == 'xz':
215        source['compressed-type'] = 'xz'
216        source['compressed'] = '%{__xz} -dc'
217
218def _patchworks_parser(source, pathkey, config, opts):
219    #
220    # Check local path
221    #
222    _local_path(source, pathkey, config)
223    source['url'] = 'http%s' % (source['path'][2:])
224
225def _git_parser(source, pathkey, config, opts):
226    #
227    # Check local path
228    #
229    _local_path(source, pathkey, config)
230    #
231    # Symlink.
232    #
233    us = source['url'].split('?')
234    source['path'] = path.dirname(us[0])
235    source['file'] = path.basename(us[0])
236    source['name'], source['ext'] = path.splitext(source['file'])
237    if len(us) > 1:
238        source['args'] = us[1:]
239    source['local'] = \
240        path.join(source['local_prefix'], 'git', source['file'])
241    source['symlink'] = source['local']
242
243def _cvs_parser(source, pathkey, config, opts):
244    #
245    # Check local path
246    #
247    _local_path(source, pathkey, config)
248    #
249    # Symlink.
250    #
251    if not source['url'].startswith('cvs://'):
252        raise error.general('invalid cvs path: %s' % (source['url']))
253    us = source['url'].split('?')
254    try:
255        url = us[0]
256        source['file'] = url[url[6:].index(':') + 7:]
257        source['cvsroot'] = ':%s:' % (url[6:url[6:].index('/') + 6:])
258    except:
259        raise error.general('invalid cvs path: %s' % (source['url']))
260    for a in us[1:]:
261        _as = a.split('=')
262        if _as[0] == 'module':
263            if len(_as) != 2:
264                raise error.general('invalid cvs module: %s' % (a))
265            source['module'] = _as[1]
266        elif _as[0] == 'src-prefix':
267            if len(_as) != 2:
268                raise error.general('invalid cvs src-prefix: %s' % (a))
269            source['src_prefix'] = _as[1]
270        elif _as[0] == 'tag':
271            if len(_as) != 2:
272                raise error.general('invalid cvs tag: %s' % (a))
273            source['tag'] = _as[1]
274        elif _as[0] == 'date':
275            if len(_as) != 2:
276                raise error.general('invalid cvs date: %s' % (a))
277            source['date'] = _as[1]
278    if 'date' in source and 'tag' in source:
279        raise error.general('cvs URL cannot have a date and tag: %s' % (source['url']))
280    # Do here to ensure an ordered path, the URL can include options in any order
281    if 'module' in source:
282        source['file'] += '_%s' % (source['module'])
283    if 'tag' in source:
284        source['file'] += '_%s' % (source['tag'])
285    if 'date' in source:
286        source['file'] += '_%s' % (source['date'])
287    for c in '/@#%.-':
288        source['file'] = source['file'].replace(c, '_')
289    source['local'] = path.join(source['local_prefix'], 'cvs', source['file'])
290    if 'src_prefix' in source:
291        source['symlink'] = path.join(source['local'], source['src_prefix'])
292    else:
293        source['symlink'] = source['local']
294
295def _file_parser(source, pathkey, config, opts):
296    #
297    # Check local path
298    #
299    _local_path(source, pathkey, config)
300    #
301    # Get the paths sorted.
302    #
303    source['file'] = source['url'][6:]
304
305parsers = { 'http': _http_parser,
306            'ftp':  _http_parser,
307            'pw':   _patchworks_parser,
308            'git':  _git_parser,
309            'cvs':  _cvs_parser,
310            'file': _file_parser }
311
312def set_release_path(release_path, macros):
313    if release_path is None:
314        release_path = '%{rtems_release_url}/%{rsb_version}/sources'
315    macros.define('release_path', release_path)
316
317def parse_url(url, pathkey, config, opts, file_override = None):
318    #
319    # Split the source up into the parts we need.
320    #
321    source = {}
322    source['url'] = url
323    source['options'] = []
324    colon = url.find(':')
325    if url[colon + 1:colon + 3] != '//':
326        raise error.general('malforned URL (no protocol prefix): %s' % (url))
327    source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:])
328    if file_override is None:
329        source['file'] = path.basename(url)
330    else:
331        bad_chars = [c for c in ['/', '\\', '?', '*'] if c in file_override]
332        if len(bad_chars) > 0:
333            raise error.general('bad characters in file name: %s' % (file_override))
334        log.output('download: file-override: %s' % (file_override))
335        source['file'] = file_override
336        source['options'] += ['file-override']
337    question_mark = source['file'].find('?')
338    if question_mark >= 0:
339        source['file'] = source['file'][:question_mark]
340    source['name'], source['ext'] = path.splitext(source['file'])
341    if source['name'].endswith('.tar'):
342        source['name'] = source['name'][:-4]
343        source['ext'] = '.tar' + source['ext']
344    #
345    # Get the file. Checks the local source directory first.
346    #
347    source['local'] = None
348    for p in parsers:
349        if url.startswith(p):
350            source['type'] = p
351            if parsers[p](source, pathkey, config, opts):
352                break
353    source['script'] = ''
354    return source
355
356def _http_downloader(url, local, config, opts):
357    if path.exists(local):
358        return True
359    #
360    # Hack for GitHub.
361    #
362    if url.startswith('https://api.github.com'):
363        url = urllib_parse.urljoin(url, config.expand('tarball/%{version}'))
364    dst = os.path.relpath(path.host(local))
365    log.output('download: (full) %s -> %s' % (url, dst))
366    log.notice('download: %s -> %s' % (_sensible_url(url, len(dst)), dst))
367    failed = False
368    if _do_download(opts):
369        _in = None
370        _out = None
371        _length = None
372        _have = 0
373        _chunk_size = 256 * 1024
374        _chunk = None
375        _last_percent = 200.0
376        _last_msg = ''
377        _have_status_output = False
378        _url = url
379        try:
380            try:
381                _in = None
382                _ssl_context = None
383                # See #2656
384                _req = urllib_request.Request(_url)
385                _req.add_header('User-Agent', 'Wget/1.16.3 (freebsd10.1)')
386                try:
387                    import ssl
388                    _ssl_context = ssl._create_unverified_context()
389                    _in = urllib_request.urlopen(_req, context = _ssl_context)
390                except:
391                    log.output('download: no ssl context')
392                    _ssl_context = None
393                if _ssl_context is None:
394                    _in = urllib_request.urlopen(_req)
395                if _url != _in.geturl():
396                    _url = _in.geturl()
397                    log.output(' redirect: %s' % (_url))
398                    log.notice(' redirect: %s' % (_sensible_url(_url)))
399                _out = open(path.host(local), 'wb')
400                try:
401                    _length = int(_in.info()['Content-Length'].strip())
402                except:
403                    pass
404                while True:
405                    _msg = '\rdownloading: %s - %s ' % (dst, _humanize_bytes(_have))
406                    if _length:
407                        _percent = round((float(_have) / _length) * 100, 2)
408                        if _percent != _last_percent:
409                            _msg += 'of %s (%0.0f%%) ' % (_humanize_bytes(_length), _percent)
410                    if _msg != _last_msg:
411                        extras = (len(_last_msg) - len(_msg))
412                        log.stdout_raw('%s%s' % (_msg, ' ' * extras + '\b' * extras))
413                        _last_msg = _msg
414                        _have_status_output = True
415                    _chunk = _in.read(_chunk_size)
416                    if not _chunk:
417                        break
418                    _out.write(_chunk)
419                    _have += len(_chunk)
420                log.stdout_raw('\n\r')
421            except:
422                if _have_status_output:
423                    log.stdout_raw('\n\r')
424                raise
425        except IOError as err:
426            log.notice('download: %s: error: %s' % (_sensible_url(_url), str(err)))
427            if path.exists(local):
428                os.remove(path.host(local))
429            failed = True
430        except ValueError as err:
431            log.notice('download: %s: error: %s' % (_sensible_url(_url), str(err)))
432            if path.exists(local):
433                os.remove(path.host(local))
434            failed = True
435        except:
436            msg = 'download: %s: error' % (_sensible_url(_url))
437            log.stderr(msg)
438            log.notice(msg)
439            if _in is not None:
440                _in.close()
441            if _out is not None:
442                _out.close()
443            raise
444        if _out is not None:
445            _out.close()
446        if _in is not None:
447            _in.close()
448            del _in
449        if not failed:
450            if not path.isfile(local):
451                raise error.general('source is not a file: %s' % (path.host(local)))
452            if not _hash_check(path.basename(local), local, config.macros, False):
453                raise error.general('checksum failure file: %s' % (dst))
454    return not failed
455
456def _git_downloader(url, local, config, opts):
457    repo = git.repo(local, opts, config.macros)
458    rlp = os.path.relpath(path.host(local))
459    us = url.split('?')
460    #
461    # Handle the various git protocols.
462    #
463    # remove 'git' from 'git://xxxx/xxxx?protocol=...'
464    #
465    url_base = us[0][len('git'):]
466    for a in us[1:]:
467        _as = a.split('=')
468        if _as[0] == 'protocol':
469            if len(_as) != 2:
470                raise error.general('invalid git protocol option: %s' % (_as))
471            if _as[1] == 'none':
472                # remove the rest of the protocol header leaving nothing.
473                us[0] = url_base[len('://'):]
474            else:
475                if _as[1] not in ['ssh', 'git', 'http', 'https', 'ftp', 'ftps', 'rsync']:
476                    raise error.general('unknown git protocol: %s' % (_as[1]))
477                us[0] = _as[1] + url_base
478    if not repo.valid():
479        log.notice('git: clone: %s -> %s' % (us[0], rlp))
480        if _do_download(opts):
481            repo.clone(us[0], local)
482    else:
483        repo.clean(['-f', '-d'])
484        repo.reset('--hard')
485        repo.checkout('master')
486    for a in us[1:]:
487        _as = a.split('=')
488        if _as[0] == 'branch' or _as[0] == 'checkout':
489            if len(_as) != 2:
490                raise error.general('invalid git branch/checkout: %s' % (_as))
491            log.notice('git: checkout: %s => %s' % (us[0], _as[1]))
492            if _do_download(opts):
493                repo.checkout(_as[1])
494        elif _as[0] == 'submodule':
495            if len(_as) != 2:
496                raise error.general('invalid git submodule: %s' % (_as))
497            log.notice('git: submodule: %s <= %s' % (us[0], _as[1]))
498            if _do_download(opts):
499                repo.submodule(_as[1])
500        elif _as[0] == 'fetch':
501            log.notice('git: fetch: %s -> %s' % (us[0], rlp))
502            if _do_download(opts):
503                repo.fetch()
504        elif _as[0] == 'merge':
505            log.notice('git: merge: %s' % (us[0]))
506            if _do_download(opts):
507                repo.merge()
508        elif _as[0] == 'pull':
509            log.notice('git: pull: %s' % (us[0]))
510            if _do_download(opts):
511                repo.pull()
512        elif _as[0] == 'reset':
513            arg = []
514            if len(_as) > 1:
515                arg = ['--%s' % (_as[1])]
516            log.notice('git: reset: %s' % (us[0]))
517            if _do_download(opts):
518                repo.reset(arg)
519                repo.submodule_foreach(['reset'] + arg)
520        elif _as[0] == 'clean':
521            arg = []
522            if len(_as) > 1:
523                arg = ['--%s' % (_as[1])]
524            log.notice('git: clean: %s' % (us[0]))
525            if _do_download(opts):
526                repo.clean(arg)
527                repo.submodule_foreach(['clean'] + arg)
528        elif _as[0] == 'protocol':
529            pass
530        else:
531            raise error.general('invalid git option: %s' % (_as))
532    return True
533
534def _cvs_downloader(url, local, config, opts):
535    rlp = os.path.relpath(path.host(local))
536    us = url.split('?')
537    module = None
538    tag = None
539    date = None
540    src_prefix = None
541    for a in us[1:]:
542        _as = a.split('=')
543        if _as[0] == 'module':
544            if len(_as) != 2:
545                raise error.general('invalid cvs module: %s' % (a))
546            module = _as[1]
547        elif _as[0] == 'src-prefix':
548            if len(_as) != 2:
549                raise error.general('invalid cvs src-prefix: %s' % (a))
550            src_prefix = _as[1]
551        elif _as[0] == 'tag':
552            if len(_as) != 2:
553                raise error.general('invalid cvs tag: %s' % (a))
554            tag = _as[1]
555        elif _as[0] == 'date':
556            if len(_as) != 2:
557                raise error.general('invalid cvs date: %s' % (a))
558            date = _as[1]
559    repo = cvs.repo(local, opts, config.macros, src_prefix)
560    if not repo.valid():
561        if not path.isdir(local):
562            log.notice('Creating source directory: %s' % \
563                           (os.path.relpath(path.host(local))))
564            if _do_download(opts):
565                path.mkdir(local)
566            log.notice('cvs: checkout: %s -> %s' % (us[0], rlp))
567            if _do_download(opts):
568                repo.checkout(':%s' % (us[0][6:]), module, tag, date)
569    for a in us[1:]:
570        _as = a.split('=')
571        if _as[0] == 'update':
572            log.notice('cvs: update: %s' % (us[0]))
573            if _do_download(opts):
574                repo.update()
575        elif _as[0] == 'reset':
576            log.notice('cvs: reset: %s' % (us[0]))
577            if _do_download(opts):
578                repo.reset()
579    return True
580
581def _file_downloader(url, local, config, opts):
582    if not path.exists(local):
583        try:
584            src = url[7:]
585            dst = local
586            log.notice('download: copy %s -> %s' % (src, dst))
587            path.copy(src, dst)
588        except:
589            return False
590    return True
591
592downloaders = { 'http': _http_downloader,
593                'ftp':  _http_downloader,
594                'pw':   _http_downloader,
595                'git':  _git_downloader,
596                'cvs':  _cvs_downloader,
597                'file': _file_downloader }
598
599def get_file(url, local, opts, config):
600    if local is None:
601        raise error.general('source/patch path invalid')
602    if not path.isdir(path.dirname(local)) and not opts.download_disabled():
603        log.notice('Creating source directory: %s' % \
604                       (os.path.relpath(path.host(path.dirname(local)))))
605    log.output('making dir: %s' % (path.host(path.dirname(local))))
606    if _do_download(opts):
607        path.mkdir(path.dirname(local))
608    if not path.exists(local) and opts.download_disabled():
609        raise error.general('source not found: %s' % (path.host(local)))
610    #
611    # Check if a URL has been provided on the command line. If the package is
612    # released push the release path URLs to the start the RTEMS URL list
613    # unless overriden by the command line option --without-release-url. The
614    # variant --without-release-url can override the released check.
615    #
616    url_bases = opts.urls()
617    if url_bases is None:
618        url_bases = []
619    try:
620        rtems_release_url_value = config.macros.expand('%{release_path}')
621    except:
622        rtems_release_url_value = None
623    rtems_release_url = None
624    rtems_release_urls = []
625    if version.released() and rtems_release_url_value:
626        rtems_release_url = rtems_release_url_value
627    with_rel_url = opts.with_arg('release-url')
628    if with_rel_url[1] == 'not-found':
629        if config.defined('without_release_url'):
630            with_rel_url = ('without_release-url', 'yes')
631    if with_rel_url[0] == 'with_release-url':
632        if with_rel_url[1] == 'yes':
633            if rtems_release_url_value is None:
634                raise error.general('no valid release URL')
635            rtems_release_url = rtems_release_url_value
636        elif with_rel_url[1] == 'no':
637            pass
638        else:
639            rtems_release_url = with_rel_url[1]
640    elif with_rel_url[0] == 'without_release-url' and with_rel_url[1] == 'yes':
641        rtems_release_url = None
642    if rtems_release_url is not None:
643        rtems_release_urls = rtems_release_url.split(',')
644        for release_url in rtems_release_urls:
645            log.trace('release url: %s' % (release_url))
646            #
647            # If the URL being fetched is under the release path do not add
648            # the sources release path because it is already there.
649            #
650            if not url.startswith(release_url):
651                url_bases = [release_url] + url_bases
652    urls = []
653    if len(url_bases) > 0:
654        #
655        # Split up the URL we are being asked to download.
656        #
657        url_path = urllib_parse.urlsplit(url)[2]
658        slash = url_path.rfind('/')
659        if slash < 0:
660            url_file = url_path
661        else:
662            url_file = url_path[slash + 1:]
663        log.trace('url_file: %s' %(url_file))
664        for base in url_bases:
665            #
666            # Hack to fix #3064 where --rsb-file is being used. This code is a
667            # mess and should be refactored.
668            #
669            if version.released() and base in rtems_release_urls:
670                url_file = path.basename(local)
671            if base[-1:] != '/':
672                base += '/'
673            next_url = urllib_parse.urljoin(base, url_file)
674            log.trace('url: %s' %(next_url))
675            urls.append(next_url)
676    urls += url.split()
677    log.trace('_url: %s -> %s' % (','.join(urls), local))
678    for url in urls:
679        for dl in downloaders:
680            if url.startswith(dl):
681                if downloaders[dl](url, local, config, opts):
682                    return
683    if _do_download(opts):
684        raise error.general('downloading %s: all paths have failed, giving up' % (url))
Note: See TracBrowser for help on using the repository browser.