source: rtems-source-builder/source-builder/sb/download.py @ b487b7d

4.11 4.11.2
Last change on this file since b487b7d was b487b7d, checked in by Chris Johns <chrisj@…>, on 07/10/17 at 01:44:32

sb/download: Fix --rsb-file options with released sources.

The released sources use the local file name so always force it when
the RSB is released.

Closes #3064.

  • Property mode set to 100644
File size: 23.7 KB
RevLine 
[649a64c]1#
2# RTEMS Tools Project (http://www.rtems.org/)
[76188ee4]3# Copyright 2010-2016 Chris Johns (chrisj@rtems.org)
[649a64c]4# All rights reserved.
5#
6# This file is part of the RTEMS Tools package in 'rtems-tools'.
7#
8# Permission to use, copy, modify, and/or distribute this software for any
9# purpose with or without fee is hereby granted, provided that the above
10# copyright notice and this permission notice appear in all copies.
11#
12# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
13# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
14# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
15# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
16# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
17# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
18# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
19
20#
21# This code builds a package given a config file. It only builds to be
22# installed not to be package unless you run a packager around this.
23#
24
[f88fcf3]25from __future__ import print_function
26
[a083b52]27import hashlib
[649a64c]28import os
[f179dc6]29import re
[649a64c]30import stat
31import sys
[f88fcf3]32try:
33    import urllib.request as urllib_request
34    import urllib.parse as urllib_parse
35except ImportError:
[31f3304]36    import urllib2 as urllib_request
[bce0563]37    import urlparse as urllib_parse
[649a64c]38
[4ce931b]39import cvs
[649a64c]40import error
41import git
42import log
43import path
[d3629a9]44import sources
[b6d188b]45import version
[649a64c]46
[d755065]47def _do_download(opts):
48    download = True
49    if opts.dry_run():
50        download = False
51        wa = opts.with_arg('download')
52        if wa is not None:
53            if wa[0] == 'with_download' and wa[1] == 'yes':
54                download = True
55    return download
56
[c49e500]57def _humanize_bytes(bytes, precision = 1):
58    abbrevs = (
[f88fcf3]59        (1 << 50, 'PB'),
60        (1 << 40, 'TB'),
61        (1 << 30, 'GB'),
62        (1 << 20, 'MB'),
63        (1 << 10, 'kB'),
[c49e500]64        (1, ' bytes')
65    )
66    if bytes == 1:
67        return '1 byte'
68    for factor, suffix in abbrevs:
69        if bytes >= factor:
70            break
71    return '%.*f%s' % (precision, float(bytes) / factor, suffix)
72
[31f3304]73def _sensible_url(url, used = 0):
[87434f6]74    space = 200
[31f3304]75    if len(url) > space:
[87434f6]76        size = int(space - 14)
77        url = url[:size] + '...<see log>'
[31f3304]78    return url
79
[a083b52]80def _hash_check(file_, absfile, macros, remove = True):
81    failed = False
[4934771]82    hash = sources.get_hash(file_.lower(), macros)
83    if hash is not None:
[a083b52]84        hash = hash.split()
85        if len(hash) != 2:
86            raise error.internal('invalid hash format: %s' % (file_))
[910081d]87        try:
88            hashlib_algorithms = hashlib.algorithms
89        except:
90            hashlib_algorithms = ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512']
91        if hash[0] not in hashlib_algorithms:
[a083b52]92            raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0]))
93        hasher = None
94        _in = None
95        try:
96            hasher = hashlib.new(hash[0])
[d4eb08f]97            _in = open(path.host(absfile), 'rb')
[a083b52]98            hasher.update(_in.read())
[f88fcf3]99        except IOError as err:
[a083b52]100            log.notice('hash: %s: read error: %s' % (file_, str(err)))
101            failed = True
102        except:
103            msg = 'hash: %s: error' % (file_)
104            log.stderr(msg)
105            log.notice(msg)
106            if _in is not None:
107                _in.close()
108            raise
109        if _in is not None:
110            _in.close()
111        log.output('checksums: %s: %s => %s' % (file_, hasher.hexdigest(), hash[1]))
112        if hasher.hexdigest() != hash[1]:
113            log.warning('checksum error: %s' % (file_))
114            failed = True
115        if failed and remove:
116            log.warning('removing: %s' % (file_))
117            if path.exists(absfile):
[d4eb08f]118                try:
119                    os.remove(path.host(absfile))
[f88fcf3]120                except IOError as err:
[d4eb08f]121                    raise error.general('hash: %s: remove: %s' % (absfile, str(err)))
122                except:
123                    raise error.general('hash: %s: remove error' % (file_))
[a083b52]124        if hasher is not None:
125            del hasher
126    else:
[b6d188b]127        if version.released():
128            raise error.general('%s: no hash found in released RSB' % (file_))
[a083b52]129        log.warning('%s: no hash found' % (file_))
130    return not failed
131
[d755065]132def _local_path(source, pathkey, config):
133    for p in config.define(pathkey).split(':'):
[f179dc6]134        local_prefix = path.abspath(p)
135        local = path.join(local_prefix, source['file'])
[d755065]136        if source['local'] is None:
[f179dc6]137            source['local_prefix'] = local_prefix
[d755065]138            source['local'] = local
139        if path.exists(local):
[f179dc6]140            source['local_prefix'] = local_prefix
[d755065]141            source['local'] = local
142            _hash_check(source['file'], local, config.macros)
143            break
144
145def _http_parser(source, pathkey, config, opts):
146    #
[f179dc6]147    # If the file has not been overrided attempt to recover a possible file name.
[d755065]148    #
[f179dc6]149    if 'file-override' not in source['options']:
[d755065]150        #
[f179dc6]151        # Hack for gitweb.cgi patch downloads. We rewrite the various fields.
[d755065]152        #
[f179dc6]153        if 'gitweb.cgi' in source['url']:
154            url = source['url']
155            if '?' not in url:
156                raise error.general('invalid gitweb.cgi request: %s' % (url))
157            req = url.split('?')[1]
158            if len(req) == 0:
159                raise error.general('invalid gitweb.cgi request: %s' % (url))
160            #
161            # The gitweb.cgi request should have:
162            #    p=<what>
163            #    a=patch
164            #    h=<hash>
165            # so extract the p and h parts to make the local name.
166            #
167            p = None
168            a = None
169            h = None
170            for r in req.split(';'):
171                if '=' not in r:
172                    raise error.general('invalid gitweb.cgi path: %s' % (url))
173                rs = r.split('=')
174                if rs[0] == 'p':
175                    p = rs[1].replace('.', '-')
176                elif rs[0] == 'a':
177                    a = rs[1]
178                elif rs[0] == 'h':
179                    h = rs[1]
180            if p is None or h is None:
181                raise error.general('gitweb.cgi path missing p or h: %s' % (url))
182            source['file'] = '%s-%s.patch' % (p, h)
183        #
184        # Check the source file name for any extra request query data and remove if
185        # found. Some hosts do not like file names containing them.
186        #
187        if '?' in source['file']:
188            qmark = source['file'].find('?')
189            source['file'] = source['file'][:qmark]
[76188ee4]190    #
[d755065]191    # Check local path
192    #
193    _local_path(source, pathkey, config)
[649a64c]194    #
195    # Is the file compressed ?
196    #
197    esl = source['ext'].split('.')
198    if esl[-1:][0] == 'gz':
[9a15c40]199        source['compressed-type'] = 'gzip'
[649a64c]200        source['compressed'] = '%{__gzip} -dc'
201    elif esl[-1:][0] == 'bz2':
[9a15c40]202        source['compressed-type'] = 'bzip2'
[649a64c]203        source['compressed'] = '%{__bzip2} -dc'
[29a300a]204    elif esl[-1:][0] == 'zip':
[9a15c40]205        source['compressed-type'] = 'zip'
[e7a6292]206        source['compressed'] = '%{__unzip} -u'
[649a64c]207    elif esl[-1:][0] == 'xz':
[9a15c40]208        source['compressed-type'] = 'xz'
[649a64c]209        source['compressed'] = '%{__xz} -dc'
210
[d755065]211def _patchworks_parser(source, pathkey, config, opts):
212    #
213    # Check local path
214    #
215    _local_path(source, pathkey, config)
[587aa5f]216    source['url'] = 'http%s' % (source['path'][2:])
217
[d755065]218def _git_parser(source, pathkey, config, opts):
219    #
220    # Check local path
221    #
222    _local_path(source, pathkey, config)
[649a64c]223    #
224    # Symlink.
225    #
226    us = source['url'].split('?')
227    source['path'] = path.dirname(us[0])
228    source['file'] = path.basename(us[0])
229    source['name'], source['ext'] = path.splitext(source['file'])
230    if len(us) > 1:
231        source['args'] = us[1:]
232    source['local'] = \
[f8bdb41]233        path.join(source['local_prefix'], 'git', source['file'])
[649a64c]234    source['symlink'] = source['local']
235
[d755065]236def _cvs_parser(source, pathkey, config, opts):
237    #
238    # Check local path
239    #
240    _local_path(source, pathkey, config)
[4ce931b]241    #
242    # Symlink.
243    #
244    if not source['url'].startswith('cvs://'):
245        raise error.general('invalid cvs path: %s' % (source['url']))
246    us = source['url'].split('?')
247    try:
248        url = us[0]
[c65b9eb]249        source['file'] = url[url[6:].index(':') + 7:]
[4ce931b]250        source['cvsroot'] = ':%s:' % (url[6:url[6:].index('/') + 6:])
251    except:
252        raise error.general('invalid cvs path: %s' % (source['url']))
[f077b2b]253    for a in us[1:]:
254        _as = a.split('=')
[c65b9eb]255        if _as[0] == 'module':
256            if len(_as) != 2:
257                raise error.general('invalid cvs module: %s' % (a))
258            source['module'] = _as[1]
259        elif _as[0] == 'src-prefix':
[f077b2b]260            if len(_as) != 2:
261                raise error.general('invalid cvs src-prefix: %s' % (a))
262            source['src_prefix'] = _as[1]
[c65b9eb]263        elif _as[0] == 'tag':
264            if len(_as) != 2:
265                raise error.general('invalid cvs tag: %s' % (a))
266            source['tag'] = _as[1]
267        elif _as[0] == 'date':
268            if len(_as) != 2:
269                raise error.general('invalid cvs date: %s' % (a))
270            source['date'] = _as[1]
271    if 'date' in source and 'tag' in source:
272        raise error.general('cvs URL cannot have a date and tag: %s' % (source['url']))
273    # Do here to ensure an ordered path, the URL can include options in any order
274    if 'module' in source:
275        source['file'] += '_%s' % (source['module'])
276    if 'tag' in source:
277        source['file'] += '_%s' % (source['tag'])
278    if 'date' in source:
279        source['file'] += '_%s' % (source['date'])
280    for c in '/@#%.-':
281        source['file'] = source['file'].replace(c, '_')
282    source['local'] = path.join(source['local_prefix'], 'cvs', source['file'])
[4ce931b]283    if 'src_prefix' in source:
[f077b2b]284        source['symlink'] = path.join(source['local'], source['src_prefix'])
[4ce931b]285    else:
286        source['symlink'] = source['local']
[649a64c]287
[d755065]288def _file_parser(source, pathkey, config, opts):
289    #
290    # Check local path
291    #
292    _local_path(source, pathkey, config)
[649a64c]293    #
[5b5d6bf]294    # Get the paths sorted.
[649a64c]295    #
[5b5d6bf]296    source['file'] = source['url'][6:]
[649a64c]297
298parsers = { 'http': _http_parser,
299            'ftp':  _http_parser,
[587aa5f]300            'pw':   _patchworks_parser,
[649a64c]301            'git':  _git_parser,
[4ce931b]302            'cvs':  _cvs_parser,
[f8bdb41]303            'file': _file_parser }
[649a64c]304
[dca7ab2]305def set_release_path(release_path, macros):
306    if release_path is None:
307        release_path = '%{rtems_release_url}/%{rsb_version}/sources'
308    macros.define('release_path', release_path)
309
[f179dc6]310def parse_url(url, pathkey, config, opts, file_override = None):
[649a64c]311    #
312    # Split the source up into the parts we need.
313    #
314    source = {}
315    source['url'] = url
[f179dc6]316    source['options'] = []
[587aa5f]317    colon = url.find(':')
318    if url[colon + 1:colon + 3] != '//':
[d790668]319        raise error.general('malforned URL (no protocol prefix): %s' % (url))
[587aa5f]320    source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:])
[f179dc6]321    if file_override is None:
322        source['file'] = path.basename(url)
323    else:
324        bad_chars = [c for c in ['/', '\\', '?', '*'] if c in file_override]
325        if len(bad_chars) > 0:
326            raise error.general('bad characters in file name: %s' % (file_override))
327        log.output('download: file-override: %s' % (file_override))
328        source['file'] = file_override
329        source['options'] += ['file-override']
[649a64c]330    source['name'], source['ext'] = path.splitext(source['file'])
[9a15c40]331    if source['name'].endswith('.tar'):
332        source['name'] = source['name'][:-4]
333        source['ext'] = '.tar' + source['ext']
[649a64c]334    #
335    # Get the file. Checks the local source directory first.
336    #
337    source['local'] = None
338    for p in parsers:
339        if url.startswith(p):
340            source['type'] = p
[d755065]341            if parsers[p](source, pathkey, config, opts):
[649a64c]342                break
[d755065]343    source['script'] = ''
[649a64c]344    return source
345
346def _http_downloader(url, local, config, opts):
347    if path.exists(local):
348        return True
349    #
350    # Hack for GitHub.
351    #
352    if url.startswith('https://api.github.com'):
[f88fcf3]353        url = urllib_parse.urljoin(url, config.expand('tarball/%{version}'))
[c49e500]354    dst = os.path.relpath(path.host(local))
[f179dc6]355    log.output('download: (full) %s -> %s' % (url, dst))
[31f3304]356    log.notice('download: %s -> %s' % (_sensible_url(url, len(dst)), dst))
[649a64c]357    failed = False
[d755065]358    if _do_download(opts):
[649a64c]359        _in = None
360        _out = None
[c49e500]361        _length = None
362        _have = 0
363        _chunk_size = 256 * 1024
364        _chunk = None
365        _last_percent = 200.0
366        _last_msg = ''
[58a8fd8]367        _have_status_output = False
[31f3304]368        _url = url
[649a64c]369        try:
[c49e500]370            try:
[d755065]371                _in = None
[3237c8e]372                _ssl_context = None
[31f3304]373                # See #2656
374                _req = urllib_request.Request(_url)
375                _req.add_header('User-Agent', 'Wget/1.16.3 (freebsd10.1)')
[3237c8e]376                try:
377                    import ssl
378                    _ssl_context = ssl._create_unverified_context()
[31f3304]379                    _in = urllib_request.urlopen(_req, context = _ssl_context)
[3237c8e]380                except:
[f179dc6]381                    log.output('download: no ssl context')
[bd16849]382                    _ssl_context = None
383                if _ssl_context is None:
[31f3304]384                    _in = urllib_request.urlopen(_req)
385                if _url != _in.geturl():
386                    _url = _in.geturl()
387                    log.output(' redirect: %s' % (_url))
388                    log.notice(' redirect: %s' % (_sensible_url(_url)))
[c49e500]389                _out = open(path.host(local), 'wb')
390                try:
[31f3304]391                    _length = int(_in.info()['Content-Length'].strip())
[c49e500]392                except:
393                    pass
394                while True:
395                    _msg = '\rdownloading: %s - %s ' % (dst, _humanize_bytes(_have))
396                    if _length:
397                        _percent = round((float(_have) / _length) * 100, 2)
398                        if _percent != _last_percent:
399                            _msg += 'of %s (%0.0f%%) ' % (_humanize_bytes(_length), _percent)
400                    if _msg != _last_msg:
401                        extras = (len(_last_msg) - len(_msg))
402                        log.stdout_raw('%s%s' % (_msg, ' ' * extras + '\b' * extras))
403                        _last_msg = _msg
[58a8fd8]404                        _have_status_output = True
[c49e500]405                    _chunk = _in.read(_chunk_size)
406                    if not _chunk:
407                        break
408                    _out.write(_chunk)
409                    _have += len(_chunk)
[58a8fd8]410                log.stdout_raw('\n\r')
[c49e500]411            except:
[58a8fd8]412                if _have_status_output:
413                    log.stdout_raw('\n\r')
[c49e500]414                raise
[f88fcf3]415        except IOError as err:
[31f3304]416            log.notice('download: %s: error: %s' % (_sensible_url(_url), str(err)))
[649a64c]417            if path.exists(local):
418                os.remove(path.host(local))
419            failed = True
[f88fcf3]420        except ValueError as err:
[31f3304]421            log.notice('download: %s: error: %s' % (_sensible_url(_url), str(err)))
[649a64c]422            if path.exists(local):
423                os.remove(path.host(local))
424            failed = True
425        except:
[31f3304]426            msg = 'download: %s: error' % (_sensible_url(_url))
[fa87409]427            log.stderr(msg)
[5142bec]428            log.notice(msg)
[d755065]429            if _in is not None:
430                _in.close()
[649a64c]431            if _out is not None:
432                _out.close()
433            raise
434        if _out is not None:
435            _out.close()
436        if _in is not None:
[d755065]437            _in.close()
[649a64c]438            del _in
439        if not failed:
440            if not path.isfile(local):
441                raise error.general('source is not a file: %s' % (path.host(local)))
[a083b52]442            if not _hash_check(path.basename(local), local, config.macros, False):
443                raise error.general('checksum failure file: %s' % (dst))
[649a64c]444    return not failed
445
446def _git_downloader(url, local, config, opts):
[d790668]447    repo = git.repo(local, opts, config.macros)
[649a64c]448    rlp = os.path.relpath(path.host(local))
449    us = url.split('?')
[d790668]450    #
451    # Handle the various git protocols.
452    #
453    # remove 'git' from 'git://xxxx/xxxx?protocol=...'
454    #
455    url_base = us[0][len('git'):]
456    for a in us[1:]:
457        _as = a.split('=')
458        if _as[0] == 'protocol':
459            if len(_as) != 2:
460                raise error.general('invalid git protocol option: %s' % (_as))
461            if _as[1] == 'none':
462                # remove the rest of the protocol header leaving nothing.
463                us[0] = url_base[len('://'):]
464            else:
465                if _as[1] not in ['ssh', 'git', 'http', 'https', 'ftp', 'ftps', 'rsync']:
466                    raise error.general('unknown git protocol: %s' % (_as[1]))
467                us[0] = _as[1] + url_base
[649a64c]468    if not repo.valid():
[5142bec]469        log.notice('git: clone: %s -> %s' % (us[0], rlp))
[d755065]470        if _do_download(opts):
[649a64c]471            repo.clone(us[0], local)
[53b918e]472    else:
[8c19df2]473        repo.clean(['-f', '-d'])
[53b918e]474        repo.reset('--hard')
475        repo.checkout('master')
[649a64c]476    for a in us[1:]:
477        _as = a.split('=')
[53b918e]478        if _as[0] == 'branch' or _as[0] == 'checkout':
479            if len(_as) != 2:
480                raise error.general('invalid git branch/checkout: %s' % (_as))
[5142bec]481            log.notice('git: checkout: %s => %s' % (us[0], _as[1]))
[d755065]482            if _do_download(opts):
[649a64c]483                repo.checkout(_as[1])
[53b918e]484        elif _as[0] == 'submodule':
485            if len(_as) != 2:
486                raise error.general('invalid git submodule: %s' % (_as))
487            log.notice('git: submodule: %s <= %s' % (us[0], _as[1]))
[d755065]488            if _do_download(opts):
[53b918e]489                repo.submodule(_as[1])
[649a64c]490        elif _as[0] == 'fetch':
[5142bec]491            log.notice('git: fetch: %s -> %s' % (us[0], rlp))
[d755065]492            if _do_download(opts):
[649a64c]493                repo.fetch()
[b0f9e30]494        elif _as[0] == 'merge':
495            log.notice('git: merge: %s' % (us[0]))
496            if _do_download(opts):
497                repo.merge()
498        elif _as[0] == 'pull':
499            log.notice('git: pull: %s' % (us[0]))
500            if _do_download(opts):
501                repo.pull()
[649a64c]502        elif _as[0] == 'reset':
503            arg = []
504            if len(_as) > 1:
505                arg = ['--%s' % (_as[1])]
[5142bec]506            log.notice('git: reset: %s' % (us[0]))
[d755065]507            if _do_download(opts):
[649a64c]508                repo.reset(arg)
[d790668]509        elif _as[0] == 'protocol':
510            pass
511        else:
512            raise error.general('invalid git option: %s' % (_as))
[649a64c]513    return True
514
[4ce931b]515def _cvs_downloader(url, local, config, opts):
516    rlp = os.path.relpath(path.host(local))
517    us = url.split('?')
518    module = None
519    tag = None
520    date = None
521    src_prefix = None
522    for a in us[1:]:
523        _as = a.split('=')
524        if _as[0] == 'module':
525            if len(_as) != 2:
526                raise error.general('invalid cvs module: %s' % (a))
527            module = _as[1]
528        elif _as[0] == 'src-prefix':
529            if len(_as) != 2:
530                raise error.general('invalid cvs src-prefix: %s' % (a))
531            src_prefix = _as[1]
532        elif _as[0] == 'tag':
533            if len(_as) != 2:
534                raise error.general('invalid cvs tag: %s' % (a))
535            tag = _as[1]
536        elif _as[0] == 'date':
537            if len(_as) != 2:
538                raise error.general('invalid cvs date: %s' % (a))
539            date = _as[1]
540    repo = cvs.repo(local, opts, config.macros, src_prefix)
541    if not repo.valid():
[5f44fcd]542        if not path.isdir(local):
543            log.notice('Creating source directory: %s' % \
544                           (os.path.relpath(path.host(local))))
[d755065]545            if _do_download(opts):
[5f44fcd]546                path.mkdir(local)
[f077b2b]547            log.notice('cvs: checkout: %s -> %s' % (us[0], rlp))
[d755065]548            if _do_download(opts):
[f077b2b]549                repo.checkout(':%s' % (us[0][6:]), module, tag, date)
[4ce931b]550    for a in us[1:]:
551        _as = a.split('=')
552        if _as[0] == 'update':
[5142bec]553            log.notice('cvs: update: %s' % (us[0]))
[d755065]554            if _do_download(opts):
[4ce931b]555                repo.update()
556        elif _as[0] == 'reset':
[5142bec]557            log.notice('cvs: reset: %s' % (us[0]))
[d755065]558            if _do_download(opts):
[4ce931b]559                repo.reset()
560    return True
561
[649a64c]562def _file_downloader(url, local, config, opts):
[12f253c]563    if not path.exists(local):
564        try:
565            src = url[7:]
566            dst = local
567            log.notice('download: copy %s -> %s' % (src, dst))
568            path.copy(src, dst)
569        except:
570            return False
[5b5d6bf]571    return True
[649a64c]572
573downloaders = { 'http': _http_downloader,
574                'ftp':  _http_downloader,
[587aa5f]575                'pw':   _http_downloader,
[649a64c]576                'git':  _git_downloader,
[4ce931b]577                'cvs':  _cvs_downloader,
578                'file': _file_downloader }
[649a64c]579
580def get_file(url, local, opts, config):
581    if local is None:
582        raise error.general('source/patch path invalid')
583    if not path.isdir(path.dirname(local)) and not opts.download_disabled():
[5142bec]584        log.notice('Creating source directory: %s' % \
585                       (os.path.relpath(path.host(path.dirname(local)))))
586    log.output('making dir: %s' % (path.host(path.dirname(local))))
[d755065]587    if _do_download(opts):
[649a64c]588        path.mkdir(path.dirname(local))
589    if not path.exists(local) and opts.download_disabled():
590        raise error.general('source not found: %s' % (path.host(local)))
591    #
[12f253c]592    # Check if a URL has been provided on the command line. If the package is
[58a8fd8]593    # released push to the start the RTEMS URL unless overrided by the command
594    # line option --with-release-url. The variant --without-release-url can
595    # override the released check.
[649a64c]596    #
597    url_bases = opts.urls()
[8f6fb61]598    try:
[dca7ab2]599        rtems_release_url_value = config.macros.expand('%{release_path}')
[8f6fb61]600    except:
601        rtems_release_url_value = None
[58a8fd8]602    rtems_release_url = None
[8f6fb61]603    if version.released() and rtems_release_url_value:
[58a8fd8]604        rtems_release_url = rtems_release_url_value
605    with_rel_url = opts.with_arg('release-url')
[2b5f69e]606    if with_rel_url[1] == 'not-found':
607        if config.defined('without_release_url'):
608            with_rel_url = ('without_release-url', 'yes')
[58a8fd8]609    if with_rel_url[0] == 'with_release-url':
610        if with_rel_url[1] == 'yes':
[8f6fb61]611            if rtems_release_url_value is None:
612                raise error.general('no valid release URL')
[58a8fd8]613            rtems_release_url = rtems_release_url_value
614        elif with_rel_url[1] == 'no':
615            pass
616        else:
617            rtems_release_url = with_rel_url[1]
[2e548833]618    elif with_rel_url[0] == 'without_release-url' and with_rel_url[1] == 'yes':
619        rtems_release_url = None
[58a8fd8]620    if rtems_release_url is not None:
[12f253c]621        log.trace('release url: %s' % (rtems_release_url))
622        #
623        # If the URL being fetched is under the release path do not add the
624        # sources release path because it is already there.
625        #
626        if not url.startswith(rtems_release_url):
627            if url_bases is None:
628                url_bases = [rtems_release_url]
629            else:
630                url_bases.append(rtems_release_url)
[649a64c]631    urls = []
632    if url_bases is not None:
[12f253c]633        #
634        # Split up the URL we are being asked to download.
635        #
[f88fcf3]636        url_path = urllib_parse.urlsplit(url)[2]
[12f253c]637        slash = url_path.rfind('/')
638        if slash < 0:
639            url_file = url_path
640        else:
641            url_file = url_path[slash + 1:]
642        log.trace('url_file: %s' %(url_file))
[649a64c]643        for base in url_bases:
[b487b7d]644            #
645            # Hack to fix #3064 where --rsb-file is being used. This code is a
646            # mess and should be refactored.
647            #
648            if version.released() and base == rtems_release_url:
649                url_file = path.basename(local)
[649a64c]650            if base[-1:] != '/':
651                base += '/'
[f88fcf3]652            next_url = urllib_parse.urljoin(base, url_file)
[12f253c]653            log.trace('url: %s' %(next_url))
654            urls.append(next_url)
[53b918e]655    urls += url.split()
[5142bec]656    log.trace('_url: %s -> %s' % (','.join(urls), local))
[53b918e]657    for url in urls:
658        for dl in downloaders:
659            if url.startswith(dl):
660                if downloaders[dl](url, local, config, opts):
661                    return
[d755065]662    if _do_download(opts):
[649a64c]663        raise error.general('downloading %s: all paths have failed, giving up' % (url))
Note: See TracBrowser for help on using the repository browser.