source: rtems-source-builder/source-builder/sb/download.py @ f88fcf3

4.11
Last change on this file since f88fcf3 was f88fcf3, checked in by Chris Johns <chrisj@…>, on 03/07/16 at 00:56:02

sb: Update code base to support Python3 and Python2.

Fix Windows support to allow MSYS2 Python to be used.

Updates #2619.

  • Property mode set to 100644
File size: 21.0 KB
RevLine 
[649a64c]1#
2# RTEMS Tools Project (http://www.rtems.org/)
[76188ee4]3# Copyright 2010-2016 Chris Johns (chrisj@rtems.org)
[649a64c]4# All rights reserved.
5#
6# This file is part of the RTEMS Tools package in 'rtems-tools'.
7#
8# Permission to use, copy, modify, and/or distribute this software for any
9# purpose with or without fee is hereby granted, provided that the above
10# copyright notice and this permission notice appear in all copies.
11#
12# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
13# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
14# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
15# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
16# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
17# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
18# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
19
20#
21# This code builds a package given a config file. It only builds to be
22# installed not to be package unless you run a packager around this.
23#
24
[f88fcf3]25from __future__ import print_function
26
[a083b52]27import hashlib
[649a64c]28import os
29import stat
30import sys
[f88fcf3]31try:
32    import urllib.request as urllib_request
33    import urllib.parse as urllib_parse
34    import urllib.error as urllib_error
35except ImportError:
36    import urllib as urllib_request
37    import urllib as urllib_parse
38    import urllib as urllib_error
[649a64c]39
[4ce931b]40import cvs
[649a64c]41import error
42import git
43import log
44import path
[d3629a9]45import sources
[b6d188b]46import version
[649a64c]47
[d755065]48def _do_download(opts):
49    download = True
50    if opts.dry_run():
51        download = False
52        wa = opts.with_arg('download')
53        if wa is not None:
54            if wa[0] == 'with_download' and wa[1] == 'yes':
55                download = True
56    return download
57
[c49e500]58def _humanize_bytes(bytes, precision = 1):
59    abbrevs = (
[f88fcf3]60        (1 << 50, 'PB'),
61        (1 << 40, 'TB'),
62        (1 << 30, 'GB'),
63        (1 << 20, 'MB'),
64        (1 << 10, 'kB'),
[c49e500]65        (1, ' bytes')
66    )
67    if bytes == 1:
68        return '1 byte'
69    for factor, suffix in abbrevs:
70        if bytes >= factor:
71            break
72    return '%.*f%s' % (precision, float(bytes) / factor, suffix)
73
[a083b52]74def _hash_check(file_, absfile, macros, remove = True):
75    failed = False
[4934771]76    hash = sources.get_hash(file_.lower(), macros)
77    if hash is not None:
[a083b52]78        hash = hash.split()
79        if len(hash) != 2:
80            raise error.internal('invalid hash format: %s' % (file_))
[910081d]81        try:
82            hashlib_algorithms = hashlib.algorithms
83        except:
84            hashlib_algorithms = ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512']
85        if hash[0] not in hashlib_algorithms:
[a083b52]86            raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0]))
87        hasher = None
88        _in = None
89        try:
90            hasher = hashlib.new(hash[0])
[d4eb08f]91            _in = open(path.host(absfile), 'rb')
[a083b52]92            hasher.update(_in.read())
[f88fcf3]93        except IOError as err:
[a083b52]94            log.notice('hash: %s: read error: %s' % (file_, str(err)))
95            failed = True
96        except:
97            msg = 'hash: %s: error' % (file_)
98            log.stderr(msg)
99            log.notice(msg)
100            if _in is not None:
101                _in.close()
102            raise
103        if _in is not None:
104            _in.close()
105        log.output('checksums: %s: %s => %s' % (file_, hasher.hexdigest(), hash[1]))
106        if hasher.hexdigest() != hash[1]:
107            log.warning('checksum error: %s' % (file_))
108            failed = True
109        if failed and remove:
110            log.warning('removing: %s' % (file_))
111            if path.exists(absfile):
[d4eb08f]112                try:
113                    os.remove(path.host(absfile))
[f88fcf3]114                except IOError as err:
[d4eb08f]115                    raise error.general('hash: %s: remove: %s' % (absfile, str(err)))
116                except:
117                    raise error.general('hash: %s: remove error' % (file_))
[a083b52]118        if hasher is not None:
119            del hasher
120    else:
[b6d188b]121        if version.released():
122            raise error.general('%s: no hash found in released RSB' % (file_))
[a083b52]123        log.warning('%s: no hash found' % (file_))
124    return not failed
125
[d755065]126def _local_path(source, pathkey, config):
127    for p in config.define(pathkey).split(':'):
128        local = path.join(path.abspath(p), source['file'])
129        if source['local'] is None:
130            source['local_prefix'] = path.abspath(p)
131            source['local'] = local
132        if path.exists(local):
133            source['local_prefix'] = path.abspath(p)
134            source['local'] = local
135            _hash_check(source['file'], local, config.macros)
136            break
137
138def _http_parser(source, pathkey, config, opts):
139    #
140    # Hack for gitweb.cgi patch downloads. We rewrite the various fields.
141    #
142    if 'gitweb.cgi' in source['url']:
143        url = source['url']
144        if '?' not in url:
145            raise error.general('invalid gitweb.cgi request: %s' % (url))
146        req = url.split('?')[1]
147        if len(req) == 0:
148            raise error.general('invalid gitweb.cgi request: %s' % (url))
149        #
150        # The gitweb.cgi request should have:
151        #    p=<what>
152        #    a=patch
153        #    h=<hash>
154        # so extract the p and h parts to make the local name.
155        #
156        p = None
157        a = None
158        h = None
159        for r in req.split(';'):
160            if '=' not in r:
161                raise error.general('invalid gitweb.cgi path: %s' % (url))
162            rs = r.split('=')
163            if rs[0] == 'p':
164                p = rs[1].replace('.', '-')
165            elif rs[0] == 'a':
166                a = rs[1]
167            elif rs[0] == 'h':
168                h = rs[1]
169        if p is None or h is None:
170            raise error.general('gitweb.cgi path missing p or h: %s' % (url))
171        source['file'] = '%s-%s.patch' % (p, h)
172    #
[76188ee4]173    # Check the source file name for any extra request query data and remove if
174    # found. Some hosts do not like file names containing them.
175    #
176    if '?' in source['file']:
177        qmark = source['file'].find('?')
178        source['file'] = source['file'][:qmark]
179    #
[d755065]180    # Check local path
181    #
182    _local_path(source, pathkey, config)
[649a64c]183    #
184    # Is the file compressed ?
185    #
186    esl = source['ext'].split('.')
187    if esl[-1:][0] == 'gz':
[9a15c40]188        source['compressed-type'] = 'gzip'
[649a64c]189        source['compressed'] = '%{__gzip} -dc'
190    elif esl[-1:][0] == 'bz2':
[9a15c40]191        source['compressed-type'] = 'bzip2'
[649a64c]192        source['compressed'] = '%{__bzip2} -dc'
[29a300a]193    elif esl[-1:][0] == 'zip':
[9a15c40]194        source['compressed-type'] = 'zip'
[e7a6292]195        source['compressed'] = '%{__unzip} -u'
[649a64c]196    elif esl[-1:][0] == 'xz':
[9a15c40]197        source['compressed-type'] = 'xz'
[649a64c]198        source['compressed'] = '%{__xz} -dc'
199
[d755065]200def _patchworks_parser(source, pathkey, config, opts):
201    #
202    # Check local path
203    #
204    _local_path(source, pathkey, config)
[587aa5f]205    source['url'] = 'http%s' % (source['path'][2:])
206
[d755065]207def _git_parser(source, pathkey, config, opts):
208    #
209    # Check local path
210    #
211    _local_path(source, pathkey, config)
[649a64c]212    #
213    # Symlink.
214    #
215    us = source['url'].split('?')
216    source['path'] = path.dirname(us[0])
217    source['file'] = path.basename(us[0])
218    source['name'], source['ext'] = path.splitext(source['file'])
219    if len(us) > 1:
220        source['args'] = us[1:]
221    source['local'] = \
[f8bdb41]222        path.join(source['local_prefix'], 'git', source['file'])
[649a64c]223    source['symlink'] = source['local']
224
[d755065]225def _cvs_parser(source, pathkey, config, opts):
226    #
227    # Check local path
228    #
229    _local_path(source, pathkey, config)
[4ce931b]230    #
231    # Symlink.
232    #
233    if not source['url'].startswith('cvs://'):
234        raise error.general('invalid cvs path: %s' % (source['url']))
235    us = source['url'].split('?')
236    try:
237        url = us[0]
[c65b9eb]238        source['file'] = url[url[6:].index(':') + 7:]
[4ce931b]239        source['cvsroot'] = ':%s:' % (url[6:url[6:].index('/') + 6:])
240    except:
241        raise error.general('invalid cvs path: %s' % (source['url']))
[f077b2b]242    for a in us[1:]:
243        _as = a.split('=')
[c65b9eb]244        if _as[0] == 'module':
245            if len(_as) != 2:
246                raise error.general('invalid cvs module: %s' % (a))
247            source['module'] = _as[1]
248        elif _as[0] == 'src-prefix':
[f077b2b]249            if len(_as) != 2:
250                raise error.general('invalid cvs src-prefix: %s' % (a))
251            source['src_prefix'] = _as[1]
[c65b9eb]252        elif _as[0] == 'tag':
253            if len(_as) != 2:
254                raise error.general('invalid cvs tag: %s' % (a))
255            source['tag'] = _as[1]
256        elif _as[0] == 'date':
257            if len(_as) != 2:
258                raise error.general('invalid cvs date: %s' % (a))
259            source['date'] = _as[1]
260    if 'date' in source and 'tag' in source:
261        raise error.general('cvs URL cannot have a date and tag: %s' % (source['url']))
262    # Do here to ensure an ordered path, the URL can include options in any order
263    if 'module' in source:
264        source['file'] += '_%s' % (source['module'])
265    if 'tag' in source:
266        source['file'] += '_%s' % (source['tag'])
267    if 'date' in source:
268        source['file'] += '_%s' % (source['date'])
269    for c in '/@#%.-':
270        source['file'] = source['file'].replace(c, '_')
271    source['local'] = path.join(source['local_prefix'], 'cvs', source['file'])
[4ce931b]272    if 'src_prefix' in source:
[f077b2b]273        source['symlink'] = path.join(source['local'], source['src_prefix'])
[4ce931b]274    else:
275        source['symlink'] = source['local']
[649a64c]276
[d755065]277def _file_parser(source, pathkey, config, opts):
278    #
279    # Check local path
280    #
281    _local_path(source, pathkey, config)
[649a64c]282    #
[5b5d6bf]283    # Get the paths sorted.
[649a64c]284    #
[5b5d6bf]285    source['file'] = source['url'][6:]
[649a64c]286
287parsers = { 'http': _http_parser,
288            'ftp':  _http_parser,
[587aa5f]289            'pw':   _patchworks_parser,
[649a64c]290            'git':  _git_parser,
[4ce931b]291            'cvs':  _cvs_parser,
[f8bdb41]292            'file': _file_parser }
[649a64c]293
294def parse_url(url, pathkey, config, opts):
295    #
296    # Split the source up into the parts we need.
297    #
298    source = {}
299    source['url'] = url
[587aa5f]300    colon = url.find(':')
301    if url[colon + 1:colon + 3] != '//':
[d790668]302        raise error.general('malforned URL (no protocol prefix): %s' % (url))
[587aa5f]303    source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:])
[649a64c]304    source['file'] = path.basename(url)
305    source['name'], source['ext'] = path.splitext(source['file'])
[9a15c40]306    if source['name'].endswith('.tar'):
307        source['name'] = source['name'][:-4]
308        source['ext'] = '.tar' + source['ext']
[649a64c]309    #
310    # Get the file. Checks the local source directory first.
311    #
312    source['local'] = None
313    for p in parsers:
314        if url.startswith(p):
315            source['type'] = p
[d755065]316            if parsers[p](source, pathkey, config, opts):
[649a64c]317                break
[d755065]318    source['script'] = ''
[649a64c]319    return source
320
321def _http_downloader(url, local, config, opts):
322    if path.exists(local):
323        return True
324    #
325    # Hack for GitHub.
326    #
327    if url.startswith('https://api.github.com'):
[f88fcf3]328        url = urllib_parse.urljoin(url, config.expand('tarball/%{version}'))
[c49e500]329    dst = os.path.relpath(path.host(local))
330    log.notice('download: %s -> %s' % (url, dst))
[649a64c]331    failed = False
[d755065]332    if _do_download(opts):
[649a64c]333        _in = None
334        _out = None
[c49e500]335        _length = None
336        _have = 0
337        _chunk_size = 256 * 1024
338        _chunk = None
339        _last_percent = 200.0
340        _last_msg = ''
341        _wipe_output = False
[649a64c]342        try:
[c49e500]343            try:
[d755065]344                _in = None
[3237c8e]345                _ssl_context = None
[f88fcf3]346                _urllib_url = url
[3237c8e]347                try:
348                    import ssl
349                    _ssl_context = ssl._create_unverified_context()
[f88fcf3]350                    _in = urllib_request.urlopen(_urllib_url, context = _ssl_context)
[3237c8e]351                except:
[bd16849]352                    _ssl_context = None
353                if _ssl_context is None:
[f88fcf3]354                    _in = urllib_request.urlopen(_urllib_url)
[d755065]355                if url != _in.geturl():
356                    log.notice(' redirect: %s' % (_in.geturl()))
[c49e500]357                _out = open(path.host(local), 'wb')
358                try:
359                    _length = int(_in.info().getheader('Content-Length').strip())
360                except:
361                    pass
362                while True:
363                    _msg = '\rdownloading: %s - %s ' % (dst, _humanize_bytes(_have))
364                    if _length:
365                        _percent = round((float(_have) / _length) * 100, 2)
366                        if _percent != _last_percent:
367                            _msg += 'of %s (%0.0f%%) ' % (_humanize_bytes(_length), _percent)
368                    if _msg != _last_msg:
369                        extras = (len(_last_msg) - len(_msg))
370                        log.stdout_raw('%s%s' % (_msg, ' ' * extras + '\b' * extras))
371                        _last_msg = _msg
372                    _chunk = _in.read(_chunk_size)
373                    if not _chunk:
374                        break
375                    _out.write(_chunk)
376                    _have += len(_chunk)
377                if _wipe_output:
378                    log.stdout_raw('\r%s\r' % (' ' * len(_last_msg)))
379                else:
380                    log.stdout_raw('\n')
381            except:
382                log.stdout_raw('\n')
383                raise
[f88fcf3]384        except IOError as err:
[5142bec]385            log.notice('download: %s: error: %s' % (url, str(err)))
[649a64c]386            if path.exists(local):
387                os.remove(path.host(local))
388            failed = True
[f88fcf3]389        except ValueError as err:
[5142bec]390            log.notice('download: %s: error: %s' % (url, str(err)))
[649a64c]391            if path.exists(local):
392                os.remove(path.host(local))
393            failed = True
394        except:
395            msg = 'download: %s: error' % (url)
[fa87409]396            log.stderr(msg)
[5142bec]397            log.notice(msg)
[d755065]398            if _in is not None:
399                _in.close()
[649a64c]400            if _out is not None:
401                _out.close()
402            raise
403        if _out is not None:
404            _out.close()
405        if _in is not None:
[d755065]406            _in.close()
[649a64c]407            del _in
408        if not failed:
409            if not path.isfile(local):
410                raise error.general('source is not a file: %s' % (path.host(local)))
[a083b52]411            if not _hash_check(path.basename(local), local, config.macros, False):
412                raise error.general('checksum failure file: %s' % (dst))
[649a64c]413    return not failed
414
415def _git_downloader(url, local, config, opts):
[d790668]416    repo = git.repo(local, opts, config.macros)
[649a64c]417    rlp = os.path.relpath(path.host(local))
418    us = url.split('?')
[d790668]419    #
420    # Handle the various git protocols.
421    #
422    # remove 'git' from 'git://xxxx/xxxx?protocol=...'
423    #
424    url_base = us[0][len('git'):]
425    for a in us[1:]:
426        _as = a.split('=')
427        if _as[0] == 'protocol':
428            if len(_as) != 2:
429                raise error.general('invalid git protocol option: %s' % (_as))
430            if _as[1] == 'none':
431                # remove the rest of the protocol header leaving nothing.
432                us[0] = url_base[len('://'):]
433            else:
434                if _as[1] not in ['ssh', 'git', 'http', 'https', 'ftp', 'ftps', 'rsync']:
435                    raise error.general('unknown git protocol: %s' % (_as[1]))
436                us[0] = _as[1] + url_base
[649a64c]437    if not repo.valid():
[5142bec]438        log.notice('git: clone: %s -> %s' % (us[0], rlp))
[d755065]439        if _do_download(opts):
[649a64c]440            repo.clone(us[0], local)
[53b918e]441    else:
[8c19df2]442        repo.clean(['-f', '-d'])
[53b918e]443        repo.reset('--hard')
444        repo.checkout('master')
[649a64c]445    for a in us[1:]:
446        _as = a.split('=')
[53b918e]447        if _as[0] == 'branch' or _as[0] == 'checkout':
448            if len(_as) != 2:
449                raise error.general('invalid git branch/checkout: %s' % (_as))
[5142bec]450            log.notice('git: checkout: %s => %s' % (us[0], _as[1]))
[d755065]451            if _do_download(opts):
[649a64c]452                repo.checkout(_as[1])
[53b918e]453        elif _as[0] == 'submodule':
454            if len(_as) != 2:
455                raise error.general('invalid git submodule: %s' % (_as))
456            log.notice('git: submodule: %s <= %s' % (us[0], _as[1]))
[d755065]457            if _do_download(opts):
[53b918e]458                repo.submodule(_as[1])
[649a64c]459        elif _as[0] == 'fetch':
[5142bec]460            log.notice('git: fetch: %s -> %s' % (us[0], rlp))
[d755065]461            if _do_download(opts):
[649a64c]462                repo.fetch()
[b0f9e30]463        elif _as[0] == 'merge':
464            log.notice('git: merge: %s' % (us[0]))
465            if _do_download(opts):
466                repo.merge()
467        elif _as[0] == 'pull':
468            log.notice('git: pull: %s' % (us[0]))
469            if _do_download(opts):
470                repo.pull()
[649a64c]471        elif _as[0] == 'reset':
472            arg = []
473            if len(_as) > 1:
474                arg = ['--%s' % (_as[1])]
[5142bec]475            log.notice('git: reset: %s' % (us[0]))
[d755065]476            if _do_download(opts):
[649a64c]477                repo.reset(arg)
[d790668]478        elif _as[0] == 'protocol':
479            pass
480        else:
481            raise error.general('invalid git option: %s' % (_as))
[649a64c]482    return True
483
[4ce931b]484def _cvs_downloader(url, local, config, opts):
485    rlp = os.path.relpath(path.host(local))
486    us = url.split('?')
487    module = None
488    tag = None
489    date = None
490    src_prefix = None
491    for a in us[1:]:
492        _as = a.split('=')
493        if _as[0] == 'module':
494            if len(_as) != 2:
495                raise error.general('invalid cvs module: %s' % (a))
496            module = _as[1]
497        elif _as[0] == 'src-prefix':
498            if len(_as) != 2:
499                raise error.general('invalid cvs src-prefix: %s' % (a))
500            src_prefix = _as[1]
501        elif _as[0] == 'tag':
502            if len(_as) != 2:
503                raise error.general('invalid cvs tag: %s' % (a))
504            tag = _as[1]
505        elif _as[0] == 'date':
506            if len(_as) != 2:
507                raise error.general('invalid cvs date: %s' % (a))
508            date = _as[1]
509    repo = cvs.repo(local, opts, config.macros, src_prefix)
510    if not repo.valid():
[5f44fcd]511        if not path.isdir(local):
512            log.notice('Creating source directory: %s' % \
513                           (os.path.relpath(path.host(local))))
[d755065]514            if _do_download(opts):
[5f44fcd]515                path.mkdir(local)
[f077b2b]516            log.notice('cvs: checkout: %s -> %s' % (us[0], rlp))
[d755065]517            if _do_download(opts):
[f077b2b]518                repo.checkout(':%s' % (us[0][6:]), module, tag, date)
[4ce931b]519    for a in us[1:]:
520        _as = a.split('=')
521        if _as[0] == 'update':
[5142bec]522            log.notice('cvs: update: %s' % (us[0]))
[d755065]523            if _do_download(opts):
[4ce931b]524                repo.update()
525        elif _as[0] == 'reset':
[5142bec]526            log.notice('cvs: reset: %s' % (us[0]))
[d755065]527            if _do_download(opts):
[4ce931b]528                repo.reset()
529    return True
530
[649a64c]531def _file_downloader(url, local, config, opts):
[12f253c]532    if not path.exists(local):
533        try:
534            src = url[7:]
535            dst = local
536            log.notice('download: copy %s -> %s' % (src, dst))
537            path.copy(src, dst)
538        except:
539            return False
[5b5d6bf]540    return True
[649a64c]541
542downloaders = { 'http': _http_downloader,
543                'ftp':  _http_downloader,
[587aa5f]544                'pw':   _http_downloader,
[649a64c]545                'git':  _git_downloader,
[4ce931b]546                'cvs':  _cvs_downloader,
547                'file': _file_downloader }
[649a64c]548
549def get_file(url, local, opts, config):
550    if local is None:
551        raise error.general('source/patch path invalid')
552    if not path.isdir(path.dirname(local)) and not opts.download_disabled():
[5142bec]553        log.notice('Creating source directory: %s' % \
554                       (os.path.relpath(path.host(path.dirname(local)))))
555    log.output('making dir: %s' % (path.host(path.dirname(local))))
[d755065]556    if _do_download(opts):
[649a64c]557        path.mkdir(path.dirname(local))
558    if not path.exists(local) and opts.download_disabled():
559        raise error.general('source not found: %s' % (path.host(local)))
560    #
[12f253c]561    # Check if a URL has been provided on the command line. If the package is
562    # release push to the start the RTEMS URL.
[649a64c]563    #
564    url_bases = opts.urls()
[12f253c]565    if version.released():
566        rtems_release_url = config.macros.expand('%{rtems_release_url}/%{rsb_version}/sources')
567        log.trace('release url: %s' % (rtems_release_url))
568        #
569        # If the URL being fetched is under the release path do not add the
570        # sources release path because it is already there.
571        #
572        if not url.startswith(rtems_release_url):
573            if url_bases is None:
574                url_bases = [rtems_release_url]
575            else:
576                url_bases.append(rtems_release_url)
[649a64c]577    urls = []
578    if url_bases is not None:
[12f253c]579        #
580        # Split up the URL we are being asked to download.
581        #
[f88fcf3]582        url_path = urllib_parse.urlsplit(url)[2]
[12f253c]583        slash = url_path.rfind('/')
584        if slash < 0:
585            url_file = url_path
586        else:
587            url_file = url_path[slash + 1:]
588        log.trace('url_file: %s' %(url_file))
[649a64c]589        for base in url_bases:
590            if base[-1:] != '/':
591                base += '/'
[f88fcf3]592            next_url = urllib_parse.urljoin(base, url_file)
[12f253c]593            log.trace('url: %s' %(next_url))
594            urls.append(next_url)
[53b918e]595    urls += url.split()
[5142bec]596    log.trace('_url: %s -> %s' % (','.join(urls), local))
[53b918e]597    for url in urls:
598        for dl in downloaders:
599            if url.startswith(dl):
600                if downloaders[dl](url, local, config, opts):
601                    return
[d755065]602    if _do_download(opts):
[649a64c]603        raise error.general('downloading %s: all paths have failed, giving up' % (url))
Note: See TracBrowser for help on using the repository browser.