source: rtems-source-builder/source-builder/sb/download.py @ c49e500

4.104.114.95
Last change on this file since c49e500 was c49e500, checked in by Chris Johns <chrisj@…>, on 07/29/14 at 00:04:55

sb: Add visual feedback for http type downloads.

  • Property mode set to 100644
File size: 13.9 KB
Line 
1#
2# RTEMS Tools Project (http://www.rtems.org/)
3# Copyright 2010-2013 Chris Johns (chrisj@rtems.org)
4# All rights reserved.
5#
6# This file is part of the RTEMS Tools package in 'rtems-tools'.
7#
8# Permission to use, copy, modify, and/or distribute this software for any
9# purpose with or without fee is hereby granted, provided that the above
10# copyright notice and this permission notice appear in all copies.
11#
12# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
13# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
14# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
15# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
16# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
17# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
18# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
19
20#
21# This code builds a package given a config file. It only builds to be
22# installed not to be package unless you run a packager around this.
23#
24
25import os
26import stat
27import sys
28import urllib2
29import urlparse
30
31import cvs
32import error
33import git
34import log
35import path
36
37def _humanize_bytes(bytes, precision = 1):
38    abbrevs = (
39        (1 << 50L, 'PB'),
40        (1 << 40L, 'TB'),
41        (1 << 30L, 'GB'),
42        (1 << 20L, 'MB'),
43        (1 << 10L, 'kB'),
44        (1, ' bytes')
45    )
46    if bytes == 1:
47        return '1 byte'
48    for factor, suffix in abbrevs:
49        if bytes >= factor:
50            break
51    return '%.*f%s' % (precision, float(bytes) / factor, suffix)
52
53def _http_parser(source, config, opts):
54    #
55    # Is the file compressed ?
56    #
57    esl = source['ext'].split('.')
58    if esl[-1:][0] == 'gz':
59        source['compressed-type'] = 'gzip'
60        source['compressed'] = '%{__gzip} -dc'
61    elif esl[-1:][0] == 'bz2':
62        source['compressed-type'] = 'bzip2'
63        source['compressed'] = '%{__bzip2} -dc'
64    elif esl[-1:][0] == 'zip':
65        source['compressed-type'] = 'zip'
66        source['compressed'] = '%{__zip} -u'
67    elif esl[-1:][0] == 'xz':
68        source['compressed-type'] = 'xz'
69        source['compressed'] = '%{__xz} -dc'
70
71def _patchworks_parser(source, config, opts):
72    source['url'] = 'http%s' % (source['path'][2:])
73
74def _git_parser(source, config, opts):
75    #
76    # Symlink.
77    #
78    us = source['url'].split('?')
79    source['path'] = path.dirname(us[0])
80    source['file'] = path.basename(us[0])
81    source['name'], source['ext'] = path.splitext(source['file'])
82    if len(us) > 1:
83        source['args'] = us[1:]
84    source['local'] = \
85        path.join(source['local_prefix'], 'git', source['file'])
86    source['symlink'] = source['local']
87
88def _cvs_parser(source, config, opts):
89    #
90    # Symlink.
91    #
92    if not source['url'].startswith('cvs://'):
93        raise error.general('invalid cvs path: %s' % (source['url']))
94    us = source['url'].split('?')
95    try:
96        url = us[0]
97        source['file'] = url[url[6:].index(':') + 7:]
98        source['cvsroot'] = ':%s:' % (url[6:url[6:].index('/') + 6:])
99    except:
100        raise error.general('invalid cvs path: %s' % (source['url']))
101    for a in us[1:]:
102        _as = a.split('=')
103        if _as[0] == 'module':
104            if len(_as) != 2:
105                raise error.general('invalid cvs module: %s' % (a))
106            source['module'] = _as[1]
107        elif _as[0] == 'src-prefix':
108            if len(_as) != 2:
109                raise error.general('invalid cvs src-prefix: %s' % (a))
110            source['src_prefix'] = _as[1]
111        elif _as[0] == 'tag':
112            if len(_as) != 2:
113                raise error.general('invalid cvs tag: %s' % (a))
114            source['tag'] = _as[1]
115        elif _as[0] == 'date':
116            if len(_as) != 2:
117                raise error.general('invalid cvs date: %s' % (a))
118            source['date'] = _as[1]
119    if 'date' in source and 'tag' in source:
120        raise error.general('cvs URL cannot have a date and tag: %s' % (source['url']))
121    # Do here to ensure an ordered path, the URL can include options in any order
122    if 'module' in source:
123        source['file'] += '_%s' % (source['module'])
124    if 'tag' in source:
125        source['file'] += '_%s' % (source['tag'])
126    if 'date' in source:
127        source['file'] += '_%s' % (source['date'])
128    for c in '/@#%.-':
129        source['file'] = source['file'].replace(c, '_')
130    source['local'] = path.join(source['local_prefix'], 'cvs', source['file'])
131    if 'src_prefix' in source:
132        source['symlink'] = path.join(source['local'], source['src_prefix'])
133    else:
134        source['symlink'] = source['local']
135
136def _file_parser(source, config, opts):
137    #
138    # Symlink.
139    #
140    source['symlink'] = source['local']
141
142parsers = { 'http': _http_parser,
143            'ftp':  _http_parser,
144            'pw':   _patchworks_parser,
145            'git':  _git_parser,
146            'cvs':  _cvs_parser,
147            'file': _file_parser }
148
149def parse_url(url, pathkey, config, opts):
150    #
151    # Split the source up into the parts we need.
152    #
153    source = {}
154    source['url'] = url
155    colon = url.find(':')
156    if url[colon + 1:colon + 3] != '//':
157        raise error.general('malforned URL: %s' % (url))
158    source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:])
159    source['file'] = path.basename(url)
160    source['name'], source['ext'] = path.splitext(source['file'])
161    if source['name'].endswith('.tar'):
162        source['name'] = source['name'][:-4]
163        source['ext'] = '.tar' + source['ext']
164    #
165    # Get the file. Checks the local source directory first.
166    #
167    source['local'] = None
168    for p in config.define(pathkey).split(':'):
169        local = path.join(path.abspath(p), source['file'])
170        if source['local'] is None:
171            source['local_prefix'] = path.abspath(p)
172            source['local'] = local
173        if path.exists(local):
174            source['local_prefix'] = path.abspath(p)
175            source['local'] = local
176            break
177    source['script'] = ''
178    for p in parsers:
179        if url.startswith(p):
180            source['type'] = p
181            if parsers[p](source, config, opts):
182                break
183    return source
184
185def _http_downloader(url, local, config, opts):
186    if path.exists(local):
187        return True
188    #
189    # Hack for GitHub.
190    #
191    if url.startswith('https://api.github.com'):
192        url = urlparse.urljoin(url, config.expand('tarball/%{version}'))
193    dst = os.path.relpath(path.host(local))
194    log.notice('download: %s -> %s' % (url, dst))
195    failed = False
196    if not opts.dry_run():
197        _in = None
198        _out = None
199        _length = None
200        _have = 0
201        _chunk_size = 256 * 1024
202        _chunk = None
203        _last_percent = 200.0
204        _last_msg = ''
205        _wipe_output = False
206        try:
207            try:
208                _in = urllib2.urlopen(url)
209                _out = open(path.host(local), 'wb')
210                try:
211                    _length = int(_in.info().getheader('Content-Length').strip())
212                except:
213                    pass
214                while True:
215                    _msg = '\rdownloading: %s - %s ' % (dst, _humanize_bytes(_have))
216                    if _length:
217                        _percent = round((float(_have) / _length) * 100, 2)
218                        if _percent != _last_percent:
219                            _msg += 'of %s (%0.0f%%) ' % (_humanize_bytes(_length), _percent)
220                    if _msg != _last_msg:
221                        extras = (len(_last_msg) - len(_msg))
222                        log.stdout_raw('%s%s' % (_msg, ' ' * extras + '\b' * extras))
223                        _last_msg = _msg
224                    _chunk = _in.read(_chunk_size)
225                    if not _chunk:
226                        break
227                    _out.write(_chunk)
228                    _have += len(_chunk)
229                if _wipe_output:
230                    log.stdout_raw('\r%s\r' % (' ' * len(_last_msg)))
231                else:
232                    log.stdout_raw('\n')
233            except:
234                log.stdout_raw('\n')
235                raise
236        except IOError, err:
237            log.notice('download: %s: error: %s' % (url, str(err)))
238            if path.exists(local):
239                os.remove(path.host(local))
240            failed = True
241        except ValueError, err:
242            log.notice('download: %s: error: %s' % (url, str(err)))
243            if path.exists(local):
244                os.remove(path.host(local))
245            failed = True
246        except:
247            msg = 'download: %s: error' % (url)
248            log.stderr(msg)
249            log.notice(msg)
250            if _out is not None:
251                _out.close()
252            raise
253        if _out is not None:
254            _out.close()
255        if _in is not None:
256            del _in
257        if not failed:
258            if not path.isfile(local):
259                raise error.general('source is not a file: %s' % (path.host(local)))
260    return not failed
261
262def _git_downloader(url, local, config, opts):
263    rlp = os.path.relpath(path.host(local))
264    us = url.split('?')
265    repo = git.repo(local, opts, config.macros)
266    if not repo.valid():
267        log.notice('git: clone: %s -> %s' % (us[0], rlp))
268        if not opts.dry_run():
269            repo.clone(us[0], local)
270    else:
271        repo.clean(['-f', '-d'])
272        repo.reset('--hard')
273        repo.checkout('master')
274    for a in us[1:]:
275        _as = a.split('=')
276        if _as[0] == 'branch' or _as[0] == 'checkout':
277            if len(_as) != 2:
278                raise error.general('invalid git branch/checkout: %s' % (_as))
279            log.notice('git: checkout: %s => %s' % (us[0], _as[1]))
280            if not opts.dry_run():
281                repo.checkout(_as[1])
282        elif _as[0] == 'pull':
283            log.notice('git: pull: %s' % (us[0]))
284            if not opts.dry_run():
285                repo.pull()
286        elif _as[0] == 'submodule':
287            if len(_as) != 2:
288                raise error.general('invalid git submodule: %s' % (_as))
289            log.notice('git: submodule: %s <= %s' % (us[0], _as[1]))
290            if not opts.dry_run():
291                repo.submodule(_as[1])
292        elif _as[0] == 'fetch':
293            log.notice('git: fetch: %s -> %s' % (us[0], rlp))
294            if not opts.dry_run():
295                repo.fetch()
296        elif _as[0] == 'reset':
297            arg = []
298            if len(_as) > 1:
299                arg = ['--%s' % (_as[1])]
300            log.notice('git: reset: %s' % (us[0]))
301            if not opts.dry_run():
302                repo.reset(arg)
303    return True
304
305def _cvs_downloader(url, local, config, opts):
306    rlp = os.path.relpath(path.host(local))
307    us = url.split('?')
308    module = None
309    tag = None
310    date = None
311    src_prefix = None
312    for a in us[1:]:
313        _as = a.split('=')
314        if _as[0] == 'module':
315            if len(_as) != 2:
316                raise error.general('invalid cvs module: %s' % (a))
317            module = _as[1]
318        elif _as[0] == 'src-prefix':
319            if len(_as) != 2:
320                raise error.general('invalid cvs src-prefix: %s' % (a))
321            src_prefix = _as[1]
322        elif _as[0] == 'tag':
323            if len(_as) != 2:
324                raise error.general('invalid cvs tag: %s' % (a))
325            tag = _as[1]
326        elif _as[0] == 'date':
327            if len(_as) != 2:
328                raise error.general('invalid cvs date: %s' % (a))
329            date = _as[1]
330    repo = cvs.repo(local, opts, config.macros, src_prefix)
331    if not repo.valid():
332        if not path.isdir(local):
333            log.notice('Creating source directory: %s' % \
334                           (os.path.relpath(path.host(local))))
335            if not opts.dry_run():
336                path.mkdir(local)
337            log.notice('cvs: checkout: %s -> %s' % (us[0], rlp))
338            if not opts.dry_run():
339                repo.checkout(':%s' % (us[0][6:]), module, tag, date)
340    for a in us[1:]:
341        _as = a.split('=')
342        if _as[0] == 'update':
343            log.notice('cvs: update: %s' % (us[0]))
344            if not opts.dry_run():
345                repo.update()
346        elif _as[0] == 'reset':
347            log.notice('cvs: reset: %s' % (us[0]))
348            if not opts.dry_run():
349                repo.reset()
350    return True
351
352def _file_downloader(url, local, config, opts):
353    if path.exists(local):
354        return True
355    return path.isdir(url)
356
357downloaders = { 'http': _http_downloader,
358                'ftp':  _http_downloader,
359                'pw':   _http_downloader,
360                'git':  _git_downloader,
361                'cvs':  _cvs_downloader,
362                'file': _file_downloader }
363
364def get_file(url, local, opts, config):
365    if local is None:
366        raise error.general('source/patch path invalid')
367    if not path.isdir(path.dirname(local)) and not opts.download_disabled():
368        log.notice('Creating source directory: %s' % \
369                       (os.path.relpath(path.host(path.dirname(local)))))
370    log.output('making dir: %s' % (path.host(path.dirname(local))))
371    if not opts.dry_run():
372        path.mkdir(path.dirname(local))
373    if not path.exists(local) and opts.download_disabled():
374        raise error.general('source not found: %s' % (path.host(local)))
375    #
376    # Check if a URL has been provided on the command line.
377    #
378    url_bases = opts.urls()
379    urls = []
380    if url_bases is not None:
381        for base in url_bases:
382            if base[-1:] != '/':
383                base += '/'
384            url_path = urlparse.urlsplit(url)[2]
385            slash = url_path.rfind('/')
386            if slash < 0:
387                url_file = url_path
388            else:
389                url_file = url_path[slash + 1:]
390            urls.append(urlparse.urljoin(base, url_file))
391    urls += url.split()
392    log.trace('_url: %s -> %s' % (','.join(urls), local))
393    for url in urls:
394        for dl in downloaders:
395            if url.startswith(dl):
396                if downloaders[dl](url, local, config, opts):
397                    return
398    if not opts.dry_run():
399        raise error.general('downloading %s: all paths have failed, giving up' % (url))
Note: See TracBrowser for help on using the repository browser.