source: rtems-source-builder/source-builder/sb/download.py @ 4837350

4.104.114.95
Last change on this file since 4837350 was fa87409, checked in by Chris Johns <chrisj@…>, on 07/25/14 at 11:43:50

sb: Fix error handling.

  • Property mode set to 100644
File size: 12.1 KB
Line 
1#
2# RTEMS Tools Project (http://www.rtems.org/)
3# Copyright 2010-2013 Chris Johns (chrisj@rtems.org)
4# All rights reserved.
5#
6# This file is part of the RTEMS Tools package in 'rtems-tools'.
7#
8# Permission to use, copy, modify, and/or distribute this software for any
9# purpose with or without fee is hereby granted, provided that the above
10# copyright notice and this permission notice appear in all copies.
11#
12# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
13# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
14# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
15# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
16# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
17# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
18# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
19
20#
21# This code builds a package given a config file. It only builds to be
22# installed not to be package unless you run a packager around this.
23#
24
25import os
26import stat
27import sys
28import urllib2
29import urlparse
30
31import cvs
32import error
33import git
34import log
35import path
36
37def _http_parser(source, config, opts):
38    #
39    # Is the file compressed ?
40    #
41    esl = source['ext'].split('.')
42    if esl[-1:][0] == 'gz':
43        source['compressed-type'] = 'gzip'
44        source['compressed'] = '%{__gzip} -dc'
45    elif esl[-1:][0] == 'bz2':
46        source['compressed-type'] = 'bzip2'
47        source['compressed'] = '%{__bzip2} -dc'
48    elif esl[-1:][0] == 'zip':
49        source['compressed-type'] = 'zip'
50        source['compressed'] = '%{__zip} -u'
51    elif esl[-1:][0] == 'xz':
52        source['compressed-type'] = 'xz'
53        source['compressed'] = '%{__xz} -dc'
54
55def _patchworks_parser(source, config, opts):
56    source['url'] = 'http%s' % (source['path'][2:])
57
58def _git_parser(source, config, opts):
59    #
60    # Symlink.
61    #
62    us = source['url'].split('?')
63    source['path'] = path.dirname(us[0])
64    source['file'] = path.basename(us[0])
65    source['name'], source['ext'] = path.splitext(source['file'])
66    if len(us) > 1:
67        source['args'] = us[1:]
68    source['local'] = \
69        path.join(source['local_prefix'], 'git', source['file'])
70    source['symlink'] = source['local']
71
72def _cvs_parser(source, config, opts):
73    #
74    # Symlink.
75    #
76    if not source['url'].startswith('cvs://'):
77        raise error.general('invalid cvs path: %s' % (source['url']))
78    us = source['url'].split('?')
79    try:
80        url = us[0]
81        source['file'] = url[url[6:].index(':') + 7:]
82        source['cvsroot'] = ':%s:' % (url[6:url[6:].index('/') + 6:])
83    except:
84        raise error.general('invalid cvs path: %s' % (source['url']))
85    for a in us[1:]:
86        _as = a.split('=')
87        if _as[0] == 'module':
88            if len(_as) != 2:
89                raise error.general('invalid cvs module: %s' % (a))
90            source['module'] = _as[1]
91        elif _as[0] == 'src-prefix':
92            if len(_as) != 2:
93                raise error.general('invalid cvs src-prefix: %s' % (a))
94            source['src_prefix'] = _as[1]
95        elif _as[0] == 'tag':
96            if len(_as) != 2:
97                raise error.general('invalid cvs tag: %s' % (a))
98            source['tag'] = _as[1]
99        elif _as[0] == 'date':
100            if len(_as) != 2:
101                raise error.general('invalid cvs date: %s' % (a))
102            source['date'] = _as[1]
103    if 'date' in source and 'tag' in source:
104        raise error.general('cvs URL cannot have a date and tag: %s' % (source['url']))
105    # Do here to ensure an ordered path, the URL can include options in any order
106    if 'module' in source:
107        source['file'] += '_%s' % (source['module'])
108    if 'tag' in source:
109        source['file'] += '_%s' % (source['tag'])
110    if 'date' in source:
111        source['file'] += '_%s' % (source['date'])
112    for c in '/@#%.-':
113        source['file'] = source['file'].replace(c, '_')
114    source['local'] = path.join(source['local_prefix'], 'cvs', source['file'])
115    if 'src_prefix' in source:
116        source['symlink'] = path.join(source['local'], source['src_prefix'])
117    else:
118        source['symlink'] = source['local']
119
120def _file_parser(source, config, opts):
121    #
122    # Symlink.
123    #
124    source['symlink'] = source['local']
125
126parsers = { 'http': _http_parser,
127            'ftp':  _http_parser,
128            'pw':   _patchworks_parser,
129            'git':  _git_parser,
130            'cvs':  _cvs_parser,
131            'file': _file_parser }
132
133def parse_url(url, pathkey, config, opts):
134    #
135    # Split the source up into the parts we need.
136    #
137    source = {}
138    source['url'] = url
139    colon = url.find(':')
140    if url[colon + 1:colon + 3] != '//':
141        raise error.general('malforned URL: %s' % (url))
142    source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:])
143    source['file'] = path.basename(url)
144    source['name'], source['ext'] = path.splitext(source['file'])
145    if source['name'].endswith('.tar'):
146        source['name'] = source['name'][:-4]
147        source['ext'] = '.tar' + source['ext']
148    #
149    # Get the file. Checks the local source directory first.
150    #
151    source['local'] = None
152    for p in config.define(pathkey).split(':'):
153        local = path.join(path.abspath(p), source['file'])
154        if source['local'] is None:
155            source['local_prefix'] = path.abspath(p)
156            source['local'] = local
157        if path.exists(local):
158            source['local_prefix'] = path.abspath(p)
159            source['local'] = local
160            break
161    source['script'] = ''
162    for p in parsers:
163        if url.startswith(p):
164            source['type'] = p
165            if parsers[p](source, config, opts):
166                break
167    return source
168
169def _http_downloader(url, local, config, opts):
170    if path.exists(local):
171        return True
172    #
173    # Hack for GitHub.
174    #
175    if url.startswith('https://api.github.com'):
176        url = urlparse.urljoin(url, config.expand('tarball/%{version}'))
177    log.notice('download: %s -> %s' % (url, os.path.relpath(path.host(local))))
178    failed = False
179    if not opts.dry_run():
180        _in = None
181        _out = None
182        try:
183            _in = urllib2.urlopen(url)
184            _out = open(path.host(local), 'wb')
185            _out.write(_in.read())
186        except IOError, err:
187            log.notice('download: %s: error: %s' % (url, str(err)))
188            if path.exists(local):
189                os.remove(path.host(local))
190            failed = True
191        except ValueError, err:
192            log.notice('download: %s: error: %s' % (url, str(err)))
193            if path.exists(local):
194                os.remove(path.host(local))
195            failed = True
196        except:
197            msg = 'download: %s: error' % (url)
198            log.stderr(msg)
199            log.notice(msg)
200            if _out is not None:
201                _out.close()
202            raise
203        if _out is not None:
204            _out.close()
205        if _in is not None:
206            del _in
207        if not failed:
208            if not path.isfile(local):
209                raise error.general('source is not a file: %s' % (path.host(local)))
210    return not failed
211
212def _git_downloader(url, local, config, opts):
213    rlp = os.path.relpath(path.host(local))
214    us = url.split('?')
215    repo = git.repo(local, opts, config.macros)
216    if not repo.valid():
217        log.notice('git: clone: %s -> %s' % (us[0], rlp))
218        if not opts.dry_run():
219            repo.clone(us[0], local)
220    else:
221        repo.clean(['-f', '-d'])
222        repo.reset('--hard')
223        repo.checkout('master')
224    for a in us[1:]:
225        _as = a.split('=')
226        if _as[0] == 'branch' or _as[0] == 'checkout':
227            if len(_as) != 2:
228                raise error.general('invalid git branch/checkout: %s' % (_as))
229            log.notice('git: checkout: %s => %s' % (us[0], _as[1]))
230            if not opts.dry_run():
231                repo.checkout(_as[1])
232        elif _as[0] == 'pull':
233            log.notice('git: pull: %s' % (us[0]))
234            if not opts.dry_run():
235                repo.pull()
236        elif _as[0] == 'submodule':
237            if len(_as) != 2:
238                raise error.general('invalid git submodule: %s' % (_as))
239            log.notice('git: submodule: %s <= %s' % (us[0], _as[1]))
240            if not opts.dry_run():
241                repo.submodule(_as[1])
242        elif _as[0] == 'fetch':
243            log.notice('git: fetch: %s -> %s' % (us[0], rlp))
244            if not opts.dry_run():
245                repo.fetch()
246        elif _as[0] == 'reset':
247            arg = []
248            if len(_as) > 1:
249                arg = ['--%s' % (_as[1])]
250            log.notice('git: reset: %s' % (us[0]))
251            if not opts.dry_run():
252                repo.reset(arg)
253    return True
254
255def _cvs_downloader(url, local, config, opts):
256    rlp = os.path.relpath(path.host(local))
257    us = url.split('?')
258    module = None
259    tag = None
260    date = None
261    src_prefix = None
262    for a in us[1:]:
263        _as = a.split('=')
264        if _as[0] == 'module':
265            if len(_as) != 2:
266                raise error.general('invalid cvs module: %s' % (a))
267            module = _as[1]
268        elif _as[0] == 'src-prefix':
269            if len(_as) != 2:
270                raise error.general('invalid cvs src-prefix: %s' % (a))
271            src_prefix = _as[1]
272        elif _as[0] == 'tag':
273            if len(_as) != 2:
274                raise error.general('invalid cvs tag: %s' % (a))
275            tag = _as[1]
276        elif _as[0] == 'date':
277            if len(_as) != 2:
278                raise error.general('invalid cvs date: %s' % (a))
279            date = _as[1]
280    repo = cvs.repo(local, opts, config.macros, src_prefix)
281    if not repo.valid():
282        if not path.isdir(local):
283            log.notice('Creating source directory: %s' % \
284                           (os.path.relpath(path.host(local))))
285            if not opts.dry_run():
286                path.mkdir(local)
287            log.notice('cvs: checkout: %s -> %s' % (us[0], rlp))
288            if not opts.dry_run():
289                repo.checkout(':%s' % (us[0][6:]), module, tag, date)
290    for a in us[1:]:
291        _as = a.split('=')
292        if _as[0] == 'update':
293            log.notice('cvs: update: %s' % (us[0]))
294            if not opts.dry_run():
295                repo.update()
296        elif _as[0] == 'reset':
297            log.notice('cvs: reset: %s' % (us[0]))
298            if not opts.dry_run():
299                repo.reset()
300    return True
301
302def _file_downloader(url, local, config, opts):
303    if path.exists(local):
304        return True
305    return path.isdir(url)
306
307downloaders = { 'http': _http_downloader,
308                'ftp':  _http_downloader,
309                'pw':   _http_downloader,
310                'git':  _git_downloader,
311                'cvs':  _cvs_downloader,
312                'file': _file_downloader }
313
314def get_file(url, local, opts, config):
315    if local is None:
316        raise error.general('source/patch path invalid')
317    if not path.isdir(path.dirname(local)) and not opts.download_disabled():
318        log.notice('Creating source directory: %s' % \
319                       (os.path.relpath(path.host(path.dirname(local)))))
320    log.output('making dir: %s' % (path.host(path.dirname(local))))
321    if not opts.dry_run():
322        path.mkdir(path.dirname(local))
323    if not path.exists(local) and opts.download_disabled():
324        raise error.general('source not found: %s' % (path.host(local)))
325    #
326    # Check if a URL has been provided on the command line.
327    #
328    url_bases = opts.urls()
329    urls = []
330    if url_bases is not None:
331        for base in url_bases:
332            if base[-1:] != '/':
333                base += '/'
334            url_path = urlparse.urlsplit(url)[2]
335            slash = url_path.rfind('/')
336            if slash < 0:
337                url_file = url_path
338            else:
339                url_file = url_path[slash + 1:]
340            urls.append(urlparse.urljoin(base, url_file))
341    urls += url.split()
342    log.trace('_url: %s -> %s' % (','.join(urls), local))
343    for url in urls:
344        for dl in downloaders:
345            if url.startswith(dl):
346                if downloaders[dl](url, local, config, opts):
347                    return
348    if not opts.dry_run():
349        raise error.general('downloading %s: all paths have failed, giving up' % (url))
Note: See TracBrowser for help on using the repository browser.