source: rtems-source-builder/source-builder/sb/download.py @ 8c19df2

4.104.114.95
Last change on this file since 8c19df2 was 8c19df2, checked in by Chris Johns <chrisj@…>, on 03/26/14 at 06:13:25

sb: Fix git to clean before updating.

  • Property mode set to 100644
File size: 11.8 KB
Line 
1#
2# RTEMS Tools Project (http://www.rtems.org/)
3# Copyright 2010-2013 Chris Johns (chrisj@rtems.org)
4# All rights reserved.
5#
6# This file is part of the RTEMS Tools package in 'rtems-tools'.
7#
8# Permission to use, copy, modify, and/or distribute this software for any
9# purpose with or without fee is hereby granted, provided that the above
10# copyright notice and this permission notice appear in all copies.
11#
12# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
13# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
14# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
15# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
16# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
17# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
18# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
19
20#
21# This code builds a package given a config file. It only builds to be
22# installed not to be package unless you run a packager around this.
23#
24
25import os
26import stat
27import sys
28import urllib2
29import urlparse
30
31import cvs
32import error
33import git
34import log
35import path
36
37def _http_parser(source, config, opts):
38    #
39    # Is the file compressed ?
40    #
41    esl = source['ext'].split('.')
42    if esl[-1:][0] == 'gz':
43        source['compressed'] = '%{__gzip} -dc'
44    elif esl[-1:][0] == 'bz2':
45        source['compressed'] = '%{__bzip2} -dc'
46    elif esl[-1:][0] == 'zip':
47        source['compressed'] = '%{__zip} -u'
48    elif esl[-1:][0] == 'xz':
49        source['compressed'] = '%{__xz} -dc'
50
51def _patchworks_parser(source, config, opts):
52    source['url'] = 'http%s' % (source['path'][2:])
53
54def _git_parser(source, config, opts):
55    #
56    # Symlink.
57    #
58    us = source['url'].split('?')
59    source['path'] = path.dirname(us[0])
60    source['file'] = path.basename(us[0])
61    source['name'], source['ext'] = path.splitext(source['file'])
62    if len(us) > 1:
63        source['args'] = us[1:]
64    source['local'] = \
65        path.join(source['local_prefix'], 'git', source['file'])
66    source['symlink'] = source['local']
67
68def _cvs_parser(source, config, opts):
69    #
70    # Symlink.
71    #
72    if not source['url'].startswith('cvs://'):
73        raise error.general('invalid cvs path: %s' % (source['url']))
74    us = source['url'].split('?')
75    try:
76        url = us[0]
77        source['file'] = url[url[6:].index(':') + 7:]
78        source['cvsroot'] = ':%s:' % (url[6:url[6:].index('/') + 6:])
79    except:
80        raise error.general('invalid cvs path: %s' % (source['url']))
81    for a in us[1:]:
82        _as = a.split('=')
83        if _as[0] == 'module':
84            if len(_as) != 2:
85                raise error.general('invalid cvs module: %s' % (a))
86            source['module'] = _as[1]
87        elif _as[0] == 'src-prefix':
88            if len(_as) != 2:
89                raise error.general('invalid cvs src-prefix: %s' % (a))
90            source['src_prefix'] = _as[1]
91        elif _as[0] == 'tag':
92            if len(_as) != 2:
93                raise error.general('invalid cvs tag: %s' % (a))
94            source['tag'] = _as[1]
95        elif _as[0] == 'date':
96            if len(_as) != 2:
97                raise error.general('invalid cvs date: %s' % (a))
98            source['date'] = _as[1]
99    if 'date' in source and 'tag' in source:
100        raise error.general('cvs URL cannot have a date and tag: %s' % (source['url']))
101    # Do here to ensure an ordered path, the URL can include options in any order
102    if 'module' in source:
103        source['file'] += '_%s' % (source['module'])
104    if 'tag' in source:
105        source['file'] += '_%s' % (source['tag'])
106    if 'date' in source:
107        source['file'] += '_%s' % (source['date'])
108    for c in '/@#%.-':
109        source['file'] = source['file'].replace(c, '_')
110    source['local'] = path.join(source['local_prefix'], 'cvs', source['file'])
111    if 'src_prefix' in source:
112        source['symlink'] = path.join(source['local'], source['src_prefix'])
113    else:
114        source['symlink'] = source['local']
115
116def _file_parser(source, config, opts):
117    #
118    # Symlink.
119    #
120    source['symlink'] = source['local']
121
122parsers = { 'http': _http_parser,
123            'ftp':  _http_parser,
124            'pw':   _patchworks_parser,
125            'git':  _git_parser,
126            'cvs':  _cvs_parser,
127            'file': _file_parser }
128
129def parse_url(url, pathkey, config, opts):
130    #
131    # Split the source up into the parts we need.
132    #
133    source = {}
134    source['url'] = url
135    colon = url.find(':')
136    if url[colon + 1:colon + 3] != '//':
137        raise error.general('malforned URL: %s' % (url))
138    source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:])
139    source['file'] = path.basename(url)
140    source['name'], source['ext'] = path.splitext(source['file'])
141    #
142    # Get the file. Checks the local source directory first.
143    #
144    source['local'] = None
145    for p in config.define(pathkey).split(':'):
146        local = path.join(path.abspath(p), source['file'])
147        if source['local'] is None:
148            source['local_prefix'] = path.abspath(p)
149            source['local'] = local
150        if path.exists(local):
151            source['local_prefix'] = path.abspath(p)
152            source['local'] = local
153            break
154    source['script'] = ''
155    for p in parsers:
156        if url.startswith(p):
157            source['type'] = p
158            if parsers[p](source, config, opts):
159                break
160    return source
161
162def _http_downloader(url, local, config, opts):
163    if path.exists(local):
164        return True
165    #
166    # Hack for GitHub.
167    #
168    if url.startswith('https://api.github.com'):
169        url = urlparse.urljoin(url, config.expand('tarball/%{version}'))
170    log.notice('download: %s -> %s' % (url, os.path.relpath(path.host(local))))
171    failed = False
172    if not opts.dry_run():
173        _in = None
174        _out = None
175        try:
176            _in = urllib2.urlopen(url)
177            _out = open(path.host(local), 'wb')
178            _out.write(_in.read())
179        except IOError, err:
180            log.notice('download: %s: error: %s' % (url, str(err)))
181            if path.exists(local):
182                os.remove(path.host(local))
183            failed = True
184        except ValueError, err:
185            log.notice('download: %s: error: %s' % (url, str(err)))
186            if path.exists(local):
187                os.remove(path.host(local))
188            failed = True
189        except:
190            msg = 'download: %s: error' % (url)
191            log.stderr(msd)
192            log.notice(msg)
193            if _out is not None:
194                _out.close()
195            raise
196        if _out is not None:
197            _out.close()
198        if _in is not None:
199            del _in
200        if not failed:
201            if not path.isfile(local):
202                raise error.general('source is not a file: %s' % (path.host(local)))
203    return not failed
204
205def _git_downloader(url, local, config, opts):
206    rlp = os.path.relpath(path.host(local))
207    us = url.split('?')
208    repo = git.repo(local, opts, config.macros)
209    if not repo.valid():
210        log.notice('git: clone: %s -> %s' % (us[0], rlp))
211        if not opts.dry_run():
212            repo.clone(us[0], local)
213    else:
214        repo.clean(['-f', '-d'])
215        repo.reset('--hard')
216        repo.checkout('master')
217    for a in us[1:]:
218        _as = a.split('=')
219        if _as[0] == 'branch' or _as[0] == 'checkout':
220            if len(_as) != 2:
221                raise error.general('invalid git branch/checkout: %s' % (_as))
222            log.notice('git: checkout: %s => %s' % (us[0], _as[1]))
223            if not opts.dry_run():
224                repo.checkout(_as[1])
225        elif _as[0] == 'pull':
226            log.notice('git: pull: %s' % (us[0]))
227            if not opts.dry_run():
228                repo.pull()
229        elif _as[0] == 'submodule':
230            if len(_as) != 2:
231                raise error.general('invalid git submodule: %s' % (_as))
232            log.notice('git: submodule: %s <= %s' % (us[0], _as[1]))
233            if not opts.dry_run():
234                repo.submodule(_as[1])
235        elif _as[0] == 'fetch':
236            log.notice('git: fetch: %s -> %s' % (us[0], rlp))
237            if not opts.dry_run():
238                repo.fetch()
239        elif _as[0] == 'reset':
240            arg = []
241            if len(_as) > 1:
242                arg = ['--%s' % (_as[1])]
243            log.notice('git: reset: %s' % (us[0]))
244            if not opts.dry_run():
245                repo.reset(arg)
246    return True
247
248def _cvs_downloader(url, local, config, opts):
249    rlp = os.path.relpath(path.host(local))
250    us = url.split('?')
251    module = None
252    tag = None
253    date = None
254    src_prefix = None
255    for a in us[1:]:
256        _as = a.split('=')
257        if _as[0] == 'module':
258            if len(_as) != 2:
259                raise error.general('invalid cvs module: %s' % (a))
260            module = _as[1]
261        elif _as[0] == 'src-prefix':
262            if len(_as) != 2:
263                raise error.general('invalid cvs src-prefix: %s' % (a))
264            src_prefix = _as[1]
265        elif _as[0] == 'tag':
266            if len(_as) != 2:
267                raise error.general('invalid cvs tag: %s' % (a))
268            tag = _as[1]
269        elif _as[0] == 'date':
270            if len(_as) != 2:
271                raise error.general('invalid cvs date: %s' % (a))
272            date = _as[1]
273    repo = cvs.repo(local, opts, config.macros, src_prefix)
274    if not repo.valid():
275        if not path.isdir(local):
276            log.notice('Creating source directory: %s' % \
277                           (os.path.relpath(path.host(local))))
278            if not opts.dry_run():
279                path.mkdir(local)
280            log.notice('cvs: checkout: %s -> %s' % (us[0], rlp))
281            if not opts.dry_run():
282                repo.checkout(':%s' % (us[0][6:]), module, tag, date)
283    for a in us[1:]:
284        _as = a.split('=')
285        if _as[0] == 'update':
286            log.notice('cvs: update: %s' % (us[0]))
287            if not opts.dry_run():
288                repo.update()
289        elif _as[0] == 'reset':
290            log.notice('cvs: reset: %s' % (us[0]))
291            if not opts.dry_run():
292                repo.reset()
293    return True
294
295def _file_downloader(url, local, config, opts):
296    if path.exists(local):
297        return True
298    return path.isdir(url)
299
300downloaders = { 'http': _http_downloader,
301                'ftp':  _http_downloader,
302                'pw':   _http_downloader,
303                'git':  _git_downloader,
304                'cvs':  _cvs_downloader,
305                'file': _file_downloader }
306
307def get_file(url, local, opts, config):
308    if local is None:
309        raise error.general('source/patch path invalid')
310    if not path.isdir(path.dirname(local)) and not opts.download_disabled():
311        log.notice('Creating source directory: %s' % \
312                       (os.path.relpath(path.host(path.dirname(local)))))
313    log.output('making dir: %s' % (path.host(path.dirname(local))))
314    if not opts.dry_run():
315        path.mkdir(path.dirname(local))
316    if not path.exists(local) and opts.download_disabled():
317        raise error.general('source not found: %s' % (path.host(local)))
318    #
319    # Check if a URL has been provided on the command line.
320    #
321    url_bases = opts.urls()
322    urls = []
323    if url_bases is not None:
324        for base in url_bases:
325            if base[-1:] != '/':
326                base += '/'
327            url_path = urlparse.urlsplit(url)[2]
328            slash = url_path.rfind('/')
329            if slash < 0:
330                url_file = url_path
331            else:
332                url_file = url_path[slash + 1:]
333            urls.append(urlparse.urljoin(base, url_file))
334    urls += url.split()
335    log.trace('_url: %s -> %s' % (','.join(urls), local))
336    for url in urls:
337        for dl in downloaders:
338            if url.startswith(dl):
339                if downloaders[dl](url, local, config, opts):
340                    return
341    if not opts.dry_run():
342        raise error.general('downloading %s: all paths have failed, giving up' % (url))
Note: See TracBrowser for help on using the repository browser.