[649a64c] | 1 | # |
---|
| 2 | # RTEMS Tools Project (http://www.rtems.org/) |
---|
| 3 | # Copyright 2010-2013 Chris Johns (chrisj@rtems.org) |
---|
| 4 | # All rights reserved. |
---|
| 5 | # |
---|
| 6 | # This file is part of the RTEMS Tools package in 'rtems-tools'. |
---|
| 7 | # |
---|
| 8 | # Permission to use, copy, modify, and/or distribute this software for any |
---|
| 9 | # purpose with or without fee is hereby granted, provided that the above |
---|
| 10 | # copyright notice and this permission notice appear in all copies. |
---|
| 11 | # |
---|
| 12 | # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES |
---|
| 13 | # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF |
---|
| 14 | # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR |
---|
| 15 | # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES |
---|
| 16 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN |
---|
| 17 | # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF |
---|
| 18 | # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. |
---|
| 19 | |
---|
| 20 | # |
---|
| 21 | # This code builds a package given a config file. It only builds to be |
---|
| 22 | # installed not to be package unless you run a packager around this. |
---|
| 23 | # |
---|
| 24 | |
---|
| 25 | import os |
---|
| 26 | import stat |
---|
| 27 | import sys |
---|
| 28 | import urllib2 |
---|
| 29 | import urlparse |
---|
| 30 | |
---|
| 31 | import error |
---|
| 32 | import git |
---|
| 33 | import log |
---|
| 34 | import path |
---|
| 35 | |
---|
| 36 | def _notice(opts, text): |
---|
| 37 | if not opts.quiet() and not log.default.has_stdout(): |
---|
| 38 | print text |
---|
| 39 | log.output(text) |
---|
| 40 | log.flush() |
---|
| 41 | |
---|
| 42 | def _output(opts, text): |
---|
| 43 | if not opts.quiet(): |
---|
| 44 | log.output(text) |
---|
| 45 | |
---|
| 46 | def _http_parser(source, config, opts): |
---|
| 47 | # |
---|
| 48 | # Is the file compressed ? |
---|
| 49 | # |
---|
| 50 | esl = source['ext'].split('.') |
---|
| 51 | if esl[-1:][0] == 'gz': |
---|
| 52 | source['compressed'] = '%{__gzip} -dc' |
---|
| 53 | elif esl[-1:][0] == 'bz2': |
---|
| 54 | source['compressed'] = '%{__bzip2} -dc' |
---|
| 55 | elif esl[-1:][0] == 'bz2': |
---|
| 56 | source['compressed'] = '%{__zip} -u' |
---|
| 57 | elif esl[-1:][0] == 'xz': |
---|
| 58 | source['compressed'] = '%{__xz} -dc' |
---|
| 59 | |
---|
| 60 | def _git_parser(source, config, opts): |
---|
| 61 | # |
---|
| 62 | # Symlink. |
---|
| 63 | # |
---|
| 64 | us = source['url'].split('?') |
---|
| 65 | source['path'] = path.dirname(us[0]) |
---|
| 66 | source['file'] = path.basename(us[0]) |
---|
| 67 | source['name'], source['ext'] = path.splitext(source['file']) |
---|
| 68 | if len(us) > 1: |
---|
| 69 | source['args'] = us[1:] |
---|
| 70 | source['local'] = \ |
---|
| 71 | path.join(source['local_prefix'], config.expand('git'), source['file']) |
---|
| 72 | source['symlink'] = source['local'] |
---|
| 73 | |
---|
| 74 | |
---|
| 75 | def _file_parser(source, config, opts): |
---|
| 76 | # |
---|
| 77 | # Symlink. |
---|
| 78 | # |
---|
| 79 | source['symlink'] = source['local'] |
---|
| 80 | |
---|
| 81 | parsers = { 'http': _http_parser, |
---|
| 82 | 'ftp': _http_parser, |
---|
| 83 | 'git': _git_parser, |
---|
| 84 | 'file': _file_parser } |
---|
| 85 | |
---|
| 86 | def parse_url(url, pathkey, config, opts): |
---|
| 87 | # |
---|
| 88 | # Split the source up into the parts we need. |
---|
| 89 | # |
---|
| 90 | source = {} |
---|
| 91 | source['url'] = url |
---|
| 92 | source['path'] = path.dirname(url) |
---|
| 93 | source['file'] = path.basename(url) |
---|
| 94 | source['name'], source['ext'] = path.splitext(source['file']) |
---|
| 95 | # |
---|
| 96 | # Get the file. Checks the local source directory first. |
---|
| 97 | # |
---|
| 98 | source['local'] = None |
---|
| 99 | for p in config.define(pathkey).split(':'): |
---|
| 100 | local = path.join(path.abspath(p), source['file']) |
---|
| 101 | if source['local'] is None: |
---|
| 102 | source['local_prefix'] = path.abspath(p) |
---|
| 103 | source['local'] = local |
---|
| 104 | if path.exists(local): |
---|
| 105 | source['local_prefix'] = path.abspath(p) |
---|
| 106 | source['local'] = local |
---|
| 107 | break |
---|
| 108 | source['script'] = '' |
---|
| 109 | for p in parsers: |
---|
| 110 | if url.startswith(p): |
---|
| 111 | source['type'] = p |
---|
| 112 | if parsers[p](source, config, opts): |
---|
| 113 | break |
---|
| 114 | return source |
---|
| 115 | |
---|
| 116 | def _http_downloader(url, local, config, opts): |
---|
| 117 | if path.exists(local): |
---|
| 118 | return True |
---|
| 119 | # |
---|
| 120 | # Hack for GitHub. |
---|
| 121 | # |
---|
| 122 | if url.startswith('https://api.github.com'): |
---|
| 123 | url = urlparse.urljoin(url, config.expand('tarball/%{version}')) |
---|
| 124 | _notice(opts, 'download: %s -> %s' % (url, os.path.relpath(path.host(local)))) |
---|
| 125 | failed = False |
---|
| 126 | if not opts.dry_run(): |
---|
| 127 | _in = None |
---|
| 128 | _out = None |
---|
| 129 | try: |
---|
| 130 | _in = urllib2.urlopen(url) |
---|
| 131 | _out = open(path.host(local), 'wb') |
---|
| 132 | _out.write(_in.read()) |
---|
| 133 | except IOError, err: |
---|
| 134 | msg = 'download: %s: error: %s' % (url, str(err)) |
---|
| 135 | _notice(opts, msg) |
---|
| 136 | if path.exists(local): |
---|
| 137 | os.remove(path.host(local)) |
---|
| 138 | failed = True |
---|
| 139 | except ValueError, err: |
---|
| 140 | msg = 'download: %s: error: %s' % (url, str(err)) |
---|
| 141 | _notice(opts, msg) |
---|
| 142 | if path.exists(local): |
---|
| 143 | os.remove(path.host(local)) |
---|
| 144 | failed = True |
---|
| 145 | except: |
---|
| 146 | msg = 'download: %s: error' % (url) |
---|
| 147 | print >> sys.stderr, msg |
---|
| 148 | if _out is not None: |
---|
| 149 | _out.close() |
---|
| 150 | raise |
---|
| 151 | if _out is not None: |
---|
| 152 | _out.close() |
---|
| 153 | if _in is not None: |
---|
| 154 | del _in |
---|
| 155 | if not failed: |
---|
| 156 | if not path.isfile(local): |
---|
| 157 | raise error.general('source is not a file: %s' % (path.host(local))) |
---|
| 158 | return not failed |
---|
| 159 | |
---|
| 160 | def _git_downloader(url, local, config, opts): |
---|
| 161 | rlp = os.path.relpath(path.host(local)) |
---|
| 162 | us = url.split('?') |
---|
| 163 | repo = git.repo(local, opts, config.macros) |
---|
| 164 | if not repo.valid(): |
---|
| 165 | _notice(opts, 'git: clone: %s -> %s' % (us[0], rlp)) |
---|
| 166 | if not opts.dry_run(): |
---|
| 167 | repo.clone(us[0], local) |
---|
| 168 | for a in us[1:]: |
---|
| 169 | _as = a.split('=') |
---|
| 170 | if _as[0] == 'branch': |
---|
| 171 | _notice(opts, 'git: checkout: %s => %s' % (us[0], _as[1])) |
---|
| 172 | if not opts.dry_run(): |
---|
| 173 | repo.checkout(_as[1]) |
---|
| 174 | elif _as[0] == 'pull': |
---|
| 175 | _notice(opts, 'git: pull: %s' % (us[0])) |
---|
| 176 | if not opts.dry_run(): |
---|
| 177 | repo.pull() |
---|
| 178 | elif _as[0] == 'fetch': |
---|
| 179 | _notice(opts, 'git: fetch: %s -> %s' % (us[0], rlp)) |
---|
| 180 | if not opts.dry_run(): |
---|
| 181 | repo.fetch() |
---|
| 182 | elif _as[0] == 'reset': |
---|
| 183 | arg = [] |
---|
| 184 | if len(_as) > 1: |
---|
| 185 | arg = ['--%s' % (_as[1])] |
---|
| 186 | _notice(opts, 'git: reset: %s' % (us[0])) |
---|
| 187 | if not opts.dry_run(): |
---|
| 188 | repo.reset(arg) |
---|
| 189 | return True |
---|
| 190 | |
---|
| 191 | def _file_downloader(url, local, config, opts): |
---|
| 192 | if path.exists(local): |
---|
| 193 | return True |
---|
| 194 | return path.isdir(url) |
---|
| 195 | |
---|
| 196 | downloaders = { 'http': _http_downloader, |
---|
| 197 | 'ftp': _http_downloader, |
---|
| 198 | 'git': _git_downloader, |
---|
| 199 | 'file': _file_downloader } |
---|
| 200 | |
---|
| 201 | def get_file(url, local, opts, config): |
---|
| 202 | if local is None: |
---|
| 203 | raise error.general('source/patch path invalid') |
---|
| 204 | if not path.isdir(path.dirname(local)) and not opts.download_disabled(): |
---|
| 205 | _notice(opts, |
---|
| 206 | 'Creating source directory: %s' % (os.path.relpath(path.host(path.dirname(local))))) |
---|
| 207 | _output(opts, 'making dir: %s' % (path.host(path.dirname(local)))) |
---|
| 208 | if not opts.dry_run(): |
---|
| 209 | path.mkdir(path.dirname(local)) |
---|
| 210 | if not path.exists(local) and opts.download_disabled(): |
---|
| 211 | raise error.general('source not found: %s' % (path.host(local))) |
---|
| 212 | # |
---|
| 213 | # Check if a URL hasbeen provided on the command line. |
---|
| 214 | # |
---|
| 215 | url_bases = opts.urls() |
---|
| 216 | urls = [] |
---|
| 217 | if url_bases is not None: |
---|
| 218 | for base in url_bases: |
---|
| 219 | if base[-1:] != '/': |
---|
| 220 | base += '/' |
---|
| 221 | url_path = urlparse.urlsplit(url)[2] |
---|
| 222 | slash = url_path.rfind('/') |
---|
| 223 | if slash < 0: |
---|
| 224 | url_file = url_path |
---|
| 225 | else: |
---|
| 226 | url_file = url_path[slash + 1:] |
---|
| 227 | urls.append(urlparse.urljoin(base, url_file)) |
---|
| 228 | urls.append(url) |
---|
| 229 | if opts.trace(): |
---|
| 230 | print '_url:', ','.join(urls), '->', local |
---|
| 231 | for url in urls: |
---|
| 232 | for dl in downloaders: |
---|
| 233 | if url.startswith(dl): |
---|
| 234 | if downloaders[dl](url, local, config, opts): |
---|
| 235 | return |
---|
| 236 | if not opts.dry_run(): |
---|
| 237 | raise error.general('downloading %s: all paths have failed, giving up' % (url)) |
---|