summaryrefslogtreecommitdiffstats
path: root/source-builder/sb/download.py
diff options
context:
space:
mode:
authorChris Johns <chrisj@rtems.org>2014-07-29 16:35:43 +1000
committerChris Johns <chrisj@rtems.org>2014-07-29 16:35:43 +1000
commita083b52921a86a3bc49e2cc1d7d102612694557c (patch)
treeb04b1fe0d3f5c037fac9f61f57f40f96712085c8 /source-builder/sb/download.py
parentsb: Add visual feedback for http type downloads. (diff)
downloadrtems-source-builder-a083b52921a86a3bc49e2cc1d7d102612694557c.tar.bz2
Add checksum support for downloaded files.
File download by http, ftp, pw support checksum. The %hash directive provides a means of setting a hash used to checksum the file. Files on disk or just downloaded are checked.
Diffstat (limited to 'source-builder/sb/download.py')
-rw-r--r--source-builder/sb/download.py45
1 files changed, 45 insertions, 0 deletions
diff --git a/source-builder/sb/download.py b/source-builder/sb/download.py
index d774879..51747b1 100644
--- a/source-builder/sb/download.py
+++ b/source-builder/sb/download.py
@@ -22,6 +22,7 @@
# installed not to be package unless you run a packager around this.
#
+import hashlib
import os
import stat
import sys
@@ -50,6 +51,47 @@ def _humanize_bytes(bytes, precision = 1):
break
return '%.*f%s' % (precision, float(bytes) / factor, suffix)
+def _hash_check(file_, absfile, macros, remove = True):
+ failed = False
+ if file_.lower() in macros.map_keys('hashes'):
+ m1, m2, hash = macros.get(file_.lower(), globals = False, maps = 'hashes')
+ hash = hash.split()
+ if len(hash) != 2:
+ raise error.internal('invalid hash format: %s' % (file_))
+ if hash[0] not in hashlib.algorithms:
+ raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0]))
+ hasher = None
+ _in = None
+ try:
+ hasher = hashlib.new(hash[0])
+ _in = open(absfile, 'rb')
+ hasher.update(_in.read())
+ except IOError, err:
+ log.notice('hash: %s: read error: %s' % (file_, str(err)))
+ failed = True
+ except:
+ msg = 'hash: %s: error' % (file_)
+ log.stderr(msg)
+ log.notice(msg)
+ if _in is not None:
+ _in.close()
+ raise
+ if _in is not None:
+ _in.close()
+ log.output('checksums: %s: %s => %s' % (file_, hasher.hexdigest(), hash[1]))
+ if hasher.hexdigest() != hash[1]:
+ log.warning('checksum error: %s' % (file_))
+ failed = True
+ if failed and remove:
+ log.warning('removing: %s' % (file_))
+ if path.exists(absfile):
+ os.remove(path.host(absfile))
+ if hasher is not None:
+ del hasher
+ else:
+ log.warning('%s: no hash found' % (file_))
+ return not failed
+
def _http_parser(source, config, opts):
#
# Is the file compressed ?
@@ -173,6 +215,7 @@ def parse_url(url, pathkey, config, opts):
if path.exists(local):
source['local_prefix'] = path.abspath(p)
source['local'] = local
+ _hash_check(source['file'], local, config.macros)
break
source['script'] = ''
for p in parsers:
@@ -257,6 +300,8 @@ def _http_downloader(url, local, config, opts):
if not failed:
if not path.isfile(local):
raise error.general('source is not a file: %s' % (path.host(local)))
+ if not _hash_check(path.basename(local), local, config.macros, False):
+ raise error.general('checksum failure file: %s' % (dst))
return not failed
def _git_downloader(url, local, config, opts):