fixed the gpg thing. WHEW. what a PITA.
also fleshed out some logging.
This commit is contained in:
@@ -252,9 +252,10 @@ class _Sizer(object):
|
||||
conversion = None
|
||||
base_factors = []
|
||||
if suffix not in self.valid_bw:
|
||||
_logger.error('Passed an invalid suffix')
|
||||
_logger.error('Suffix {0} is invalid; must be one of {1}'.format(suffix, ','.join(self.valid_bw)))
|
||||
raise ValueError('suffix is not a valid unit notation for this conversion')
|
||||
if target and target not in self.valid_bw:
|
||||
_logger.error('Target {0} is invalid; must be one of {1}'.format(target, ','.join(self.valid_bw)))
|
||||
raise ValueError('target is not a valid unit notation for this conversion')
|
||||
for (_unit_type, _base) in (('decimal', 10), ('binary', 2)):
|
||||
if target and base_factors:
|
||||
@@ -282,8 +283,10 @@ class _Sizer(object):
|
||||
conversion = None
|
||||
base_factors = []
|
||||
if suffix not in self.valid_storage:
|
||||
_logger.error('Suffix {0} is invalid; must be one of {1}'.format(suffix, ','.join(self.valid_storage)))
|
||||
raise ValueError('suffix is not a valid unit notation for this conversion')
|
||||
if target and target not in self.valid_storage:
|
||||
_logger.error('Target {0} is invalid; must be one of {1}'.format(target, ','.join(self.valid_storage)))
|
||||
raise ValueError('target is not a valid unit notation for this conversion')
|
||||
for (_unit_type, _base) in (('decimal', 10), ('binary', 2)):
|
||||
if target and base_factors:
|
||||
@@ -334,5 +337,6 @@ def convertSizeUnit(pos):
|
||||
_size = int(pos.group('size'))
|
||||
amt_type = pos.group('pct_unit_or_sct').strip()
|
||||
else:
|
||||
raise ValueError('Invalid size specified: {0}'.format(orig_pos))
|
||||
_logger.error('Size {0} is invalid; did not match {1}'.format(orig_pos, _pos_re.pattern))
|
||||
raise ValueError('Invalid size specified')
|
||||
return((from_beginning, _size, amt_type))
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import copy
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
@@ -8,21 +9,37 @@ import gpg
|
||||
import gpg.errors
|
||||
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class KeyEditor(object):
|
||||
def __init__(self):
|
||||
self.trusted = False
|
||||
_logger.info('Key editor instantiated.')
|
||||
|
||||
def truster(self, kw, arg, *args, **kwargs):
|
||||
_logger.debug('Key trust editor invoked:')
|
||||
_logger.debug('Command: {0}'.format(kw))
|
||||
_logger.debug('Argument: {0}'.format(arg))
|
||||
if args:
|
||||
_logger.debug('args: {0}'.format(','.join(args)))
|
||||
if kwargs:
|
||||
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||
if kw == 'GET_LINE':
|
||||
if arg == 'keyedit.prompt':
|
||||
if not self.trusted:
|
||||
_logger.debug('Returning: "trust"')
|
||||
return('trust')
|
||||
else:
|
||||
_logger.debug('Returning: "save"')
|
||||
return('save')
|
||||
elif arg == 'edit_ownertrust.value' and not self.trusted:
|
||||
self.trusted = True
|
||||
_logger.debug('Status changed to trusted')
|
||||
_logger.debug('Returning: "4"')
|
||||
return('4') # "Full"
|
||||
else:
|
||||
_logger.debug('Returning: "save"')
|
||||
return('save')
|
||||
return(None)
|
||||
|
||||
@@ -34,29 +51,53 @@ class GPG(object):
|
||||
self.temporary = None
|
||||
self.ctx = None
|
||||
self._imported_keys = []
|
||||
_logger.debug('Homedir: {0}'.format(self.homedir))
|
||||
_logger.debug('Primary key: {0}'.format(self.primary_key))
|
||||
if args:
|
||||
_logger.debug('args: {0}'.format(','.join(args)))
|
||||
if kwargs:
|
||||
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||
_logger.info('Instantiated GPG class.')
|
||||
self._initContext()
|
||||
|
||||
def _initContext(self):
|
||||
if not self.homedir:
|
||||
self.homedir = tempfile.mkdtemp(suffix = '.gpg', prefix = '.aif.')
|
||||
self.homedir = tempfile.mkdtemp(prefix = '.aif.', suffix = '.gpg')
|
||||
self.temporary = True
|
||||
_logger.debug('Set as temporary homedir.')
|
||||
self.homedir = os.path.abspath(os.path.expanduser(self.homedir))
|
||||
_logger.debug('Homedir finalized: {0}'.format(self.homedir))
|
||||
if not os.path.isdir(self.homedir):
|
||||
os.makedirs(self.homedir, exist_ok = True)
|
||||
os.chmod(self.homedir, 0o0700)
|
||||
_logger.info('Created {0}'.format(self.homedir))
|
||||
self.ctx = gpg.Context(home_dir = self.homedir)
|
||||
if self.temporary:
|
||||
self.primary_key = self.createKey('AIF-NG File Verification Key', sign = True, force = True).fpr
|
||||
self.primary_key = self.createKey('AIF-NG File Verification Key',
|
||||
sign = True,
|
||||
force = True,
|
||||
certify = True).fpr
|
||||
self.primary_key = self.findKeyByID(self.primary_key, source = 'secret')
|
||||
if self.primary_key:
|
||||
_logger.debug('Found primary key in secret keyring: {0}'.format(self.primary_key.fpr))
|
||||
else:
|
||||
_logger.error('Could not find primary key in secret keyring: {0}'.format(self.primary_key))
|
||||
raise RuntimeError('Primary key not found in secret keyring')
|
||||
self.ctx.signers = [self.primary_key]
|
||||
if self.ctx.signers:
|
||||
_logger.debug('Signers set to: {0}'.format(','.join([k.fpr for k in self.ctx.signers])))
|
||||
else:
|
||||
raise _logger.error('Could not assign signing keys; signing set empty')
|
||||
return(None)
|
||||
|
||||
def clean(self):
|
||||
# This is mostly just to cleanup the stuff we did before.
|
||||
_logger.info('Cleaning GPG homedir.')
|
||||
self.primary_key = self.primary_key.fpr
|
||||
if self.temporary:
|
||||
self.primary_key = None
|
||||
shutil.rmtree(self.homedir)
|
||||
_logger.info('Deleted temporary GPG homedir: {0}'.format(self.homedir))
|
||||
self.ctx = None
|
||||
return(None)
|
||||
|
||||
@@ -72,10 +113,17 @@ class GPG(object):
|
||||
'authenticate': kwargs.get('authenticate', False),
|
||||
'passphrase': kwargs.get('passphrase'),
|
||||
'force': kwargs.get('force')}
|
||||
_logger.debug('Key creation parameters: {0}'.format(keyinfo))
|
||||
if args:
|
||||
_logger.debug('args: {0}'.format(','.join(args)))
|
||||
if kwargs:
|
||||
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||
if not keyinfo['expires_in']:
|
||||
del(keyinfo['expires_in'])
|
||||
keyinfo['expires'] = False
|
||||
k = self.ctx.create_key(**keyinfo)
|
||||
_logger.info('Created key: {0}'.format(k.fpr))
|
||||
_logger.debug('Key info: {0}'.format(k))
|
||||
return(k)
|
||||
|
||||
def findKey(self, searchstr, secret = False, local = True, remote = True,
|
||||
@@ -83,12 +131,25 @@ class GPG(object):
|
||||
fltr = 0
|
||||
if secret:
|
||||
fltr = fltr | gpg.constants.KEYLIST_MODE_WITH_SECRET
|
||||
_logger.debug('Added "secret" to filter; new filter value: {0}'.format(fltr))
|
||||
if local:
|
||||
fltr = fltr | gpg.constants.KEYLIST_MODE_LOCAL
|
||||
_logger.debug('Added "local" to filter; new filter value: {0}'.format(fltr))
|
||||
if remote:
|
||||
fltr = fltr | gpg.constants.KEYLIST_MODE_EXTERN
|
||||
_logger.debug('Added "remote" to filter; new filter value: {0}'.format(fltr))
|
||||
if args:
|
||||
_logger.debug('args: {0}'.format(','.join(args)))
|
||||
if kwargs:
|
||||
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||
keys = [k for k in self.ctx.keylist(pattern = searchstr, secret = secret_only, mode = fltr)]
|
||||
_logger.info('Found {0} keys'.format(len(keys)))
|
||||
if keys:
|
||||
_logger.debug('Found keys: {0}'.format(keys))
|
||||
else:
|
||||
_logger.warn('Found no keys.')
|
||||
if keyring_import:
|
||||
_logger.debug('Importing enabled; importing found keys.')
|
||||
self.importKeys(keys, native = True)
|
||||
return(keys)
|
||||
|
||||
@@ -99,129 +160,243 @@ class GPG(object):
|
||||
'local': gpg.constants.KEYLIST_MODE_LOCAL,
|
||||
'secret': gpg.constants.KEYLIST_MODE_WITH_SECRET}
|
||||
if source not in sources.keys():
|
||||
raise ValueError('source parameter must be one (and only one) of: {0}'.format(sources.keys()))
|
||||
_logger.error('Invalid source parameter ({0}); must be one of: {1}'.format(source, sources.keys()))
|
||||
raise ValueError('Invalid source parameter')
|
||||
if args:
|
||||
_logger.debug('args: {0}'.format(','.join(args)))
|
||||
if kwargs:
|
||||
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||
orig_mode = self.ctx.get_keylist_mode()
|
||||
_logger.debug('Original keylist mode: {0}'.format(orig_mode))
|
||||
self.ctx.set_keylist_mode(sources[source])
|
||||
_logger.info('Set keylist mode: {0} ({1})'.format(source, sources[source]))
|
||||
_logger.debug('Searching for key ID: {0}'.format(key_id))
|
||||
try:
|
||||
key = self.ctx.get_key(key_id, secret = (True if source == 'secret' else False))
|
||||
_logger.info('Found key object for {0}'.format(key_id))
|
||||
_logger.debug('Found key: {0}'.format(key))
|
||||
except gpg.errors.KeyNotFound:
|
||||
key = None
|
||||
_logger.warning('Found no keys.')
|
||||
self.ctx.set_keylist_mode(orig_mode)
|
||||
_logger.info('Restored keylist mode ({0})'.format(orig_mode))
|
||||
if keyring_import and key:
|
||||
_logger.debug('Importing enabled; importing found keys.')
|
||||
self.importKeys(key, native = True)
|
||||
return(key)
|
||||
|
||||
def getKey(self, key_id, secret = False, strict = False, *args, **kwargs):
|
||||
key = None
|
||||
if args:
|
||||
_logger.debug('args: {0}'.format(','.join(args)))
|
||||
if kwargs:
|
||||
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||
try:
|
||||
getattr(key_id, 'fpr')
|
||||
_logger.info('Key specified is already a native key object.')
|
||||
_logger.debug('Key: {0}'.format(key_id))
|
||||
return(key_id)
|
||||
except AttributeError:
|
||||
if not strict:
|
||||
_logger.debug('Strict mode disabled; attempting import of {0} first.'.format(key_id))
|
||||
self.findKeyByID(key_id, keyring_import = True, **kwargs)
|
||||
try:
|
||||
key = self.ctx.get_key(key_id, secret = secret)
|
||||
_logger.info('Found {0}.'.format(key_id))
|
||||
_logger.debug('Key: {0}'.format(key))
|
||||
except gpg.errors.KeyNotFound:
|
||||
key = None
|
||||
return(key)
|
||||
return(None)
|
||||
|
||||
def getKeyFile(self, keyfile, keyring_import = False, *args, **kwargs):
|
||||
keyfile = os.path.abspath(os.path.expanduser(keyfile))
|
||||
with open(keyfile, 'rb') as fh:
|
||||
rawkey_data = fh.read()
|
||||
fh.seek(0, 0)
|
||||
keys = [k for k in self.ctx.keylist(source = fh)]
|
||||
if keyring_import:
|
||||
self.importKeys(keys, native = True)
|
||||
return((keys, rawkey_data))
|
||||
_logger.warning('Could not locate {0} in keyring'.format(key_id))
|
||||
return(key)
|
||||
|
||||
def getKeyData(self, keydata, keyring_import = False, *args, **kwargs):
|
||||
orig_keydata = keydata
|
||||
if args:
|
||||
_logger.debug('args: {0}'.format(','.join(args)))
|
||||
if kwargs:
|
||||
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||
if isinstance(keydata, str):
|
||||
_logger.debug('String passed as keydata; converting to bytes.')
|
||||
keydata = keydata.encode('utf-8')
|
||||
buf = io.BytesIO(keydata)
|
||||
_logger.info('Parsed {0} bytes; looking for key(s).'.format(buf.getbuffer().nbytes))
|
||||
keys = [k for k in self.ctx.keylist(source = buf)]
|
||||
_logger.info('Found {0} key(s) in data.'.format(len(keys)))
|
||||
if keys:
|
||||
_logger.debug('Keys found: {0}'.format(keys))
|
||||
else:
|
||||
_logger.warning('No keys found in data.')
|
||||
buf.close()
|
||||
if keyring_import:
|
||||
_logger.debug('Importing enabled; importing found keys.')
|
||||
self.importKeys(keys, native = True)
|
||||
return((keys, orig_keydata))
|
||||
|
||||
def getKeyFile(self, keyfile, keyring_import = False, *args, **kwargs):
|
||||
if args:
|
||||
_logger.debug('args: {0}'.format(','.join(args)))
|
||||
if kwargs:
|
||||
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||
orig_keyfile = keyfile
|
||||
keyfile = os.path.abspath(os.path.expanduser(keyfile))
|
||||
_logger.info('Parsed absolute keyfile path: {0} => {1}'.format(orig_keyfile, keyfile))
|
||||
with open(keyfile, 'rb') as fh:
|
||||
rawkey_data = fh.read()
|
||||
fh.seek(0, 0)
|
||||
_logger.debug('Parsed {0} bytes; looking for key(s).'.format(len(rawkey_data)))
|
||||
keys = [k for k in self.ctx.keylist(source = fh)]
|
||||
_logger.info('Found {0} key(s) in data.'.format(len(keys)))
|
||||
if keys:
|
||||
_logger.debug('Keys found: {0}'.format(keys))
|
||||
else:
|
||||
_logger.warning('No keys found in data.')
|
||||
if keyring_import:
|
||||
_logger.debug('Importing enabled; importing found keys.')
|
||||
self.importKeys(keys, native = True)
|
||||
return((keys, rawkey_data))
|
||||
|
||||
def importKeys(self, keydata, native = False, local = True, remote = True, *args, **kwargs):
|
||||
fltr = 0
|
||||
orig_km = None
|
||||
keys = []
|
||||
if args:
|
||||
_logger.debug('args: {0}'.format(','.join(args)))
|
||||
if kwargs:
|
||||
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||
if local:
|
||||
fltr = fltr | gpg.constants.KEYLIST_MODE_LOCAL
|
||||
_logger.debug('Added "local" to filter; new filter value: {0}'.format(fltr))
|
||||
if remote:
|
||||
fltr = fltr | gpg.constants.KEYLIST_MODE_EXTERN
|
||||
_logger.debug('Added "remote" to filter; new filter value: {0}'.format(fltr))
|
||||
if self.ctx.get_keylist_mode() != fltr:
|
||||
orig_km = self.ctx.get_keylist_mode()
|
||||
self.ctx.set_keylist_mode(fltr)
|
||||
_logger.info(('Current keylist mode ({0}) doesn\'t match filter ({1}); '
|
||||
'set to new mode.').format(orig_km, fltr))
|
||||
if not native: # It's raw key data (.gpg, .asc, etc.).
|
||||
_logger.info('Non-native keydata specified; parsing.')
|
||||
formatted_keys = b''
|
||||
if isinstance(keydata, str):
|
||||
formatted_keys += keydata.encode('utf-8')
|
||||
_logger.debug('Specified keydata was a string; converted to bytes.')
|
||||
elif isinstance(keydata, list):
|
||||
for k in keydata:
|
||||
_logger.debug('Specified keydata was a list/list-like; iterating.')
|
||||
for idx, k in enumerate(keydata):
|
||||
_logger.debug('Parsing entry {0} of {1} entries.'.format((idx + 1), len(keydata)))
|
||||
if isinstance(k, str):
|
||||
formatted_keys += k.encode('utf-8')
|
||||
_logger.debug('Keydata ({0}) was a string; converted to bytes.'.format((idx + 1)))
|
||||
else:
|
||||
_logger.debug('Keydata ({0}) was already in bytes.'.format((idx + 1)))
|
||||
formatted_keys += k
|
||||
else:
|
||||
formatted_keys += keydata
|
||||
for rslt in self.ctx.key_import(formatted_keys).imports:
|
||||
keys.append(self.ctx.get_key(rslt.fpr))
|
||||
_logger.warning('Could not identify keydata reliably; unpredictable results ahead.')
|
||||
formatted_keys = keydata
|
||||
rslt = self.ctx.key_import(formatted_keys).imports
|
||||
_logger.debug('Imported keys: {0}'.format(rslt))
|
||||
for r in rslt:
|
||||
k = self.ctx.get_key(r.fpr)
|
||||
if k:
|
||||
_logger.debug('Adding key to keylist: {0}'.format(k))
|
||||
else:
|
||||
_logger.warning('Could not find key ID {0}.'.format(r.fpr))
|
||||
keys.append(k)
|
||||
else: # It's a native Key() object (or a list of them).
|
||||
_logger.info('Native keydata specified; parsing.')
|
||||
if not isinstance(keydata, list):
|
||||
_logger.debug('Specified keydata was not a list/list-like; fixing.')
|
||||
keydata = [keydata]
|
||||
keys = keydata
|
||||
_logger.debug('Importing keys: {0}'.format(keys))
|
||||
self.ctx.op_import_keys(keydata)
|
||||
if orig_km:
|
||||
self.ctx.set_keylist_mode(orig_km)
|
||||
_logger.info('Restored keylist mode to {0}'.format(orig_km))
|
||||
for k in keys:
|
||||
_logger.info('Signing {0} with a local signature.'.format(k.fpr))
|
||||
self.ctx.key_sign(k, local = True)
|
||||
_logger.debug('Adding trust for {0}.'.format(k.fpr))
|
||||
trusteditor = KeyEditor()
|
||||
self.ctx.interact(k, trusteditor.truster)
|
||||
return(None)
|
||||
|
||||
def verifyData(self, data, keys = None, strict = False, detached = None, *args, **kwargs):
|
||||
results = {}
|
||||
if args:
|
||||
_logger.debug('args: {0}'.format(','.join(args)))
|
||||
if kwargs:
|
||||
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||
if keys:
|
||||
_logger.info('Keys were specified.')
|
||||
if not isinstance(keys, list):
|
||||
keys = [self.getKey(keys, source = 'local')]
|
||||
else:
|
||||
keys = [self.getKey(k, source = 'local') for k in keys]
|
||||
_logger.debug('Verifying against keys: {0}'.format(keys))
|
||||
if isinstance(data, str):
|
||||
data = data.encode('utf-8')
|
||||
_logger.debug('Specified data was a string; converted to bytes.')
|
||||
_logger.info('Verifying {0} bytes of data.'.format(len(data)))
|
||||
fnargs = {'signed_data': data}
|
||||
if detached:
|
||||
_logger.info('Specified a detached signature.')
|
||||
if isinstance(detached, str):
|
||||
detached = detached.encode('utf-8')
|
||||
_logger.debug('Specified signature was a string; converted to bytes.')
|
||||
if not isinstance(detached, bytes) and not hasattr(detached, 'read'):
|
||||
raise TypeError('detached must be bytes or a file-like object (make sure the position is correct!)')
|
||||
_logger.error('Detached signature was neither bytes nor a buffer-like object.')
|
||||
raise TypeError('detached must be bytes or buffer-like object')
|
||||
if isinstance(detached, bytes):
|
||||
_logger.info('Signature length: {0} bytes'.format(len(detached)))
|
||||
else:
|
||||
_logger.info('Signature length: {0} bytes'.format(detached.getbuffer().nbytes))
|
||||
fnargs['signature'] = detached
|
||||
if strict:
|
||||
_logger.debug('Strict mode enabled; data must be signed by ALL specified keys.')
|
||||
fnargs['verify'] = keys
|
||||
_logger.debug('Verifying with args: {0}'.format(fnargs))
|
||||
results[None] = self.ctx.verify(**fnargs)
|
||||
else:
|
||||
if keys:
|
||||
_logger.debug('Keys were specified but running in non-strict; iterating over all.')
|
||||
for k in keys:
|
||||
_fnargs = copy.deepcopy(fnargs)
|
||||
_fnargs['verify'] = [k]
|
||||
_logger.info('Verifying against key {0}'.format(k.fpr))
|
||||
try:
|
||||
print(self.ctx.get_keylist_mode())
|
||||
_logger.debug(('Verifying with args (data-stripped): '
|
||||
'{0}').format({k: (v if k not in ('signed_data',
|
||||
'signature')
|
||||
else '(stripped)') for k, v in _fnargs.items()}))
|
||||
sigchk = self.ctx.verify(**_fnargs)
|
||||
results[k.fpr] = (True, sigchk[1].results, None)
|
||||
_logger.info('Key {0} verification results: {1}'.format(k.fpr, sigchk))
|
||||
results[k.fpr] = (True, sigchk[1], None)
|
||||
except gpg.errors.MissingSignatures as e:
|
||||
_logger.warning('Key {0}: missing signature'.format(k.fpr))
|
||||
_logger.debug('Key {0} results: {1}'.format(k.fpr, e.results))
|
||||
results[k.fpr] = (False, e.results, 'Missing Signature')
|
||||
except gpg.errors.BadSignatures as e:
|
||||
_logger.warning('Key {0}: bad signature'.format(k.fpr))
|
||||
_logger.debug('Key {0} results: {1}'.format(k.fpr, e.results))
|
||||
results[k.fpr] = (False, e.results, 'Bad Signature')
|
||||
else:
|
||||
_logger.debug('No keys specified but running in non-strict; accepting any signatures.')
|
||||
_logger.debug(('Verifying with args (data-stripped): '
|
||||
'{0}').format({k: (v if k not in ('signed_data',
|
||||
'signature')
|
||||
else '(stripped)') for k, v in fnargs.items()}))
|
||||
results[None] = self.ctx.verify(**fnargs)
|
||||
_logger.debug('Results for any/all signatures: {0}'.format(results[None]))
|
||||
return(results)
|
||||
|
||||
def verifyFile(self, filepath, *args, **kwargs):
|
||||
orig_filepath = filepath
|
||||
filepath = os.path.abspath(os.path.expanduser(filepath))
|
||||
_logger.debug('File verification invoked. Transformed filepath: {0} => {1}'.format(orig_filepath, filepath))
|
||||
if args:
|
||||
_logger.debug('args: {0}'.format(','.join(args)))
|
||||
if kwargs:
|
||||
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||
with open(filepath, 'rb') as fh:
|
||||
results = self.verifyData(fh.read(), **kwargs)
|
||||
return(results)
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import ftplib
|
||||
import io
|
||||
import logging
|
||||
import pathlib
|
||||
import re
|
||||
##
|
||||
import requests
|
||||
import requests.auth
|
||||
from lxml import etree
|
||||
##
|
||||
import aif.constants_fallback
|
||||
from . import gpg_handler
|
||||
@@ -12,13 +14,26 @@ from . import hash_handler
|
||||
from . import parser
|
||||
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ChecksumFile(object):
|
||||
_bsd_re = re.compile(r'^(?P<fname>\(.*\))\s+=\s+(?P<cksum>.*)$')
|
||||
|
||||
def __init__(self, checksum_xml, filetype):
|
||||
self.xml = checksum_xml
|
||||
if self.xml is not None:
|
||||
_logger.debug('checksum_xml: {0}'.format(etree.tostring(self.xml).decode('utf-8')))
|
||||
else:
|
||||
_logger.error('checksum_xml is required but not specified')
|
||||
raise ValueError('checksum_xml is required')
|
||||
self.uri = self.xml.text.strip()
|
||||
self.filetype = filetype
|
||||
if filetype:
|
||||
_logger.debug('URI and filetype: {{{0}}}{1}'.format(self.uri, self.filetype))
|
||||
else:
|
||||
_logger.error('filetype is required but not specified')
|
||||
raise ValueError('filetype is required')
|
||||
self.hashes = None
|
||||
downloader = getDLHandler(self.uri) # Recursive objects for the win?
|
||||
dl = downloader(self.xml)
|
||||
@@ -28,14 +43,14 @@ class ChecksumFile(object):
|
||||
self._convert()
|
||||
|
||||
def _convert(self):
|
||||
data = self.data
|
||||
if not isinstance(data, str):
|
||||
data = data.decode('utf-8')
|
||||
data.strip()
|
||||
if not isinstance(self.data, str):
|
||||
self.data = self.data.decode('utf-8')
|
||||
self.data.strip()
|
||||
self.hashes = {}
|
||||
if self.filetype not in ('gnu', 'bsd'):
|
||||
_logger.error('Passed an invalid filetype: {0}'.format(self.filetype))
|
||||
raise ValueError('filetype attribute must be either "gnu" or "bsd"')
|
||||
for line in data.splitlines():
|
||||
for line in self.data.splitlines():
|
||||
if self.filetype == 'gnu':
|
||||
hashtype = None # GNU style splits their hash types into separate files by default.
|
||||
h, fname = line.split(None, 1)
|
||||
@@ -48,17 +63,29 @@ class ChecksumFile(object):
|
||||
if hashtype not in self.hashes:
|
||||
self.hashes[hashtype] = {}
|
||||
self.hashes[hashtype][fname] = h
|
||||
_logger.debug('Generated hash set: {0}'.format(self.hashes))
|
||||
return(None)
|
||||
|
||||
|
||||
class Downloader(object):
|
||||
def __init__(self, netresource_xml, *args, **kwargs):
|
||||
self.xml = netresource_xml
|
||||
_logger.info('Instantiated class {0}'.format(type(self).__name__))
|
||||
if netresource_xml is not None:
|
||||
_logger.debug('netresource_xml: {0}'.format(etree.tostring(self.xml).decode('utf-8')))
|
||||
else:
|
||||
_logger.error('netresource_xml is required but not specified')
|
||||
raise ValueError('netresource_xml is required')
|
||||
_logger.debug('args: {0}'.format(','.join(args)))
|
||||
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||
self.uri = parser.URI(self.xml.text.strip())
|
||||
_logger.debug('Parsed URI: {0}'.format(self.uri))
|
||||
self.user = self.xml.attrib.get('user')
|
||||
if not self.user and self.uri.user:
|
||||
self.user = self.uri.user
|
||||
self.password = self.xml.attrib.get('password')
|
||||
_logger.debug('Parsed user: {0}'.format(self.user))
|
||||
_logger.debug('Parsed password: {0}'.format(self.password))
|
||||
if not self.password and self.uri.password:
|
||||
self.password = self.uri.password
|
||||
self.real_uri = ('{0}://'
|
||||
@@ -68,6 +95,7 @@ class Downloader(object):
|
||||
(self.uri.base if self.uri.base else ''),
|
||||
(':{0}'.format(self.uri.port) if self.uri.port else ''),
|
||||
self.uri.path)
|
||||
_logger.debug('Rebuilt URI: {0}'.format(self.real_uri))
|
||||
self.gpg = None
|
||||
self.checksum = None
|
||||
self.data = io.BytesIO()
|
||||
@@ -77,11 +105,19 @@ class Downloader(object):
|
||||
return(None)
|
||||
|
||||
def parseGpgVerify(self, results):
|
||||
pass
|
||||
pass # TODO? Might not need to.
|
||||
|
||||
def verify(self, verify_xml, *args, **kwargs):
|
||||
gpg_xml = verify_xml.find('gpg')
|
||||
if gpg_xml is not None:
|
||||
_logger.debug('gpg_xml: {0}'.format(etree.tostring(gpg_xml).decode('utf-8')))
|
||||
else:
|
||||
_logger.debug('No <gpg> in verify_xml')
|
||||
hash_xml = verify_xml.find('hash')
|
||||
if hash_xml is not None:
|
||||
_logger.debug('Hash XML: {0}'.format(etree.tostring(hash_xml).decode('utf-8')))
|
||||
else:
|
||||
_logger.debug('No <hash> in verify_xml')
|
||||
results = {}
|
||||
if gpg_xml is not None:
|
||||
results['gpg'] = self.verifyGPG(gpg_xml)
|
||||
@@ -94,35 +130,72 @@ class Downloader(object):
|
||||
# We don't allow custom GPG homedirs since this is probably running from a LiveCD/USB/whatever anyways.
|
||||
# This means we can *always* instantiate the GPG handler from scratch.
|
||||
self.gpg = gpg_handler.GPG()
|
||||
_logger.info('Established GPG session.')
|
||||
_logger.debug('GPG home dir: {0}'.format(self.gpg.homedir))
|
||||
_logger.debug('GPG primary key: {0}'.format(self.gpg.primary_key.fpr))
|
||||
keys_xml = gpg_xml.find('keys')
|
||||
if keys_xml is not None:
|
||||
_logger.debug('keys_xml: {0}'.format(etree.tostring(keys_xml).decode('utf-8')))
|
||||
else:
|
||||
_logger.error('No required <keys> in gpg_xml')
|
||||
raise ValueError('<keys> is required in a GPG verification block')
|
||||
sigs_xml = gpg_xml.find('sigs')
|
||||
fnargs = {'keyring_import': True}
|
||||
if sigs_xml is not None:
|
||||
_logger.debug('Keys XML: {0}'.format(etree.tostring(keys_xml).decode('utf-8')))
|
||||
else:
|
||||
_logger.error('No required <keys> in gpg_xml')
|
||||
raise ValueError('<sigs> is required in a GPG verification block')
|
||||
fnargs = {'strict': keys_xml.attrib.get('detect')}
|
||||
if fnargs['strict']: # We have to manually do this since it's in our parent's __init__
|
||||
if fnargs['strict'].lower() in ('true', '1'):
|
||||
fnargs['strict'] = True
|
||||
else:
|
||||
fnargs['strict'] = False
|
||||
else:
|
||||
fnargs['strict'] = False
|
||||
fnargs.update(kwargs)
|
||||
if keys_xml is not None:
|
||||
fnargs['keys'] = []
|
||||
for key_id_xml in keys_xml.findall('keyID'):
|
||||
_logger.debug('Found <keyID>: {0}'.format(etree.tostring(key_id_xml).decode('utf-8')))
|
||||
if key_id_xml.text == 'auto':
|
||||
k = self.gpg.findKeyByID(aif.constants_fallback.ARCH_RELENG_KEY, **fnargs)
|
||||
elif key_id_xml.text == 'detect':
|
||||
fnargs['strict'] = False
|
||||
continue
|
||||
_logger.debug('Key ID was set to "auto"; using {0}'.format(aif.constants_fallback.ARCH_RELENG_KEY))
|
||||
self.gpg.findKeyByID(aif.constants_fallback.ARCH_RELENG_KEY, source = 'remote',
|
||||
keyring_import = True, **fnargs)
|
||||
k = self.gpg.findKeyByID(aif.constants_fallback.ARCH_RELENG_KEY, source = 'local', **fnargs)
|
||||
else:
|
||||
k = self.gpg.findKeyByID(key_id_xml.text.strip(), **fnargs)
|
||||
_logger.debug('Finding key: {0}'.format(key_id_xml.text.strip()))
|
||||
self.gpg.findKeyByID(key_id_xml.text.strip(), source = 'remote', keyring_import = True, **fnargs)
|
||||
k = self.gpg.findKeyByID(key_id_xml.text.strip(), source = 'local', **fnargs)
|
||||
if k:
|
||||
_logger.debug('Key {0} found'.format(k.fpr))
|
||||
else:
|
||||
_logger.error('Key {0} not found'.format(key_id_xml.text.strip()))
|
||||
raise RuntimeError('Could not find key ID specified')
|
||||
fnargs['keys'].append(k)
|
||||
for key_file_xml in keys_xml.findall('keyFile'):
|
||||
_logger.debug('Found <keyFile>: {0}'.format(etree.tostring(key_file_xml).decode('utf-8')))
|
||||
downloader = getDLHandler(key_file_xml.text.strip()) # Recursive objects for the win?
|
||||
dl = downloader(key_file_xml)
|
||||
dl.get()
|
||||
k = self.gpg.getKeyData(dl.data.read(), **fnargs)[0]
|
||||
k = self.gpg.getKeyData(dl.data.read(), keyring_import = True, **fnargs)[0]
|
||||
if k:
|
||||
fnargs['keys'].extend(k)
|
||||
else:
|
||||
pass # No keys found in key file. We log this in GPG.getKeyData() though.
|
||||
dl.data.seek(0, 0)
|
||||
fnargs['keys'].extend(k)
|
||||
if not fnargs['keys']:
|
||||
_logger.debug('Found no keys in keys_xml')
|
||||
raise ValueError('Could not find any keys')
|
||||
if sigs_xml is not None:
|
||||
for sig_text_xml in sigs_xml.findall('signature'):
|
||||
_logger.debug('Found <signature>')
|
||||
sig = sig_text_xml.text.strip()
|
||||
sigchk = self.gpg.verifyData(self.data.read(), detached = sig, **fnargs)
|
||||
self.data.seek(0, 0)
|
||||
results.update(sigchk)
|
||||
for sig_file_xml in sigs_xml.findall('signatureFile'):
|
||||
_logger.debug('Found <signatureFile>: {0}'.format(sig_file_xml.text.strip()))
|
||||
downloader = getDLHandler(sig_file_xml.text.strip())
|
||||
dl = downloader(sig_file_xml)
|
||||
dl.get()
|
||||
@@ -131,6 +204,7 @@ class Downloader(object):
|
||||
self.data.seek(0, 0)
|
||||
results.update(sigchk)
|
||||
self.gpg.clean()
|
||||
_logger.debug('Rendered results: {0}'.format(results))
|
||||
return(results)
|
||||
|
||||
def verifyHash(self, hash_xml, *args, **kwargs):
|
||||
@@ -144,6 +218,7 @@ class Downloader(object):
|
||||
self.data.seek(0, 0)
|
||||
if checksum_file_xml is not None:
|
||||
for cksum_xml in checksum_file_xml:
|
||||
_logger.debug('Found <checksumFile>: {0}'.format(etree.tostring(cksum_xml).decode('utf-8')))
|
||||
htype = cksum_xml.attrib['hashType'].strip().lower()
|
||||
ftype = cksum_xml.attrib['fileType'].strip().lower()
|
||||
fname = cksum_xml.attrib.get('filePath',
|
||||
@@ -154,14 +229,28 @@ class Downloader(object):
|
||||
elif ftype == 'bsd':
|
||||
cksum = cksum_file.hashes[htype][fname]
|
||||
result = (cksum == checksums[htype])
|
||||
if result:
|
||||
_logger.debug('Checksum type {0} matches ({1})'.format(htype, cksum))
|
||||
else:
|
||||
_logger.warning(('Checksum type {0} mismatch: '
|
||||
'{1} (data) vs. {2} (specified)').format(htype, checksums[htype], cksum))
|
||||
results.append(result)
|
||||
if checksum_xml is not None:
|
||||
for cksum_xml in checksum_xml:
|
||||
_logger.debug('Found <checksum>: {0}'.format(etree.tostring(cksum_xml).decode('utf-8')))
|
||||
# Thankfully, this is a LOT easier.
|
||||
htype = cksum_xml.attrib['hashType'].strip().lower()
|
||||
result = (cksum_xml.text.strip().lower() == checksums[htype])
|
||||
if result:
|
||||
_logger.debug('Checksum type {0} matches ({1})'.format(htype, checksums[htype]))
|
||||
else:
|
||||
_logger.warning(('Checksum type {0} mismatch: '
|
||||
'{1} (data) vs. {2} (specified)').format(htype,
|
||||
checksums[htype],
|
||||
cksum_xml.text.strip().lower()))
|
||||
results.append(result)
|
||||
result = all(results)
|
||||
_logger.debug('Overall result of checksumming: {0}'.format(result))
|
||||
return(result)
|
||||
|
||||
|
||||
@@ -176,6 +265,7 @@ class FSDownloader(Downloader):
|
||||
with open(self.uri.path, 'rb') as fh:
|
||||
self.data.write(fh.read())
|
||||
self.data.seek(0, 0)
|
||||
_logger.info('Read in {0} bytes'.format(self.data.getbuffer().nbytes))
|
||||
return(None)
|
||||
|
||||
|
||||
@@ -188,11 +278,15 @@ class FTPDownloader(Downloader):
|
||||
self.password = ''
|
||||
self.port = (self.uri.port if self.uri.port else 0)
|
||||
self._conn = None
|
||||
_logger.debug('User: {0}'.format(self.user))
|
||||
_logger.debug('Password: {0}'.format(self.password))
|
||||
_logger.debug('Port: {0}'.format(self.port))
|
||||
|
||||
def _connect(self):
|
||||
self._conn = ftplib.FTP()
|
||||
self._conn.connect(host = self.uri.base, port = self.port)
|
||||
self._conn.login(user = self.user, passwd = self.password)
|
||||
_logger.info('Connected.')
|
||||
return(None)
|
||||
|
||||
def get(self):
|
||||
@@ -201,10 +295,12 @@ class FTPDownloader(Downloader):
|
||||
self._conn.retrbinary('RETR {0}'.format(self.uri.path), self.data.write)
|
||||
self.data.seek(0, 0)
|
||||
self._close()
|
||||
_logger.info('Read in {0} bytes'.format(self.data.getbuffer().nbytes))
|
||||
return(None)
|
||||
|
||||
def _close(self):
|
||||
self._conn.quit()
|
||||
_logger.info('Closed connection')
|
||||
return(None)
|
||||
|
||||
|
||||
@@ -217,6 +313,7 @@ class FTPSDownloader(FTPDownloader):
|
||||
self._conn.connect(host = self.uri.base, port = self.port)
|
||||
self._conn.login(user = self.user, passwd = self.password)
|
||||
self._conn.prot_p()
|
||||
_logger.info('Connected.')
|
||||
return(None)
|
||||
|
||||
|
||||
@@ -225,6 +322,7 @@ class HTTPDownloader(Downloader):
|
||||
super().__init__(netresource_xml, *args, **kwargs)
|
||||
self.auth = self.xml.attrib.get('authType', 'none').lower()
|
||||
if self.auth == 'none':
|
||||
_logger.debug('No auth.')
|
||||
self.auth = None
|
||||
self.realm = None
|
||||
self.user = None
|
||||
@@ -232,14 +330,17 @@ class HTTPDownloader(Downloader):
|
||||
else:
|
||||
if self.auth == 'basic':
|
||||
self.auth = requests.auth.HTTPBasicAuth(self.user, self.password)
|
||||
_logger.info('HTTP basic auth configured.')
|
||||
elif self.auth == 'digest':
|
||||
self.auth = requests.auth.HTTPDigestAuth(self.user, self.password)
|
||||
_logger.info('HTTP digest auth configured.')
|
||||
|
||||
def get(self):
|
||||
self.data.seek(0, 0)
|
||||
req = requests.get(self.real_uri, auth = self.auth)
|
||||
self.data.write(req.content)
|
||||
self.data.seek(0, 0)
|
||||
_logger.info('Read in {0} bytes'.format(self.data.getbuffer().nbytes))
|
||||
return(None)
|
||||
|
||||
|
||||
@@ -254,5 +355,6 @@ def getDLHandler(uri):
|
||||
elif re.search(r'^ftps://', uri, re.IGNORECASE):
|
||||
return(FTPSDownloader)
|
||||
else:
|
||||
_logger.error('Unable to detect which download handler to instantiate.')
|
||||
raise RuntimeError('Could not detect which download handler to use')
|
||||
return(None)
|
||||
|
||||
Reference in New Issue
Block a user