starting to roll in some logging. still need to figure out what's going on with that gpg verifyData
This commit is contained in:
@@ -1,3 +1,4 @@
|
||||
import logging
|
||||
import math
|
||||
import os
|
||||
import pathlib
|
||||
@@ -7,14 +8,24 @@ import subprocess
|
||||
##
|
||||
import psutil
|
||||
##
|
||||
from . import parser
|
||||
from . import file_handler
|
||||
from . import gpg_handler
|
||||
from . import hash_handler
|
||||
from . import sources
|
||||
|
||||
|
||||
_logger = logging.getLogger('utils.__init__')
|
||||
|
||||
|
||||
def checkMounted(devpath):
|
||||
if devpath in [p.device for p in psutil.disk_partitions(all = True)]:
|
||||
raise RuntimeError('{0} is mounted; we are cowardly refusing to destructive operations on it'.format(devpath))
|
||||
for p in psutil.disk_partitions(all = True):
|
||||
if p.device == devpath:
|
||||
_logger.error(('{0} is mounted at {1} but was specified as a target. '
|
||||
'Cowardly refusing to run potentially destructive operations on it.').format(devpath,
|
||||
p.mountpoint))
|
||||
# TODO: raise only if not dryrun? Raise warning instead if so?
|
||||
raise RuntimeError('Device mounted in live environment')
|
||||
return(None)
|
||||
|
||||
|
||||
@@ -104,6 +115,7 @@ def kernelFilesystems():
|
||||
FS_FSTYPES.append(l[0])
|
||||
else:
|
||||
FS_FSTYPES.append(l[1])
|
||||
_logger.debug('Built list of pre-loaded filesystem types: {0}'.format(','.join(FS_FSTYPES)))
|
||||
_mod_dir = os.path.join('/lib/modules',
|
||||
os.uname().release,
|
||||
'kernel/fs')
|
||||
@@ -119,14 +131,23 @@ def kernelFilesystems():
|
||||
fs_name = mod_name.group('fsname')
|
||||
if fs_name:
|
||||
# The kernel *probably* has autoloading enabled, but in case it doesn't...
|
||||
# TODO: logging!
|
||||
if os.getuid() == 0:
|
||||
subprocess.run(['modprobe', fs_name])
|
||||
cmd = subprocess.run(['modprobe', fs_name], stderr = subprocess.PIPE, stdout = subprocess.PIPE)
|
||||
_logger.debug('Executed: {0}'.format(' '.join(cmd.args)))
|
||||
if cmd.returncode != 0:
|
||||
_logger.warning('Command returned non-zero status')
|
||||
_logger.debug('Exit status: {0}'.format(str(cmd.returncode)))
|
||||
for a in ('stdout', 'stderr'):
|
||||
x = getattr(cmd, a)
|
||||
if x:
|
||||
_logger.debug('{0}: {1}'.format(a.upper(), x.decode('utf-8').strip()))
|
||||
FS_FSTYPES.append(fs_name)
|
||||
except FileNotFoundError:
|
||||
# We're running on a kernel that doesn't have modules
|
||||
_logger.info('Kernel has no modules available')
|
||||
pass
|
||||
FS_FSTYPES = sorted(list(set(FS_FSTYPES)))
|
||||
_logger.debug('Generated full list of FS_FSTYPES: {0}'.format(','.join(FS_FSTYPES)))
|
||||
return(FS_FSTYPES)
|
||||
|
||||
|
||||
@@ -143,16 +164,16 @@ def xmlBool(xmlobj):
|
||||
|
||||
|
||||
class _Sizer(object):
|
||||
def __init__(self):
|
||||
# We use different methods for converting between storage and BW, and different multipliers for each subtype.
|
||||
# https://stackoverflow.com/a/12912296/733214
|
||||
# https://stackoverflow.com/a/52684562/733214
|
||||
# https://stackoverflow.com/questions/5194057/better-way-to-convert-file-sizes-in-python
|
||||
# https://en.wikipedia.org/wiki/Orders_of_magnitude_(data)
|
||||
# https://en.wikipedia.org/wiki/Binary_prefix
|
||||
# 'decimal' is base-10, 'binary' is base-2. (Duh.)
|
||||
# "b" = bytes, "n" = given value, and "u" = unit suffix's key in below notes.
|
||||
self.storageUnits = {'decimal': { # n * (10 ** u) = b; b / (10 ** u) = u
|
||||
# We use different methods for converting between storage and BW, and different multipliers for each subtype.
|
||||
# https://stackoverflow.com/a/12912296/733214
|
||||
# https://stackoverflow.com/a/52684562/733214
|
||||
# https://stackoverflow.com/questions/5194057/better-way-to-convert-file-sizes-in-python
|
||||
# https://en.wikipedia.org/wiki/Orders_of_magnitude_(data)
|
||||
# https://en.wikipedia.org/wiki/Binary_prefix
|
||||
# 'decimal' is base-10, 'binary' is base-2. (Duh.)
|
||||
# "b" = bytes, "n" = given value, and "u" = unit suffix's key in below notes.
|
||||
storageUnits = {
|
||||
'decimal': { # n * (10 ** u) = b; b / (10 ** u) = u
|
||||
0: (None, 'B', 'byte'),
|
||||
3: ('k', 'kB', 'kilobyte'),
|
||||
6: ('M', 'MB', 'megabyte'),
|
||||
@@ -163,19 +184,20 @@ class _Sizer(object):
|
||||
18: ('Z', 'ZB', 'zettabyte'),
|
||||
19: ('Y', 'YB', 'yottabyte')
|
||||
},
|
||||
'binary': { # n * (2 ** u) = b; b / (2 ** u) = u
|
||||
-1: ('nybble', 'nibble', 'nyble', 'half-byte', 'tetrade', 'nibble'),
|
||||
10: ('Ki', 'KiB', 'kibibyte'),
|
||||
20: ('Mi', 'MiB', 'mebibyte'),
|
||||
30: ('Gi', 'GiB', 'gibibyte'),
|
||||
40: ('Ti', 'TiB', 'tebibyte'),
|
||||
50: ('Pi', 'PiB', 'pebibyte'),
|
||||
60: ('Ei', 'EiB', 'exbibyte'),
|
||||
70: ('Zi', 'ZiB', 'zebibyte'),
|
||||
80: ('Yi', 'YiB', 'yobibyte')
|
||||
}}
|
||||
# https://en.wikipedia.org/wiki/Bit#Multiple_bits - note that 8 bits = 1 byte
|
||||
self.bwUnits = {'decimal': { # n * (10 ** u) = b; b / (10 ** u) = u
|
||||
'binary': { # n * (2 ** u) = b; b / (2 ** u) = u
|
||||
-1: ('nybble', 'nibble', 'nyble', 'half-byte', 'tetrade', 'nibble'),
|
||||
10: ('Ki', 'KiB', 'kibibyte'),
|
||||
20: ('Mi', 'MiB', 'mebibyte'),
|
||||
30: ('Gi', 'GiB', 'gibibyte'),
|
||||
40: ('Ti', 'TiB', 'tebibyte'),
|
||||
50: ('Pi', 'PiB', 'pebibyte'),
|
||||
60: ('Ei', 'EiB', 'exbibyte'),
|
||||
70: ('Zi', 'ZiB', 'zebibyte'),
|
||||
80: ('Yi', 'YiB', 'yobibyte')
|
||||
}}
|
||||
# https://en.wikipedia.org/wiki/Bit#Multiple_bits - note that 8 bits = 1 byte
|
||||
bwUnits = {
|
||||
'decimal': { # n * (10 ** u) = b; b / (10 ** u) = u
|
||||
0: (None, 'b', 'bit'),
|
||||
3: ('k', 'kb', 'kilobit'),
|
||||
6: ('M', 'Mb', 'megabit'),
|
||||
@@ -186,29 +208,32 @@ class _Sizer(object):
|
||||
18: ('Z', 'Zb', 'zettabit'),
|
||||
19: ('Y', 'Yb', 'yottabit')
|
||||
},
|
||||
'binary': { # n * (2 ** u) = b; b / (2 ** u) = u
|
||||
-1: ('semi-octet', 'quartet', 'quadbit'),
|
||||
10: ('Ki', 'Kib', 'kibibit'),
|
||||
20: ('Mi', 'Mib', 'mebibit'),
|
||||
30: ('Gi', 'Gib', 'gibibit'),
|
||||
40: ('Ti', 'Tib', 'tebibit'),
|
||||
50: ('Pi', 'Pib', 'pebibit'),
|
||||
60: ('Ei', 'Eib', 'exbibit'),
|
||||
70: ('Zi', 'Zib', 'zebibit'),
|
||||
80: ('Yi', 'Yib', 'yobibit')
|
||||
}}
|
||||
self.valid_storage = []
|
||||
for unit_type, convpair in self.storageUnits.items():
|
||||
for f, l in convpair.items():
|
||||
for suffix in l:
|
||||
if suffix not in self.valid_storage and suffix:
|
||||
self.valid_storage.append(suffix)
|
||||
self.valid_bw = []
|
||||
for unit_type, convpair in self.bwUnits.items():
|
||||
for f, l in convpair.items():
|
||||
for suffix in l:
|
||||
if suffix not in self.valid_bw and suffix:
|
||||
self.valid_bw.append(suffix)
|
||||
'binary': { # n * (2 ** u) = b; b / (2 ** u) = u
|
||||
-1: ('semi-octet', 'quartet', 'quadbit'),
|
||||
10: ('Ki', 'Kib', 'kibibit'),
|
||||
20: ('Mi', 'Mib', 'mebibit'),
|
||||
30: ('Gi', 'Gib', 'gibibit'),
|
||||
40: ('Ti', 'Tib', 'tebibit'),
|
||||
50: ('Pi', 'Pib', 'pebibit'),
|
||||
60: ('Ei', 'Eib', 'exbibit'),
|
||||
70: ('Zi', 'Zib', 'zebibit'),
|
||||
80: ('Yi', 'Yib', 'yobibit')
|
||||
}}
|
||||
valid_storage = []
|
||||
for unit_type, convpair in storageUnits.items():
|
||||
for f, l in convpair.items():
|
||||
for suffix in l:
|
||||
if suffix not in valid_storage and suffix:
|
||||
valid_storage.append(suffix)
|
||||
valid_bw = []
|
||||
for unit_type, convpair in bwUnits.items():
|
||||
for f, l in convpair.items():
|
||||
for suffix in l:
|
||||
if suffix not in valid_bw and suffix:
|
||||
valid_bw.append(suffix)
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def convert(self, n, suffix):
|
||||
conversion = {}
|
||||
@@ -227,6 +252,7 @@ class _Sizer(object):
|
||||
conversion = None
|
||||
base_factors = []
|
||||
if suffix not in self.valid_bw:
|
||||
_logger.error('Passed an invalid suffix')
|
||||
raise ValueError('suffix is not a valid unit notation for this conversion')
|
||||
if target and target not in self.valid_bw:
|
||||
raise ValueError('target is not a valid unit notation for this conversion')
|
||||
@@ -310,4 +336,3 @@ def convertSizeUnit(pos):
|
||||
else:
|
||||
raise ValueError('Invalid size specified: {0}'.format(orig_pos))
|
||||
return((from_beginning, _size, amt_type))
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ class File(object):
|
||||
self.path_rel = pathlib.PurePosixPath(self.orig_path)
|
||||
self.path_full = pathlib.PurePosixPath(self.fullpath)
|
||||
|
||||
def __str(self):
|
||||
def __str__(self):
|
||||
return(self.fullpath)
|
||||
|
||||
|
||||
|
||||
@@ -8,28 +8,47 @@ import gpg
|
||||
import gpg.errors
|
||||
|
||||
|
||||
class KeyEditor(object):
|
||||
def __init__(self):
|
||||
self.trusted = False
|
||||
|
||||
def truster(self, kw, arg, *args, **kwargs):
|
||||
if kw == 'GET_LINE':
|
||||
if arg == 'keyedit.prompt':
|
||||
if not self.trusted:
|
||||
return('trust')
|
||||
else:
|
||||
return('save')
|
||||
elif arg == 'edit_ownertrust.value' and not self.trusted:
|
||||
self.trusted = True
|
||||
return('4') # "Full"
|
||||
else:
|
||||
return('save')
|
||||
return(None)
|
||||
|
||||
|
||||
class GPG(object):
|
||||
def __init__(self, homedir = None, primary_key = None):
|
||||
def __init__(self, homedir = None, primary_key = None, *args, **kwargs):
|
||||
self.homedir = homedir
|
||||
self.primary_key = primary_key
|
||||
self.temporary = None
|
||||
self.gpg = None
|
||||
self.ctx = None
|
||||
self._imported_keys = []
|
||||
self._initContext()
|
||||
|
||||
def _initContext(self):
|
||||
if not self.homedir:
|
||||
self.homedir = tempfile.mkdtemp(suffix = '.gpg', prefix = '.aif.')
|
||||
self.homedir = os.path.abspath(os.path.expanduser(self.homedir))
|
||||
self.temporary = False
|
||||
if not os.path.isdir(self.homedir):
|
||||
self.temporary = True
|
||||
self.homedir = os.path.abspath(os.path.expanduser(self.homedir))
|
||||
if not os.path.isdir(self.homedir):
|
||||
os.makedirs(self.homedir, exist_ok = True)
|
||||
os.chmod(self.homedir, 0o0700)
|
||||
self.gpg = gpg.Context(home_dir = self.homedir)
|
||||
self.ctx = gpg.Context(home_dir = self.homedir)
|
||||
if self.temporary:
|
||||
self.primary_key = self.createKey('AIF-NG File Verification Key', sign = True, force = True)
|
||||
else:
|
||||
self.primary_key = self.getKey(self.primary_key, secret = True)
|
||||
self.primary_key = self.createKey('AIF-NG File Verification Key', sign = True, force = True).fpr
|
||||
self.primary_key = self.findKeyByID(self.primary_key, source = 'secret')
|
||||
self.ctx.signers = [self.primary_key]
|
||||
return(None)
|
||||
|
||||
def clean(self):
|
||||
@@ -38,7 +57,7 @@ class GPG(object):
|
||||
if self.temporary:
|
||||
self.primary_key = None
|
||||
shutil.rmtree(self.homedir)
|
||||
self.gpg = None
|
||||
self.ctx = None
|
||||
return(None)
|
||||
|
||||
def createKey(self, userid, *args, **kwargs):
|
||||
@@ -56,11 +75,11 @@ class GPG(object):
|
||||
if not keyinfo['expires_in']:
|
||||
del(keyinfo['expires_in'])
|
||||
keyinfo['expires'] = False
|
||||
k = self.gpg.create_key(**keyinfo)
|
||||
return(k.fpr)
|
||||
k = self.ctx.create_key(**keyinfo)
|
||||
return(k)
|
||||
|
||||
def findKey(self, searchstr, secret = False, local = True, remote = True,
|
||||
secret_only = False, keyring_import = False):
|
||||
secret_only = False, keyring_import = False, *args, **kwargs):
|
||||
fltr = 0
|
||||
if secret:
|
||||
fltr = fltr | gpg.constants.KEYLIST_MODE_WITH_SECRET
|
||||
@@ -68,83 +87,137 @@ class GPG(object):
|
||||
fltr = fltr | gpg.constants.KEYLIST_MODE_LOCAL
|
||||
if remote:
|
||||
fltr = fltr | gpg.constants.KEYLIST_MODE_EXTERN
|
||||
keys = [k for k in self.gpg.keylist(pattern = searchstr, secret = secret_only, mode = fltr)]
|
||||
keys = [k for k in self.ctx.keylist(pattern = searchstr, secret = secret_only, mode = fltr)]
|
||||
if keyring_import:
|
||||
self.importKeys(keys, native = True)
|
||||
return(keys)
|
||||
|
||||
def getKey(self, key_id, secret = False, strict = False):
|
||||
def findKeyByID(self, key_id, source = 'remote', keyring_import = False, *args, **kwargs):
|
||||
# So .get_key() CAN get a remote key from a keyserver... but you can't have ANY other keylist modes defined.
|
||||
# Ugh.
|
||||
sources = {'remote': gpg.constants.KEYLIST_MODE_EXTERN,
|
||||
'local': gpg.constants.KEYLIST_MODE_LOCAL,
|
||||
'secret': gpg.constants.KEYLIST_MODE_WITH_SECRET}
|
||||
if source not in sources.keys():
|
||||
raise ValueError('source parameter must be one (and only one) of: {0}'.format(sources.keys()))
|
||||
orig_mode = self.ctx.get_keylist_mode()
|
||||
self.ctx.set_keylist_mode(sources[source])
|
||||
try:
|
||||
key = self.ctx.get_key(key_id, secret = (True if source == 'secret' else False))
|
||||
except gpg.errors.KeyNotFound:
|
||||
key = None
|
||||
self.ctx.set_keylist_mode(orig_mode)
|
||||
if keyring_import and key:
|
||||
self.importKeys(key, native = True)
|
||||
return(key)
|
||||
|
||||
def getKey(self, key_id, secret = False, strict = False, *args, **kwargs):
|
||||
try:
|
||||
getattr(key_id, 'fpr')
|
||||
return(key_id)
|
||||
except AttributeError:
|
||||
if not strict:
|
||||
self.findKey(key_id, keyring_import = True)
|
||||
self.findKeyByID(key_id, keyring_import = True, **kwargs)
|
||||
try:
|
||||
key = self.gpg.get_key(key_id, secret = secret)
|
||||
key = self.ctx.get_key(key_id, secret = secret)
|
||||
except gpg.errors.KeyNotFound:
|
||||
key = None
|
||||
return(key)
|
||||
return(None)
|
||||
|
||||
def getKeyFile(self, keyfile, keyring_import = False):
|
||||
def getKeyFile(self, keyfile, keyring_import = False, *args, **kwargs):
|
||||
keyfile = os.path.abspath(os.path.expanduser(keyfile))
|
||||
with open(keyfile, 'rb') as fh:
|
||||
rawkey_data = fh.read()
|
||||
fh.seek(0, 0)
|
||||
keys = [k for k in self.gpg.keylist(source = fh)]
|
||||
keys = [k for k in self.ctx.keylist(source = fh)]
|
||||
if keyring_import:
|
||||
self.importKeys(keys, native = True)
|
||||
return((keys, rawkey_data))
|
||||
|
||||
def getKeyStr(self, keydata, keyring_import = False):
|
||||
def getKeyData(self, keydata, keyring_import = False, *args, **kwargs):
|
||||
orig_keydata = keydata
|
||||
if isinstance(keydata, str):
|
||||
keydata = keydata.encode('utf-8')
|
||||
buf = io.BytesIO(keydata)
|
||||
keys = [k for k in self.gpg.keylist(source = buf)]
|
||||
keys = [k for k in self.ctx.keylist(source = buf)]
|
||||
buf.close()
|
||||
if keyring_import:
|
||||
self.importKeys(keys, native = True)
|
||||
return((keys, orig_keydata))
|
||||
|
||||
def importKeys(self, keydata, native = False):
|
||||
if not native:
|
||||
self.gpg.key_import(keydata)
|
||||
else:
|
||||
def importKeys(self, keydata, native = False, local = True, remote = True, *args, **kwargs):
|
||||
fltr = 0
|
||||
orig_km = None
|
||||
keys = []
|
||||
if local:
|
||||
fltr = fltr | gpg.constants.KEYLIST_MODE_LOCAL
|
||||
if remote:
|
||||
fltr = fltr | gpg.constants.KEYLIST_MODE_EXTERN
|
||||
if self.ctx.get_keylist_mode() != fltr:
|
||||
orig_km = self.ctx.get_keylist_mode()
|
||||
self.ctx.set_keylist_mode(fltr)
|
||||
if not native: # It's raw key data (.gpg, .asc, etc.).
|
||||
formatted_keys = b''
|
||||
if isinstance(keydata, str):
|
||||
formatted_keys += keydata.encode('utf-8')
|
||||
elif isinstance(keydata, list):
|
||||
for k in keydata:
|
||||
if isinstance(k, str):
|
||||
formatted_keys += k.encode('utf-8')
|
||||
else:
|
||||
formatted_keys += k
|
||||
else:
|
||||
formatted_keys += keydata
|
||||
for rslt in self.ctx.key_import(formatted_keys).imports:
|
||||
keys.append(self.ctx.get_key(rslt.fpr))
|
||||
else: # It's a native Key() object (or a list of them).
|
||||
if not isinstance(keydata, list):
|
||||
keydata = [keydata]
|
||||
self.gpg.op_import_keys(keydata)
|
||||
keys = keydata
|
||||
self.ctx.op_import_keys(keydata)
|
||||
if orig_km:
|
||||
self.ctx.set_keylist_mode(orig_km)
|
||||
for k in keys:
|
||||
self.ctx.key_sign(k, local = True)
|
||||
trusteditor = KeyEditor()
|
||||
self.ctx.interact(k, trusteditor.truster)
|
||||
return(None)
|
||||
|
||||
def verifyData(self, data, keys = None, strict = False, detached = None, *args, **kwargs):
|
||||
results = {}
|
||||
if keys:
|
||||
if not isinstance(keys, list):
|
||||
keys = [self.getKey(keys)]
|
||||
keys = [self.getKey(keys, source = 'local')]
|
||||
else:
|
||||
keys = [self.getKey(k) for k in keys]
|
||||
keys = [self.getKey(k, source = 'local') for k in keys]
|
||||
if isinstance(data, str):
|
||||
data = data.encode('utf-8')
|
||||
args = {'signed_data': data}
|
||||
fnargs = {'signed_data': data}
|
||||
if detached:
|
||||
if isinstance(detached, str):
|
||||
detached = detached.encode('utf-8')
|
||||
args['signature'] = detached
|
||||
if not isinstance(detached, bytes) and not hasattr(detached, 'read'):
|
||||
raise TypeError('detached must be bytes or a file-like object (make sure the position is correct!)')
|
||||
fnargs['signature'] = detached
|
||||
if strict:
|
||||
if keys:
|
||||
if not isinstance(keys, list):
|
||||
keys = [keys]
|
||||
args['verify'] = keys
|
||||
results[None] = self.gpg.verify(**args)
|
||||
fnargs['verify'] = keys
|
||||
results[None] = self.ctx.verify(**fnargs)
|
||||
else:
|
||||
if keys:
|
||||
for k in keys:
|
||||
_args = copy.deepcopy(args)
|
||||
_args['verify'] = [k]
|
||||
results[k.fpr] = self.gpg.verify(**_args)
|
||||
_fnargs = copy.deepcopy(fnargs)
|
||||
_fnargs['verify'] = [k]
|
||||
try:
|
||||
print(self.ctx.get_keylist_mode())
|
||||
sigchk = self.ctx.verify(**_fnargs)
|
||||
results[k.fpr] = (True, sigchk[1].results, None)
|
||||
except gpg.errors.MissingSignatures as e:
|
||||
results[k.fpr] = (False, e.results, 'Missing Signature')
|
||||
except gpg.errors.BadSignatures as e:
|
||||
results[k.fpr] = (False, e.results, 'Bad Signature')
|
||||
else:
|
||||
results[None] = self.gpg.verify(**args)
|
||||
results[None] = self.ctx.verify(**fnargs)
|
||||
return(results)
|
||||
|
||||
def verifyFile(self, filepath, *args, **kwargs):
|
||||
|
||||
@@ -1,24 +1,28 @@
|
||||
import copy
|
||||
import hashlib
|
||||
import os
|
||||
import pathlib
|
||||
import zlib
|
||||
##
|
||||
import aif.constants_fallback
|
||||
from . import file_handler
|
||||
|
||||
|
||||
class Hash(object):
|
||||
def __init__(self, file_path):
|
||||
def __init__(self, hash_algos = None, *args, **kwargs):
|
||||
self.hashers = None
|
||||
self.valid_hashtypes = list(aif.constants_fallback.HASH_SUPPORTED_TYPES)
|
||||
self.hash_algos = hash_algos
|
||||
self.configure()
|
||||
|
||||
def configure(self, hashalgo = None):
|
||||
def configure(self, *args, **kwargs):
|
||||
self.hashers = {}
|
||||
if hashalgo:
|
||||
if not isinstance(hashalgo, list):
|
||||
hashalgo = [hashalgo]
|
||||
if self.hash_algos:
|
||||
if not isinstance(self.hash_algos, list):
|
||||
self.hash_algos = [self.hash_algos]
|
||||
else:
|
||||
hashalgo = list(aif.constants_fallback.HASH_SUPPORTED_TYPES)
|
||||
for h in hashalgo:
|
||||
if h not in aif.constants_fallback.HASH_SUPPORTED_TYPES:
|
||||
self.hash_algos = copy.deepcopy(self.valid_hashtypes)
|
||||
for h in self.hash_algos:
|
||||
if h not in self.valid_hashtypes:
|
||||
raise ValueError('Hash algorithm not supported')
|
||||
if h not in aif.constants_fallback.HASH_EXTRA_SUPPORTED_TYPES:
|
||||
hasher = hashlib.new(h)
|
||||
@@ -27,22 +31,44 @@ class Hash(object):
|
||||
self.hashers[h] = hasher
|
||||
return()
|
||||
|
||||
def hashData(self, data):
|
||||
def hashData(self, data, *args, **kwargs):
|
||||
results = {}
|
||||
if not self.hashers:
|
||||
if not self.hashers or not self.hash_algos:
|
||||
self.configure()
|
||||
for hashtype, hasher in self.hashers.items():
|
||||
if hashtype in aif.constants_fallback.HASH_EXTRA_SUPPORTED_TYPES:
|
||||
results[hashtype] = hasher(data)
|
||||
else:
|
||||
rslt = hasher.update(data)
|
||||
results[hashtype] = rslt.hexdigest()
|
||||
hasher.update(data)
|
||||
results[hashtype] = hasher.hexdigest()
|
||||
return(results)
|
||||
|
||||
def hashFile(self, file_path):
|
||||
if not isinstance(file_path, (str, file_handler.File, pathlib.Path, pathlib.PurePath)):
|
||||
def hashFile(self, file_path, *args, **kwargs):
|
||||
if not isinstance(file_path, (str, pathlib.Path, pathlib.PurePath)):
|
||||
raise ValueError('file_path must be a path expression')
|
||||
file_path = str(file_path)
|
||||
with open(file_path, 'rb') as fh:
|
||||
results = self.hashData(fh.read())
|
||||
return(results)
|
||||
|
||||
def verifyData(self, data, checksum, checksum_type, *args, **kwargs):
|
||||
if isinstance(data, str):
|
||||
data = data.encode('utf-8')
|
||||
if not isinstance(checksum, str):
|
||||
checksum = checksum.decode('utf-8')
|
||||
if checksum_type not in self.hash_algos:
|
||||
raise ValueError('Hash algorithm not supported; try reconfiguring')
|
||||
self.configure()
|
||||
cksum = self.hashData(data)
|
||||
cksum_htype = cksum[checksum_type]
|
||||
if cksum == checksum:
|
||||
result = True
|
||||
else:
|
||||
result = False
|
||||
return(result)
|
||||
|
||||
def verifyFile(self, filepath, checksum, checksum_type, *args, **kwargs):
|
||||
filepath = os.path.abspath(os.path.expanduser(filepath))
|
||||
with open(filepath, 'rb') as fh:
|
||||
result = self.verifyData(fh.read(), checksum, checksum_type, **kwargs)
|
||||
return(result)
|
||||
|
||||
29
aif/utils/parser.py
Normal file
29
aif/utils/parser.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import logging
|
||||
import re
|
||||
|
||||
|
||||
_logger = logging.getLogger('utils:{0}'.format(__name__))
|
||||
|
||||
|
||||
_uri_re = re.compile((r'^(?P<scheme>[\w]+)://'
|
||||
r'(?:(?P<user>[^:@]+)(?::(?P<password>[^@]+)?)?@)?'
|
||||
r'(?P<base>[^/:]+)?'
|
||||
r'(?::(?P<port>[0-9]+))?'
|
||||
r'(?P<path>/.*)$'),
|
||||
re.IGNORECASE)
|
||||
|
||||
|
||||
class URI(object):
|
||||
def __init__(self, uri):
|
||||
self.orig_uri = uri
|
||||
r = _uri_re.search(self.orig_uri)
|
||||
if not r:
|
||||
raise ValueError('Not a valid URI')
|
||||
for k, v in dict(zip(list(_uri_re.groupindex.keys()), r.groups())).items():
|
||||
setattr(self, k, v)
|
||||
if self.port:
|
||||
self.port = int(self.port)
|
||||
for a in ('base', 'scheme'):
|
||||
v = getattr(self, a)
|
||||
if v:
|
||||
setattr(self, a, v.lower())
|
||||
258
aif/utils/sources.py
Normal file
258
aif/utils/sources.py
Normal file
@@ -0,0 +1,258 @@
|
||||
import ftplib
|
||||
import io
|
||||
import pathlib
|
||||
import re
|
||||
##
|
||||
import requests
|
||||
import requests.auth
|
||||
##
|
||||
import aif.constants_fallback
|
||||
from . import gpg_handler
|
||||
from . import hash_handler
|
||||
from . import parser
|
||||
|
||||
|
||||
class ChecksumFile(object):
|
||||
_bsd_re = re.compile(r'^(?P<fname>\(.*\))\s+=\s+(?P<cksum>.*)$')
|
||||
|
||||
def __init__(self, checksum_xml, filetype):
|
||||
self.xml = checksum_xml
|
||||
self.uri = self.xml.text.strip()
|
||||
self.filetype = filetype
|
||||
self.hashes = None
|
||||
downloader = getDLHandler(self.uri) # Recursive objects for the win?
|
||||
dl = downloader(self.xml)
|
||||
dl.get()
|
||||
self.data = dl.data.read()
|
||||
dl.data.seek(0, 0)
|
||||
self._convert()
|
||||
|
||||
def _convert(self):
|
||||
data = self.data
|
||||
if not isinstance(data, str):
|
||||
data = data.decode('utf-8')
|
||||
data.strip()
|
||||
self.hashes = {}
|
||||
if self.filetype not in ('gnu', 'bsd'):
|
||||
raise ValueError('filetype attribute must be either "gnu" or "bsd"')
|
||||
for line in data.splitlines():
|
||||
if self.filetype == 'gnu':
|
||||
hashtype = None # GNU style splits their hash types into separate files by default.
|
||||
h, fname = line.split(None, 1)
|
||||
elif self.filetype == 'bsd':
|
||||
l = line.split(None, 1)
|
||||
hashtype = l.pop(0).lower()
|
||||
r = self._bsd_re.search(l[0])
|
||||
h = r.group('cksum')
|
||||
fname = r.group('fname')
|
||||
if hashtype not in self.hashes:
|
||||
self.hashes[hashtype] = {}
|
||||
self.hashes[hashtype][fname] = h
|
||||
return(None)
|
||||
|
||||
|
||||
class Downloader(object):
|
||||
def __init__(self, netresource_xml, *args, **kwargs):
|
||||
self.xml = netresource_xml
|
||||
self.uri = parser.URI(self.xml.text.strip())
|
||||
self.user = self.xml.attrib.get('user')
|
||||
if not self.user and self.uri.user:
|
||||
self.user = self.uri.user
|
||||
self.password = self.xml.attrib.get('password')
|
||||
if not self.password and self.uri.password:
|
||||
self.password = self.uri.password
|
||||
self.real_uri = ('{0}://'
|
||||
'{1}'
|
||||
'{2}'
|
||||
'{3}').format(self.uri.scheme,
|
||||
(self.uri.base if self.uri.base else ''),
|
||||
(':{0}'.format(self.uri.port) if self.uri.port else ''),
|
||||
self.uri.path)
|
||||
self.gpg = None
|
||||
self.checksum = None
|
||||
self.data = io.BytesIO()
|
||||
|
||||
def get(self):
|
||||
pass # Dummy method.
|
||||
return(None)
|
||||
|
||||
def parseGpgVerify(self, results):
|
||||
pass
|
||||
|
||||
def verify(self, verify_xml, *args, **kwargs):
|
||||
gpg_xml = verify_xml.find('gpg')
|
||||
hash_xml = verify_xml.find('hash')
|
||||
results = {}
|
||||
if gpg_xml is not None:
|
||||
results['gpg'] = self.verifyGPG(gpg_xml)
|
||||
if hash_xml is not None:
|
||||
results['hash'] = self.verifyHash(hash_xml)
|
||||
return(results)
|
||||
|
||||
def verifyGPG(self, gpg_xml, *args, **kwargs):
|
||||
results = {}
|
||||
# We don't allow custom GPG homedirs since this is probably running from a LiveCD/USB/whatever anyways.
|
||||
# This means we can *always* instantiate the GPG handler from scratch.
|
||||
self.gpg = gpg_handler.GPG()
|
||||
keys_xml = gpg_xml.find('keys')
|
||||
sigs_xml = gpg_xml.find('sigs')
|
||||
fnargs = {'keyring_import': True}
|
||||
fnargs.update(kwargs)
|
||||
if keys_xml is not None:
|
||||
fnargs['keys'] = []
|
||||
for key_id_xml in keys_xml.findall('keyID'):
|
||||
if key_id_xml.text == 'auto':
|
||||
k = self.gpg.findKeyByID(aif.constants_fallback.ARCH_RELENG_KEY, **fnargs)
|
||||
elif key_id_xml.text == 'detect':
|
||||
fnargs['strict'] = False
|
||||
continue
|
||||
else:
|
||||
k = self.gpg.findKeyByID(key_id_xml.text.strip(), **fnargs)
|
||||
fnargs['keys'].append(k)
|
||||
for key_file_xml in keys_xml.findall('keyFile'):
|
||||
downloader = getDLHandler(key_file_xml.text.strip()) # Recursive objects for the win?
|
||||
dl = downloader(key_file_xml)
|
||||
dl.get()
|
||||
k = self.gpg.getKeyData(dl.data.read(), **fnargs)[0]
|
||||
dl.data.seek(0, 0)
|
||||
fnargs['keys'].extend(k)
|
||||
if sigs_xml is not None:
|
||||
for sig_text_xml in sigs_xml.findall('signature'):
|
||||
sig = sig_text_xml.text.strip()
|
||||
sigchk = self.gpg.verifyData(self.data.read(), detached = sig, **fnargs)
|
||||
self.data.seek(0, 0)
|
||||
results.update(sigchk)
|
||||
for sig_file_xml in sigs_xml.findall('signatureFile'):
|
||||
downloader = getDLHandler(sig_file_xml.text.strip())
|
||||
dl = downloader(sig_file_xml)
|
||||
dl.get()
|
||||
sigchk = self.gpg.verifyData(self.data.read(), detached = dl.data.read(), **fnargs)
|
||||
dl.data.seek(0, 0)
|
||||
self.data.seek(0, 0)
|
||||
results.update(sigchk)
|
||||
self.gpg.clean()
|
||||
return(results)
|
||||
|
||||
def verifyHash(self, hash_xml, *args, **kwargs):
|
||||
results = []
|
||||
algos = [str(ht) for ht in hash_xml.xpath('//checksum/@hashType|//checksumFile/@hashType')]
|
||||
self.checksum = hash_handler.Hash(hash_algos = algos)
|
||||
self.checksum.configure()
|
||||
checksum_xml = hash_xml.findall('checksum')
|
||||
checksum_file_xml = hash_xml.findall('checksumFile')
|
||||
checksums = self.checksum.hashData(self.data.read())
|
||||
self.data.seek(0, 0)
|
||||
if checksum_file_xml is not None:
|
||||
for cksum_xml in checksum_file_xml:
|
||||
htype = cksum_xml.attrib['hashType'].strip().lower()
|
||||
ftype = cksum_xml.attrib['fileType'].strip().lower()
|
||||
fname = cksum_xml.attrib.get('filePath',
|
||||
pathlib.PurePath(self.uri.path).name)
|
||||
cksum_file = ChecksumFile(cksum_xml, ftype)
|
||||
if ftype == 'gnu':
|
||||
cksum = cksum_file.hashes[None][fname]
|
||||
elif ftype == 'bsd':
|
||||
cksum = cksum_file.hashes[htype][fname]
|
||||
result = (cksum == checksums[htype])
|
||||
results.append(result)
|
||||
if checksum_xml is not None:
|
||||
for cksum_xml in checksum_xml:
|
||||
# Thankfully, this is a LOT easier.
|
||||
htype = cksum_xml.attrib['hashType'].strip().lower()
|
||||
result = (cksum_xml.text.strip().lower() == checksums[htype])
|
||||
results.append(result)
|
||||
result = all(results)
|
||||
return(result)
|
||||
|
||||
|
||||
class FSDownloader(Downloader):
|
||||
def __init__(self, netresource_xml, *args, **kwargs):
|
||||
super().__init__(netresource_xml, *args, **kwargs)
|
||||
delattr(self, 'user')
|
||||
delattr(self, 'password')
|
||||
|
||||
def get(self):
|
||||
self.data.seek(0, 0)
|
||||
with open(self.uri.path, 'rb') as fh:
|
||||
self.data.write(fh.read())
|
||||
self.data.seek(0, 0)
|
||||
return(None)
|
||||
|
||||
|
||||
class FTPDownloader(Downloader):
|
||||
def __init__(self, netresource_xml, *args, **kwargs):
|
||||
super().__init__(netresource_xml, *args, **kwargs)
|
||||
if not self.user:
|
||||
self.user = ''
|
||||
if not self.password:
|
||||
self.password = ''
|
||||
self.port = (self.uri.port if self.uri.port else 0)
|
||||
self._conn = None
|
||||
|
||||
def _connect(self):
|
||||
self._conn = ftplib.FTP()
|
||||
self._conn.connect(host = self.uri.base, port = self.port)
|
||||
self._conn.login(user = self.user, passwd = self.password)
|
||||
return(None)
|
||||
|
||||
def get(self):
|
||||
self._connect()
|
||||
self.data.seek(0, 0)
|
||||
self._conn.retrbinary('RETR {0}'.format(self.uri.path), self.data.write)
|
||||
self.data.seek(0, 0)
|
||||
self._close()
|
||||
return(None)
|
||||
|
||||
def _close(self):
|
||||
self._conn.quit()
|
||||
return(None)
|
||||
|
||||
|
||||
class FTPSDownloader(FTPDownloader):
|
||||
def __init__(self, netresource_xml, *args, **kwargs):
|
||||
super().__init__(netresource_xml, *args, **kwargs)
|
||||
|
||||
def _connect(self):
|
||||
self._conn = ftplib.FTP_TLS()
|
||||
self._conn.connect(host = self.uri.base, port = self.port)
|
||||
self._conn.login(user = self.user, passwd = self.password)
|
||||
self._conn.prot_p()
|
||||
return(None)
|
||||
|
||||
|
||||
class HTTPDownloader(Downloader):
|
||||
def __init__(self, netresource_xml, *args, **kwargs):
|
||||
super().__init__(netresource_xml, *args, **kwargs)
|
||||
self.auth = self.xml.attrib.get('authType', 'none').lower()
|
||||
if self.auth == 'none':
|
||||
self.auth = None
|
||||
self.realm = None
|
||||
self.user = None
|
||||
self.password = None
|
||||
else:
|
||||
if self.auth == 'basic':
|
||||
self.auth = requests.auth.HTTPBasicAuth(self.user, self.password)
|
||||
elif self.auth == 'digest':
|
||||
self.auth = requests.auth.HTTPDigestAuth(self.user, self.password)
|
||||
|
||||
def get(self):
|
||||
self.data.seek(0, 0)
|
||||
req = requests.get(self.real_uri, auth = self.auth)
|
||||
self.data.write(req.content)
|
||||
self.data.seek(0, 0)
|
||||
return(None)
|
||||
|
||||
|
||||
def getDLHandler(uri):
|
||||
uri = uri.strip()
|
||||
if re.search(r'^file://', uri, re.IGNORECASE):
|
||||
return(FSDownloader)
|
||||
elif re.search(r'^https?://', uri, re.IGNORECASE):
|
||||
return(HTTPDownloader)
|
||||
elif re.search(r'^ftp://', uri, re.IGNORECASE):
|
||||
return(FTPDownloader)
|
||||
elif re.search(r'^ftps://', uri, re.IGNORECASE):
|
||||
return(FTPSDownloader)
|
||||
else:
|
||||
raise RuntimeError('Could not detect which download handler to use')
|
||||
return(None)
|
||||
Reference in New Issue
Block a user